--- /dev/null
+/* File object implementation */\r
+\r
+#define PY_SSIZE_T_CLEAN\r
+#include "Python.h"\r
+#include "structmember.h"\r
+\r
+#ifdef HAVE_SYS_TYPES_H\r
+#include <sys/types.h>\r
+#endif /* HAVE_SYS_TYPES_H */\r
+\r
+#ifdef MS_WINDOWS\r
+#define fileno _fileno\r
+/* can simulate truncate with Win32 API functions; see file_truncate */\r
+#define HAVE_FTRUNCATE\r
+#define WIN32_LEAN_AND_MEAN\r
+#include <windows.h>\r
+#endif\r
+\r
+#if defined(PYOS_OS2) && defined(PYCC_GCC)\r
+#include <io.h>\r
+#endif\r
+\r
+#define BUF(v) PyString_AS_STRING((PyStringObject *)v)\r
+\r
+#ifdef HAVE_ERRNO_H\r
+#include <errno.h>\r
+#endif\r
+\r
+#ifdef HAVE_GETC_UNLOCKED\r
+#define GETC(f) getc_unlocked(f)\r
+#define FLOCKFILE(f) flockfile(f)\r
+#define FUNLOCKFILE(f) funlockfile(f)\r
+#else\r
+#define GETC(f) getc(f)\r
+#define FLOCKFILE(f)\r
+#define FUNLOCKFILE(f)\r
+#endif\r
+\r
+/* Bits in f_newlinetypes */\r
+#define NEWLINE_UNKNOWN 0 /* No newline seen, yet */\r
+#define NEWLINE_CR 1 /* \r newline seen */\r
+#define NEWLINE_LF 2 /* \n newline seen */\r
+#define NEWLINE_CRLF 4 /* \r\n newline seen */\r
+\r
+/*\r
+ * These macros release the GIL while preventing the f_close() function being\r
+ * called in the interval between them. For that purpose, a running total of\r
+ * the number of currently running unlocked code sections is kept in\r
+ * the unlocked_count field of the PyFileObject. The close() method raises\r
+ * an IOError if that field is non-zero. See issue #815646, #595601.\r
+ */\r
+\r
+#define FILE_BEGIN_ALLOW_THREADS(fobj) \\r
+{ \\r
+ fobj->unlocked_count++; \\r
+ Py_BEGIN_ALLOW_THREADS\r
+\r
+#define FILE_END_ALLOW_THREADS(fobj) \\r
+ Py_END_ALLOW_THREADS \\r
+ fobj->unlocked_count--; \\r
+ assert(fobj->unlocked_count >= 0); \\r
+}\r
+\r
+#define FILE_ABORT_ALLOW_THREADS(fobj) \\r
+ Py_BLOCK_THREADS \\r
+ fobj->unlocked_count--; \\r
+ assert(fobj->unlocked_count >= 0);\r
+\r
+#ifdef __cplusplus\r
+extern "C" {\r
+#endif\r
+\r
+FILE *\r
+PyFile_AsFile(PyObject *f)\r
+{\r
+ if (f == NULL || !PyFile_Check(f))\r
+ return NULL;\r
+ else\r
+ return ((PyFileObject *)f)->f_fp;\r
+}\r
+\r
+void PyFile_IncUseCount(PyFileObject *fobj)\r
+{\r
+ fobj->unlocked_count++;\r
+}\r
+\r
+void PyFile_DecUseCount(PyFileObject *fobj)\r
+{\r
+ fobj->unlocked_count--;\r
+ assert(fobj->unlocked_count >= 0);\r
+}\r
+\r
+PyObject *\r
+PyFile_Name(PyObject *f)\r
+{\r
+ if (f == NULL || !PyFile_Check(f))\r
+ return NULL;\r
+ else\r
+ return ((PyFileObject *)f)->f_name;\r
+}\r
+\r
+/* This is a safe wrapper around PyObject_Print to print to the FILE\r
+ of a PyFileObject. PyObject_Print releases the GIL but knows nothing\r
+ about PyFileObject. */\r
+static int\r
+file_PyObject_Print(PyObject *op, PyFileObject *f, int flags)\r
+{\r
+ int result;\r
+ PyFile_IncUseCount(f);\r
+ result = PyObject_Print(op, f->f_fp, flags);\r
+ PyFile_DecUseCount(f);\r
+ return result;\r
+}\r
+\r
+/* On Unix, fopen will succeed for directories.\r
+ In Python, there should be no file objects referring to\r
+ directories, so we need a check. */\r
+\r
+static PyFileObject*\r
+dircheck(PyFileObject* f)\r
+{\r
+#if defined(HAVE_FSTAT) && defined(S_IFDIR) && defined(EISDIR)\r
+ struct stat buf;\r
+ if (f->f_fp == NULL)\r
+ return f;\r
+ if (fstat(fileno(f->f_fp), &buf) == 0 &&\r
+ S_ISDIR(buf.st_mode)) {\r
+ char *msg = strerror(EISDIR);\r
+ PyObject *exc = PyObject_CallFunction(PyExc_IOError, "(isO)",\r
+ EISDIR, msg, f->f_name);\r
+ PyErr_SetObject(PyExc_IOError, exc);\r
+ Py_XDECREF(exc);\r
+ return NULL;\r
+ }\r
+#endif\r
+ return f;\r
+}\r
+\r
+\r
+static PyObject *\r
+fill_file_fields(PyFileObject *f, FILE *fp, PyObject *name, char *mode,\r
+ int (*close)(FILE *))\r
+{\r
+ assert(name != NULL);\r
+ assert(f != NULL);\r
+ assert(PyFile_Check(f));\r
+ assert(f->f_fp == NULL);\r
+\r
+ Py_DECREF(f->f_name);\r
+ Py_DECREF(f->f_mode);\r
+ Py_DECREF(f->f_encoding);\r
+ Py_DECREF(f->f_errors);\r
+\r
+ Py_INCREF(name);\r
+ f->f_name = name;\r
+\r
+ f->f_mode = PyString_FromString(mode);\r
+\r
+ f->f_close = close;\r
+ f->f_softspace = 0;\r
+ f->f_binary = strchr(mode,'b') != NULL;\r
+ f->f_buf = NULL;\r
+ f->f_univ_newline = (strchr(mode, 'U') != NULL);\r
+ f->f_newlinetypes = NEWLINE_UNKNOWN;\r
+ f->f_skipnextlf = 0;\r
+ Py_INCREF(Py_None);\r
+ f->f_encoding = Py_None;\r
+ Py_INCREF(Py_None);\r
+ f->f_errors = Py_None;\r
+ f->readable = f->writable = 0;\r
+ if (strchr(mode, 'r') != NULL || f->f_univ_newline)\r
+ f->readable = 1;\r
+ if (strchr(mode, 'w') != NULL || strchr(mode, 'a') != NULL)\r
+ f->writable = 1;\r
+ if (strchr(mode, '+') != NULL)\r
+ f->readable = f->writable = 1;\r
+\r
+ if (f->f_mode == NULL)\r
+ return NULL;\r
+ f->f_fp = fp;\r
+ f = dircheck(f);\r
+ return (PyObject *) f;\r
+}\r
+\r
+#if defined _MSC_VER && _MSC_VER >= 1400 && defined(__STDC_SECURE_LIB__)\r
+#define Py_VERIFY_WINNT\r
+/* The CRT on windows compiled with Visual Studio 2005 and higher may\r
+ * assert if given invalid mode strings. This is all fine and well\r
+ * in static languages like C where the mode string is typcially hard\r
+ * coded. But in Python, were we pass in the mode string from the user,\r
+ * we need to verify it first manually\r
+ */\r
+static int _PyVerify_Mode_WINNT(const char *mode)\r
+{\r
+ /* See if mode string is valid on Windows to avoid hard assertions */\r
+ /* remove leading spacese */\r
+ int singles = 0;\r
+ int pairs = 0;\r
+ int encoding = 0;\r
+ const char *s, *c;\r
+\r
+ while(*mode == ' ') /* strip initial spaces */\r
+ ++mode;\r
+ if (!strchr("rwa", *mode)) /* must start with one of these */\r
+ return 0;\r
+ while (*++mode) {\r
+ if (*mode == ' ' || *mode == 'N') /* ignore spaces and N */\r
+ continue;\r
+ s = "+TD"; /* each of this can appear only once */\r
+ c = strchr(s, *mode);\r
+ if (c) {\r
+ ptrdiff_t idx = s-c;\r
+ if (singles & (1<<idx))\r
+ return 0;\r
+ singles |= (1<<idx);\r
+ continue;\r
+ }\r
+ s = "btcnSR"; /* only one of each letter in the pairs allowed */\r
+ c = strchr(s, *mode);\r
+ if (c) {\r
+ ptrdiff_t idx = (s-c)/2;\r
+ if (pairs & (1<<idx))\r
+ return 0;\r
+ pairs |= (1<<idx);\r
+ continue;\r
+ }\r
+ if (*mode == ',') {\r
+ encoding = 1;\r
+ break;\r
+ }\r
+ return 0; /* found an invalid char */\r
+ }\r
+\r
+ if (encoding) {\r
+ char *e[] = {"UTF-8", "UTF-16LE", "UNICODE"};\r
+ while (*mode == ' ')\r
+ ++mode;\r
+ /* find 'ccs =' */\r
+ if (strncmp(mode, "ccs", 3))\r
+ return 0;\r
+ mode += 3;\r
+ while (*mode == ' ')\r
+ ++mode;\r
+ if (*mode != '=')\r
+ return 0;\r
+ while (*mode == ' ')\r
+ ++mode;\r
+ for(encoding = 0; encoding<_countof(e); ++encoding) {\r
+ size_t l = strlen(e[encoding]);\r
+ if (!strncmp(mode, e[encoding], l)) {\r
+ mode += l; /* found a valid encoding */\r
+ break;\r
+ }\r
+ }\r
+ if (encoding == _countof(e))\r
+ return 0;\r
+ }\r
+ /* skip trailing spaces */\r
+ while (*mode == ' ')\r
+ ++mode;\r
+\r
+ return *mode == '\0'; /* must be at the end of the string */\r
+}\r
+#endif\r
+\r
+/* check for known incorrect mode strings - problem is, platforms are\r
+ free to accept any mode characters they like and are supposed to\r
+ ignore stuff they don't understand... write or append mode with\r
+ universal newline support is expressly forbidden by PEP 278.\r
+ Additionally, remove the 'U' from the mode string as platforms\r
+ won't know what it is. Non-zero return signals an exception */\r
+int\r
+_PyFile_SanitizeMode(char *mode)\r
+{\r
+ char *upos;\r
+ size_t len = strlen(mode);\r
+\r
+ if (!len) {\r
+ PyErr_SetString(PyExc_ValueError, "empty mode string");\r
+ return -1;\r
+ }\r
+\r
+ upos = strchr(mode, 'U');\r
+ if (upos) {\r
+ memmove(upos, upos+1, len-(upos-mode)); /* incl null char */\r
+\r
+ if (mode[0] == 'w' || mode[0] == 'a') {\r
+ PyErr_Format(PyExc_ValueError, "universal newline "\r
+ "mode can only be used with modes "\r
+ "starting with 'r'");\r
+ return -1;\r
+ }\r
+\r
+ if (mode[0] != 'r') {\r
+ memmove(mode+1, mode, strlen(mode)+1);\r
+ mode[0] = 'r';\r
+ }\r
+\r
+ if (!strchr(mode, 'b')) {\r
+ memmove(mode+2, mode+1, strlen(mode));\r
+ mode[1] = 'b';\r
+ }\r
+ } else if (mode[0] != 'r' && mode[0] != 'w' && mode[0] != 'a') {\r
+ PyErr_Format(PyExc_ValueError, "mode string must begin with "\r
+ "one of 'r', 'w', 'a' or 'U', not '%.200s'", mode);\r
+ return -1;\r
+ }\r
+#ifdef Py_VERIFY_WINNT\r
+ /* additional checks on NT with visual studio 2005 and higher */\r
+ if (!_PyVerify_Mode_WINNT(mode)) {\r
+ PyErr_Format(PyExc_ValueError, "Invalid mode ('%.50s')", mode);\r
+ return -1;\r
+ }\r
+#endif\r
+ return 0;\r
+}\r
+\r
+static PyObject *\r
+open_the_file(PyFileObject *f, char *name, char *mode)\r
+{\r
+ char *newmode;\r
+ assert(f != NULL);\r
+ assert(PyFile_Check(f));\r
+#ifdef MS_WINDOWS\r
+ /* windows ignores the passed name in order to support Unicode */\r
+ assert(f->f_name != NULL);\r
+#else\r
+ assert(name != NULL);\r
+#endif\r
+ assert(mode != NULL);\r
+ assert(f->f_fp == NULL);\r
+\r
+ /* probably need to replace 'U' by 'rb' */\r
+ newmode = PyMem_MALLOC(strlen(mode) + 3);\r
+ if (!newmode) {\r
+ PyErr_NoMemory();\r
+ return NULL;\r
+ }\r
+ strcpy(newmode, mode);\r
+\r
+ if (_PyFile_SanitizeMode(newmode)) {\r
+ f = NULL;\r
+ goto cleanup;\r
+ }\r
+\r
+ /* rexec.py can't stop a user from getting the file() constructor --\r
+ all they have to do is get *any* file object f, and then do\r
+ type(f). Here we prevent them from doing damage with it. */\r
+ if (PyEval_GetRestricted()) {\r
+ PyErr_SetString(PyExc_IOError,\r
+ "file() constructor not accessible in restricted mode");\r
+ f = NULL;\r
+ goto cleanup;\r
+ }\r
+ errno = 0;\r
+\r
+#ifdef MS_WINDOWS\r
+ if (PyUnicode_Check(f->f_name)) {\r
+ PyObject *wmode;\r
+ wmode = PyUnicode_DecodeASCII(newmode, strlen(newmode), NULL);\r
+ if (f->f_name && wmode) {\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ /* PyUnicode_AS_UNICODE OK without thread\r
+ lock as it is a simple dereference. */\r
+ f->f_fp = _wfopen(PyUnicode_AS_UNICODE(f->f_name),\r
+ PyUnicode_AS_UNICODE(wmode));\r
+ FILE_END_ALLOW_THREADS(f)\r
+ }\r
+ Py_XDECREF(wmode);\r
+ }\r
+#endif\r
+ if (NULL == f->f_fp && NULL != name) {\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ f->f_fp = fopen(name, newmode);\r
+ FILE_END_ALLOW_THREADS(f)\r
+ }\r
+\r
+ if (f->f_fp == NULL) {\r
+#if defined _MSC_VER && (_MSC_VER < 1400 || !defined(__STDC_SECURE_LIB__))\r
+ /* MSVC 6 (Microsoft) leaves errno at 0 for bad mode strings,\r
+ * across all Windows flavors. When it sets EINVAL varies\r
+ * across Windows flavors, the exact conditions aren't\r
+ * documented, and the answer lies in the OS's implementation\r
+ * of Win32's CreateFile function (whose source is secret).\r
+ * Seems the best we can do is map EINVAL to ENOENT.\r
+ * Starting with Visual Studio .NET 2005, EINVAL is correctly\r
+ * set by our CRT error handler (set in exceptions.c.)\r
+ */\r
+ if (errno == 0) /* bad mode string */\r
+ errno = EINVAL;\r
+ else if (errno == EINVAL) /* unknown, but not a mode string */\r
+ errno = ENOENT;\r
+#endif\r
+ /* EINVAL is returned when an invalid filename or\r
+ * an invalid mode is supplied. */\r
+ if (errno == EINVAL) {\r
+ PyObject *v;\r
+ char message[100];\r
+ PyOS_snprintf(message, 100,\r
+ "invalid mode ('%.50s') or filename", mode);\r
+ v = Py_BuildValue("(isO)", errno, message, f->f_name);\r
+ if (v != NULL) {\r
+ PyErr_SetObject(PyExc_IOError, v);\r
+ Py_DECREF(v);\r
+ }\r
+ }\r
+ else\r
+ PyErr_SetFromErrnoWithFilenameObject(PyExc_IOError, f->f_name);\r
+ f = NULL;\r
+ }\r
+ if (f != NULL)\r
+ f = dircheck(f);\r
+\r
+cleanup:\r
+ PyMem_FREE(newmode);\r
+\r
+ return (PyObject *)f;\r
+}\r
+\r
+static PyObject *\r
+close_the_file(PyFileObject *f)\r
+{\r
+ int sts = 0;\r
+ int (*local_close)(FILE *);\r
+ FILE *local_fp = f->f_fp;\r
+ char *local_setbuf = f->f_setbuf;\r
+ if (local_fp != NULL) {\r
+ local_close = f->f_close;\r
+ if (local_close != NULL && f->unlocked_count > 0) {\r
+ if (f->ob_refcnt > 0) {\r
+ PyErr_SetString(PyExc_IOError,\r
+ "close() called during concurrent "\r
+ "operation on the same file object.");\r
+ } else {\r
+ /* This should not happen unless someone is\r
+ * carelessly playing with the PyFileObject\r
+ * struct fields and/or its associated FILE\r
+ * pointer. */\r
+ PyErr_SetString(PyExc_SystemError,\r
+ "PyFileObject locking error in "\r
+ "destructor (refcnt <= 0 at close).");\r
+ }\r
+ return NULL;\r
+ }\r
+ /* NULL out the FILE pointer before releasing the GIL, because\r
+ * it will not be valid anymore after the close() function is\r
+ * called. */\r
+ f->f_fp = NULL;\r
+ if (local_close != NULL) {\r
+ /* Issue #9295: must temporarily reset f_setbuf so that another\r
+ thread doesn't free it when running file_close() concurrently.\r
+ Otherwise this close() will crash when flushing the buffer. */\r
+ f->f_setbuf = NULL;\r
+ Py_BEGIN_ALLOW_THREADS\r
+ errno = 0;\r
+ sts = (*local_close)(local_fp);\r
+ Py_END_ALLOW_THREADS\r
+ f->f_setbuf = local_setbuf;\r
+ if (sts == EOF)\r
+ return PyErr_SetFromErrno(PyExc_IOError);\r
+ if (sts != 0)\r
+ return PyInt_FromLong((long)sts);\r
+ }\r
+ }\r
+ Py_RETURN_NONE;\r
+}\r
+\r
+PyObject *\r
+PyFile_FromFile(FILE *fp, char *name, char *mode, int (*close)(FILE *))\r
+{\r
+ PyFileObject *f;\r
+ PyObject *o_name;\r
+\r
+ f = (PyFileObject *)PyFile_Type.tp_new(&PyFile_Type, NULL, NULL);\r
+ if (f == NULL)\r
+ return NULL;\r
+ o_name = PyString_FromString(name);\r
+ if (o_name == NULL) {\r
+ if (close != NULL && fp != NULL)\r
+ close(fp);\r
+ Py_DECREF(f);\r
+ return NULL;\r
+ }\r
+ if (fill_file_fields(f, fp, o_name, mode, close) == NULL) {\r
+ Py_DECREF(f);\r
+ Py_DECREF(o_name);\r
+ return NULL;\r
+ }\r
+ Py_DECREF(o_name);\r
+ return (PyObject *)f;\r
+}\r
+\r
+PyObject *\r
+PyFile_FromString(char *name, char *mode)\r
+{\r
+ extern int fclose(FILE *);\r
+ PyFileObject *f;\r
+\r
+ f = (PyFileObject *)PyFile_FromFile((FILE *)NULL, name, mode, fclose);\r
+ if (f != NULL) {\r
+ if (open_the_file(f, name, mode) == NULL) {\r
+ Py_DECREF(f);\r
+ f = NULL;\r
+ }\r
+ }\r
+ return (PyObject *)f;\r
+}\r
+\r
+void\r
+PyFile_SetBufSize(PyObject *f, int bufsize)\r
+{\r
+ PyFileObject *file = (PyFileObject *)f;\r
+ if (bufsize >= 0) {\r
+ int type;\r
+ switch (bufsize) {\r
+ case 0:\r
+ type = _IONBF;\r
+ break;\r
+#ifdef HAVE_SETVBUF\r
+ case 1:\r
+ type = _IOLBF;\r
+ bufsize = BUFSIZ;\r
+ break;\r
+#endif\r
+ default:\r
+ type = _IOFBF;\r
+#ifndef HAVE_SETVBUF\r
+ bufsize = BUFSIZ;\r
+#endif\r
+ break;\r
+ }\r
+ fflush(file->f_fp);\r
+ if (type == _IONBF) {\r
+ PyMem_Free(file->f_setbuf);\r
+ file->f_setbuf = NULL;\r
+ } else {\r
+ file->f_setbuf = (char *)PyMem_Realloc(file->f_setbuf,\r
+ bufsize);\r
+ }\r
+#ifdef HAVE_SETVBUF\r
+ setvbuf(file->f_fp, file->f_setbuf, type, bufsize);\r
+#else /* !HAVE_SETVBUF */\r
+ setbuf(file->f_fp, file->f_setbuf);\r
+#endif /* !HAVE_SETVBUF */\r
+ }\r
+}\r
+\r
+/* Set the encoding used to output Unicode strings.\r
+ Return 1 on success, 0 on failure. */\r
+\r
+int\r
+PyFile_SetEncoding(PyObject *f, const char *enc)\r
+{\r
+ return PyFile_SetEncodingAndErrors(f, enc, NULL);\r
+}\r
+\r
+int\r
+PyFile_SetEncodingAndErrors(PyObject *f, const char *enc, char* errors)\r
+{\r
+ PyFileObject *file = (PyFileObject*)f;\r
+ PyObject *str, *oerrors;\r
+\r
+ assert(PyFile_Check(f));\r
+ str = PyString_FromString(enc);\r
+ if (!str)\r
+ return 0;\r
+ if (errors) {\r
+ oerrors = PyString_FromString(errors);\r
+ if (!oerrors) {\r
+ Py_DECREF(str);\r
+ return 0;\r
+ }\r
+ } else {\r
+ oerrors = Py_None;\r
+ Py_INCREF(Py_None);\r
+ }\r
+ Py_DECREF(file->f_encoding);\r
+ file->f_encoding = str;\r
+ Py_DECREF(file->f_errors);\r
+ file->f_errors = oerrors;\r
+ return 1;\r
+}\r
+\r
+static PyObject *\r
+err_closed(void)\r
+{\r
+ PyErr_SetString(PyExc_ValueError, "I/O operation on closed file");\r
+ return NULL;\r
+}\r
+\r
+static PyObject *\r
+err_mode(char *action)\r
+{\r
+ PyErr_Format(PyExc_IOError, "File not open for %s", action);\r
+ return NULL;\r
+}\r
+\r
+/* Refuse regular file I/O if there's data in the iteration-buffer.\r
+ * Mixing them would cause data to arrive out of order, as the read*\r
+ * methods don't use the iteration buffer. */\r
+static PyObject *\r
+err_iterbuffered(void)\r
+{\r
+ PyErr_SetString(PyExc_ValueError,\r
+ "Mixing iteration and read methods would lose data");\r
+ return NULL;\r
+}\r
+\r
+static void drop_readahead(PyFileObject *);\r
+\r
+/* Methods */\r
+\r
+static void\r
+file_dealloc(PyFileObject *f)\r
+{\r
+ PyObject *ret;\r
+ if (f->weakreflist != NULL)\r
+ PyObject_ClearWeakRefs((PyObject *) f);\r
+ ret = close_the_file(f);\r
+ if (!ret) {\r
+ PySys_WriteStderr("close failed in file object destructor:\n");\r
+ PyErr_Print();\r
+ }\r
+ else {\r
+ Py_DECREF(ret);\r
+ }\r
+ PyMem_Free(f->f_setbuf);\r
+ Py_XDECREF(f->f_name);\r
+ Py_XDECREF(f->f_mode);\r
+ Py_XDECREF(f->f_encoding);\r
+ Py_XDECREF(f->f_errors);\r
+ drop_readahead(f);\r
+ Py_TYPE(f)->tp_free((PyObject *)f);\r
+}\r
+\r
+static PyObject *\r
+file_repr(PyFileObject *f)\r
+{\r
+ PyObject *ret = NULL;\r
+ PyObject *name = NULL;\r
+ if (PyUnicode_Check(f->f_name)) {\r
+#ifdef Py_USING_UNICODE\r
+ const char *name_str;\r
+ name = PyUnicode_AsUnicodeEscapeString(f->f_name);\r
+ name_str = name ? PyString_AsString(name) : "?";\r
+ ret = PyString_FromFormat("<%s file u'%s', mode '%s' at %p>",\r
+ f->f_fp == NULL ? "closed" : "open",\r
+ name_str,\r
+ PyString_AsString(f->f_mode),\r
+ f);\r
+ Py_XDECREF(name);\r
+ return ret;\r
+#endif\r
+ } else {\r
+ name = PyObject_Repr(f->f_name);\r
+ if (name == NULL)\r
+ return NULL;\r
+ ret = PyString_FromFormat("<%s file %s, mode '%s' at %p>",\r
+ f->f_fp == NULL ? "closed" : "open",\r
+ PyString_AsString(name),\r
+ PyString_AsString(f->f_mode),\r
+ f);\r
+ Py_XDECREF(name);\r
+ return ret;\r
+ }\r
+}\r
+\r
+static PyObject *\r
+file_close(PyFileObject *f)\r
+{\r
+ PyObject *sts = close_the_file(f);\r
+ if (sts) {\r
+ PyMem_Free(f->f_setbuf);\r
+ f->f_setbuf = NULL;\r
+ }\r
+ return sts;\r
+}\r
+\r
+\r
+/* Our very own off_t-like type, 64-bit if possible */\r
+#if !defined(HAVE_LARGEFILE_SUPPORT)\r
+typedef off_t Py_off_t;\r
+#elif SIZEOF_OFF_T >= 8\r
+typedef off_t Py_off_t;\r
+#elif SIZEOF_FPOS_T >= 8\r
+typedef fpos_t Py_off_t;\r
+#else\r
+#error "Large file support, but neither off_t nor fpos_t is large enough."\r
+#endif\r
+\r
+\r
+/* a portable fseek() function\r
+ return 0 on success, non-zero on failure (with errno set) */\r
+static int\r
+_portable_fseek(FILE *fp, Py_off_t offset, int whence)\r
+{\r
+#if !defined(HAVE_LARGEFILE_SUPPORT)\r
+ return fseek(fp, offset, whence);\r
+#elif defined(HAVE_FSEEKO) && SIZEOF_OFF_T >= 8\r
+ return fseeko(fp, offset, whence);\r
+#elif defined(HAVE_FSEEK64)\r
+ return fseek64(fp, offset, whence);\r
+#elif defined(__BEOS__)\r
+ return _fseek(fp, offset, whence);\r
+#elif SIZEOF_FPOS_T >= 8\r
+ /* lacking a 64-bit capable fseek(), use a 64-bit capable fsetpos()\r
+ and fgetpos() to implement fseek()*/\r
+ fpos_t pos;\r
+ switch (whence) {\r
+ case SEEK_END:\r
+#ifdef MS_WINDOWS\r
+ fflush(fp);\r
+ if (_lseeki64(fileno(fp), 0, 2) == -1)\r
+ return -1;\r
+#else\r
+ if (fseek(fp, 0, SEEK_END) != 0)\r
+ return -1;\r
+#endif\r
+ /* fall through */\r
+ case SEEK_CUR:\r
+ if (fgetpos(fp, &pos) != 0)\r
+ return -1;\r
+ offset += pos;\r
+ break;\r
+ /* case SEEK_SET: break; */\r
+ }\r
+ return fsetpos(fp, &offset);\r
+#else\r
+#error "Large file support, but no way to fseek."\r
+#endif\r
+}\r
+\r
+\r
+/* a portable ftell() function\r
+ Return -1 on failure with errno set appropriately, current file\r
+ position on success */\r
+static Py_off_t\r
+_portable_ftell(FILE* fp)\r
+{\r
+#if !defined(HAVE_LARGEFILE_SUPPORT)\r
+ return ftell(fp);\r
+#elif defined(HAVE_FTELLO) && SIZEOF_OFF_T >= 8\r
+ return ftello(fp);\r
+#elif defined(HAVE_FTELL64)\r
+ return ftell64(fp);\r
+#elif SIZEOF_FPOS_T >= 8\r
+ fpos_t pos;\r
+ if (fgetpos(fp, &pos) != 0)\r
+ return -1;\r
+ return pos;\r
+#else\r
+#error "Large file support, but no way to ftell."\r
+#endif\r
+}\r
+\r
+\r
+static PyObject *\r
+file_seek(PyFileObject *f, PyObject *args)\r
+{\r
+ int whence;\r
+ int ret;\r
+ Py_off_t offset;\r
+ PyObject *offobj, *off_index;\r
+\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ drop_readahead(f);\r
+ whence = 0;\r
+ if (!PyArg_ParseTuple(args, "O|i:seek", &offobj, &whence))\r
+ return NULL;\r
+ off_index = PyNumber_Index(offobj);\r
+ if (!off_index) {\r
+ if (!PyFloat_Check(offobj))\r
+ return NULL;\r
+ /* Deprecated in 2.6 */\r
+ PyErr_Clear();\r
+ if (PyErr_WarnEx(PyExc_DeprecationWarning,\r
+ "integer argument expected, got float",\r
+ 1) < 0)\r
+ return NULL;\r
+ off_index = offobj;\r
+ Py_INCREF(offobj);\r
+ }\r
+#if !defined(HAVE_LARGEFILE_SUPPORT)\r
+ offset = PyInt_AsLong(off_index);\r
+#else\r
+ offset = PyLong_Check(off_index) ?\r
+ PyLong_AsLongLong(off_index) : PyInt_AsLong(off_index);\r
+#endif\r
+ Py_DECREF(off_index);\r
+ if (PyErr_Occurred())\r
+ return NULL;\r
+\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ ret = _portable_fseek(f->f_fp, offset, whence);\r
+ FILE_END_ALLOW_THREADS(f)\r
+\r
+ if (ret != 0) {\r
+ PyErr_SetFromErrno(PyExc_IOError);\r
+ clearerr(f->f_fp);\r
+ return NULL;\r
+ }\r
+ f->f_skipnextlf = 0;\r
+ Py_INCREF(Py_None);\r
+ return Py_None;\r
+}\r
+\r
+\r
+#ifdef HAVE_FTRUNCATE\r
+static PyObject *\r
+file_truncate(PyFileObject *f, PyObject *args)\r
+{\r
+ Py_off_t newsize;\r
+ PyObject *newsizeobj = NULL;\r
+ Py_off_t initialpos;\r
+ int ret;\r
+\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ if (!f->writable)\r
+ return err_mode("writing");\r
+ if (!PyArg_UnpackTuple(args, "truncate", 0, 1, &newsizeobj))\r
+ return NULL;\r
+\r
+ /* Get current file position. If the file happens to be open for\r
+ * update and the last operation was an input operation, C doesn't\r
+ * define what the later fflush() will do, but we promise truncate()\r
+ * won't change the current position (and fflush() *does* change it\r
+ * then at least on Windows). The easiest thing is to capture\r
+ * current pos now and seek back to it at the end.\r
+ */\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ initialpos = _portable_ftell(f->f_fp);\r
+ FILE_END_ALLOW_THREADS(f)\r
+ if (initialpos == -1)\r
+ goto onioerror;\r
+\r
+ /* Set newsize to current postion if newsizeobj NULL, else to the\r
+ * specified value.\r
+ */\r
+ if (newsizeobj != NULL) {\r
+#if !defined(HAVE_LARGEFILE_SUPPORT)\r
+ newsize = PyInt_AsLong(newsizeobj);\r
+#else\r
+ newsize = PyLong_Check(newsizeobj) ?\r
+ PyLong_AsLongLong(newsizeobj) :\r
+ PyInt_AsLong(newsizeobj);\r
+#endif\r
+ if (PyErr_Occurred())\r
+ return NULL;\r
+ }\r
+ else /* default to current position */\r
+ newsize = initialpos;\r
+\r
+ /* Flush the stream. We're mixing stream-level I/O with lower-level\r
+ * I/O, and a flush may be necessary to synch both platform views\r
+ * of the current file state.\r
+ */\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ ret = fflush(f->f_fp);\r
+ FILE_END_ALLOW_THREADS(f)\r
+ if (ret != 0)\r
+ goto onioerror;\r
+\r
+#ifdef MS_WINDOWS\r
+ /* MS _chsize doesn't work if newsize doesn't fit in 32 bits,\r
+ so don't even try using it. */\r
+ {\r
+ HANDLE hFile;\r
+\r
+ /* Have to move current pos to desired endpoint on Windows. */\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ ret = _portable_fseek(f->f_fp, newsize, SEEK_SET) != 0;\r
+ FILE_END_ALLOW_THREADS(f)\r
+ if (ret)\r
+ goto onioerror;\r
+\r
+ /* Truncate. Note that this may grow the file! */\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ hFile = (HANDLE)_get_osfhandle(fileno(f->f_fp));\r
+ ret = hFile == (HANDLE)-1;\r
+ if (ret == 0) {\r
+ ret = SetEndOfFile(hFile) == 0;\r
+ if (ret)\r
+ errno = EACCES;\r
+ }\r
+ FILE_END_ALLOW_THREADS(f)\r
+ if (ret)\r
+ goto onioerror;\r
+ }\r
+#else\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ ret = ftruncate(fileno(f->f_fp), newsize);\r
+ FILE_END_ALLOW_THREADS(f)\r
+ if (ret != 0)\r
+ goto onioerror;\r
+#endif /* !MS_WINDOWS */\r
+\r
+ /* Restore original file position. */\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ ret = _portable_fseek(f->f_fp, initialpos, SEEK_SET) != 0;\r
+ FILE_END_ALLOW_THREADS(f)\r
+ if (ret)\r
+ goto onioerror;\r
+\r
+ Py_INCREF(Py_None);\r
+ return Py_None;\r
+\r
+onioerror:\r
+ PyErr_SetFromErrno(PyExc_IOError);\r
+ clearerr(f->f_fp);\r
+ return NULL;\r
+}\r
+#endif /* HAVE_FTRUNCATE */\r
+\r
+static PyObject *\r
+file_tell(PyFileObject *f)\r
+{\r
+ Py_off_t pos;\r
+\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ pos = _portable_ftell(f->f_fp);\r
+ FILE_END_ALLOW_THREADS(f)\r
+\r
+ if (pos == -1) {\r
+ PyErr_SetFromErrno(PyExc_IOError);\r
+ clearerr(f->f_fp);\r
+ return NULL;\r
+ }\r
+ if (f->f_skipnextlf) {\r
+ int c;\r
+ c = GETC(f->f_fp);\r
+ if (c == '\n') {\r
+ f->f_newlinetypes |= NEWLINE_CRLF;\r
+ pos++;\r
+ f->f_skipnextlf = 0;\r
+ } else if (c != EOF) ungetc(c, f->f_fp);\r
+ }\r
+#if !defined(HAVE_LARGEFILE_SUPPORT)\r
+ return PyInt_FromLong(pos);\r
+#else\r
+ return PyLong_FromLongLong(pos);\r
+#endif\r
+}\r
+\r
+static PyObject *\r
+file_fileno(PyFileObject *f)\r
+{\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ return PyInt_FromLong((long) fileno(f->f_fp));\r
+}\r
+\r
+static PyObject *\r
+file_flush(PyFileObject *f)\r
+{\r
+ int res;\r
+\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ res = fflush(f->f_fp);\r
+ FILE_END_ALLOW_THREADS(f)\r
+ if (res != 0) {\r
+ PyErr_SetFromErrno(PyExc_IOError);\r
+ clearerr(f->f_fp);\r
+ return NULL;\r
+ }\r
+ Py_INCREF(Py_None);\r
+ return Py_None;\r
+}\r
+\r
+static PyObject *\r
+file_isatty(PyFileObject *f)\r
+{\r
+ long res;\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ res = isatty((int)fileno(f->f_fp));\r
+ FILE_END_ALLOW_THREADS(f)\r
+ return PyBool_FromLong(res);\r
+}\r
+\r
+\r
+#if BUFSIZ < 8192\r
+#define SMALLCHUNK 8192\r
+#else\r
+#define SMALLCHUNK BUFSIZ\r
+#endif\r
+\r
+static size_t\r
+new_buffersize(PyFileObject *f, size_t currentsize)\r
+{\r
+#ifdef HAVE_FSTAT\r
+ off_t pos, end;\r
+ struct stat st;\r
+ if (fstat(fileno(f->f_fp), &st) == 0) {\r
+ end = st.st_size;\r
+ /* The following is not a bug: we really need to call lseek()\r
+ *and* ftell(). The reason is that some stdio libraries\r
+ mistakenly flush their buffer when ftell() is called and\r
+ the lseek() call it makes fails, thereby throwing away\r
+ data that cannot be recovered in any way. To avoid this,\r
+ we first test lseek(), and only call ftell() if lseek()\r
+ works. We can't use the lseek() value either, because we\r
+ need to take the amount of buffered data into account.\r
+ (Yet another reason why stdio stinks. :-) */\r
+ pos = lseek(fileno(f->f_fp), 0L, SEEK_CUR);\r
+ if (pos >= 0) {\r
+ pos = ftell(f->f_fp);\r
+ }\r
+ if (pos < 0)\r
+ clearerr(f->f_fp);\r
+ if (end > pos && pos >= 0)\r
+ return currentsize + end - pos + 1;\r
+ /* Add 1 so if the file were to grow we'd notice. */\r
+ }\r
+#endif\r
+ /* Expand the buffer by an amount proportional to the current size,\r
+ giving us amortized linear-time behavior. Use a less-than-double\r
+ growth factor to avoid excessive allocation. */\r
+ return currentsize + (currentsize >> 3) + 6;\r
+}\r
+\r
+#if defined(EWOULDBLOCK) && defined(EAGAIN) && EWOULDBLOCK != EAGAIN\r
+#define BLOCKED_ERRNO(x) ((x) == EWOULDBLOCK || (x) == EAGAIN)\r
+#else\r
+#ifdef EWOULDBLOCK\r
+#define BLOCKED_ERRNO(x) ((x) == EWOULDBLOCK)\r
+#else\r
+#ifdef EAGAIN\r
+#define BLOCKED_ERRNO(x) ((x) == EAGAIN)\r
+#else\r
+#define BLOCKED_ERRNO(x) 0\r
+#endif\r
+#endif\r
+#endif\r
+\r
+static PyObject *\r
+file_read(PyFileObject *f, PyObject *args)\r
+{\r
+ long bytesrequested = -1;\r
+ size_t bytesread, buffersize, chunksize;\r
+ PyObject *v;\r
+\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ if (!f->readable)\r
+ return err_mode("reading");\r
+ /* refuse to mix with f.next() */\r
+ if (f->f_buf != NULL &&\r
+ (f->f_bufend - f->f_bufptr) > 0 &&\r
+ f->f_buf[0] != '\0')\r
+ return err_iterbuffered();\r
+ if (!PyArg_ParseTuple(args, "|l:read", &bytesrequested))\r
+ return NULL;\r
+ if (bytesrequested < 0)\r
+ buffersize = new_buffersize(f, (size_t)0);\r
+ else\r
+ buffersize = bytesrequested;\r
+ if (buffersize > PY_SSIZE_T_MAX) {\r
+ PyErr_SetString(PyExc_OverflowError,\r
+ "requested number of bytes is more than a Python string can hold");\r
+ return NULL;\r
+ }\r
+ v = PyString_FromStringAndSize((char *)NULL, buffersize);\r
+ if (v == NULL)\r
+ return NULL;\r
+ bytesread = 0;\r
+ for (;;) {\r
+ int interrupted;\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ chunksize = Py_UniversalNewlineFread(BUF(v) + bytesread,\r
+ buffersize - bytesread, f->f_fp, (PyObject *)f);\r
+ interrupted = ferror(f->f_fp) && errno == EINTR;\r
+ FILE_END_ALLOW_THREADS(f)\r
+ if (interrupted) {\r
+ clearerr(f->f_fp);\r
+ if (PyErr_CheckSignals()) {\r
+ Py_DECREF(v);\r
+ return NULL;\r
+ }\r
+ }\r
+ if (chunksize == 0) {\r
+ if (interrupted)\r
+ continue;\r
+ if (!ferror(f->f_fp))\r
+ break;\r
+ clearerr(f->f_fp);\r
+ /* When in non-blocking mode, data shouldn't\r
+ * be discarded if a blocking signal was\r
+ * received. That will also happen if\r
+ * chunksize != 0, but bytesread < buffersize. */\r
+ if (bytesread > 0 && BLOCKED_ERRNO(errno))\r
+ break;\r
+ PyErr_SetFromErrno(PyExc_IOError);\r
+ Py_DECREF(v);\r
+ return NULL;\r
+ }\r
+ bytesread += chunksize;\r
+ if (bytesread < buffersize && !interrupted) {\r
+ clearerr(f->f_fp);\r
+ break;\r
+ }\r
+ if (bytesrequested < 0) {\r
+ buffersize = new_buffersize(f, buffersize);\r
+ if (_PyString_Resize(&v, buffersize) < 0)\r
+ return NULL;\r
+ } else {\r
+ /* Got what was requested. */\r
+ break;\r
+ }\r
+ }\r
+ if (bytesread != buffersize && _PyString_Resize(&v, bytesread))\r
+ return NULL;\r
+ return v;\r
+}\r
+\r
+static PyObject *\r
+file_readinto(PyFileObject *f, PyObject *args)\r
+{\r
+ char *ptr;\r
+ Py_ssize_t ntodo;\r
+ Py_ssize_t ndone, nnow;\r
+ Py_buffer pbuf;\r
+\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ if (!f->readable)\r
+ return err_mode("reading");\r
+ /* refuse to mix with f.next() */\r
+ if (f->f_buf != NULL &&\r
+ (f->f_bufend - f->f_bufptr) > 0 &&\r
+ f->f_buf[0] != '\0')\r
+ return err_iterbuffered();\r
+ if (!PyArg_ParseTuple(args, "w*", &pbuf))\r
+ return NULL;\r
+ ptr = pbuf.buf;\r
+ ntodo = pbuf.len;\r
+ ndone = 0;\r
+ while (ntodo > 0) {\r
+ int interrupted;\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ nnow = Py_UniversalNewlineFread(ptr+ndone, ntodo, f->f_fp,\r
+ (PyObject *)f);\r
+ interrupted = ferror(f->f_fp) && errno == EINTR;\r
+ FILE_END_ALLOW_THREADS(f)\r
+ if (interrupted) {\r
+ clearerr(f->f_fp);\r
+ if (PyErr_CheckSignals()) {\r
+ PyBuffer_Release(&pbuf);\r
+ return NULL;\r
+ }\r
+ }\r
+ if (nnow == 0) {\r
+ if (interrupted)\r
+ continue;\r
+ if (!ferror(f->f_fp))\r
+ break;\r
+ PyErr_SetFromErrno(PyExc_IOError);\r
+ clearerr(f->f_fp);\r
+ PyBuffer_Release(&pbuf);\r
+ return NULL;\r
+ }\r
+ ndone += nnow;\r
+ ntodo -= nnow;\r
+ }\r
+ PyBuffer_Release(&pbuf);\r
+ return PyInt_FromSsize_t(ndone);\r
+}\r
+\r
+/**************************************************************************\r
+Routine to get next line using platform fgets().\r
+\r
+Under MSVC 6:\r
+\r
++ MS threadsafe getc is very slow (multiple layers of function calls before+\r
+ after each character, to lock+unlock the stream).\r
++ The stream-locking functions are MS-internal -- can't access them from user\r
+ code.\r
++ There's nothing Tim could find in the MS C or platform SDK libraries that\r
+ can worm around this.\r
++ MS fgets locks/unlocks only once per line; it's the only hook we have.\r
+\r
+So we use fgets for speed(!), despite that it's painful.\r
+\r
+MS realloc is also slow.\r
+\r
+Reports from other platforms on this method vs getc_unlocked (which MS doesn't\r
+have):\r
+ Linux a wash\r
+ Solaris a wash\r
+ Tru64 Unix getline_via_fgets significantly faster\r
+\r
+CAUTION: The C std isn't clear about this: in those cases where fgets\r
+writes something into the buffer, can it write into any position beyond the\r
+required trailing null byte? MSVC 6 fgets does not, and no platform is (yet)\r
+known on which it does; and it would be a strange way to code fgets. Still,\r
+getline_via_fgets may not work correctly if it does. The std test\r
+test_bufio.py should fail if platform fgets() routinely writes beyond the\r
+trailing null byte. #define DONT_USE_FGETS_IN_GETLINE to disable this code.\r
+**************************************************************************/\r
+\r
+/* Use this routine if told to, or by default on non-get_unlocked()\r
+ * platforms unless told not to. Yikes! Let's spell that out:\r
+ * On a platform with getc_unlocked():\r
+ * By default, use getc_unlocked().\r
+ * If you want to use fgets() instead, #define USE_FGETS_IN_GETLINE.\r
+ * On a platform without getc_unlocked():\r
+ * By default, use fgets().\r
+ * If you don't want to use fgets(), #define DONT_USE_FGETS_IN_GETLINE.\r
+ */\r
+#if !defined(USE_FGETS_IN_GETLINE) && !defined(HAVE_GETC_UNLOCKED)\r
+#define USE_FGETS_IN_GETLINE\r
+#endif\r
+\r
+#if defined(DONT_USE_FGETS_IN_GETLINE) && defined(USE_FGETS_IN_GETLINE)\r
+#undef USE_FGETS_IN_GETLINE\r
+#endif\r
+\r
+#ifdef USE_FGETS_IN_GETLINE\r
+static PyObject*\r
+getline_via_fgets(PyFileObject *f, FILE *fp)\r
+{\r
+/* INITBUFSIZE is the maximum line length that lets us get away with the fast\r
+ * no-realloc, one-fgets()-call path. Boosting it isn't free, because we have\r
+ * to fill this much of the buffer with a known value in order to figure out\r
+ * how much of the buffer fgets() overwrites. So if INITBUFSIZE is larger\r
+ * than "most" lines, we waste time filling unused buffer slots. 100 is\r
+ * surely adequate for most peoples' email archives, chewing over source code,\r
+ * etc -- "regular old text files".\r
+ * MAXBUFSIZE is the maximum line length that lets us get away with the less\r
+ * fast (but still zippy) no-realloc, two-fgets()-call path. See above for\r
+ * cautions about boosting that. 300 was chosen because the worst real-life\r
+ * text-crunching job reported on Python-Dev was a mail-log crawler where over\r
+ * half the lines were 254 chars.\r
+ */\r
+#define INITBUFSIZE 100\r
+#define MAXBUFSIZE 300\r
+ char* p; /* temp */\r
+ char buf[MAXBUFSIZE];\r
+ PyObject* v; /* the string object result */\r
+ char* pvfree; /* address of next free slot */\r
+ char* pvend; /* address one beyond last free slot */\r
+ size_t nfree; /* # of free buffer slots; pvend-pvfree */\r
+ size_t total_v_size; /* total # of slots in buffer */\r
+ size_t increment; /* amount to increment the buffer */\r
+ size_t prev_v_size;\r
+\r
+ /* Optimize for normal case: avoid _PyString_Resize if at all\r
+ * possible via first reading into stack buffer "buf".\r
+ */\r
+ total_v_size = INITBUFSIZE; /* start small and pray */\r
+ pvfree = buf;\r
+ for (;;) {\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ pvend = buf + total_v_size;\r
+ nfree = pvend - pvfree;\r
+ memset(pvfree, '\n', nfree);\r
+ assert(nfree < INT_MAX); /* Should be atmost MAXBUFSIZE */\r
+ p = fgets(pvfree, (int)nfree, fp);\r
+ FILE_END_ALLOW_THREADS(f)\r
+\r
+ if (p == NULL) {\r
+ clearerr(fp);\r
+ if (PyErr_CheckSignals())\r
+ return NULL;\r
+ v = PyString_FromStringAndSize(buf, pvfree - buf);\r
+ return v;\r
+ }\r
+ /* fgets read *something* */\r
+ p = memchr(pvfree, '\n', nfree);\r
+ if (p != NULL) {\r
+ /* Did the \n come from fgets or from us?\r
+ * Since fgets stops at the first \n, and then writes\r
+ * \0, if it's from fgets a \0 must be next. But if\r
+ * that's so, it could not have come from us, since\r
+ * the \n's we filled the buffer with have only more\r
+ * \n's to the right.\r
+ */\r
+ if (p+1 < pvend && *(p+1) == '\0') {\r
+ /* It's from fgets: we win! In particular,\r
+ * we haven't done any mallocs yet, and can\r
+ * build the final result on the first try.\r
+ */\r
+ ++p; /* include \n from fgets */\r
+ }\r
+ else {\r
+ /* Must be from us: fgets didn't fill the\r
+ * buffer and didn't find a newline, so it\r
+ * must be the last and newline-free line of\r
+ * the file.\r
+ */\r
+ assert(p > pvfree && *(p-1) == '\0');\r
+ --p; /* don't include \0 from fgets */\r
+ }\r
+ v = PyString_FromStringAndSize(buf, p - buf);\r
+ return v;\r
+ }\r
+ /* yuck: fgets overwrote all the newlines, i.e. the entire\r
+ * buffer. So this line isn't over yet, or maybe it is but\r
+ * we're exactly at EOF. If we haven't already, try using the\r
+ * rest of the stack buffer.\r
+ */\r
+ assert(*(pvend-1) == '\0');\r
+ if (pvfree == buf) {\r
+ pvfree = pvend - 1; /* overwrite trailing null */\r
+ total_v_size = MAXBUFSIZE;\r
+ }\r
+ else\r
+ break;\r
+ }\r
+\r
+ /* The stack buffer isn't big enough; malloc a string object and read\r
+ * into its buffer.\r
+ */\r
+ total_v_size = MAXBUFSIZE << 1;\r
+ v = PyString_FromStringAndSize((char*)NULL, (int)total_v_size);\r
+ if (v == NULL)\r
+ return v;\r
+ /* copy over everything except the last null byte */\r
+ memcpy(BUF(v), buf, MAXBUFSIZE-1);\r
+ pvfree = BUF(v) + MAXBUFSIZE - 1;\r
+\r
+ /* Keep reading stuff into v; if it ever ends successfully, break\r
+ * after setting p one beyond the end of the line. The code here is\r
+ * very much like the code above, except reads into v's buffer; see\r
+ * the code above for detailed comments about the logic.\r
+ */\r
+ for (;;) {\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ pvend = BUF(v) + total_v_size;\r
+ nfree = pvend - pvfree;\r
+ memset(pvfree, '\n', nfree);\r
+ assert(nfree < INT_MAX);\r
+ p = fgets(pvfree, (int)nfree, fp);\r
+ FILE_END_ALLOW_THREADS(f)\r
+\r
+ if (p == NULL) {\r
+ clearerr(fp);\r
+ if (PyErr_CheckSignals()) {\r
+ Py_DECREF(v);\r
+ return NULL;\r
+ }\r
+ p = pvfree;\r
+ break;\r
+ }\r
+ p = memchr(pvfree, '\n', nfree);\r
+ if (p != NULL) {\r
+ if (p+1 < pvend && *(p+1) == '\0') {\r
+ /* \n came from fgets */\r
+ ++p;\r
+ break;\r
+ }\r
+ /* \n came from us; last line of file, no newline */\r
+ assert(p > pvfree && *(p-1) == '\0');\r
+ --p;\r
+ break;\r
+ }\r
+ /* expand buffer and try again */\r
+ assert(*(pvend-1) == '\0');\r
+ increment = total_v_size >> 2; /* mild exponential growth */\r
+ prev_v_size = total_v_size;\r
+ total_v_size += increment;\r
+ /* check for overflow */\r
+ if (total_v_size <= prev_v_size ||\r
+ total_v_size > PY_SSIZE_T_MAX) {\r
+ PyErr_SetString(PyExc_OverflowError,\r
+ "line is longer than a Python string can hold");\r
+ Py_DECREF(v);\r
+ return NULL;\r
+ }\r
+ if (_PyString_Resize(&v, (int)total_v_size) < 0)\r
+ return NULL;\r
+ /* overwrite the trailing null byte */\r
+ pvfree = BUF(v) + (prev_v_size - 1);\r
+ }\r
+ if (BUF(v) + total_v_size != p && _PyString_Resize(&v, p - BUF(v)))\r
+ return NULL;\r
+ return v;\r
+#undef INITBUFSIZE\r
+#undef MAXBUFSIZE\r
+}\r
+#endif /* ifdef USE_FGETS_IN_GETLINE */\r
+\r
+/* Internal routine to get a line.\r
+ Size argument interpretation:\r
+ > 0: max length;\r
+ <= 0: read arbitrary line\r
+*/\r
+\r
+static PyObject *\r
+get_line(PyFileObject *f, int n)\r
+{\r
+ FILE *fp = f->f_fp;\r
+ int c;\r
+ char *buf, *end;\r
+ size_t total_v_size; /* total # of slots in buffer */\r
+ size_t used_v_size; /* # used slots in buffer */\r
+ size_t increment; /* amount to increment the buffer */\r
+ PyObject *v;\r
+ int newlinetypes = f->f_newlinetypes;\r
+ int skipnextlf = f->f_skipnextlf;\r
+ int univ_newline = f->f_univ_newline;\r
+\r
+#if defined(USE_FGETS_IN_GETLINE)\r
+ if (n <= 0 && !univ_newline )\r
+ return getline_via_fgets(f, fp);\r
+#endif\r
+ total_v_size = n > 0 ? n : 100;\r
+ v = PyString_FromStringAndSize((char *)NULL, total_v_size);\r
+ if (v == NULL)\r
+ return NULL;\r
+ buf = BUF(v);\r
+ end = buf + total_v_size;\r
+\r
+ for (;;) {\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ FLOCKFILE(fp);\r
+ if (univ_newline) {\r
+ c = 'x'; /* Shut up gcc warning */\r
+ while ( buf != end && (c = GETC(fp)) != EOF ) {\r
+ if (skipnextlf ) {\r
+ skipnextlf = 0;\r
+ if (c == '\n') {\r
+ /* Seeing a \n here with\r
+ * skipnextlf true means we\r
+ * saw a \r before.\r
+ */\r
+ newlinetypes |= NEWLINE_CRLF;\r
+ c = GETC(fp);\r
+ if (c == EOF) break;\r
+ } else {\r
+ newlinetypes |= NEWLINE_CR;\r
+ }\r
+ }\r
+ if (c == '\r') {\r
+ skipnextlf = 1;\r
+ c = '\n';\r
+ } else if ( c == '\n')\r
+ newlinetypes |= NEWLINE_LF;\r
+ *buf++ = c;\r
+ if (c == '\n') break;\r
+ }\r
+ if (c == EOF) {\r
+ if (ferror(fp) && errno == EINTR) {\r
+ FUNLOCKFILE(fp);\r
+ FILE_ABORT_ALLOW_THREADS(f)\r
+ f->f_newlinetypes = newlinetypes;\r
+ f->f_skipnextlf = skipnextlf;\r
+\r
+ if (PyErr_CheckSignals()) {\r
+ Py_DECREF(v);\r
+ return NULL;\r
+ }\r
+ /* We executed Python signal handlers and got no exception.\r
+ * Now back to reading the line where we left off. */\r
+ clearerr(fp);\r
+ continue;\r
+ }\r
+ if (skipnextlf)\r
+ newlinetypes |= NEWLINE_CR;\r
+ }\r
+ } else /* If not universal newlines use the normal loop */\r
+ while ((c = GETC(fp)) != EOF &&\r
+ (*buf++ = c) != '\n' &&\r
+ buf != end)\r
+ ;\r
+ FUNLOCKFILE(fp);\r
+ FILE_END_ALLOW_THREADS(f)\r
+ f->f_newlinetypes = newlinetypes;\r
+ f->f_skipnextlf = skipnextlf;\r
+ if (c == '\n')\r
+ break;\r
+ if (c == EOF) {\r
+ if (ferror(fp)) {\r
+ if (errno == EINTR) {\r
+ if (PyErr_CheckSignals()) {\r
+ Py_DECREF(v);\r
+ return NULL;\r
+ }\r
+ /* We executed Python signal handlers and got no exception.\r
+ * Now back to reading the line where we left off. */\r
+ clearerr(fp);\r
+ continue;\r
+ }\r
+ PyErr_SetFromErrno(PyExc_IOError);\r
+ clearerr(fp);\r
+ Py_DECREF(v);\r
+ return NULL;\r
+ }\r
+ clearerr(fp);\r
+ if (PyErr_CheckSignals()) {\r
+ Py_DECREF(v);\r
+ return NULL;\r
+ }\r
+ break;\r
+ }\r
+ /* Must be because buf == end */\r
+ if (n > 0)\r
+ break;\r
+ used_v_size = total_v_size;\r
+ increment = total_v_size >> 2; /* mild exponential growth */\r
+ total_v_size += increment;\r
+ if (total_v_size > PY_SSIZE_T_MAX) {\r
+ PyErr_SetString(PyExc_OverflowError,\r
+ "line is longer than a Python string can hold");\r
+ Py_DECREF(v);\r
+ return NULL;\r
+ }\r
+ if (_PyString_Resize(&v, total_v_size) < 0)\r
+ return NULL;\r
+ buf = BUF(v) + used_v_size;\r
+ end = BUF(v) + total_v_size;\r
+ }\r
+\r
+ used_v_size = buf - BUF(v);\r
+ if (used_v_size != total_v_size && _PyString_Resize(&v, used_v_size))\r
+ return NULL;\r
+ return v;\r
+}\r
+\r
+/* External C interface */\r
+\r
+PyObject *\r
+PyFile_GetLine(PyObject *f, int n)\r
+{\r
+ PyObject *result;\r
+\r
+ if (f == NULL) {\r
+ PyErr_BadInternalCall();\r
+ return NULL;\r
+ }\r
+\r
+ if (PyFile_Check(f)) {\r
+ PyFileObject *fo = (PyFileObject *)f;\r
+ if (fo->f_fp == NULL)\r
+ return err_closed();\r
+ if (!fo->readable)\r
+ return err_mode("reading");\r
+ /* refuse to mix with f.next() */\r
+ if (fo->f_buf != NULL &&\r
+ (fo->f_bufend - fo->f_bufptr) > 0 &&\r
+ fo->f_buf[0] != '\0')\r
+ return err_iterbuffered();\r
+ result = get_line(fo, n);\r
+ }\r
+ else {\r
+ PyObject *reader;\r
+ PyObject *args;\r
+\r
+ reader = PyObject_GetAttrString(f, "readline");\r
+ if (reader == NULL)\r
+ return NULL;\r
+ if (n <= 0)\r
+ args = PyTuple_New(0);\r
+ else\r
+ args = Py_BuildValue("(i)", n);\r
+ if (args == NULL) {\r
+ Py_DECREF(reader);\r
+ return NULL;\r
+ }\r
+ result = PyEval_CallObject(reader, args);\r
+ Py_DECREF(reader);\r
+ Py_DECREF(args);\r
+ if (result != NULL && !PyString_Check(result) &&\r
+ !PyUnicode_Check(result)) {\r
+ Py_DECREF(result);\r
+ result = NULL;\r
+ PyErr_SetString(PyExc_TypeError,\r
+ "object.readline() returned non-string");\r
+ }\r
+ }\r
+\r
+ if (n < 0 && result != NULL && PyString_Check(result)) {\r
+ char *s = PyString_AS_STRING(result);\r
+ Py_ssize_t len = PyString_GET_SIZE(result);\r
+ if (len == 0) {\r
+ Py_DECREF(result);\r
+ result = NULL;\r
+ PyErr_SetString(PyExc_EOFError,\r
+ "EOF when reading a line");\r
+ }\r
+ else if (s[len-1] == '\n') {\r
+ if (result->ob_refcnt == 1) {\r
+ if (_PyString_Resize(&result, len-1))\r
+ return NULL;\r
+ }\r
+ else {\r
+ PyObject *v;\r
+ v = PyString_FromStringAndSize(s, len-1);\r
+ Py_DECREF(result);\r
+ result = v;\r
+ }\r
+ }\r
+ }\r
+#ifdef Py_USING_UNICODE\r
+ if (n < 0 && result != NULL && PyUnicode_Check(result)) {\r
+ Py_UNICODE *s = PyUnicode_AS_UNICODE(result);\r
+ Py_ssize_t len = PyUnicode_GET_SIZE(result);\r
+ if (len == 0) {\r
+ Py_DECREF(result);\r
+ result = NULL;\r
+ PyErr_SetString(PyExc_EOFError,\r
+ "EOF when reading a line");\r
+ }\r
+ else if (s[len-1] == '\n') {\r
+ if (result->ob_refcnt == 1)\r
+ PyUnicode_Resize(&result, len-1);\r
+ else {\r
+ PyObject *v;\r
+ v = PyUnicode_FromUnicode(s, len-1);\r
+ Py_DECREF(result);\r
+ result = v;\r
+ }\r
+ }\r
+ }\r
+#endif\r
+ return result;\r
+}\r
+\r
+/* Python method */\r
+\r
+static PyObject *\r
+file_readline(PyFileObject *f, PyObject *args)\r
+{\r
+ int n = -1;\r
+\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ if (!f->readable)\r
+ return err_mode("reading");\r
+ /* refuse to mix with f.next() */\r
+ if (f->f_buf != NULL &&\r
+ (f->f_bufend - f->f_bufptr) > 0 &&\r
+ f->f_buf[0] != '\0')\r
+ return err_iterbuffered();\r
+ if (!PyArg_ParseTuple(args, "|i:readline", &n))\r
+ return NULL;\r
+ if (n == 0)\r
+ return PyString_FromString("");\r
+ if (n < 0)\r
+ n = 0;\r
+ return get_line(f, n);\r
+}\r
+\r
+static PyObject *\r
+file_readlines(PyFileObject *f, PyObject *args)\r
+{\r
+ long sizehint = 0;\r
+ PyObject *list = NULL;\r
+ PyObject *line;\r
+ char small_buffer[SMALLCHUNK];\r
+ char *buffer = small_buffer;\r
+ size_t buffersize = SMALLCHUNK;\r
+ PyObject *big_buffer = NULL;\r
+ size_t nfilled = 0;\r
+ size_t nread;\r
+ size_t totalread = 0;\r
+ char *p, *q, *end;\r
+ int err;\r
+ int shortread = 0; /* bool, did the previous read come up short? */\r
+\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ if (!f->readable)\r
+ return err_mode("reading");\r
+ /* refuse to mix with f.next() */\r
+ if (f->f_buf != NULL &&\r
+ (f->f_bufend - f->f_bufptr) > 0 &&\r
+ f->f_buf[0] != '\0')\r
+ return err_iterbuffered();\r
+ if (!PyArg_ParseTuple(args, "|l:readlines", &sizehint))\r
+ return NULL;\r
+ if ((list = PyList_New(0)) == NULL)\r
+ return NULL;\r
+ for (;;) {\r
+ if (shortread)\r
+ nread = 0;\r
+ else {\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ nread = Py_UniversalNewlineFread(buffer+nfilled,\r
+ buffersize-nfilled, f->f_fp, (PyObject *)f);\r
+ FILE_END_ALLOW_THREADS(f)\r
+ shortread = (nread < buffersize-nfilled);\r
+ }\r
+ if (nread == 0) {\r
+ sizehint = 0;\r
+ if (!ferror(f->f_fp))\r
+ break;\r
+ if (errno == EINTR) {\r
+ if (PyErr_CheckSignals()) {\r
+ goto error;\r
+ }\r
+ clearerr(f->f_fp);\r
+ shortread = 0;\r
+ continue;\r
+ }\r
+ PyErr_SetFromErrno(PyExc_IOError);\r
+ clearerr(f->f_fp);\r
+ goto error;\r
+ }\r
+ totalread += nread;\r
+ p = (char *)memchr(buffer+nfilled, '\n', nread);\r
+ if (p == NULL) {\r
+ /* Need a larger buffer to fit this line */\r
+ nfilled += nread;\r
+ buffersize *= 2;\r
+ if (buffersize > PY_SSIZE_T_MAX) {\r
+ PyErr_SetString(PyExc_OverflowError,\r
+ "line is longer than a Python string can hold");\r
+ goto error;\r
+ }\r
+ if (big_buffer == NULL) {\r
+ /* Create the big buffer */\r
+ big_buffer = PyString_FromStringAndSize(\r
+ NULL, buffersize);\r
+ if (big_buffer == NULL)\r
+ goto error;\r
+ buffer = PyString_AS_STRING(big_buffer);\r
+ memcpy(buffer, small_buffer, nfilled);\r
+ }\r
+ else {\r
+ /* Grow the big buffer */\r
+ if ( _PyString_Resize(&big_buffer, buffersize) < 0 )\r
+ goto error;\r
+ buffer = PyString_AS_STRING(big_buffer);\r
+ }\r
+ continue;\r
+ }\r
+ end = buffer+nfilled+nread;\r
+ q = buffer;\r
+ do {\r
+ /* Process complete lines */\r
+ p++;\r
+ line = PyString_FromStringAndSize(q, p-q);\r
+ if (line == NULL)\r
+ goto error;\r
+ err = PyList_Append(list, line);\r
+ Py_DECREF(line);\r
+ if (err != 0)\r
+ goto error;\r
+ q = p;\r
+ p = (char *)memchr(q, '\n', end-q);\r
+ } while (p != NULL);\r
+ /* Move the remaining incomplete line to the start */\r
+ nfilled = end-q;\r
+ memmove(buffer, q, nfilled);\r
+ if (sizehint > 0)\r
+ if (totalread >= (size_t)sizehint)\r
+ break;\r
+ }\r
+ if (nfilled != 0) {\r
+ /* Partial last line */\r
+ line = PyString_FromStringAndSize(buffer, nfilled);\r
+ if (line == NULL)\r
+ goto error;\r
+ if (sizehint > 0) {\r
+ /* Need to complete the last line */\r
+ PyObject *rest = get_line(f, 0);\r
+ if (rest == NULL) {\r
+ Py_DECREF(line);\r
+ goto error;\r
+ }\r
+ PyString_Concat(&line, rest);\r
+ Py_DECREF(rest);\r
+ if (line == NULL)\r
+ goto error;\r
+ }\r
+ err = PyList_Append(list, line);\r
+ Py_DECREF(line);\r
+ if (err != 0)\r
+ goto error;\r
+ }\r
+\r
+cleanup:\r
+ Py_XDECREF(big_buffer);\r
+ return list;\r
+\r
+error:\r
+ Py_CLEAR(list);\r
+ goto cleanup;\r
+}\r
+\r
+static PyObject *\r
+file_write(PyFileObject *f, PyObject *args)\r
+{\r
+ Py_buffer pbuf;\r
+ const char *s;\r
+ Py_ssize_t n, n2;\r
+ PyObject *encoded = NULL;\r
+ int err_flag = 0, err;\r
+\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ if (!f->writable)\r
+ return err_mode("writing");\r
+ if (f->f_binary) {\r
+ if (!PyArg_ParseTuple(args, "s*", &pbuf))\r
+ return NULL;\r
+ s = pbuf.buf;\r
+ n = pbuf.len;\r
+ }\r
+ else {\r
+ PyObject *text;\r
+ if (!PyArg_ParseTuple(args, "O", &text))\r
+ return NULL;\r
+\r
+ if (PyString_Check(text)) {\r
+ s = PyString_AS_STRING(text);\r
+ n = PyString_GET_SIZE(text);\r
+#ifdef Py_USING_UNICODE\r
+ } else if (PyUnicode_Check(text)) {\r
+ const char *encoding, *errors;\r
+ if (f->f_encoding != Py_None)\r
+ encoding = PyString_AS_STRING(f->f_encoding);\r
+ else\r
+ encoding = PyUnicode_GetDefaultEncoding();\r
+ if (f->f_errors != Py_None)\r
+ errors = PyString_AS_STRING(f->f_errors);\r
+ else\r
+ errors = "strict";\r
+ encoded = PyUnicode_AsEncodedString(text, encoding, errors);\r
+ if (encoded == NULL)\r
+ return NULL;\r
+ s = PyString_AS_STRING(encoded);\r
+ n = PyString_GET_SIZE(encoded);\r
+#endif\r
+ } else {\r
+ if (PyObject_AsCharBuffer(text, &s, &n))\r
+ return NULL;\r
+ }\r
+ }\r
+ f->f_softspace = 0;\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ n2 = fwrite(s, 1, n, f->f_fp);\r
+ if (n2 != n || ferror(f->f_fp)) {\r
+ err_flag = 1;\r
+ err = errno;\r
+ }\r
+ FILE_END_ALLOW_THREADS(f)\r
+ Py_XDECREF(encoded);\r
+ if (f->f_binary)\r
+ PyBuffer_Release(&pbuf);\r
+ if (err_flag) {\r
+ errno = err;\r
+ PyErr_SetFromErrno(PyExc_IOError);\r
+ clearerr(f->f_fp);\r
+ return NULL;\r
+ }\r
+ Py_INCREF(Py_None);\r
+ return Py_None;\r
+}\r
+\r
+static PyObject *\r
+file_writelines(PyFileObject *f, PyObject *seq)\r
+{\r
+#define CHUNKSIZE 1000\r
+ PyObject *list, *line;\r
+ PyObject *it; /* iter(seq) */\r
+ PyObject *result;\r
+ int index, islist;\r
+ Py_ssize_t i, j, nwritten, len;\r
+\r
+ assert(seq != NULL);\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ if (!f->writable)\r
+ return err_mode("writing");\r
+\r
+ result = NULL;\r
+ list = NULL;\r
+ islist = PyList_Check(seq);\r
+ if (islist)\r
+ it = NULL;\r
+ else {\r
+ it = PyObject_GetIter(seq);\r
+ if (it == NULL) {\r
+ PyErr_SetString(PyExc_TypeError,\r
+ "writelines() requires an iterable argument");\r
+ return NULL;\r
+ }\r
+ /* From here on, fail by going to error, to reclaim "it". */\r
+ list = PyList_New(CHUNKSIZE);\r
+ if (list == NULL)\r
+ goto error;\r
+ }\r
+\r
+ /* Strategy: slurp CHUNKSIZE lines into a private list,\r
+ checking that they are all strings, then write that list\r
+ without holding the interpreter lock, then come back for more. */\r
+ for (index = 0; ; index += CHUNKSIZE) {\r
+ if (islist) {\r
+ Py_XDECREF(list);\r
+ list = PyList_GetSlice(seq, index, index+CHUNKSIZE);\r
+ if (list == NULL)\r
+ goto error;\r
+ j = PyList_GET_SIZE(list);\r
+ }\r
+ else {\r
+ for (j = 0; j < CHUNKSIZE; j++) {\r
+ line = PyIter_Next(it);\r
+ if (line == NULL) {\r
+ if (PyErr_Occurred())\r
+ goto error;\r
+ break;\r
+ }\r
+ PyList_SetItem(list, j, line);\r
+ }\r
+ /* The iterator might have closed the file on us. */\r
+ if (f->f_fp == NULL) {\r
+ err_closed();\r
+ goto error;\r
+ }\r
+ }\r
+ if (j == 0)\r
+ break;\r
+\r
+ /* Check that all entries are indeed strings. If not,\r
+ apply the same rules as for file.write() and\r
+ convert the results to strings. This is slow, but\r
+ seems to be the only way since all conversion APIs\r
+ could potentially execute Python code. */\r
+ for (i = 0; i < j; i++) {\r
+ PyObject *v = PyList_GET_ITEM(list, i);\r
+ if (!PyString_Check(v)) {\r
+ const char *buffer;\r
+ int res;\r
+ if (f->f_binary) {\r
+ res = PyObject_AsReadBuffer(v, (const void**)&buffer, &len);\r
+ } else {\r
+ res = PyObject_AsCharBuffer(v, &buffer, &len);\r
+ }\r
+ if (res) {\r
+ PyErr_SetString(PyExc_TypeError,\r
+ "writelines() argument must be a sequence of strings");\r
+ goto error;\r
+ }\r
+ line = PyString_FromStringAndSize(buffer,\r
+ len);\r
+ if (line == NULL)\r
+ goto error;\r
+ Py_DECREF(v);\r
+ PyList_SET_ITEM(list, i, line);\r
+ }\r
+ }\r
+\r
+ /* Since we are releasing the global lock, the\r
+ following code may *not* execute Python code. */\r
+ f->f_softspace = 0;\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ for (i = 0; i < j; i++) {\r
+ line = PyList_GET_ITEM(list, i);\r
+ len = PyString_GET_SIZE(line);\r
+ nwritten = fwrite(PyString_AS_STRING(line),\r
+ 1, len, f->f_fp);\r
+ if (nwritten != len) {\r
+ FILE_ABORT_ALLOW_THREADS(f)\r
+ PyErr_SetFromErrno(PyExc_IOError);\r
+ clearerr(f->f_fp);\r
+ goto error;\r
+ }\r
+ }\r
+ FILE_END_ALLOW_THREADS(f)\r
+\r
+ if (j < CHUNKSIZE)\r
+ break;\r
+ }\r
+\r
+ Py_INCREF(Py_None);\r
+ result = Py_None;\r
+ error:\r
+ Py_XDECREF(list);\r
+ Py_XDECREF(it);\r
+ return result;\r
+#undef CHUNKSIZE\r
+}\r
+\r
+static PyObject *\r
+file_self(PyFileObject *f)\r
+{\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ Py_INCREF(f);\r
+ return (PyObject *)f;\r
+}\r
+\r
+static PyObject *\r
+file_xreadlines(PyFileObject *f)\r
+{\r
+ if (PyErr_WarnPy3k("f.xreadlines() not supported in 3.x, "\r
+ "try 'for line in f' instead", 1) < 0)\r
+ return NULL;\r
+ return file_self(f);\r
+}\r
+\r
+static PyObject *\r
+file_exit(PyObject *f, PyObject *args)\r
+{\r
+ PyObject *ret = PyObject_CallMethod(f, "close", NULL);\r
+ if (!ret)\r
+ /* If error occurred, pass through */\r
+ return NULL;\r
+ Py_DECREF(ret);\r
+ /* We cannot return the result of close since a true\r
+ * value will be interpreted as "yes, swallow the\r
+ * exception if one was raised inside the with block". */\r
+ Py_RETURN_NONE;\r
+}\r
+\r
+PyDoc_STRVAR(readline_doc,\r
+"readline([size]) -> next line from the file, as a string.\n"\r
+"\n"\r
+"Retain newline. A non-negative size argument limits the maximum\n"\r
+"number of bytes to return (an incomplete line may be returned then).\n"\r
+"Return an empty string at EOF.");\r
+\r
+PyDoc_STRVAR(read_doc,\r
+"read([size]) -> read at most size bytes, returned as a string.\n"\r
+"\n"\r
+"If the size argument is negative or omitted, read until EOF is reached.\n"\r
+"Notice that when in non-blocking mode, less data than what was requested\n"\r
+"may be returned, even if no size parameter was given.");\r
+\r
+PyDoc_STRVAR(write_doc,\r
+"write(str) -> None. Write string str to file.\n"\r
+"\n"\r
+"Note that due to buffering, flush() or close() may be needed before\n"\r
+"the file on disk reflects the data written.");\r
+\r
+PyDoc_STRVAR(fileno_doc,\r
+"fileno() -> integer \"file descriptor\".\n"\r
+"\n"\r
+"This is needed for lower-level file interfaces, such os.read().");\r
+\r
+PyDoc_STRVAR(seek_doc,\r
+"seek(offset[, whence]) -> None. Move to new file position.\n"\r
+"\n"\r
+"Argument offset is a byte count. Optional argument whence defaults to\n"\r
+"0 (offset from start of file, offset should be >= 0); other values are 1\n"\r
+"(move relative to current position, positive or negative), and 2 (move\n"\r
+"relative to end of file, usually negative, although many platforms allow\n"\r
+"seeking beyond the end of a file). If the file is opened in text mode,\n"\r
+"only offsets returned by tell() are legal. Use of other offsets causes\n"\r
+"undefined behavior."\r
+"\n"\r
+"Note that not all file objects are seekable.");\r
+\r
+#ifdef HAVE_FTRUNCATE\r
+PyDoc_STRVAR(truncate_doc,\r
+"truncate([size]) -> None. Truncate the file to at most size bytes.\n"\r
+"\n"\r
+"Size defaults to the current file position, as returned by tell().");\r
+#endif\r
+\r
+PyDoc_STRVAR(tell_doc,\r
+"tell() -> current file position, an integer (may be a long integer).");\r
+\r
+PyDoc_STRVAR(readinto_doc,\r
+"readinto() -> Undocumented. Don't use this; it may go away.");\r
+\r
+PyDoc_STRVAR(readlines_doc,\r
+"readlines([size]) -> list of strings, each a line from the file.\n"\r
+"\n"\r
+"Call readline() repeatedly and return a list of the lines so read.\n"\r
+"The optional size argument, if given, is an approximate bound on the\n"\r
+"total number of bytes in the lines returned.");\r
+\r
+PyDoc_STRVAR(xreadlines_doc,\r
+"xreadlines() -> returns self.\n"\r
+"\n"\r
+"For backward compatibility. File objects now include the performance\n"\r
+"optimizations previously implemented in the xreadlines module.");\r
+\r
+PyDoc_STRVAR(writelines_doc,\r
+"writelines(sequence_of_strings) -> None. Write the strings to the file.\n"\r
+"\n"\r
+"Note that newlines are not added. The sequence can be any iterable object\n"\r
+"producing strings. This is equivalent to calling write() for each string.");\r
+\r
+PyDoc_STRVAR(flush_doc,\r
+"flush() -> None. Flush the internal I/O buffer.");\r
+\r
+PyDoc_STRVAR(close_doc,\r
+"close() -> None or (perhaps) an integer. Close the file.\n"\r
+"\n"\r
+"Sets data attribute .closed to True. A closed file cannot be used for\n"\r
+"further I/O operations. close() may be called more than once without\n"\r
+"error. Some kinds of file objects (for example, opened by popen())\n"\r
+"may return an exit status upon closing.");\r
+\r
+PyDoc_STRVAR(isatty_doc,\r
+"isatty() -> true or false. True if the file is connected to a tty device.");\r
+\r
+PyDoc_STRVAR(enter_doc,\r
+ "__enter__() -> self.");\r
+\r
+PyDoc_STRVAR(exit_doc,\r
+ "__exit__(*excinfo) -> None. Closes the file.");\r
+\r
+static PyMethodDef file_methods[] = {\r
+ {"readline", (PyCFunction)file_readline, METH_VARARGS, readline_doc},\r
+ {"read", (PyCFunction)file_read, METH_VARARGS, read_doc},\r
+ {"write", (PyCFunction)file_write, METH_VARARGS, write_doc},\r
+ {"fileno", (PyCFunction)file_fileno, METH_NOARGS, fileno_doc},\r
+ {"seek", (PyCFunction)file_seek, METH_VARARGS, seek_doc},\r
+#ifdef HAVE_FTRUNCATE\r
+ {"truncate", (PyCFunction)file_truncate, METH_VARARGS, truncate_doc},\r
+#endif\r
+ {"tell", (PyCFunction)file_tell, METH_NOARGS, tell_doc},\r
+ {"readinto", (PyCFunction)file_readinto, METH_VARARGS, readinto_doc},\r
+ {"readlines", (PyCFunction)file_readlines, METH_VARARGS, readlines_doc},\r
+ {"xreadlines",(PyCFunction)file_xreadlines, METH_NOARGS, xreadlines_doc},\r
+ {"writelines",(PyCFunction)file_writelines, METH_O, writelines_doc},\r
+ {"flush", (PyCFunction)file_flush, METH_NOARGS, flush_doc},\r
+ {"close", (PyCFunction)file_close, METH_NOARGS, close_doc},\r
+ {"isatty", (PyCFunction)file_isatty, METH_NOARGS, isatty_doc},\r
+ {"__enter__", (PyCFunction)file_self, METH_NOARGS, enter_doc},\r
+ {"__exit__", (PyCFunction)file_exit, METH_VARARGS, exit_doc},\r
+ {NULL, NULL} /* sentinel */\r
+};\r
+\r
+#define OFF(x) offsetof(PyFileObject, x)\r
+\r
+static PyMemberDef file_memberlist[] = {\r
+ {"mode", T_OBJECT, OFF(f_mode), RO,\r
+ "file mode ('r', 'U', 'w', 'a', possibly with 'b' or '+' added)"},\r
+ {"name", T_OBJECT, OFF(f_name), RO,\r
+ "file name"},\r
+ {"encoding", T_OBJECT, OFF(f_encoding), RO,\r
+ "file encoding"},\r
+ {"errors", T_OBJECT, OFF(f_errors), RO,\r
+ "Unicode error handler"},\r
+ /* getattr(f, "closed") is implemented without this table */\r
+ {NULL} /* Sentinel */\r
+};\r
+\r
+static PyObject *\r
+get_closed(PyFileObject *f, void *closure)\r
+{\r
+ return PyBool_FromLong((long)(f->f_fp == 0));\r
+}\r
+static PyObject *\r
+get_newlines(PyFileObject *f, void *closure)\r
+{\r
+ switch (f->f_newlinetypes) {\r
+ case NEWLINE_UNKNOWN:\r
+ Py_INCREF(Py_None);\r
+ return Py_None;\r
+ case NEWLINE_CR:\r
+ return PyString_FromString("\r");\r
+ case NEWLINE_LF:\r
+ return PyString_FromString("\n");\r
+ case NEWLINE_CR|NEWLINE_LF:\r
+ return Py_BuildValue("(ss)", "\r", "\n");\r
+ case NEWLINE_CRLF:\r
+ return PyString_FromString("\r\n");\r
+ case NEWLINE_CR|NEWLINE_CRLF:\r
+ return Py_BuildValue("(ss)", "\r", "\r\n");\r
+ case NEWLINE_LF|NEWLINE_CRLF:\r
+ return Py_BuildValue("(ss)", "\n", "\r\n");\r
+ case NEWLINE_CR|NEWLINE_LF|NEWLINE_CRLF:\r
+ return Py_BuildValue("(sss)", "\r", "\n", "\r\n");\r
+ default:\r
+ PyErr_Format(PyExc_SystemError,\r
+ "Unknown newlines value 0x%x\n",\r
+ f->f_newlinetypes);\r
+ return NULL;\r
+ }\r
+}\r
+\r
+static PyObject *\r
+get_softspace(PyFileObject *f, void *closure)\r
+{\r
+ if (PyErr_WarnPy3k("file.softspace not supported in 3.x", 1) < 0)\r
+ return NULL;\r
+ return PyInt_FromLong(f->f_softspace);\r
+}\r
+\r
+static int\r
+set_softspace(PyFileObject *f, PyObject *value)\r
+{\r
+ int new;\r
+ if (PyErr_WarnPy3k("file.softspace not supported in 3.x", 1) < 0)\r
+ return -1;\r
+\r
+ if (value == NULL) {\r
+ PyErr_SetString(PyExc_TypeError,\r
+ "can't delete softspace attribute");\r
+ return -1;\r
+ }\r
+\r
+ new = PyInt_AsLong(value);\r
+ if (new == -1 && PyErr_Occurred())\r
+ return -1;\r
+ f->f_softspace = new;\r
+ return 0;\r
+}\r
+\r
+static PyGetSetDef file_getsetlist[] = {\r
+ {"closed", (getter)get_closed, NULL, "True if the file is closed"},\r
+ {"newlines", (getter)get_newlines, NULL,\r
+ "end-of-line convention used in this file"},\r
+ {"softspace", (getter)get_softspace, (setter)set_softspace,\r
+ "flag indicating that a space needs to be printed; used by print"},\r
+ {0},\r
+};\r
+\r
+static void\r
+drop_readahead(PyFileObject *f)\r
+{\r
+ if (f->f_buf != NULL) {\r
+ PyMem_Free(f->f_buf);\r
+ f->f_buf = NULL;\r
+ }\r
+}\r
+\r
+/* Make sure that file has a readahead buffer with at least one byte\r
+ (unless at EOF) and no more than bufsize. Returns negative value on\r
+ error, will set MemoryError if bufsize bytes cannot be allocated. */\r
+static int\r
+readahead(PyFileObject *f, Py_ssize_t bufsize)\r
+{\r
+ Py_ssize_t chunksize;\r
+\r
+ if (f->f_buf != NULL) {\r
+ if( (f->f_bufend - f->f_bufptr) >= 1)\r
+ return 0;\r
+ else\r
+ drop_readahead(f);\r
+ }\r
+ if ((f->f_buf = (char *)PyMem_Malloc(bufsize)) == NULL) {\r
+ PyErr_NoMemory();\r
+ return -1;\r
+ }\r
+ FILE_BEGIN_ALLOW_THREADS(f)\r
+ errno = 0;\r
+ chunksize = Py_UniversalNewlineFread(\r
+ f->f_buf, bufsize, f->f_fp, (PyObject *)f);\r
+ FILE_END_ALLOW_THREADS(f)\r
+ if (chunksize == 0) {\r
+ if (ferror(f->f_fp)) {\r
+ PyErr_SetFromErrno(PyExc_IOError);\r
+ clearerr(f->f_fp);\r
+ drop_readahead(f);\r
+ return -1;\r
+ }\r
+ }\r
+ f->f_bufptr = f->f_buf;\r
+ f->f_bufend = f->f_buf + chunksize;\r
+ return 0;\r
+}\r
+\r
+/* Used by file_iternext. The returned string will start with 'skip'\r
+ uninitialized bytes followed by the remainder of the line. Don't be\r
+ horrified by the recursive call: maximum recursion depth is limited by\r
+ logarithmic buffer growth to about 50 even when reading a 1gb line. */\r
+\r
+static PyStringObject *\r
+readahead_get_line_skip(PyFileObject *f, Py_ssize_t skip, Py_ssize_t bufsize)\r
+{\r
+ PyStringObject* s;\r
+ char *bufptr;\r
+ char *buf;\r
+ Py_ssize_t len;\r
+\r
+ if (f->f_buf == NULL)\r
+ if (readahead(f, bufsize) < 0)\r
+ return NULL;\r
+\r
+ len = f->f_bufend - f->f_bufptr;\r
+ if (len == 0)\r
+ return (PyStringObject *)\r
+ PyString_FromStringAndSize(NULL, skip);\r
+ bufptr = (char *)memchr(f->f_bufptr, '\n', len);\r
+ if (bufptr != NULL) {\r
+ bufptr++; /* Count the '\n' */\r
+ len = bufptr - f->f_bufptr;\r
+ s = (PyStringObject *)\r
+ PyString_FromStringAndSize(NULL, skip + len);\r
+ if (s == NULL)\r
+ return NULL;\r
+ memcpy(PyString_AS_STRING(s) + skip, f->f_bufptr, len);\r
+ f->f_bufptr = bufptr;\r
+ if (bufptr == f->f_bufend)\r
+ drop_readahead(f);\r
+ } else {\r
+ bufptr = f->f_bufptr;\r
+ buf = f->f_buf;\r
+ f->f_buf = NULL; /* Force new readahead buffer */\r
+ assert(len <= PY_SSIZE_T_MAX - skip);\r
+ s = readahead_get_line_skip(f, skip + len, bufsize + (bufsize>>2));\r
+ if (s == NULL) {\r
+ PyMem_Free(buf);\r
+ return NULL;\r
+ }\r
+ memcpy(PyString_AS_STRING(s) + skip, bufptr, len);\r
+ PyMem_Free(buf);\r
+ }\r
+ return s;\r
+}\r
+\r
+/* A larger buffer size may actually decrease performance. */\r
+#define READAHEAD_BUFSIZE 8192\r
+\r
+static PyObject *\r
+file_iternext(PyFileObject *f)\r
+{\r
+ PyStringObject* l;\r
+\r
+ if (f->f_fp == NULL)\r
+ return err_closed();\r
+ if (!f->readable)\r
+ return err_mode("reading");\r
+\r
+ l = readahead_get_line_skip(f, 0, READAHEAD_BUFSIZE);\r
+ if (l == NULL || PyString_GET_SIZE(l) == 0) {\r
+ Py_XDECREF(l);\r
+ return NULL;\r
+ }\r
+ return (PyObject *)l;\r
+}\r
+\r
+\r
+static PyObject *\r
+file_new(PyTypeObject *type, PyObject *args, PyObject *kwds)\r
+{\r
+ PyObject *self;\r
+ static PyObject *not_yet_string;\r
+\r
+ assert(type != NULL && type->tp_alloc != NULL);\r
+\r
+ if (not_yet_string == NULL) {\r
+ not_yet_string = PyString_InternFromString("<uninitialized file>");\r
+ if (not_yet_string == NULL)\r
+ return NULL;\r
+ }\r
+\r
+ self = type->tp_alloc(type, 0);\r
+ if (self != NULL) {\r
+ /* Always fill in the name and mode, so that nobody else\r
+ needs to special-case NULLs there. */\r
+ Py_INCREF(not_yet_string);\r
+ ((PyFileObject *)self)->f_name = not_yet_string;\r
+ Py_INCREF(not_yet_string);\r
+ ((PyFileObject *)self)->f_mode = not_yet_string;\r
+ Py_INCREF(Py_None);\r
+ ((PyFileObject *)self)->f_encoding = Py_None;\r
+ Py_INCREF(Py_None);\r
+ ((PyFileObject *)self)->f_errors = Py_None;\r
+ ((PyFileObject *)self)->weakreflist = NULL;\r
+ ((PyFileObject *)self)->unlocked_count = 0;\r
+ }\r
+ return self;\r
+}\r
+\r
+static int\r
+file_init(PyObject *self, PyObject *args, PyObject *kwds)\r
+{\r
+ PyFileObject *foself = (PyFileObject *)self;\r
+ int ret = 0;\r
+ static char *kwlist[] = {"name", "mode", "buffering", 0};\r
+ char *name = NULL;\r
+ char *mode = "r";\r
+ int bufsize = -1;\r
+ int wideargument = 0;\r
+#ifdef MS_WINDOWS\r
+ PyObject *po;\r
+#endif\r
+\r
+ assert(PyFile_Check(self));\r
+ if (foself->f_fp != NULL) {\r
+ /* Have to close the existing file first. */\r
+ PyObject *closeresult = file_close(foself);\r
+ if (closeresult == NULL)\r
+ return -1;\r
+ Py_DECREF(closeresult);\r
+ }\r
+\r
+#ifdef MS_WINDOWS\r
+ if (PyArg_ParseTupleAndKeywords(args, kwds, "U|si:file",\r
+ kwlist, &po, &mode, &bufsize)) {\r
+ wideargument = 1;\r
+ if (fill_file_fields(foself, NULL, po, mode,\r
+ fclose) == NULL)\r
+ goto Error;\r
+ } else {\r
+ /* Drop the argument parsing error as narrow\r
+ strings are also valid. */\r
+ PyErr_Clear();\r
+ }\r
+#endif\r
+\r
+ if (!wideargument) {\r
+ PyObject *o_name;\r
+\r
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "et|si:file", kwlist,\r
+ Py_FileSystemDefaultEncoding,\r
+ &name,\r
+ &mode, &bufsize))\r
+ return -1;\r
+\r
+ /* We parse again to get the name as a PyObject */\r
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|si:file",\r
+ kwlist, &o_name, &mode,\r
+ &bufsize))\r
+ goto Error;\r
+\r
+ if (fill_file_fields(foself, NULL, o_name, mode,\r
+ fclose) == NULL)\r
+ goto Error;\r
+ }\r
+ if (open_the_file(foself, name, mode) == NULL)\r
+ goto Error;\r
+ foself->f_setbuf = NULL;\r
+ PyFile_SetBufSize(self, bufsize);\r
+ goto Done;\r
+\r
+Error:\r
+ ret = -1;\r
+ /* fall through */\r
+Done:\r
+ PyMem_Free(name); /* free the encoded string */\r
+ return ret;\r
+}\r
+\r
+PyDoc_VAR(file_doc) =\r
+PyDoc_STR(\r
+"file(name[, mode[, buffering]]) -> file object\n"\r
+"\n"\r
+"Open a file. The mode can be 'r', 'w' or 'a' for reading (default),\n"\r
+"writing or appending. The file will be created if it doesn't exist\n"\r
+"when opened for writing or appending; it will be truncated when\n"\r
+"opened for writing. Add a 'b' to the mode for binary files.\n"\r
+"Add a '+' to the mode to allow simultaneous reading and writing.\n"\r
+"If the buffering argument is given, 0 means unbuffered, 1 means line\n"\r
+"buffered, and larger numbers specify the buffer size. The preferred way\n"\r
+"to open a file is with the builtin open() function.\n"\r
+)\r
+PyDoc_STR(\r
+"Add a 'U' to mode to open the file for input with universal newline\n"\r
+"support. Any line ending in the input file will be seen as a '\\n'\n"\r
+"in Python. Also, a file so opened gains the attribute 'newlines';\n"\r
+"the value for this attribute is one of None (no newline read yet),\n"\r
+"'\\r', '\\n', '\\r\\n' or a tuple containing all the newline types seen.\n"\r
+"\n"\r
+"'U' cannot be combined with 'w' or '+' mode.\n"\r
+);\r
+\r
+PyTypeObject PyFile_Type = {\r
+ PyVarObject_HEAD_INIT(&PyType_Type, 0)\r
+ "file",\r
+ sizeof(PyFileObject),\r
+ 0,\r
+ (destructor)file_dealloc, /* tp_dealloc */\r
+ 0, /* tp_print */\r
+ 0, /* tp_getattr */\r
+ 0, /* tp_setattr */\r
+ 0, /* tp_compare */\r
+ (reprfunc)file_repr, /* tp_repr */\r
+ 0, /* tp_as_number */\r
+ 0, /* tp_as_sequence */\r
+ 0, /* tp_as_mapping */\r
+ 0, /* tp_hash */\r
+ 0, /* tp_call */\r
+ 0, /* tp_str */\r
+ PyObject_GenericGetAttr, /* tp_getattro */\r
+ /* softspace is writable: we must supply tp_setattro */\r
+ PyObject_GenericSetAttr, /* tp_setattro */\r
+ 0, /* tp_as_buffer */\r
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_WEAKREFS, /* tp_flags */\r
+ file_doc, /* tp_doc */\r
+ 0, /* tp_traverse */\r
+ 0, /* tp_clear */\r
+ 0, /* tp_richcompare */\r
+ offsetof(PyFileObject, weakreflist), /* tp_weaklistoffset */\r
+ (getiterfunc)file_self, /* tp_iter */\r
+ (iternextfunc)file_iternext, /* tp_iternext */\r
+ file_methods, /* tp_methods */\r
+ file_memberlist, /* tp_members */\r
+ file_getsetlist, /* tp_getset */\r
+ 0, /* tp_base */\r
+ 0, /* tp_dict */\r
+ 0, /* tp_descr_get */\r
+ 0, /* tp_descr_set */\r
+ 0, /* tp_dictoffset */\r
+ file_init, /* tp_init */\r
+ PyType_GenericAlloc, /* tp_alloc */\r
+ file_new, /* tp_new */\r
+ PyObject_Del, /* tp_free */\r
+};\r
+\r
+/* Interface for the 'soft space' between print items. */\r
+\r
+int\r
+PyFile_SoftSpace(PyObject *f, int newflag)\r
+{\r
+ long oldflag = 0;\r
+ if (f == NULL) {\r
+ /* Do nothing */\r
+ }\r
+ else if (PyFile_Check(f)) {\r
+ oldflag = ((PyFileObject *)f)->f_softspace;\r
+ ((PyFileObject *)f)->f_softspace = newflag;\r
+ }\r
+ else {\r
+ PyObject *v;\r
+ v = PyObject_GetAttrString(f, "softspace");\r
+ if (v == NULL)\r
+ PyErr_Clear();\r
+ else {\r
+ if (PyInt_Check(v))\r
+ oldflag = PyInt_AsLong(v);\r
+ assert(oldflag < INT_MAX);\r
+ Py_DECREF(v);\r
+ }\r
+ v = PyInt_FromLong((long)newflag);\r
+ if (v == NULL)\r
+ PyErr_Clear();\r
+ else {\r
+ if (PyObject_SetAttrString(f, "softspace", v) != 0)\r
+ PyErr_Clear();\r
+ Py_DECREF(v);\r
+ }\r
+ }\r
+ return (int)oldflag;\r
+}\r
+\r
+/* Interfaces to write objects/strings to file-like objects */\r
+\r
+int\r
+PyFile_WriteObject(PyObject *v, PyObject *f, int flags)\r
+{\r
+ PyObject *writer, *value, *args, *result;\r
+ if (f == NULL) {\r
+ PyErr_SetString(PyExc_TypeError, "writeobject with NULL file");\r
+ return -1;\r
+ }\r
+ else if (PyFile_Check(f)) {\r
+ PyFileObject *fobj = (PyFileObject *) f;\r
+#ifdef Py_USING_UNICODE\r
+ PyObject *enc = fobj->f_encoding;\r
+ int result;\r
+#endif\r
+ if (fobj->f_fp == NULL) {\r
+ err_closed();\r
+ return -1;\r
+ }\r
+#ifdef Py_USING_UNICODE\r
+ if ((flags & Py_PRINT_RAW) &&\r
+ PyUnicode_Check(v) && enc != Py_None) {\r
+ char *cenc = PyString_AS_STRING(enc);\r
+ char *errors = fobj->f_errors == Py_None ?\r
+ "strict" : PyString_AS_STRING(fobj->f_errors);\r
+ value = PyUnicode_AsEncodedString(v, cenc, errors);\r
+ if (value == NULL)\r
+ return -1;\r
+ } else {\r
+ value = v;\r
+ Py_INCREF(value);\r
+ }\r
+ result = file_PyObject_Print(value, fobj, flags);\r
+ Py_DECREF(value);\r
+ return result;\r
+#else\r
+ return file_PyObject_Print(v, fobj, flags);\r
+#endif\r
+ }\r
+ writer = PyObject_GetAttrString(f, "write");\r
+ if (writer == NULL)\r
+ return -1;\r
+ if (flags & Py_PRINT_RAW) {\r
+ if (PyUnicode_Check(v)) {\r
+ value = v;\r
+ Py_INCREF(value);\r
+ } else\r
+ value = PyObject_Str(v);\r
+ }\r
+ else\r
+ value = PyObject_Repr(v);\r
+ if (value == NULL) {\r
+ Py_DECREF(writer);\r
+ return -1;\r
+ }\r
+ args = PyTuple_Pack(1, value);\r
+ if (args == NULL) {\r
+ Py_DECREF(value);\r
+ Py_DECREF(writer);\r
+ return -1;\r
+ }\r
+ result = PyEval_CallObject(writer, args);\r
+ Py_DECREF(args);\r
+ Py_DECREF(value);\r
+ Py_DECREF(writer);\r
+ if (result == NULL)\r
+ return -1;\r
+ Py_DECREF(result);\r
+ return 0;\r
+}\r
+\r
+int\r
+PyFile_WriteString(const char *s, PyObject *f)\r
+{\r
+\r
+ if (f == NULL) {\r
+ /* Should be caused by a pre-existing error */\r
+ if (!PyErr_Occurred())\r
+ PyErr_SetString(PyExc_SystemError,\r
+ "null file for PyFile_WriteString");\r
+ return -1;\r
+ }\r
+ else if (PyFile_Check(f)) {\r
+ PyFileObject *fobj = (PyFileObject *) f;\r
+ FILE *fp = PyFile_AsFile(f);\r
+ if (fp == NULL) {\r
+ err_closed();\r
+ return -1;\r
+ }\r
+ FILE_BEGIN_ALLOW_THREADS(fobj)\r
+ fputs(s, fp);\r
+ FILE_END_ALLOW_THREADS(fobj)\r
+ return 0;\r
+ }\r
+ else if (!PyErr_Occurred()) {\r
+ PyObject *v = PyString_FromString(s);\r
+ int err;\r
+ if (v == NULL)\r
+ return -1;\r
+ err = PyFile_WriteObject(v, f, Py_PRINT_RAW);\r
+ Py_DECREF(v);\r
+ return err;\r
+ }\r
+ else\r
+ return -1;\r
+}\r
+\r
+/* Try to get a file-descriptor from a Python object. If the object\r
+ is an integer or long integer, its value is returned. If not, the\r
+ object's fileno() method is called if it exists; the method must return\r
+ an integer or long integer, which is returned as the file descriptor value.\r
+ -1 is returned on failure.\r
+*/\r
+\r
+int PyObject_AsFileDescriptor(PyObject *o)\r
+{\r
+ int fd;\r
+ PyObject *meth;\r
+\r
+ if (PyInt_Check(o)) {\r
+ fd = _PyInt_AsInt(o);\r
+ }\r
+ else if (PyLong_Check(o)) {\r
+ fd = _PyLong_AsInt(o);\r
+ }\r
+ else if ((meth = PyObject_GetAttrString(o, "fileno")) != NULL)\r
+ {\r
+ PyObject *fno = PyEval_CallObject(meth, NULL);\r
+ Py_DECREF(meth);\r
+ if (fno == NULL)\r
+ return -1;\r
+\r
+ if (PyInt_Check(fno)) {\r
+ fd = _PyInt_AsInt(fno);\r
+ Py_DECREF(fno);\r
+ }\r
+ else if (PyLong_Check(fno)) {\r
+ fd = _PyLong_AsInt(fno);\r
+ Py_DECREF(fno);\r
+ }\r
+ else {\r
+ PyErr_SetString(PyExc_TypeError,\r
+ "fileno() returned a non-integer");\r
+ Py_DECREF(fno);\r
+ return -1;\r
+ }\r
+ }\r
+ else {\r
+ PyErr_SetString(PyExc_TypeError,\r
+ "argument must be an int, or have a fileno() method.");\r
+ return -1;\r
+ }\r
+\r
+ if (fd < 0) {\r
+ PyErr_Format(PyExc_ValueError,\r
+ "file descriptor cannot be a negative integer (%i)",\r
+ fd);\r
+ return -1;\r
+ }\r
+ return fd;\r
+}\r
+\r
+/* From here on we need access to the real fgets and fread */\r
+#undef fgets\r
+#undef fread\r
+\r
+/*\r
+** Py_UniversalNewlineFgets is an fgets variation that understands\r
+** all of \r, \n and \r\n conventions.\r
+** The stream should be opened in binary mode.\r
+** If fobj is NULL the routine always does newline conversion, and\r
+** it may peek one char ahead to gobble the second char in \r\n.\r
+** If fobj is non-NULL it must be a PyFileObject. In this case there\r
+** is no readahead but in stead a flag is used to skip a following\r
+** \n on the next read. Also, if the file is open in binary mode\r
+** the whole conversion is skipped. Finally, the routine keeps track of\r
+** the different types of newlines seen.\r
+** Note that we need no error handling: fgets() treats error and eof\r
+** identically.\r
+*/\r
+char *\r
+Py_UniversalNewlineFgets(char *buf, int n, FILE *stream, PyObject *fobj)\r
+{\r
+ char *p = buf;\r
+ int c;\r
+ int newlinetypes = 0;\r
+ int skipnextlf = 0;\r
+ int univ_newline = 1;\r
+\r
+ if (fobj) {\r
+ if (!PyFile_Check(fobj)) {\r
+ errno = ENXIO; /* What can you do... */\r
+ return NULL;\r
+ }\r
+ univ_newline = ((PyFileObject *)fobj)->f_univ_newline;\r
+ if ( !univ_newline )\r
+ return fgets(buf, n, stream);\r
+ newlinetypes = ((PyFileObject *)fobj)->f_newlinetypes;\r
+ skipnextlf = ((PyFileObject *)fobj)->f_skipnextlf;\r
+ }\r
+ FLOCKFILE(stream);\r
+ c = 'x'; /* Shut up gcc warning */\r
+ while (--n > 0 && (c = GETC(stream)) != EOF ) {\r
+ if (skipnextlf ) {\r
+ skipnextlf = 0;\r
+ if (c == '\n') {\r
+ /* Seeing a \n here with skipnextlf true\r
+ ** means we saw a \r before.\r
+ */\r
+ newlinetypes |= NEWLINE_CRLF;\r
+ c = GETC(stream);\r
+ if (c == EOF) break;\r
+ } else {\r
+ /*\r
+ ** Note that c == EOF also brings us here,\r
+ ** so we're okay if the last char in the file\r
+ ** is a CR.\r
+ */\r
+ newlinetypes |= NEWLINE_CR;\r
+ }\r
+ }\r
+ if (c == '\r') {\r
+ /* A \r is translated into a \n, and we skip\r
+ ** an adjacent \n, if any. We don't set the\r
+ ** newlinetypes flag until we've seen the next char.\r
+ */\r
+ skipnextlf = 1;\r
+ c = '\n';\r
+ } else if ( c == '\n') {\r
+ newlinetypes |= NEWLINE_LF;\r
+ }\r
+ *p++ = c;\r
+ if (c == '\n') break;\r
+ }\r
+ if ( c == EOF && skipnextlf )\r
+ newlinetypes |= NEWLINE_CR;\r
+ FUNLOCKFILE(stream);\r
+ *p = '\0';\r
+ if (fobj) {\r
+ ((PyFileObject *)fobj)->f_newlinetypes = newlinetypes;\r
+ ((PyFileObject *)fobj)->f_skipnextlf = skipnextlf;\r
+ } else if ( skipnextlf ) {\r
+ /* If we have no file object we cannot save the\r
+ ** skipnextlf flag. We have to readahead, which\r
+ ** will cause a pause if we're reading from an\r
+ ** interactive stream, but that is very unlikely\r
+ ** unless we're doing something silly like\r
+ ** execfile("/dev/tty").\r
+ */\r
+ c = GETC(stream);\r
+ if ( c != '\n' )\r
+ ungetc(c, stream);\r
+ }\r
+ if (p == buf)\r
+ return NULL;\r
+ return buf;\r
+}\r
+\r
+/*\r
+** Py_UniversalNewlineFread is an fread variation that understands\r
+** all of \r, \n and \r\n conventions.\r
+** The stream should be opened in binary mode.\r
+** fobj must be a PyFileObject. In this case there\r
+** is no readahead but in stead a flag is used to skip a following\r
+** \n on the next read. Also, if the file is open in binary mode\r
+** the whole conversion is skipped. Finally, the routine keeps track of\r
+** the different types of newlines seen.\r
+*/\r
+size_t\r
+Py_UniversalNewlineFread(char *buf, size_t n,\r
+ FILE *stream, PyObject *fobj)\r
+{\r
+ char *dst = buf;\r
+ PyFileObject *f = (PyFileObject *)fobj;\r
+ int newlinetypes, skipnextlf;\r
+\r
+ assert(buf != NULL);\r
+ assert(stream != NULL);\r
+\r
+ if (!fobj || !PyFile_Check(fobj)) {\r
+ errno = ENXIO; /* What can you do... */\r
+ return 0;\r
+ }\r
+ if (!f->f_univ_newline)\r
+ return fread(buf, 1, n, stream);\r
+ newlinetypes = f->f_newlinetypes;\r
+ skipnextlf = f->f_skipnextlf;\r
+ /* Invariant: n is the number of bytes remaining to be filled\r
+ * in the buffer.\r
+ */\r
+ while (n) {\r
+ size_t nread;\r
+ int shortread;\r
+ char *src = dst;\r
+\r
+ nread = fread(dst, 1, n, stream);\r
+ assert(nread <= n);\r
+ if (nread == 0)\r
+ break;\r
+\r
+ n -= nread; /* assuming 1 byte out for each in; will adjust */\r
+ shortread = n != 0; /* true iff EOF or error */\r
+ while (nread--) {\r
+ char c = *src++;\r
+ if (c == '\r') {\r
+ /* Save as LF and set flag to skip next LF. */\r
+ *dst++ = '\n';\r
+ skipnextlf = 1;\r
+ }\r
+ else if (skipnextlf && c == '\n') {\r
+ /* Skip LF, and remember we saw CR LF. */\r
+ skipnextlf = 0;\r
+ newlinetypes |= NEWLINE_CRLF;\r
+ ++n;\r
+ }\r
+ else {\r
+ /* Normal char to be stored in buffer. Also\r
+ * update the newlinetypes flag if either this\r
+ * is an LF or the previous char was a CR.\r
+ */\r
+ if (c == '\n')\r
+ newlinetypes |= NEWLINE_LF;\r
+ else if (skipnextlf)\r
+ newlinetypes |= NEWLINE_CR;\r
+ *dst++ = c;\r
+ skipnextlf = 0;\r
+ }\r
+ }\r
+ if (shortread) {\r
+ /* If this is EOF, update type flags. */\r
+ if (skipnextlf && feof(stream))\r
+ newlinetypes |= NEWLINE_CR;\r
+ break;\r
+ }\r
+ }\r
+ f->f_newlinetypes = newlinetypes;\r
+ f->f_skipnextlf = skipnextlf;\r
+ return dst - buf;\r
+}\r
+\r
+#ifdef __cplusplus\r
+}\r
+#endif\r