]> git.proxmox.com Git - mirror_edk2.git/blame - AppPkg/Applications/Python/Python-2.7.2/Parser/parsetok.c
EmbeddedPkg: Extend NvVarStoreFormattedLib LIBRARY_CLASS
[mirror_edk2.git] / AppPkg / Applications / Python / Python-2.7.2 / Parser / parsetok.c
CommitLineData
b81cc7d6
OM
1/** @file\r
2 Parser-tokenizer link implementation.\r
3\r
4 Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>\r
5 This program and the accompanying materials are licensed and made available under\r
6 the terms and conditions of the BSD License that accompanies this distribution.\r
7 The full text of the license may be found at\r
8 http://opensource.org/licenses/bsd-license.php.\r
9\r
10 THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
11 WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
12**/\r
4710c53d 13#include "pgenheaders.h"\r
14#include "tokenizer.h"\r
15#include "node.h"\r
16#include "grammar.h"\r
17#include "parser.h"\r
18#include "parsetok.h"\r
19#include "errcode.h"\r
20#include "graminit.h"\r
21\r
22int Py_TabcheckFlag;\r
23\r
24\r
25/* Forward */\r
26static node *parsetok(struct tok_state *, grammar *, int, perrdetail *, int *);\r
27static void initerr(perrdetail *err_ret, const char* filename);\r
28\r
29/* Parse input coming from a string. Return error code, print some errors. */\r
30node *\r
31PyParser_ParseString(const char *s, grammar *g, int start, perrdetail *err_ret)\r
32{\r
33 return PyParser_ParseStringFlagsFilename(s, NULL, g, start, err_ret, 0);\r
34}\r
35\r
36node *\r
37PyParser_ParseStringFlags(const char *s, grammar *g, int start,\r
38 perrdetail *err_ret, int flags)\r
39{\r
40 return PyParser_ParseStringFlagsFilename(s, NULL,\r
41 g, start, err_ret, flags);\r
42}\r
43\r
44node *\r
45PyParser_ParseStringFlagsFilename(const char *s, const char *filename,\r
46 grammar *g, int start,\r
47 perrdetail *err_ret, int flags)\r
48{\r
49 int iflags = flags;\r
50 return PyParser_ParseStringFlagsFilenameEx(s, filename, g, start,\r
51 err_ret, &iflags);\r
52}\r
53\r
54node *\r
55PyParser_ParseStringFlagsFilenameEx(const char *s, const char *filename,\r
56 grammar *g, int start,\r
57 perrdetail *err_ret, int *flags)\r
58{\r
59 struct tok_state *tok;\r
60\r
61 initerr(err_ret, filename);\r
62\r
63 if ((tok = PyTokenizer_FromString(s, start == file_input)) == NULL) {\r
64 err_ret->error = PyErr_Occurred() ? E_DECODE : E_NOMEM;\r
65 return NULL;\r
66 }\r
67\r
68 tok->filename = filename ? filename : "<string>";\r
69 if (Py_TabcheckFlag || Py_VerboseFlag) {\r
70 tok->altwarning = (tok->filename != NULL);\r
71 if (Py_TabcheckFlag >= 2)\r
72 tok->alterror++;\r
73 }\r
74\r
75 return parsetok(tok, g, start, err_ret, flags);\r
76}\r
77\r
78/* Parse input coming from a file. Return error code, print some errors. */\r
79\r
80node *\r
81PyParser_ParseFile(FILE *fp, const char *filename, grammar *g, int start,\r
82 char *ps1, char *ps2, perrdetail *err_ret)\r
83{\r
84 return PyParser_ParseFileFlags(fp, filename, g, start, ps1, ps2,\r
85 err_ret, 0);\r
86}\r
87\r
88node *\r
89PyParser_ParseFileFlags(FILE *fp, const char *filename, grammar *g, int start,\r
90 char *ps1, char *ps2, perrdetail *err_ret, int flags)\r
91{\r
92 int iflags = flags;\r
93 return PyParser_ParseFileFlagsEx(fp, filename, g, start, ps1, ps2, err_ret, &iflags);\r
94}\r
95\r
96node *\r
97PyParser_ParseFileFlagsEx(FILE *fp, const char *filename, grammar *g, int start,\r
98 char *ps1, char *ps2, perrdetail *err_ret, int *flags)\r
99{\r
100 struct tok_state *tok;\r
101\r
102 initerr(err_ret, filename);\r
103\r
104 if ((tok = PyTokenizer_FromFile(fp, ps1, ps2)) == NULL) {\r
105 err_ret->error = E_NOMEM;\r
106 return NULL;\r
107 }\r
108 tok->filename = filename;\r
109 if (Py_TabcheckFlag || Py_VerboseFlag) {\r
110 tok->altwarning = (filename != NULL);\r
111 if (Py_TabcheckFlag >= 2)\r
112 tok->alterror++;\r
113 }\r
114\r
115 return parsetok(tok, g, start, err_ret, flags);\r
116}\r
117\r
118#if 0\r
119static char with_msg[] =\r
120"%s:%d: Warning: 'with' will become a reserved keyword in Python 2.6\n";\r
121\r
122static char as_msg[] =\r
123"%s:%d: Warning: 'as' will become a reserved keyword in Python 2.6\n";\r
124\r
125static void\r
126warn(const char *msg, const char *filename, int lineno)\r
127{\r
128 if (filename == NULL)\r
129 filename = "<string>";\r
130 PySys_WriteStderr(msg, filename, lineno);\r
131}\r
132#endif\r
133\r
134/* Parse input coming from the given tokenizer structure.\r
135 Return error code. */\r
136\r
137static node *\r
138parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,\r
139 int *flags)\r
140{\r
141 parser_state *ps;\r
142 node *n;\r
b81cc7d6
OM
143 int started = 0;\r
144 //int handling_import = 0;\r
145 //int handling_with = 0;\r
4710c53d 146\r
147 if ((ps = PyParser_New(g, start)) == NULL) {\r
148 fprintf(stderr, "no mem for new parser\n");\r
149 err_ret->error = E_NOMEM;\r
150 PyTokenizer_Free(tok);\r
151 return NULL;\r
152 }\r
153#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD\r
154 if (*flags & PyPARSE_PRINT_IS_FUNCTION) {\r
155 ps->p_flags |= CO_FUTURE_PRINT_FUNCTION;\r
156 }\r
157 if (*flags & PyPARSE_UNICODE_LITERALS) {\r
158 ps->p_flags |= CO_FUTURE_UNICODE_LITERALS;\r
159 }\r
160\r
161#endif\r
162\r
163 for (;;) {\r
164 char *a, *b;\r
165 int type;\r
166 size_t len;\r
167 char *str;\r
168 int col_offset;\r
169\r
170 type = PyTokenizer_Get(tok, &a, &b);\r
171 if (type == ERRORTOKEN) {\r
172 err_ret->error = tok->done;\r
173 break;\r
174 }\r
175 if (type == ENDMARKER && started) {\r
176 type = NEWLINE; /* Add an extra newline */\r
b81cc7d6
OM
177 //handling_with = 0;\r
178 //handling_import = 0;\r
4710c53d 179 started = 0;\r
180 /* Add the right number of dedent tokens,\r
181 except if a certain flag is given --\r
182 codeop.py uses this. */\r
183 if (tok->indent &&\r
184 !(*flags & PyPARSE_DONT_IMPLY_DEDENT))\r
185 {\r
186 tok->pendin = -tok->indent;\r
187 tok->indent = 0;\r
188 }\r
189 }\r
190 else\r
191 started = 1;\r
192 len = b - a; /* XXX this may compute NULL - NULL */\r
193 str = (char *) PyObject_MALLOC(len + 1);\r
194 if (str == NULL) {\r
195 fprintf(stderr, "no mem for next token\n");\r
196 err_ret->error = E_NOMEM;\r
197 break;\r
198 }\r
199 if (len > 0)\r
200 strncpy(str, a, len);\r
201 str[len] = '\0';\r
202\r
203#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD\r
204#endif\r
205 if (a >= tok->line_start)\r
206 col_offset = a - tok->line_start;\r
207 else\r
208 col_offset = -1;\r
209\r
210 if ((err_ret->error =\r
211 PyParser_AddToken(ps, (int)type, str, tok->lineno, col_offset,\r
212 &(err_ret->expected))) != E_OK) {\r
213 if (err_ret->error != E_DONE) {\r
214 PyObject_FREE(str);\r
215 err_ret->token = type;\r
216 }\r
217 break;\r
218 }\r
219 }\r
220\r
221 if (err_ret->error == E_DONE) {\r
222 n = ps->p_tree;\r
223 ps->p_tree = NULL;\r
224 }\r
225 else\r
226 n = NULL;\r
227\r
228#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD\r
229 *flags = ps->p_flags;\r
230#endif\r
231 PyParser_Delete(ps);\r
232\r
233 if (n == NULL) {\r
234 if (tok->lineno <= 1 && tok->done == E_EOF)\r
235 err_ret->error = E_EOF;\r
236 err_ret->lineno = tok->lineno;\r
237 if (tok->buf != NULL) {\r
238 char *text = NULL;\r
239 size_t len;\r
240 assert(tok->cur - tok->buf < INT_MAX);\r
241 err_ret->offset = (int)(tok->cur - tok->buf);\r
242 len = tok->inp - tok->buf;\r
243#ifdef Py_USING_UNICODE\r
244 text = PyTokenizer_RestoreEncoding(tok, len, &err_ret->offset);\r
245\r
246#endif\r
247 if (text == NULL) {\r
248 text = (char *) PyObject_MALLOC(len + 1);\r
249 if (text != NULL) {\r
250 if (len > 0)\r
251 strncpy(text, tok->buf, len);\r
252 text[len] = '\0';\r
253 }\r
254 }\r
255 err_ret->text = text;\r
256 }\r
257 } else if (tok->encoding != NULL) {\r
258 /* 'nodes->n_str' uses PyObject_*, while 'tok->encoding' was\r
259 * allocated using PyMem_\r
260 */\r
261 node* r = PyNode_New(encoding_decl);\r
262 if (r)\r
263 r->n_str = PyObject_MALLOC(strlen(tok->encoding)+1);\r
264 if (!r || !r->n_str) {\r
265 err_ret->error = E_NOMEM;\r
266 if (r)\r
267 PyObject_FREE(r);\r
268 n = NULL;\r
269 goto done;\r
270 }\r
271 strcpy(r->n_str, tok->encoding);\r
272 PyMem_FREE(tok->encoding);\r
273 tok->encoding = NULL;\r
274 r->n_nchildren = 1;\r
275 r->n_child = n;\r
276 n = r;\r
277 }\r
278\r
279done:\r
280 PyTokenizer_Free(tok);\r
281\r
282 return n;\r
283}\r
284\r
285static void\r
286initerr(perrdetail *err_ret, const char *filename)\r
287{\r
288 err_ret->error = E_OK;\r
289 err_ret->filename = filename;\r
290 err_ret->lineno = 0;\r
291 err_ret->offset = 0;\r
292 err_ret->text = NULL;\r
293 err_ret->token = -1;\r
294 err_ret->expected = -1;\r
295}\r