aboutsummaryrefslogtreecommitdiff
path: root/py/runtime.c
blob: 906d07b604df3298a03fc5eab711642123944e93 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
/*
 * This file is part of the MicroPython project, http://micropython.org/
 *
 * The MIT License (MIT)
 *
 * Copyright (c) 2013, 2014 Damien P. George
 * Copyright (c) 2014-2018 Paul Sokolovsky
 *
 * Permission is hereby granted, free of charge, to any person obtaining a copy
 * of this software and associated documentation files (the "Software"), to deal
 * in the Software without restriction, including without limitation the rights
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 * copies of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 * THE SOFTWARE.
 */

#include <stdarg.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>

#include "py/parsenum.h"
#include "py/compile.h"
#include "py/objstr.h"
#include "py/objtuple.h"
#include "py/objlist.h"
#include "py/objtype.h"
#include "py/objmodule.h"
#include "py/objgenerator.h"
#include "py/smallint.h"
#include "py/runtime.h"
#include "py/builtin.h"
#include "py/stackctrl.h"
#include "py/gc.h"

#if MICROPY_DEBUG_VERBOSE // print debugging info
#define DEBUG_PRINT (1)
#define DEBUG_printf DEBUG_printf
#define DEBUG_OP_printf(...) DEBUG_printf(__VA_ARGS__)
#else // don't print debugging info
#define DEBUG_printf(...) (void)0
#define DEBUG_OP_printf(...) (void)0
#endif

const mp_obj_module_t mp_module___main__ = {
    .base = { &mp_type_module },
    .globals = (mp_obj_dict_t*)&MP_STATE_VM(dict_main),
};

void mp_init(void) {
    qstr_init();

    // no pending exceptions to start with
    MP_STATE_VM(mp_pending_exception) = MP_OBJ_NULL;
    #if MICROPY_ENABLE_SCHEDULER
    MP_STATE_VM(sched_state) = MP_SCHED_IDLE;
    MP_STATE_VM(sched_idx) = 0;
    MP_STATE_VM(sched_len) = 0;
    #endif

#if MICROPY_ENABLE_EMERGENCY_EXCEPTION_BUF
    mp_init_emergency_exception_buf();
#endif

    #if MICROPY_KBD_EXCEPTION
    // initialise the exception object for raising KeyboardInterrupt
    MP_STATE_VM(mp_kbd_exception).base.type = &mp_type_KeyboardInterrupt;
    MP_STATE_VM(mp_kbd_exception).traceback_alloc = 0;
    MP_STATE_VM(mp_kbd_exception).traceback_len = 0;
    MP_STATE_VM(mp_kbd_exception).traceback_data = NULL;
    MP_STATE_VM(mp_kbd_exception).args = (mp_obj_tuple_t*)&mp_const_empty_tuple_obj;
    #endif

    #if MICROPY_ENABLE_COMPILER
    // optimization disabled by default
    MP_STATE_VM(mp_optimise_value) = 0;
    #if MICROPY_EMIT_NATIVE
    MP_STATE_VM(default_emit_opt) = MP_EMIT_OPT_NONE;
    #endif
    #endif

    // init global module dict
    mp_obj_dict_init(&MP_STATE_VM(mp_loaded_modules_dict), 3);

    // initialise the __main__ module
    mp_obj_dict_init(&MP_STATE_VM(dict_main), 1);
    mp_obj_dict_store(MP_OBJ_FROM_PTR(&MP_STATE_VM(dict_main)), MP_OBJ_NEW_QSTR(MP_QSTR___name__), MP_OBJ_NEW_QSTR(MP_QSTR___main__));

    // locals = globals for outer module (see Objects/frameobject.c/PyFrame_New())
    mp_locals_set(&MP_STATE_VM(dict_main));
    mp_globals_set(&MP_STATE_VM(dict_main));

    #if MICROPY_CAN_OVERRIDE_BUILTINS
    // start with no extensions to builtins
    MP_STATE_VM(mp_module_builtins_override_dict) = NULL;
    #endif

    #if MICROPY_PY_OS_DUPTERM
    for (size_t i = 0; i < MICROPY_PY_OS_DUPTERM; ++i) {
        MP_STATE_VM(dupterm_objs[i]) = MP_OBJ_NULL;
    }
    #endif

    #if MICROPY_VFS
    // initialise the VFS sub-system
    MP_STATE_VM(vfs_cur) = NULL;
    MP_STATE_VM(vfs_mount_table) = NULL;
    #endif

    #if MICROPY_PY_SYS_ATEXIT
    MP_STATE_VM(sys_exitfunc) = mp_const_none;
    #endif

    #if MICROPY_PY_SYS_SETTRACE
    MP_STATE_THREAD(prof_trace_callback) = MP_OBJ_NULL;
    MP_STATE_THREAD(prof_callback_is_executing) = false;
    MP_STATE_THREAD(current_code_state) = NULL;
    #endif

    #if MICROPY_PY_BLUETOOTH
    MP_STATE_VM(bluetooth) = MP_OBJ_NULL;
    #endif

    #if MICROPY_PY_THREAD_GIL
    mp_thread_mutex_init(&MP_STATE_VM(gil_mutex));
    #endif

    // call port specific initialization if any
    #ifdef MICROPY_PORT_INIT_FUNC
    MICROPY_PORT_INIT_FUNC;
    #endif

    MP_THREAD_GIL_ENTER();
}

void mp_deinit(void) {
    MP_THREAD_GIL_EXIT();

    // call port specific deinitialization if any
    #ifdef MICROPY_PORT_DEINIT_FUNC
    MICROPY_PORT_DEINIT_FUNC;
    #endif

    //mp_obj_dict_free(&dict_main);
    //mp_map_deinit(&MP_STATE_VM(mp_loaded_modules_map));
}

mp_obj_t mp_load_name(qstr qst) {
    // logic: search locals, globals, builtins
    DEBUG_OP_printf("load name %s\n", qstr_str(qst));
    // If we're at the outer scope (locals == globals), dispatch to load_global right away
    if (mp_locals_get() != mp_globals_get()) {
        mp_map_elem_t *elem = mp_map_lookup(&mp_locals_get()->map, MP_OBJ_NEW_QSTR(qst), MP_MAP_LOOKUP);
        if (elem != NULL) {
            return elem->value;
        }
    }
    return mp_load_global(qst);
}

mp_obj_t mp_load_global(qstr qst) {
    // logic: search globals, builtins
    DEBUG_OP_printf("load global %s\n", qstr_str(qst));
    mp_map_elem_t *elem = mp_map_lookup(&mp_globals_get()->map, MP_OBJ_NEW_QSTR(qst), MP_MAP_LOOKUP);
    if (elem == NULL) {
        #if MICROPY_CAN_OVERRIDE_BUILTINS
        if (MP_STATE_VM(mp_module_builtins_override_dict) != NULL) {
            // lookup in additional dynamic table of builtins first
            elem = mp_map_lookup(&MP_STATE_VM(mp_module_builtins_override_dict)->map, MP_OBJ_NEW_QSTR(qst), MP_MAP_LOOKUP);
            if (elem != NULL) {
                return elem->value;
            }
        }
        #endif
        elem = mp_map_lookup((mp_map_t*)&mp_module_builtins_globals.map, MP_OBJ_NEW_QSTR(qst), MP_MAP_LOOKUP);
        if (elem == NULL) {
            if (MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE) {
                mp_raise_msg(&mp_type_NameError, "name not defined");
            } else {
                mp_raise_msg_varg(&mp_type_NameError, "name '%q' isn't defined", qst);
            }
        }
    }
    return elem->value;
}

mp_obj_t mp_load_build_class(void) {
    DEBUG_OP_printf("load_build_class\n");
    #if MICROPY_CAN_OVERRIDE_BUILTINS
    if (MP_STATE_VM(mp_module_builtins_override_dict) != NULL) {
        // lookup in additional dynamic table of builtins first
        mp_map_elem_t *elem = mp_map_lookup(&MP_STATE_VM(mp_module_builtins_override_dict)->map, MP_OBJ_NEW_QSTR(MP_QSTR___build_class__), MP_MAP_LOOKUP);
        if (elem != NULL) {
            return elem->value;
        }
    }
    #endif
    return MP_OBJ_FROM_PTR(&mp_builtin___build_class___obj);
}

void mp_store_name(qstr qst, mp_obj_t obj) {
    DEBUG_OP_printf("store name %s <- %p\n", qstr_str(qst), obj);
    mp_obj_dict_store(MP_OBJ_FROM_PTR(mp_locals_get()), MP_OBJ_NEW_QSTR(qst), obj);
}

void mp_delete_name(qstr qst) {
    DEBUG_OP_printf("delete name %s\n", qstr_str(qst));
    // TODO convert KeyError to NameError if qst not found
    mp_obj_dict_delete(MP_OBJ_FROM_PTR(mp_locals_get()), MP_OBJ_NEW_QSTR(qst));
}

void mp_store_global(qstr qst, mp_obj_t obj) {
    DEBUG_OP_printf("store global %s <- %p\n", qstr_str(qst), obj);
    mp_obj_dict_store(MP_OBJ_FROM_PTR(mp_globals_get()), MP_OBJ_NEW_QSTR(qst), obj);
}

void mp_delete_global(qstr qst) {
    DEBUG_OP_printf("delete global %s\n", qstr_str(qst));
    // TODO convert KeyError to NameError if qst not found
    mp_obj_dict_delete(MP_OBJ_FROM_PTR(mp_globals_get()), MP_OBJ_NEW_QSTR(qst));
}

mp_obj_t mp_unary_op(mp_unary_op_t op, mp_obj_t arg) {
    DEBUG_OP_printf("unary " UINT_FMT " %q %p\n", op, mp_unary_op_method_name[op], arg);

    if (op == MP_UNARY_OP_NOT) {
        // "not x" is the negative of whether "x" is true per Python semantics
        return mp_obj_new_bool(mp_obj_is_true(arg) == 0);
    } else if (mp_obj_is_small_int(arg)) {
        mp_int_t val = MP_OBJ_SMALL_INT_VALUE(arg);
        switch (op) {
            case MP_UNARY_OP_BOOL:
                return mp_obj_new_bool(val != 0);
            case MP_UNARY_OP_HASH:
                return arg;
            case MP_UNARY_OP_POSITIVE:
            case MP_UNARY_OP_INT:
                return arg;
            case MP_UNARY_OP_NEGATIVE:
                // check for overflow
                if (val == MP_SMALL_INT_MIN) {
                    return mp_obj_new_int(-val);
                } else {
                    return MP_OBJ_NEW_SMALL_INT(-val);
                }
            case MP_UNARY_OP_ABS:
                if (val >= 0) {
                    return arg;
                } else if (val == MP_SMALL_INT_MIN) {
                    // check for overflow
                    return mp_obj_new_int(-val);
                } else {
                    return MP_OBJ_NEW_SMALL_INT(-val);
                }
            default:
                assert(op == MP_UNARY_OP_INVERT);
                return MP_OBJ_NEW_SMALL_INT(~val);
        }
    } else if (op == MP_UNARY_OP_HASH && mp_obj_is_str_or_bytes(arg)) {
        // fast path for hashing str/bytes
        GET_STR_HASH(arg, h);
        if (h == 0) {
            GET_STR_DATA_LEN(arg, data, len);
            h = qstr_compute_hash(data, len);
        }
        return MP_OBJ_NEW_SMALL_INT(h);
    } else {
        const mp_obj_type_t *type = mp_obj_get_type(arg);
        if (type->unary_op != NULL) {
            mp_obj_t result = type->unary_op(op, arg);
            if (result != MP_OBJ_NULL) {
                return result;
            }
        }
        // With MP_UNARY_OP_INT, mp_unary_op() becomes a fallback for mp_obj_get_int().
        // In this case provide a more focused error message to not confuse, e.g. chr(1.0)
        if (MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE) {
            if (op == MP_UNARY_OP_INT) {
                mp_raise_TypeError("can't convert to int");
            } else {
                mp_raise_TypeError("unsupported type for operator");
            }
        } else {
            if (op == MP_UNARY_OP_INT) {
                mp_raise_msg_varg(&mp_type_TypeError,
                    "can't convert %s to int", mp_obj_get_type_str(arg));
            } else {
                mp_raise_msg_varg(&mp_type_TypeError,
                    "unsupported type for %q: '%s'",
                    mp_unary_op_method_name[op], mp_obj_get_type_str(arg));
            }
        }
    }
}

mp_obj_t mp_binary_op(mp_binary_op_t op, mp_obj_t lhs, mp_obj_t rhs) {
    DEBUG_OP_printf("binary " UINT_FMT " %q %p %p\n", op, mp_binary_op_method_name[op], lhs, rhs);

    // TODO correctly distinguish inplace operators for mutable objects
    // lookup logic that CPython uses for +=:
    //   check for implemented +=
    //   then check for implemented +
    //   then check for implemented seq.inplace_concat
    //   then check for implemented seq.concat
    //   then fail
    // note that list does not implement + or +=, so that inplace_concat is reached first for +=

    // deal with is
    if (op == MP_BINARY_OP_IS) {
        return mp_obj_new_bool(lhs == rhs);
    }

    // deal with == and != for all types
    if (op == MP_BINARY_OP_EQUAL || op == MP_BINARY_OP_NOT_EQUAL) {
        // mp_obj_equal_not_equal supports a bunch of shortcuts
        return mp_obj_equal_not_equal(op, lhs, rhs);
    }

    // deal with exception_match for all types
    if (op == MP_BINARY_OP_EXCEPTION_MATCH) {
        // rhs must be issubclass(rhs, BaseException)
        if (mp_obj_is_exception_type(rhs)) {
            if (mp_obj_exception_match(lhs, rhs)) {
                return mp_const_true;
            } else {
                return mp_const_false;
            }
        } else if (mp_obj_is_type(rhs, &mp_type_tuple)) {
            mp_obj_tuple_t *tuple = MP_OBJ_TO_PTR(rhs);
            for (size_t i = 0; i < tuple->len; i++) {
                rhs = tuple->items[i];
                if (!mp_obj_is_exception_type(rhs)) {
                    goto unsupported_op;
                }
                if (mp_obj_exception_match(lhs, rhs)) {
                    return mp_const_true;
                }
            }
            return mp_const_false;
        }
        goto unsupported_op;
    }

    if (mp_obj_is_small_int(lhs)) {
        mp_int_t lhs_val = MP_OBJ_SMALL_INT_VALUE(lhs);
        if (mp_obj_is_small_int(rhs)) {
            mp_int_t rhs_val = MP_OBJ_SMALL_INT_VALUE(rhs);
            // This is a binary operation: lhs_val op rhs_val
            // We need to be careful to handle overflow; see CERT INT32-C
            // Operations that can overflow:
            //      +       result always fits in mp_int_t, then handled by SMALL_INT check
            //      -       result always fits in mp_int_t, then handled by SMALL_INT check
            //      *       checked explicitly
            //      /       if lhs=MIN and rhs=-1; result always fits in mp_int_t, then handled by SMALL_INT check
            //      %       if lhs=MIN and rhs=-1; result always fits in mp_int_t, then handled by SMALL_INT check
            //      <<      checked explicitly
            switch (op) {
                case MP_BINARY_OP_OR:
                case MP_BINARY_OP_INPLACE_OR: lhs_val |= rhs_val; break;
                case MP_BINARY_OP_XOR:
                case MP_BINARY_OP_INPLACE_XOR: lhs_val ^= rhs_val; break;
                case MP_BINARY_OP_AND:
                case MP_BINARY_OP_INPLACE_AND: lhs_val &= rhs_val; break;
                case MP_BINARY_OP_LSHIFT:
                case MP_BINARY_OP_INPLACE_LSHIFT: {
                    if (rhs_val < 0) {
                        // negative shift not allowed
                        mp_raise_ValueError("negative shift count");
                    } else if (rhs_val >= (mp_int_t)BITS_PER_WORD || lhs_val > (MP_SMALL_INT_MAX >> rhs_val) || lhs_val < (MP_SMALL_INT_MIN >> rhs_val)) {
                        // left-shift will overflow, so use higher precision integer
                        lhs = mp_obj_new_int_from_ll(lhs_val);
                        goto generic_binary_op;
                    } else {
                        // use standard precision
                        lhs_val <<= rhs_val;
                    }
                    break;
                }
                case MP_BINARY_OP_RSHIFT:
                case MP_BINARY_OP_INPLACE_RSHIFT:
                    if (rhs_val < 0) {
                        // negative shift not allowed
                        mp_raise_ValueError("negative shift count");
                    } else {
                        // standard precision is enough for right-shift
                        if (rhs_val >= (mp_int_t)BITS_PER_WORD) {
                            // Shifting to big amounts is underfined behavior
                            // in C and is CPU-dependent; propagate sign bit.
                            rhs_val = BITS_PER_WORD - 1;
                        }
                        lhs_val >>= rhs_val;
                    }
                    break;
                case MP_BINARY_OP_ADD:
                case MP_BINARY_OP_INPLACE_ADD: lhs_val += rhs_val; break;
                case MP_BINARY_OP_SUBTRACT:
                case MP_BINARY_OP_INPLACE_SUBTRACT: lhs_val -= rhs_val; break;
                case MP_BINARY_OP_MULTIPLY:
                case MP_BINARY_OP_INPLACE_MULTIPLY: {

                    // If long long type exists and is larger than mp_int_t, then
                    // we can use the following code to perform overflow-checked multiplication.
                    // Otherwise (eg in x64 case) we must use mp_small_int_mul_overflow.
                    #if 0
                    // compute result using long long precision
                    long long res = (long long)lhs_val * (long long)rhs_val;
                    if (res > MP_SMALL_INT_MAX || res < MP_SMALL_INT_MIN) {
                        // result overflowed SMALL_INT, so return higher precision integer
                        return mp_obj_new_int_from_ll(res);
                    } else {
                        // use standard precision
                        lhs_val = (mp_int_t)res;
                    }
                    #endif

                    if (mp_small_int_mul_overflow(lhs_val, rhs_val)) {
                        // use higher precision
                        lhs = mp_obj_new_int_from_ll(lhs_val);
                        goto generic_binary_op;
                    } else {
                        // use standard precision
                        return MP_OBJ_NEW_SMALL_INT(lhs_val * rhs_val);
                    }
                }
                case MP_BINARY_OP_FLOOR_DIVIDE:
                case MP_BINARY_OP_INPLACE_FLOOR_DIVIDE:
                    if (rhs_val == 0) {
                        goto zero_division;
                    }
                    lhs_val = mp_small_int_floor_divide(lhs_val, rhs_val);
                    break;

                #if MICROPY_PY_BUILTINS_FLOAT
                case MP_BINARY_OP_TRUE_DIVIDE:
                case MP_BINARY_OP_INPLACE_TRUE_DIVIDE:
                    if (rhs_val == 0) {
                        goto zero_division;
                    }
                    return mp_obj_new_float((mp_float_t)lhs_val / (mp_float_t)rhs_val);
                #endif

                case MP_BINARY_OP_MODULO:
                case MP_BINARY_OP_INPLACE_MODULO: {
                    if (rhs_val == 0) {
                        goto zero_division;
                    }
                    lhs_val = mp_small_int_modulo(lhs_val, rhs_val);
                    break;
                }

                case MP_BINARY_OP_POWER:
                case MP_BINARY_OP_INPLACE_POWER:
                    if (rhs_val < 0) {
                        #if MICROPY_PY_BUILTINS_FLOAT
                        return mp_obj_float_binary_op(op, lhs_val, rhs);
                        #else
                        mp_raise_ValueError("negative power with no float support");
                        #endif
                    } else {
                        mp_int_t ans = 1;
                        while (rhs_val > 0) {
                            if (rhs_val & 1) {
                                if (mp_small_int_mul_overflow(ans, lhs_val)) {
                                    goto power_overflow;
                                }
                                ans *= lhs_val;
                            }
                            if (rhs_val == 1) {
                                break;
                            }
                            rhs_val /= 2;
                            if (mp_small_int_mul_overflow(lhs_val, lhs_val)) {
                                goto power_overflow;
                            }
                            lhs_val *= lhs_val;
                        }
                        lhs_val = ans;
                    }
                    break;

                power_overflow:
                    // use higher precision
                    lhs = mp_obj_new_int_from_ll(MP_OBJ_SMALL_INT_VALUE(lhs));
                    goto generic_binary_op;

                case MP_BINARY_OP_DIVMOD: {
                    if (rhs_val == 0) {
                        goto zero_division;
                    }
                    // to reduce stack usage we don't pass a temp array of the 2 items
                    mp_obj_tuple_t *tuple = MP_OBJ_TO_PTR(mp_obj_new_tuple(2, NULL));
                    tuple->items[0] = MP_OBJ_NEW_SMALL_INT(mp_small_int_floor_divide(lhs_val, rhs_val));
                    tuple->items[1] = MP_OBJ_NEW_SMALL_INT(mp_small_int_modulo(lhs_val, rhs_val));
                    return MP_OBJ_FROM_PTR(tuple);
                }

                case MP_BINARY_OP_LESS: return mp_obj_new_bool(lhs_val < rhs_val);
                case MP_BINARY_OP_MORE: return mp_obj_new_bool(lhs_val > rhs_val);
                case MP_BINARY_OP_LESS_EQUAL: return mp_obj_new_bool(lhs_val <= rhs_val);
                case MP_BINARY_OP_MORE_EQUAL: return mp_obj_new_bool(lhs_val >= rhs_val);

                default:
                    goto unsupported_op;
            }
            // This is an inlined version of mp_obj_new_int, for speed
            if (MP_SMALL_INT_FITS(lhs_val)) {
                return MP_OBJ_NEW_SMALL_INT(lhs_val);
            } else {
                return mp_obj_new_int_from_ll(lhs_val);
            }
#if MICROPY_PY_BUILTINS_FLOAT
        } else if (mp_obj_is_float(rhs)) {
            mp_obj_t res = mp_obj_float_binary_op(op, lhs_val, rhs);
            if (res == MP_OBJ_NULL) {
                goto unsupported_op;
            } else {
                return res;
            }
#endif
#if MICROPY_PY_BUILTINS_COMPLEX
        } else if (mp_obj_is_type(rhs, &mp_type_complex)) {
            mp_obj_t res = mp_obj_complex_binary_op(op, lhs_val, 0, rhs);
            if (res == MP_OBJ_NULL) {
                goto unsupported_op;
            } else {
                return res;
            }
#endif
        }
    }

    // Convert MP_BINARY_OP_IN to MP_BINARY_OP_CONTAINS with swapped args.
    if (op == MP_BINARY_OP_IN) {
        op = MP_BINARY_OP_CONTAINS;
        mp_obj_t temp = lhs;
        lhs = rhs;
        rhs = temp;
    }

    // generic binary_op supplied by type
    const mp_obj_type_t *type;
generic_binary_op:
    type = mp_obj_get_type(lhs);
    if (type->binary_op != NULL) {
        mp_obj_t result = type->binary_op(op, lhs, rhs);
        if (result != MP_OBJ_NULL) {
            return result;
        }
    }

#if MICROPY_PY_REVERSE_SPECIAL_METHODS
    if (op >= MP_BINARY_OP_OR && op <= MP_BINARY_OP_POWER) {
        mp_obj_t t = rhs;
        rhs = lhs;
        lhs = t;
        op += MP_BINARY_OP_REVERSE_OR - MP_BINARY_OP_OR;
        goto generic_binary_op;
    } else if (op >= MP_BINARY_OP_REVERSE_OR) {
        // Convert __rop__ back to __op__ for error message
        mp_obj_t t = rhs;
        rhs = lhs;
        lhs = t;
        op -= MP_BINARY_OP_REVERSE_OR - MP_BINARY_OP_OR;
    }
#endif

    if (op == MP_BINARY_OP_CONTAINS) {
        // If type didn't support containment then explicitly walk the iterator.
        // mp_getiter will raise the appropriate exception if lhs is not iterable.
        mp_obj_iter_buf_t iter_buf;
        mp_obj_t iter = mp_getiter(lhs, &iter_buf);
        mp_obj_t next;
        while ((next = mp_iternext(iter)) != MP_OBJ_STOP_ITERATION) {
            if (mp_obj_equal(next, rhs)) {
                return mp_const_true;
            }
        }
        return mp_const_false;
    }

unsupported_op:
    if (MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE) {
        mp_raise_TypeError("unsupported type for operator");
    } else {
        mp_raise_msg_varg(&mp_type_TypeError,
            "unsupported types for %q: '%s', '%s'",
            mp_binary_op_method_name[op], mp_obj_get_type_str(lhs), mp_obj_get_type_str(rhs));
    }

zero_division:
    mp_raise_msg(&mp_type_ZeroDivisionError, "divide by zero");
}

mp_obj_t mp_call_function_0(mp_obj_t fun) {
    return mp_call_function_n_kw(fun, 0, 0, NULL);
}

mp_obj_t mp_call_function_1(mp_obj_t fun, mp_obj_t arg) {
    return mp_call_function_n_kw(fun, 1, 0, &arg);
}

mp_obj_t mp_call_function_2(mp_obj_t fun, mp_obj_t arg1, mp_obj_t arg2) {
    mp_obj_t args[2];
    args[0] = arg1;
    args[1] = arg2;
    return mp_call_function_n_kw(fun, 2, 0, args);
}

// args contains, eg: arg0  arg1  key0  value0  key1  value1
mp_obj_t mp_call_function_n_kw(mp_obj_t fun_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
    // TODO improve this: fun object can specify its type and we parse here the arguments,
    // passing to the function arrays of fixed and keyword arguments

    DEBUG_OP_printf("calling function %p(n_args=" UINT_FMT ", n_kw=" UINT_FMT ", args=%p)\n", fun_in, n_args, n_kw, args);

    // get the type
    const mp_obj_type_t *type = mp_obj_get_type(fun_in);

    // do the call
    if (type->call != NULL) {
        return type->call(fun_in, n_args, n_kw, args);
    }

    if (MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE) {
        mp_raise_TypeError("object not callable");
    } else {
        mp_raise_msg_varg(&mp_type_TypeError,
            "'%s' object isn't callable", mp_obj_get_type_str(fun_in));
    }
}

// args contains: fun  self/NULL  arg(0)  ...  arg(n_args-2)  arg(n_args-1)  kw_key(0)  kw_val(0)  ... kw_key(n_kw-1)  kw_val(n_kw-1)
// if n_args==0 and n_kw==0 then there are only fun and self/NULL
mp_obj_t mp_call_method_n_kw(size_t n_args, size_t n_kw, const mp_obj_t *args) {
    DEBUG_OP_printf("call method (fun=%p, self=%p, n_args=" UINT_FMT ", n_kw=" UINT_FMT ", args=%p)\n", args[0], args[1], n_args, n_kw, args);
    int adjust = (args[1] == MP_OBJ_NULL) ? 0 : 1;
    return mp_call_function_n_kw(args[0], n_args + adjust, n_kw, args + 2 - adjust);
}

// This function only needs to be exposed externally when in stackless mode.
#if !MICROPY_STACKLESS
STATIC
#endif
void mp_call_prepare_args_n_kw_var(bool have_self, size_t n_args_n_kw, const mp_obj_t *args, mp_call_args_t *out_args) {
    mp_obj_t fun = *args++;
    mp_obj_t self = MP_OBJ_NULL;
    if (have_self) {
        self = *args++; // may be MP_OBJ_NULL
    }
    uint n_args = n_args_n_kw & 0xff;
    uint n_kw = (n_args_n_kw >> 8) & 0xff;
    mp_obj_t pos_seq = args[n_args + 2 * n_kw]; // may be MP_OBJ_NULL
    mp_obj_t kw_dict = args[n_args + 2 * n_kw + 1]; // may be MP_OBJ_NULL

    DEBUG_OP_printf("call method var (fun=%p, self=%p, n_args=%u, n_kw=%u, args=%p, seq=%p, dict=%p)\n", fun, self, n_args, n_kw, args, pos_seq, kw_dict);

    // We need to create the following array of objects:
    //     args[0 .. n_args]  unpacked(pos_seq)  args[n_args .. n_args + 2 * n_kw]  unpacked(kw_dict)
    // TODO: optimize one day to avoid constructing new arg array? Will be hard.

    // The new args array
    mp_obj_t *args2;
    uint args2_alloc;
    uint args2_len = 0;

    // Try to get a hint for the size of the kw_dict
    uint kw_dict_len = 0;
    if (kw_dict != MP_OBJ_NULL && mp_obj_is_type(kw_dict, &mp_type_dict)) {
        kw_dict_len = mp_obj_dict_len(kw_dict);
    }

    // Extract the pos_seq sequence to the new args array.
    // Note that it can be arbitrary iterator.
    if (pos_seq == MP_OBJ_NULL) {
        // no sequence

        // allocate memory for the new array of args
        args2_alloc = 1 + n_args + 2 * (n_kw + kw_dict_len);
        args2 = mp_nonlocal_alloc(args2_alloc * sizeof(mp_obj_t));

        // copy the self
        if (self != MP_OBJ_NULL) {
            args2[args2_len++] = self;
        }

        // copy the fixed pos args
        mp_seq_copy(args2 + args2_len, args, n_args, mp_obj_t);
        args2_len += n_args;

    } else if (mp_obj_is_type(pos_seq, &mp_type_tuple) || mp_obj_is_type(pos_seq, &mp_type_list)) {
        // optimise the case of a tuple and list

        // get the items
        size_t len;
        mp_obj_t *items;
        mp_obj_get_array(pos_seq, &len, &items);

        // allocate memory for the new array of args
        args2_alloc = 1 + n_args + len + 2 * (n_kw + kw_dict_len);
        args2 = mp_nonlocal_alloc(args2_alloc * sizeof(mp_obj_t));

        // copy the self
        if (self != MP_OBJ_NULL) {
            args2[args2_len++] = self;
        }

        // copy the fixed and variable position args
        mp_seq_cat(args2 + args2_len, args, n_args, items, len, mp_obj_t);
        args2_len += n_args + len;

    } else {
        // generic iterator

        // allocate memory for the new array of args
        args2_alloc = 1 + n_args + 2 * (n_kw + kw_dict_len) + 3;
        args2 = mp_nonlocal_alloc(args2_alloc * sizeof(mp_obj_t));

        // copy the self
        if (self != MP_OBJ_NULL) {
            args2[args2_len++] = self;
        }

        // copy the fixed position args
        mp_seq_copy(args2 + args2_len, args, n_args, mp_obj_t);
        args2_len += n_args;

        // extract the variable position args from the iterator
        mp_obj_iter_buf_t iter_buf;
        mp_obj_t iterable = mp_getiter(pos_seq, &iter_buf);
        mp_obj_t item;
        while ((item = mp_iternext(iterable)) != MP_OBJ_STOP_ITERATION) {
            if (args2_len >= args2_alloc) {
                args2 = mp_nonlocal_realloc(args2, args2_alloc * sizeof(mp_obj_t), args2_alloc * 2 * sizeof(mp_obj_t));
                args2_alloc *= 2;
            }
            args2[args2_len++] = item;
        }
    }

    // The size of the args2 array now is the number of positional args.
    uint pos_args_len = args2_len;

    // Copy the fixed kw args.
    mp_seq_copy(args2 + args2_len, args + n_args, 2 * n_kw, mp_obj_t);
    args2_len += 2 * n_kw;

    // Extract (key,value) pairs from kw_dict dictionary and append to args2.
    // Note that it can be arbitrary iterator.
    if (kw_dict == MP_OBJ_NULL) {
        // pass
    } else if (mp_obj_is_type(kw_dict, &mp_type_dict)) {
        // dictionary
        mp_map_t *map = mp_obj_dict_get_map(kw_dict);
        assert(args2_len + 2 * map->used <= args2_alloc); // should have enough, since kw_dict_len is in this case hinted correctly above
        for (size_t i = 0; i < map->alloc; i++) {
            if (mp_map_slot_is_filled(map, i)) {
                // the key must be a qstr, so intern it if it's a string
                mp_obj_t key = map->table[i].key;
                if (!mp_obj_is_qstr(key)) {
                    key = mp_obj_str_intern_checked(key);
                }
                args2[args2_len++] = key;
                args2[args2_len++] = map->table[i].value;
            }
        }
    } else {
        // generic mapping:
        // - call keys() to get an iterable of all keys in the mapping
        // - call __getitem__ for each key to get the corresponding value

        // get the keys iterable
        mp_obj_t dest[3];
        mp_load_method(kw_dict, MP_QSTR_keys, dest);
        mp_obj_t iterable = mp_getiter(mp_call_method_n_kw(0, 0, dest), NULL);

        mp_obj_t key;
        while ((key = mp_iternext(iterable)) != MP_OBJ_STOP_ITERATION) {
            // expand size of args array if needed
            if (args2_len + 1 >= args2_alloc) {
                uint new_alloc = args2_alloc * 2;
                if (new_alloc < 4) {
                    new_alloc = 4;
                }
                args2 = mp_nonlocal_realloc(args2, args2_alloc * sizeof(mp_obj_t), new_alloc * sizeof(mp_obj_t));
                args2_alloc = new_alloc;
            }

            // the key must be a qstr, so intern it if it's a string
            if (!mp_obj_is_qstr(key)) {
                key = mp_obj_str_intern_checked(key);
            }

            // get the value corresponding to the key
            mp_load_method(kw_dict, MP_QSTR___getitem__, dest);
            dest[2] = key;
            mp_obj_t value = mp_call_method_n_kw(1, 0, dest);

            // store the key/value pair in the argument array
            args2[args2_len++] = key;
            args2[args2_len++] = value;
        }
    }

    out_args->fun = fun;
    out_args->args = args2;
    out_args->n_args = pos_args_len;
    out_args->n_kw = (args2_len - pos_args_len) / 2;
    out_args->n_alloc = args2_alloc;
}

mp_obj_t mp_call_method_n_kw_var(bool have_self, size_t n_args_n_kw, const mp_obj_t *args) {
    mp_call_args_t out_args;
    mp_call_prepare_args_n_kw_var(have_self, n_args_n_kw, args, &out_args);

    mp_obj_t res = mp_call_function_n_kw(out_args.fun, out_args.n_args, out_args.n_kw, out_args.args);
    mp_nonlocal_free(out_args.args, out_args.n_alloc * sizeof(mp_obj_t));

    return res;
}

// unpacked items are stored in reverse order into the array pointed to by items
void mp_unpack_sequence(mp_obj_t seq_in, size_t num, mp_obj_t *items) {
    size_t seq_len;
    if (mp_obj_is_type(seq_in, &mp_type_tuple) || mp_obj_is_type(seq_in, &mp_type_list)) {
        mp_obj_t *seq_items;
        mp_obj_get_array(seq_in, &seq_len, &seq_items);
        if (seq_len < num) {
            goto too_short;
        } else if (seq_len > num) {
            goto too_long;
        }
        for (size_t i = 0; i < num; i++) {
            items[i] = seq_items[num - 1 - i];
        }
    } else {
        mp_obj_iter_buf_t iter_buf;
        mp_obj_t iterable = mp_getiter(seq_in, &iter_buf);

        for (seq_len = 0; seq_len < num; seq_len++) {
            mp_obj_t el = mp_iternext(iterable);
            if (el == MP_OBJ_STOP_ITERATION) {
                goto too_short;
            }
            items[num - 1 - seq_len] = el;
        }
        if (mp_iternext(iterable) != MP_OBJ_STOP_ITERATION) {
            goto too_long;
        }
    }
    return;

too_short:
    if (MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE) {
        mp_raise_ValueError("wrong number of values to unpack");
    } else {
        mp_raise_msg_varg(&mp_type_ValueError, "need more than %d values to unpack", (int)seq_len);
    }
too_long:
    if (MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE) {
        mp_raise_ValueError("wrong number of values to unpack");
    } else {
        mp_raise_msg_varg(&mp_type_ValueError, "too many values to unpack (expected %d)", (int)num);
    }
}

// unpacked items are stored in reverse order into the array pointed to by items
void mp_unpack_ex(mp_obj_t seq_in, size_t num_in, mp_obj_t *items) {
    size_t num_left = num_in & 0xff;
    size_t num_right = (num_in >> 8) & 0xff;
    DEBUG_OP_printf("unpack ex " UINT_FMT " " UINT_FMT "\n", num_left, num_right);
    size_t seq_len;
    if (mp_obj_is_type(seq_in, &mp_type_tuple) || mp_obj_is_type(seq_in, &mp_type_list)) {
        // Make the seq variable volatile so the compiler keeps a reference to it,
        // since if it's a tuple then seq_items points to the interior of the GC cell
        // and mp_obj_new_list may trigger a GC which doesn't trace this and reclaims seq.
        volatile mp_obj_t seq = seq_in;
        mp_obj_t *seq_items;
        mp_obj_get_array(seq, &seq_len, &seq_items);
        if (seq_len < num_left + num_right) {
            goto too_short;
        }
        for (size_t i = 0; i < num_right; i++) {
            items[i] = seq_items[seq_len - 1 - i];
        }
        items[num_right] = mp_obj_new_list(seq_len - num_left - num_right, seq_items + num_left);
        for (size_t i = 0; i < num_left; i++) {
            items[num_right + 1 + i] = seq_items[num_left - 1 - i];
        }
        seq = MP_OBJ_NULL;
    } else {
        // Generic iterable; this gets a bit messy: we unpack known left length to the
        // items destination array, then the rest to a dynamically created list.  Once the
        // iterable is exhausted, we take from this list for the right part of the items.
        // TODO Improve to waste less memory in the dynamically created list.
        mp_obj_t iterable = mp_getiter(seq_in, NULL);
        mp_obj_t item;
        for (seq_len = 0; seq_len < num_left; seq_len++) {
            item = mp_iternext(iterable);
            if (item == MP_OBJ_STOP_ITERATION) {
                goto too_short;
            }
            items[num_left + num_right + 1 - 1 - seq_len] = item;
        }
        mp_obj_list_t *rest = MP_OBJ_TO_PTR(mp_obj_new_list(0, NULL));
        while ((item = mp_iternext(iterable)) != MP_OBJ_STOP_ITERATION) {
            mp_obj_list_append(MP_OBJ_FROM_PTR(rest), item);
        }
        if (rest->len < num_right) {
            goto too_short;
        }
        items[num_right] = MP_OBJ_FROM_PTR(rest);
        for (size_t i = 0; i < num_right; i++) {
            items[num_right - 1 - i] = rest->items[rest->len - num_right + i];
        }
        mp_obj_list_set_len(MP_OBJ_FROM_PTR(rest), rest->len - num_right);
    }
    return;

too_short:
    if (MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE) {
        mp_raise_ValueError("wrong number of values to unpack");
    } else {
        mp_raise_msg_varg(&mp_type_ValueError, "need more than %d values to unpack", (int)seq_len);
    }
}

mp_obj_t mp_load_attr(mp_obj_t base, qstr attr) {
    DEBUG_OP_printf("load attr %p.%s\n", base, qstr_str(attr));
    // use load_method
    mp_obj_t dest[2];
    mp_load_method(base, attr, dest);
    if (dest[1] == MP_OBJ_NULL) {
        // load_method returned just a normal attribute
        return dest[0];
    } else {
        // load_method returned a method, so build a bound method object
        return mp_obj_new_bound_meth(dest[0], dest[1]);
    }
}

#if MICROPY_BUILTIN_METHOD_CHECK_SELF_ARG

// The following "checked fun" type is local to the mp_convert_member_lookup
// function, and serves to check that the first argument to a builtin function
// has the correct type.

typedef struct _mp_obj_checked_fun_t {
    mp_obj_base_t base;
    const mp_obj_type_t *type;
    mp_obj_t fun;
} mp_obj_checked_fun_t;

STATIC mp_obj_t checked_fun_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
    mp_obj_checked_fun_t *self = MP_OBJ_TO_PTR(self_in);
    if (n_args > 0) {
        const mp_obj_type_t *arg0_type = mp_obj_get_type(args[0]);
        if (arg0_type != self->type) {
            if (MICROPY_ERROR_REPORTING != MICROPY_ERROR_REPORTING_DETAILED) {
                mp_raise_TypeError("argument has wrong type");
            } else {
                mp_raise_msg_varg(&mp_type_TypeError,
                    "argument should be a '%q' not a '%q'", self->type->name, arg0_type->name);
            }
        }
    }
    return mp_call_function_n_kw(self->fun, n_args, n_kw, args);
}

STATIC const mp_obj_type_t mp_type_checked_fun = {
    { &mp_type_type },
    .name = MP_QSTR_function,
    .call = checked_fun_call,
};

STATIC mp_obj_t mp_obj_new_checked_fun(const mp_obj_type_t *type, mp_obj_t fun) {
    mp_obj_checked_fun_t *o = m_new_obj(mp_obj_checked_fun_t);
    o->base.type = &mp_type_checked_fun;
    o->type = type;
    o->fun = fun;
    return MP_OBJ_FROM_PTR(o);
}

#endif // MICROPY_BUILTIN_METHOD_CHECK_SELF_ARG

// Given a member that was extracted from an instance, convert it correctly
// and put the result in the dest[] array for a possible method call.
// Conversion means dealing with static/class methods, callables, and values.
// see http://docs.python.org/3/howto/descriptor.html
void mp_convert_member_lookup(mp_obj_t self, const mp_obj_type_t *type, mp_obj_t member, mp_obj_t *dest) {
    if (mp_obj_is_type(member, &mp_type_staticmethod)) {
        // return just the function
        dest[0] = ((mp_obj_static_class_method_t*)MP_OBJ_TO_PTR(member))->fun;
    } else if (mp_obj_is_type(member, &mp_type_classmethod)) {
        // return a bound method, with self being the type of this object
        // this type should be the type of the original instance, not the base
        // type (which is what is passed in the 'type' argument to this function)
        if (self != MP_OBJ_NULL) {
            type = mp_obj_get_type(self);
        }
        dest[0] = ((mp_obj_static_class_method_t*)MP_OBJ_TO_PTR(member))->fun;
        dest[1] = MP_OBJ_FROM_PTR(type);
    } else if (mp_obj_is_type(member, &mp_type_type)) {
        // Don't try to bind types (even though they're callable)
        dest[0] = member;
    } else if (mp_obj_is_fun(member)
        || (mp_obj_is_obj(member)
            && (((mp_obj_base_t*)MP_OBJ_TO_PTR(member))->type->name == MP_QSTR_closure
                || ((mp_obj_base_t*)MP_OBJ_TO_PTR(member))->type->name == MP_QSTR_generator))) {
        // only functions, closures and generators objects can be bound to self
        #if MICROPY_BUILTIN_METHOD_CHECK_SELF_ARG
        const mp_obj_type_t *m_type = ((mp_obj_base_t*)MP_OBJ_TO_PTR(member))->type;
        if (self == MP_OBJ_NULL
            && (m_type == &mp_type_fun_builtin_0
                || m_type == &mp_type_fun_builtin_1
                || m_type == &mp_type_fun_builtin_2
                || m_type == &mp_type_fun_builtin_3
                || m_type == &mp_type_fun_builtin_var)
            && type != &mp_type_object) {
            // we extracted a builtin method without a first argument, so we must
            // wrap this function in a type checker
            // Note that object will do its own checking so shouldn't be wrapped.
            dest[0] = mp_obj_new_checked_fun(type, member);
        } else
        #endif
        {
            // return a bound method, with self being this object
            dest[0] = member;
            dest[1] = self;
        }
    } else {
        // class member is a value, so just return that value
        dest[0] = member;
    }
}

// no attribute found, returns:     dest[0] == MP_OBJ_NULL, dest[1] == MP_OBJ_NULL
// normal attribute found, returns: dest[0] == <attribute>, dest[1] == MP_OBJ_NULL
// method attribute found, returns: dest[0] == <method>,    dest[1] == <self>
void mp_load_method_maybe(mp_obj_t obj, qstr attr, mp_obj_t *dest) {
    // clear output to indicate no attribute/method found yet
    dest[0] = MP_OBJ_NULL;
    dest[1] = MP_OBJ_NULL;

    // get the type
    const mp_obj_type_t *type = mp_obj_get_type(obj);

    // look for built-in names
    #if MICROPY_CPYTHON_COMPAT
    if (attr == MP_QSTR___class__) {
        // a.__class__ is equivalent to type(a)
        dest[0] = MP_OBJ_FROM_PTR(type);
        return;
    }
    #endif

    if (attr == MP_QSTR___next__ && type->iternext != NULL) {
        dest[0] = MP_OBJ_FROM_PTR(&mp_builtin_next_obj);
        dest[1] = obj;

    } else if (type->attr != NULL) {
        // this type can do its own load, so call it
        type->attr(obj, attr, dest);

    } else if (type->locals_dict != NULL) {
        // generic method lookup
        // this is a lookup in the object (ie not class or type)
        assert(type->locals_dict->base.type == &mp_type_dict); // MicroPython restriction, for now
        mp_map_t *locals_map = &type->locals_dict->map;
        mp_map_elem_t *elem = mp_map_lookup(locals_map, MP_OBJ_NEW_QSTR(attr), MP_MAP_LOOKUP);
        if (elem != NULL) {
            mp_convert_member_lookup(obj, type, elem->value, dest);
        }
    }
}

void mp_load_method(mp_obj_t base, qstr attr, mp_obj_t *dest) {
    DEBUG_OP_printf("load method %p.%s\n", base, qstr_str(attr));

    mp_load_method_maybe(base, attr, dest);

    if (dest[0] == MP_OBJ_NULL) {
        // no attribute/method called attr
        if (MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE) {
            mp_raise_msg(&mp_type_AttributeError, "no such attribute");
        } else {
            // following CPython, we give a more detailed error message for type objects
            if (mp_obj_is_type(base, &mp_type_type)) {
                mp_raise_msg_varg(&mp_type_AttributeError,
                    "type object '%q' has no attribute '%q'",
                    ((mp_obj_type_t*)MP_OBJ_TO_PTR(base))->name, attr);
            } else {
                mp_raise_msg_varg(&mp_type_AttributeError,
                    "'%s' object has no attribute '%q'",
                    mp_obj_get_type_str(base), attr);
            }
        }
    }
}

// Acts like mp_load_method_maybe but catches AttributeError, and all other exceptions if requested
void mp_load_method_protected(mp_obj_t obj, qstr attr, mp_obj_t *dest, bool catch_all_exc) {
    nlr_buf_t nlr;
    if (nlr_push(&nlr) == 0) {
        mp_load_method_maybe(obj, attr, dest);
        nlr_pop();
    } else {
        if (!catch_all_exc
            && !mp_obj_is_subclass_fast(MP_OBJ_FROM_PTR(((mp_obj_base_t*)nlr.ret_val)->type),
                MP_OBJ_FROM_PTR(&mp_type_AttributeError))) {
            // Re-raise the exception
            nlr_raise(MP_OBJ_FROM_PTR(nlr.ret_val));
        }
    }
}

void mp_store_attr(mp_obj_t base, qstr attr, mp_obj_t value) {
    DEBUG_OP_printf("store attr %p.%s <- %p\n", base, qstr_str(attr), value);
    const mp_obj_type_t *type = mp_obj_get_type(base);
    if (type->attr != NULL) {
        mp_obj_t dest[2] = {MP_OBJ_SENTINEL, value};
        type->attr(base, attr, dest);
        if (dest[0] == MP_OBJ_NULL) {
            // success
            return;
        }
    }
    if (MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE) {
        mp_raise_msg(&mp_type_AttributeError, "no such attribute");
    } else {
        mp_raise_msg_varg(&mp_type_AttributeError,
            "'%s' object has no attribute '%q'",
            mp_obj_get_type_str(base), attr);
    }
}

mp_obj_t mp_getiter(mp_obj_t o_in, mp_obj_iter_buf_t *iter_buf) {
    assert(o_in);
    const mp_obj_type_t *type = mp_obj_get_type(o_in);

    // Check for native getiter which is the identity.  We handle this case explicitly
    // so we don't unnecessarily allocate any RAM for the iter_buf, which won't be used.
    if (type->getiter == mp_identity_getiter) {
        return o_in;
    }

    // check for native getiter (corresponds to __iter__)
    if (type->getiter != NULL) {
        if (iter_buf == NULL && type->getiter != mp_obj_instance_getiter) {
            // if caller did not provide a buffer then allocate one on the heap
            // mp_obj_instance_getiter is special, it will allocate only if needed
            iter_buf = m_new_obj(mp_obj_iter_buf_t);
        }
        mp_obj_t iter = type->getiter(o_in, iter_buf);
        if (iter != MP_OBJ_NULL) {
            return iter;
        }
    }

    // check for __getitem__
    mp_obj_t dest[2];
    mp_load_method_maybe(o_in, MP_QSTR___getitem__, dest);
    if (dest[0] != MP_OBJ_NULL) {
        // __getitem__ exists, create and return an iterator
        if (iter_buf == NULL) {
            // if caller did not provide a buffer then allocate one on the heap
            iter_buf = m_new_obj(mp_obj_iter_buf_t);
        }
        return mp_obj_new_getitem_iter(dest, iter_buf);
    }

    // object not iterable
    if (MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE) {
        mp_raise_TypeError("object not iterable");
    } else {
        mp_raise_msg_varg(&mp_type_TypeError,
            "'%s' object isn't iterable", mp_obj_get_type_str(o_in));
    }
}

// may return MP_OBJ_STOP_ITERATION as an optimisation instead of raise StopIteration()
// may also raise StopIteration()
mp_obj_t mp_iternext_allow_raise(mp_obj_t o_in) {
    const mp_obj_type_t *type = mp_obj_get_type(o_in);
    if (type->iternext != NULL) {
        return type->iternext(o_in);
    } else {
        // check for __next__ method
        mp_obj_t dest[2];
        mp_load_method_maybe(o_in, MP_QSTR___next__, dest);
        if (dest[0] != MP_OBJ_NULL) {
            // __next__ exists, call it and return its result
            return mp_call_method_n_kw(0, 0, dest);
        } else {
            if (MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE) {
                mp_raise_TypeError("object not an iterator");
            } else {
                mp_raise_msg_varg(&mp_type_TypeError,
                    "'%s' object isn't an iterator", mp_obj_get_type_str(o_in));
            }
        }
    }
}

// will always return MP_OBJ_STOP_ITERATION instead of raising StopIteration() (or any subclass thereof)
// may raise other exceptions
mp_obj_t mp_iternext(mp_obj_t o_in) {
    MP_STACK_CHECK(); // enumerate, filter, map and zip can recursively call mp_iternext
    const mp_obj_type_t *type = mp_obj_get_type(o_in);
    if (type->iternext != NULL) {
        return type->iternext(o_in);
    } else {
        // check for __next__ method
        mp_obj_t dest[2];
        mp_load_method_maybe(o_in, MP_QSTR___next__, dest);
        if (dest[0] != MP_OBJ_NULL) {
            // __next__ exists, call it and return its result
            nlr_buf_t nlr;
            if (nlr_push(&nlr) == 0) {
                mp_obj_t ret = mp_call_method_n_kw(0, 0, dest);
                nlr_pop();
                return ret;
            } else {
                if (mp_obj_is_subclass_fast(MP_OBJ_FROM_PTR(((mp_obj_base_t*)nlr.ret_val)->type), MP_OBJ_FROM_PTR(&mp_type_StopIteration))) {
                    return MP_OBJ_STOP_ITERATION;
                } else {
                    nlr_jump(nlr.ret_val);
                }
            }
        } else {
            if (MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE) {
                mp_raise_TypeError("object not an iterator");
            } else {
                mp_raise_msg_varg(&mp_type_TypeError,
                    "'%s' object isn't an iterator", mp_obj_get_type_str(o_in));
            }
        }
    }
}

// TODO: Unclear what to do with StopIterarion exception here.
mp_vm_return_kind_t mp_resume(mp_obj_t self_in, mp_obj_t send_value, mp_obj_t throw_value, mp_obj_t *ret_val) {
    assert((send_value != MP_OBJ_NULL) ^ (throw_value != MP_OBJ_NULL));
    const mp_obj_type_t *type = mp_obj_get_type(self_in);

    if (type == &mp_type_gen_instance) {
        return mp_obj_gen_resume(self_in, send_value, throw_value, ret_val);
    }

    if (type->iternext != NULL && send_value == mp_const_none) {
        mp_obj_t ret = type->iternext(self_in);
        *ret_val = ret;
        if (ret != MP_OBJ_STOP_ITERATION) {
            return MP_VM_RETURN_YIELD;
        } else {
            // Emulate raise StopIteration()
            // Special case, handled in vm.c
            return MP_VM_RETURN_NORMAL;
        }
    }

    mp_obj_t dest[3]; // Reserve slot for send() arg

    // Python instance iterator protocol
    if (send_value == mp_const_none) {
        mp_load_method_maybe(self_in, MP_QSTR___next__, dest);
        if (dest[0] != MP_OBJ_NULL) {
            *ret_val = mp_call_method_n_kw(0, 0, dest);
            return MP_VM_RETURN_YIELD;
        }
    }

    // Either python instance generator protocol, or native object
    // generator protocol.
    if (send_value != MP_OBJ_NULL) {
        mp_load_method(self_in, MP_QSTR_send, dest);
        dest[2] = send_value;
        *ret_val = mp_call_method_n_kw(1, 0, dest);
        return MP_VM_RETURN_YIELD;
    }

    assert(throw_value != MP_OBJ_NULL);
    {
        if (mp_obj_is_subclass_fast(MP_OBJ_FROM_PTR(mp_obj_get_type(throw_value)), MP_OBJ_FROM_PTR(&mp_type_GeneratorExit))) {
            mp_load_method_maybe(self_in, MP_QSTR_close, dest);
            if (dest[0] != MP_OBJ_NULL) {
                // TODO: Exceptions raised in close() are not propagated,
                // printed to sys.stderr
                *ret_val = mp_call_method_n_kw(0, 0, dest);
                // We assume one can't "yield" from close()
                return MP_VM_RETURN_NORMAL;
            }
        } else {
            mp_load_method_maybe(self_in, MP_QSTR_throw, dest);
            if (dest[0] != MP_OBJ_NULL) {
                dest[2] = throw_value;
                *ret_val = mp_call_method_n_kw(1, 0, dest);
                // If .throw() method returned, we assume it's value to yield
                // - any exception would be thrown with nlr_raise().
                return MP_VM_RETURN_YIELD;
            }
        }
        // If there's nowhere to throw exception into, then we assume that object
        // is just incapable to handle it, so any exception thrown into it
        // will be propagated up. This behavior is approved by test_pep380.py
        // test_delegation_of_close_to_non_generator(),
        //  test_delegating_throw_to_non_generator()
        if (mp_obj_exception_match(throw_value, MP_OBJ_FROM_PTR(&mp_type_StopIteration))) {
            // PEP479: if StopIteration is raised inside a generator it is replaced with RuntimeError
            *ret_val = mp_obj_new_exception_msg(&mp_type_RuntimeError, "generator raised StopIteration");
        } else {
            *ret_val = mp_make_raise_obj(throw_value);
        }
        return MP_VM_RETURN_EXCEPTION;
    }
}

mp_obj_t mp_make_raise_obj(mp_obj_t o) {
    DEBUG_printf("raise %p\n", o);
    if (mp_obj_is_exception_type(o)) {
        // o is an exception type (it is derived from BaseException (or is BaseException))
        // create and return a new exception instance by calling o
        // TODO could have an option to disable traceback, then builtin exceptions (eg TypeError)
        // could have const instances in ROM which we return here instead
        return mp_call_function_n_kw(o, 0, 0, NULL);
    } else if (mp_obj_is_exception_instance(o)) {
        // o is an instance of an exception, so use it as the exception
        return o;
    } else {
        // o cannot be used as an exception, so return a type error (which will be raised by the caller)
        return mp_obj_new_exception_msg(&mp_type_TypeError, "exceptions must derive from BaseException");
    }
}

mp_obj_t mp_import_name(qstr name, mp_obj_t fromlist, mp_obj_t level) {
    DEBUG_printf("import name '%s' level=%d\n", qstr_str(name), MP_OBJ_SMALL_INT_VALUE(level));

    // build args array
    mp_obj_t args[5];
    args[0] = MP_OBJ_NEW_QSTR(name);
    args[1] = mp_const_none; // TODO should be globals
    args[2] = mp_const_none; // TODO should be locals
    args[3] = fromlist;
    args[4] = level;

    #if MICROPY_CAN_OVERRIDE_BUILTINS
    // Lookup __import__ and call that if it exists
    mp_obj_dict_t *bo_dict = MP_STATE_VM(mp_module_builtins_override_dict);
    if (bo_dict != NULL) {
        mp_map_elem_t *import = mp_map_lookup(&bo_dict->map, MP_OBJ_NEW_QSTR(MP_QSTR___import__), MP_MAP_LOOKUP);
        if (import != NULL) {
            return mp_call_function_n_kw(import->value, 5, 0, args);
        }
    }
    #endif

    return mp_builtin___import__(5, args);
}

mp_obj_t mp_import_from(mp_obj_t module, qstr name) {
    DEBUG_printf("import from %p %s\n", module, qstr_str(name));

    mp_obj_t dest[2];

    mp_load_method_maybe(module, name, dest);

    if (dest[1] != MP_OBJ_NULL) {
        // Hopefully we can't import bound method from an object
import_error:
        mp_raise_msg_varg(&mp_type_ImportError, "cannot import name %q", name);
    }

    if (dest[0] != MP_OBJ_NULL) {
        return dest[0];
    }

    #if MICROPY_ENABLE_EXTERNAL_IMPORT

    // See if it's a package, then can try FS import
    if (!mp_obj_is_package(module)) {
        goto import_error;
    }

    mp_load_method_maybe(module, MP_QSTR___name__, dest);
    size_t pkg_name_len;
    const char *pkg_name = mp_obj_str_get_data(dest[0], &pkg_name_len);

    const uint dot_name_len = pkg_name_len + 1 + qstr_len(name);
    char *dot_name = mp_local_alloc(dot_name_len);
    memcpy(dot_name, pkg_name, pkg_name_len);
    dot_name[pkg_name_len] = '.';
    memcpy(dot_name + pkg_name_len + 1, qstr_str(name), qstr_len(name));
    qstr dot_name_q = qstr_from_strn(dot_name, dot_name_len);
    mp_local_free(dot_name);

    // For fromlist, pass sentinel "non empty" value to force returning of leaf module
    return mp_import_name(dot_name_q, mp_const_true, MP_OBJ_NEW_SMALL_INT(0));

    #else

    // Package import not supported with external imports disabled
    goto import_error;

    #endif
}

void mp_import_all(mp_obj_t module) {
    DEBUG_printf("import all %p\n", module);

    // TODO: Support __all__
    mp_map_t *map = &mp_obj_module_get_globals(module)->map;
    for (size_t i = 0; i < map->alloc; i++) {
        if (mp_map_slot_is_filled(map, i)) {
            // Entry in module global scope may be generated programmatically
            // (and thus be not a qstr for longer names). Avoid turning it in
            // qstr if it has '_' and was used exactly to save memory.
            const char *name = mp_obj_str_get_str(map->table[i].key);
            if (*name != '_') {
                qstr qname = mp_obj_str_get_qstr(map->table[i].key);
                mp_store_name(qname, map->table[i].value);
            }
        }
    }
}

#if MICROPY_ENABLE_COMPILER

mp_obj_t mp_parse_compile_execute(mp_lexer_t *lex, mp_parse_input_kind_t parse_input_kind, mp_obj_dict_t *globals, mp_obj_dict_t *locals) {
    // save context
    mp_obj_dict_t *volatile old_globals = mp_globals_get();
    mp_obj_dict_t *volatile old_locals = mp_locals_get();

    // set new context
    mp_globals_set(globals);
    mp_locals_set(locals);

    nlr_buf_t nlr;
    if (nlr_push(&nlr) == 0) {
        qstr source_name = lex->source_name;
        mp_parse_tree_t parse_tree = mp_parse(lex, parse_input_kind);
        mp_obj_t module_fun = mp_compile(&parse_tree, source_name, false);

        mp_obj_t ret;
        if (MICROPY_PY_BUILTINS_COMPILE && globals == NULL) {
            // for compile only, return value is the module function
            ret = module_fun;
        } else {
            // execute module function and get return value
            ret = mp_call_function_0(module_fun);
        }

        // finish nlr block, restore context and return value
        nlr_pop();
        mp_globals_set(old_globals);
        mp_locals_set(old_locals);
        return ret;
    } else {
        // exception; restore context and re-raise same exception
        mp_globals_set(old_globals);
        mp_locals_set(old_locals);
        nlr_jump(nlr.ret_val);
    }
}

#endif // MICROPY_ENABLE_COMPILER

NORETURN void m_malloc_fail(size_t num_bytes) {
    DEBUG_printf("memory allocation failed, allocating %u bytes\n", (uint)num_bytes);
    #if MICROPY_ENABLE_GC
    if (gc_is_locked()) {
        mp_raise_msg(&mp_type_MemoryError, "memory allocation failed, heap is locked");
    }
    #endif
    mp_raise_msg_varg(&mp_type_MemoryError,
        "memory allocation failed, allocating %u bytes", (uint)num_bytes);
}

NORETURN void mp_raise_msg(const mp_obj_type_t *exc_type, const char *msg) {
    if (msg == NULL) {
        nlr_raise(mp_obj_new_exception(exc_type));
    } else {
        nlr_raise(mp_obj_new_exception_msg(exc_type, msg));
    }
}

NORETURN void mp_raise_msg_varg(const mp_obj_type_t *exc_type, const char *fmt, ...) {
    va_list args;
    va_start(args, fmt);
    mp_obj_t exc = mp_obj_new_exception_msg_vlist(exc_type, fmt, args);
    va_end(args);
    nlr_raise(exc);
}

NORETURN void mp_raise_ValueError(const char *msg) {
    mp_raise_msg(&mp_type_ValueError, msg);
}

NORETURN void mp_raise_TypeError(const char *msg) {
    mp_raise_msg(&mp_type_TypeError, msg);
}

NORETURN void mp_raise_OSError(int errno_) {
    nlr_raise(mp_obj_new_exception_arg1(&mp_type_OSError, MP_OBJ_NEW_SMALL_INT(errno_)));
}

NORETURN void mp_raise_NotImplementedError(const char *msg) {
    mp_raise_msg(&mp_type_NotImplementedError, msg);
}

#if MICROPY_STACK_CHECK || MICROPY_ENABLE_PYSTACK
NORETURN void mp_raise_recursion_depth(void) {
    nlr_raise(mp_obj_new_exception_arg1(&mp_type_RuntimeError,
        MP_OBJ_NEW_QSTR(MP_QSTR_maximum_space_recursion_space_depth_space_exceeded)));
}
#endif