forked from public/swade-fr-content
Ajout des compendiums light
This commit is contained in:
parent
0365cc213f
commit
f1da8606f2
698
module/compendium_light/swade-core-rules.swade-edges.json
Normal file
698
module/compendium_light/swade-core-rules.swade-edges.json
Normal file
@ -0,0 +1,698 @@
|
||||
{
|
||||
"entries": [
|
||||
{
|
||||
"description": "Voir SWADE p.49",
|
||||
"id": "Ace",
|
||||
"name": "As"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.49",
|
||||
"id": "Acrobat",
|
||||
"name": "Acrobate"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.41",
|
||||
"id": "Alertness",
|
||||
"name": "Vigilant"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.37",
|
||||
"id": "Ambidextrous",
|
||||
"name": "Ambidextre"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.148",
|
||||
"id": "Arcane Background (Gifted)",
|
||||
"name": "Arcane (Don)"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.148",
|
||||
"id": "Arcane Background (Magic)",
|
||||
"name": "Arcane (Magie)"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.148",
|
||||
"id": "Arcane Background (Miracles)",
|
||||
"name": "Arcane (Miracles)"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.148",
|
||||
"id": "Arcane Background (Psionics)",
|
||||
"name": "Arcane (Psionique)"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.148",
|
||||
"id": "Arcane Background (Weird Science)",
|
||||
"name": "Arcane (Science Étrange)"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.39",
|
||||
"id": "Arcane Resistance",
|
||||
"name": "Résistance aux Arcanes"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.37",
|
||||
"id": "Aristocrat",
|
||||
"name": "Aristocrate"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.46",
|
||||
"id": "Artificer",
|
||||
"name": "Artificier"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.49",
|
||||
"id": "Assassin",
|
||||
"name": "Assassin"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.40",
|
||||
"id": "Attractive",
|
||||
"name": "Séduisant"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.48",
|
||||
"id": "Beast Bond",
|
||||
"name": "Lien Animal"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.48",
|
||||
"id": "Beast Master",
|
||||
"name": "Maître des Bêtes"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.38",
|
||||
"id": "Berserk",
|
||||
"name": "Enragé"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.41",
|
||||
"id": "Block",
|
||||
"name": "Blocage"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.52",
|
||||
"id": "Bolster",
|
||||
"name": "Pique Revigorante"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.37",
|
||||
"id": "Brave",
|
||||
"name": "Brave"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.41",
|
||||
"id": "Brawler",
|
||||
"name": "Bagarreur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.38",
|
||||
"id": "Brawny",
|
||||
"name": "Costaud"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.41",
|
||||
"id": "Bruiser",
|
||||
"name": "Cogneur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.37",
|
||||
"id": "Brute",
|
||||
"name": "Brute"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.42",
|
||||
"id": "Calculating",
|
||||
"name": "Calculateur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.48",
|
||||
"id": "Champion",
|
||||
"name": "Champion"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.46",
|
||||
"id": "Channeling",
|
||||
"name": "Canalisation"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.38",
|
||||
"id": "Charismatic",
|
||||
"name": "Charismatique"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.48",
|
||||
"id": "Chi",
|
||||
"name": "Chi"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.49",
|
||||
"id": "Combat Acrobat",
|
||||
"name": "Acrobate de Combat"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.42",
|
||||
"id": "Combat Reflexes",
|
||||
"name": "Combatif"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.45",
|
||||
"id": "Command",
|
||||
"name": "Commandement"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.45",
|
||||
"id": "Command Presence",
|
||||
"name": "Grande Aura de Commandement"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.52",
|
||||
"id": "Common Bond",
|
||||
"name": "Lien Mutuel"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.46",
|
||||
"id": "Concentration",
|
||||
"name": "Concentration"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.51",
|
||||
"id": "Connections",
|
||||
"name": "Contacts"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.42",
|
||||
"id": "Counterattack",
|
||||
"name": "Contre-Attaque"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.49",
|
||||
"id": "Danger Sense",
|
||||
"name": "Sixième Sens"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.42",
|
||||
"id": "Dead Shot",
|
||||
"name": "Dans le Mille !"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.43",
|
||||
"id": "Dodge",
|
||||
"name": "Esquive"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.42",
|
||||
"id": "Double Tap",
|
||||
"name": "Double Détente"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.39",
|
||||
"id": "Elan",
|
||||
"name": "Panache"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.52",
|
||||
"id": "Expert",
|
||||
"name": "Expert"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.46",
|
||||
"id": "Extra Effort",
|
||||
"name": "Effort Supplémentaire"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.43",
|
||||
"id": "Extraction",
|
||||
"name": "Extraction"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.38",
|
||||
"id": "Fame",
|
||||
"name": "Notoriété"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.39",
|
||||
"id": "Famous",
|
||||
"name": "Célébrité"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.38",
|
||||
"id": "Fast Healer",
|
||||
"name": "Guérison Rapide"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.43",
|
||||
"id": "Feint",
|
||||
"name": "Feinte"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.45",
|
||||
"id": "Fervor",
|
||||
"name": "Ferveur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.40",
|
||||
"id": "Filthy Rich",
|
||||
"name": "Très Riche"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.43",
|
||||
"id": "First Strike",
|
||||
"name": "Frappe Éclair"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.40",
|
||||
"id": "Fleet-Footed",
|
||||
"name": "Véloce"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.53",
|
||||
"id": "Followers",
|
||||
"name": "Suivants"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.42",
|
||||
"id": "Free Runner",
|
||||
"name": "Coureur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.43",
|
||||
"id": "Frenzy",
|
||||
"name": "Frénésie"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.46",
|
||||
"id": "Gadgeteer",
|
||||
"name": "Bricoleur de Génie"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.44",
|
||||
"id": "Giant Killer",
|
||||
"name": "Tueur de Géant"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.38",
|
||||
"id": "Great Luck",
|
||||
"name": "Très Chanceux"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.43",
|
||||
"id": "Hard To Kill",
|
||||
"name": "Increvable"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.43",
|
||||
"id": "Harder To Kill",
|
||||
"name": "Trompe-la-Mort"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.48",
|
||||
"id": "Healer",
|
||||
"name": "Guérisseur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.45",
|
||||
"id": "Hold the Line!",
|
||||
"name": "Serrez les rangs !"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.47",
|
||||
"id": "Holy/Unholy Warrior",
|
||||
"name": "Guerrier Saint/Impie"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.52",
|
||||
"id": "Humiliate",
|
||||
"name": "Humiliation"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.39",
|
||||
"id": "Improved Arcane Resistance",
|
||||
"name": "Grande Résistance aux Arcanes"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.41",
|
||||
"id": "Improved Block",
|
||||
"name": "Grand Blocage"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.42",
|
||||
"id": "Improved Counterattack",
|
||||
"name": "Grande Contre-Attaque"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.43",
|
||||
"id": "Improved Dodge",
|
||||
"name": "Grande Esquive"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.43",
|
||||
"id": "Improved Extraction",
|
||||
"name": "Grande Extraction"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.43",
|
||||
"id": "Improved First Strike",
|
||||
"name": "Frappe Froudroyante"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.43",
|
||||
"id": "Improved Frenzy",
|
||||
"name": "Frénésie Suprême"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.44",
|
||||
"id": "Improved Level Headed",
|
||||
"name": "Sang-Froid"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.44",
|
||||
"id": "Improved Nerves of Steel",
|
||||
"name": "Nerfs d'Acier Trempé"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.44",
|
||||
"id": "Improved Rapid Fire",
|
||||
"name": "Tir Très Rapide"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.47",
|
||||
"id": "Improved Rapid Recharge",
|
||||
"name": "Grande Source de Pouvoir"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.41",
|
||||
"id": "Improved Sweep",
|
||||
"name": "Grand Balayage"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.41",
|
||||
"id": "Improved Trademark Weapon",
|
||||
"name": "Arme Fétiche Adorée"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.43",
|
||||
"id": "Improvisational Fighter",
|
||||
"name": "Improvisation Martiale"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.45",
|
||||
"id": "Inspire",
|
||||
"name": "Inspiration"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.50",
|
||||
"id": "Investigator",
|
||||
"name": "Investigateur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.44",
|
||||
"id": "Iron Jaw",
|
||||
"name": "Mâchoire d'Acier"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.52",
|
||||
"id": "Iron Will",
|
||||
"name": "Volonté de Fer"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.51",
|
||||
"id": "Jack-Of-All-Trades",
|
||||
"name": "Touche-à-Tout"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.44",
|
||||
"id": "Killer Instinct",
|
||||
"name": "Instinct de Tueur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.44",
|
||||
"id": "Level Headed",
|
||||
"name": "Tête Froide"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.38",
|
||||
"id": "Linguist",
|
||||
"name": "Linguiste"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.48",
|
||||
"id": "Liquid Courage",
|
||||
"name": "Courage Liquide"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.38",
|
||||
"id": "Luck",
|
||||
"name": "Chanceux"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.44",
|
||||
"id": "Marksman",
|
||||
"name": "Tireur d'Élite"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.41",
|
||||
"id": "Martial Artist",
|
||||
"name": "Arts Martiaux"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.41",
|
||||
"id": "Martial Warrior",
|
||||
"name": "Maîtrise des Arts Martiaux"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.53",
|
||||
"id": "Master",
|
||||
"name": "Maître"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.53",
|
||||
"id": "Master of Arms",
|
||||
"name": "Maître d'Armes Légendaire"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.46",
|
||||
"id": "Master Tactician",
|
||||
"name": "Maître Tacticien"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.49",
|
||||
"id": "Mcgyver",
|
||||
"name": "Débrouillard"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.52",
|
||||
"id": "Menacing",
|
||||
"name": "Menaçant"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.47",
|
||||
"id": "Mentalist",
|
||||
"name": "Mentaliste"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.42",
|
||||
"id": "Mighty Blow",
|
||||
"name": "Coup Puissant"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.49",
|
||||
"id": "Mr Fix It",
|
||||
"name": "Bidouilleur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.45",
|
||||
"id": "Natural Leader",
|
||||
"name": "Leader Naturel"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.44",
|
||||
"id": "Nerves of Steel",
|
||||
"name": "Nerfs d'Acier"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.47",
|
||||
"id": "New Powers",
|
||||
"name": "Nouveaux Pouvoirs"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.44",
|
||||
"id": "No Mercy",
|
||||
"name": "Sans Pitié"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.47",
|
||||
"id": "Power Points",
|
||||
"name": "Points de Pouvoir"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.46",
|
||||
"id": "Power Surge",
|
||||
"name": "Afflux de Pouvoir"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.53",
|
||||
"id": "Professional",
|
||||
"name": "Professionnel"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.52",
|
||||
"id": "Provoke",
|
||||
"name": "Provocateur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.40",
|
||||
"id": "Quick",
|
||||
"name": "Vif"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.51",
|
||||
"id": "Rabble-Rouser",
|
||||
"name": "Agitateur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.44",
|
||||
"id": "Rapid Fire",
|
||||
"name": "Tir Rapide"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.47",
|
||||
"id": "Rapid Recharge",
|
||||
"name": "Source de Pouvoir"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.52",
|
||||
"id": "Reliable",
|
||||
"name": "Fiable"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.52",
|
||||
"id": "Retort",
|
||||
"name": "Réplique Acerbe"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.40",
|
||||
"id": "Rich",
|
||||
"name": "Riche"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.44",
|
||||
"id": "Rock and Roll!",
|
||||
"name": "Rock'n Roll!"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.49",
|
||||
"id": "Scavenger",
|
||||
"name": "Recycleur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.50",
|
||||
"id": "Scholar",
|
||||
"name": "Érudit"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.53",
|
||||
"id": "Sidekick",
|
||||
"name": "Acolyte"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.51",
|
||||
"id": "Soldier",
|
||||
"name": "Soldat"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.46",
|
||||
"id": "Soul Drain",
|
||||
"name": "Drain de l'Âme"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.44",
|
||||
"id": "Steady Hands",
|
||||
"name": "Poigne Ferme"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.52",
|
||||
"id": "Streetwise",
|
||||
"name": "Réseau"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.52",
|
||||
"id": "Strong Willed",
|
||||
"name": "Déterminé"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.41",
|
||||
"id": "Sweep",
|
||||
"name": "Balayage"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.45",
|
||||
"id": "Tactician",
|
||||
"name": "Tacticien"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.51",
|
||||
"id": "Thief",
|
||||
"name": "Voleur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.53",
|
||||
"id": "Tough As Nails",
|
||||
"name": "Endurci"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.53",
|
||||
"id": "Tougher Than Nails",
|
||||
"name": "Coriace"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.41",
|
||||
"id": "Trademark Weapon",
|
||||
"name": "Arme Fétiche"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.42",
|
||||
"id": "Two-Fisted",
|
||||
"name": "Combat à deux Armes"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.42",
|
||||
"id": "Two-Gun Kid",
|
||||
"name": "Double Flingue"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.40",
|
||||
"id": "Very Attractive",
|
||||
"name": "Très Séduisant"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.53",
|
||||
"id": "Weapon Master",
|
||||
"name": "Maître d'Armes"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.47",
|
||||
"id": "Wizard",
|
||||
"name": "Mage"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.50",
|
||||
"id": "Woodsman",
|
||||
"name": "Forestier"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.51",
|
||||
"id": "Work the Crowd",
|
||||
"name": "Chauffeur de Foule"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.51",
|
||||
"id": "Work The Room",
|
||||
"name": "Chauffeur de Salle"
|
||||
}
|
||||
],
|
||||
"label": "SWADE Atouts",
|
||||
"mapping": {
|
||||
"description": "data.description"
|
||||
}
|
||||
}
|
293
module/compendium_light/swade-core-rules.swade-hindrances.json
Normal file
293
module/compendium_light/swade-core-rules.swade-hindrances.json
Normal file
@ -0,0 +1,293 @@
|
||||
{
|
||||
"entries": [
|
||||
{
|
||||
"description": "Voir SWADE p.23",
|
||||
"id": "All Thumbs",
|
||||
"name": "2 mains gauches"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.22",
|
||||
"id": "Anemic",
|
||||
"name": "Anémique"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.22",
|
||||
"id": "Arrogant",
|
||||
"name": "Arrogant"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.26",
|
||||
"id": "Bad Eyes",
|
||||
"name": "Myope"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.25",
|
||||
"id": "Bad Luck",
|
||||
"name": "Malchanceux"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.22",
|
||||
"id": "Big Mouth",
|
||||
"name": "Bavard"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.22",
|
||||
"id": "Blind",
|
||||
"name": "Aveugle"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.28",
|
||||
"id": "Bloodthirsty",
|
||||
"name": "Sanguinaire"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.26",
|
||||
"id": "Can't Swim",
|
||||
"name": "Mauvais nageur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.27",
|
||||
"id": "Cautious",
|
||||
"name": "Prudent"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.24",
|
||||
"id": "Clueless",
|
||||
"name": "Ignorant"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.22",
|
||||
"id": "Clumsy",
|
||||
"name": "Lourdaud"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.23",
|
||||
"id": "Code of Honor",
|
||||
"name": "Code d'honneur"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.23",
|
||||
"id": "Curious",
|
||||
"name": "Curieux"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.28",
|
||||
"id": "Death Wish",
|
||||
"name": "Rien à perdre"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.23",
|
||||
"id": "Delusional",
|
||||
"name": "Chimère"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.28",
|
||||
"id": "Doubting Thomas",
|
||||
"name": "Sceptique"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.26",
|
||||
"id": "Driven",
|
||||
"name": "Obsession"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.22",
|
||||
"id": "Elderly",
|
||||
"name": "Agé"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.24",
|
||||
"id": "Enemy",
|
||||
"name": "Ennemis"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.23",
|
||||
"id": "Greedy",
|
||||
"name": "Cupide"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.25/26",
|
||||
"id": "Habit",
|
||||
"name": "Mauvaise habitude"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.24",
|
||||
"id": "Hard of Hearing",
|
||||
"name": "Dur d'oreille"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.24",
|
||||
"id": "Heroic",
|
||||
"name": "Héroïque"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.24",
|
||||
"id": "Hesitant",
|
||||
"name": "Hésitant"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.24/25",
|
||||
"id": "Illiterate",
|
||||
"name": "Illettré"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.25",
|
||||
"id": "Impulsive",
|
||||
"name": "Impulsif"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.25",
|
||||
"id": "Jealous",
|
||||
"name": "Jaloux"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.25",
|
||||
"id": "Loyal",
|
||||
"name": "Loyal"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.28",
|
||||
"id": "Mean",
|
||||
"name": "Sale caractère"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.28",
|
||||
"id": "Mild Mannered",
|
||||
"name": "Timoré"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.26",
|
||||
"id": "Mute",
|
||||
"name": "Muet"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.26",
|
||||
"id": "Obese",
|
||||
"name": "Obèse"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.26",
|
||||
"id": "Obligation",
|
||||
"name": "Obligation"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.25",
|
||||
"id": "One Arm",
|
||||
"name": "Manchot"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.23",
|
||||
"id": "One Eye",
|
||||
"name": "Borgne"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.24",
|
||||
"id": "Outsider",
|
||||
"name": "Etranger"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.27",
|
||||
"id": "Overconfident",
|
||||
"name": "Présomptueux"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.26",
|
||||
"id": "Pacifist",
|
||||
"name": "Pacifiste"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.27",
|
||||
"id": "Phobia",
|
||||
"name": "Phobie"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.27",
|
||||
"id": "Poverty",
|
||||
"name": "Poches percées"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.23",
|
||||
"id": "Quirk",
|
||||
"name": "Bizarrerie"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.25",
|
||||
"id": "Ruthless",
|
||||
"name": "Impitoyable"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.28",
|
||||
"id": "Secret",
|
||||
"name": "Secret"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.23",
|
||||
"id": "Shamed",
|
||||
"name": "Déshonoré"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.25",
|
||||
"id": "Slow",
|
||||
"name": "Lent"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.24",
|
||||
"id": "Small",
|
||||
"name": "Frêle"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.28",
|
||||
"id": "Stubborn",
|
||||
"name": "Tétu"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.28",
|
||||
"id": "Suspicious",
|
||||
"name": "Suspicieux"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.22",
|
||||
"id": "Thin Skinned",
|
||||
"name": "Thin Skinned"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.23",
|
||||
"id": "Tongue-Tied",
|
||||
"name": "Défaut d'élocution"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.26",
|
||||
"id": "Ugly",
|
||||
"name": "Moche"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.27",
|
||||
"id": "Vengeful",
|
||||
"name": "Rancunier"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.28",
|
||||
"id": "Vow",
|
||||
"name": "Serment"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.27",
|
||||
"id": "Wanted",
|
||||
"name": "Recherché"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.23",
|
||||
"id": "Yellow",
|
||||
"name": "Couard"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.22",
|
||||
"id": "Young",
|
||||
"name": "Gamin"
|
||||
}
|
||||
],
|
||||
"label": "SWADE Handicaps",
|
||||
"mapping": {
|
||||
"description": "data.description"
|
||||
}
|
||||
}
|
173
module/compendium_light/swade-core-rules.swade-skills.json
Normal file
173
module/compendium_light/swade-core-rules.swade-skills.json
Normal file
@ -0,0 +1,173 @@
|
||||
{
|
||||
"entries": [
|
||||
{
|
||||
"description": "Voir SWADE p.30",
|
||||
"id": "Academics",
|
||||
"name": "Éducation"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.29",
|
||||
"id": "Athletics",
|
||||
"name": "Athlétisme"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.35",
|
||||
"id": "Battle",
|
||||
"name": "Stratégie"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.33",
|
||||
"id": "Boating",
|
||||
"name": "Navigation"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.30",
|
||||
"id": "Common Knowledge",
|
||||
"name": "Culture Générale"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.29",
|
||||
"id": "Driving",
|
||||
"name": "Conduite"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.31",
|
||||
"id": "Electronics",
|
||||
"name": "Électronique"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.31",
|
||||
"id": "Faith",
|
||||
"name": "Foi"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.29",
|
||||
"id": "Fighting",
|
||||
"name": "Combat"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.31",
|
||||
"id": "Focus",
|
||||
"name": "Focus"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.31",
|
||||
"id": "Gambling",
|
||||
"name": "Jeu"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.31",
|
||||
"id": "Hacking",
|
||||
"name": "Informatique"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.35",
|
||||
"id": "Healing",
|
||||
"name": "Soins"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.31",
|
||||
"id": "Intimidation",
|
||||
"name": "Intimidation"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.31",
|
||||
"id": "Language",
|
||||
"name": "Langue"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.33",
|
||||
"id": "Notice",
|
||||
"name": "Perception"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.33",
|
||||
"id": "Occult",
|
||||
"name": "Occultisme"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.29",
|
||||
"id": "Performance",
|
||||
"name": "Performance"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.34",
|
||||
"id": "Persuasion",
|
||||
"name": "Persuasion"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.34",
|
||||
"id": "Piloting",
|
||||
"name": "Pilotage"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.34",
|
||||
"id": "Psionics",
|
||||
"name": "Psioniques"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.35",
|
||||
"id": "Repair",
|
||||
"name": "Réparation"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.34",
|
||||
"id": "Research",
|
||||
"name": "Recherche"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.31",
|
||||
"id": "Riding",
|
||||
"name": "Équitation"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.35",
|
||||
"id": "Science",
|
||||
"name": "Science"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.36",
|
||||
"id": "Shooting",
|
||||
"name": "Tir"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.33",
|
||||
"id": "Spellcasting",
|
||||
"name": "Magie"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.30",
|
||||
"id": "Stealth",
|
||||
"name": "Discrétion"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.36",
|
||||
"id": "Survival",
|
||||
"name": "Survie"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.34",
|
||||
"id": "Taunt",
|
||||
"name": " Provocation"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.35",
|
||||
"id": "Thievery",
|
||||
"name": "Subterfuge"
|
||||
},
|
||||
{
|
||||
"description": "page inconnue",
|
||||
"id": "Unskilled Attempt",
|
||||
"name": "Tentative sans qualification"
|
||||
},
|
||||
{
|
||||
"description": "Voir SWADE p.35",
|
||||
"id": "Weird Science",
|
||||
"name": "Science Étrange"
|
||||
}
|
||||
],
|
||||
"label": "SWADE Compétences",
|
||||
"mapping": {
|
||||
"description": "data.description"
|
||||
}
|
||||
}
|
25
tools/create_light_json.lua
Normal file
25
tools/create_light_json.lua
Normal file
@ -0,0 +1,25 @@
|
||||
local jsonList = { 'swade-core-rules.swade-hindrances.json', 'swade-core-rules.swade-edges.json', 'swade-core-rules.swade-skills.json' }
|
||||
local inFolder = '../module/compendiums/'
|
||||
local outFolder = '../module/compendium_light/'
|
||||
|
||||
package.path = package.path .. ";luajson/?.lua"
|
||||
local JSON = require'json'
|
||||
|
||||
for _, jsonFile in pairs(jsonList) do
|
||||
local f= io.open(inFolder..jsonFile, "r")
|
||||
local jsonIn = f:read("*a")
|
||||
f:close()
|
||||
|
||||
local jsonInData = JSON.decode(jsonIn)
|
||||
local jsonOutData = { label = jsonInData.label, entries = {}, mapping = { description = "data.description" } }
|
||||
for key, data in pairs(jsonInData.entries) do
|
||||
jsonOutData.entries[key] = { id = data.id, name = data.name, description = data.description }
|
||||
end
|
||||
|
||||
local jsonOut = JSON.encode(jsonOutData )
|
||||
f= io.open(outFolder..jsonFile, "w+")
|
||||
f:write(jsonOut)
|
||||
f:close()
|
||||
|
||||
end
|
||||
|
625
tools/lpeg/lpcap.lua
Normal file
625
tools/lpeg/lpcap.lua
Normal file
@ -0,0 +1,625 @@
|
||||
--[[
|
||||
LPEGLJ
|
||||
lpcap.lua
|
||||
Capture functions
|
||||
Copyright (C) 2014 Rostislav Sacek.
|
||||
based on LPeg v1.0 - PEG pattern matching for Lua
|
||||
Lua.org & PUC-Rio written by Roberto Ierusalimschy
|
||||
http://www.inf.puc-rio.br/~roberto/lpeg/
|
||||
|
||||
** Permission is hereby granted, free of charge, to any person obtaining
|
||||
** a copy of this software and associated documentation files (the
|
||||
** "Software"), to deal in the Software without restriction, including
|
||||
** without limitation the rights to use, copy, modify, merge, publish,
|
||||
** distribute, sublicense, and/or sell copies of the Software, and to
|
||||
** permit persons to whom the Software is furnished to do so, subject to
|
||||
** the following conditions:
|
||||
**
|
||||
** The above copyright notice and this permission notice shall be
|
||||
** included in all copies or substantial portions of the Software.
|
||||
**
|
||||
** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
** SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
**
|
||||
** [ MIT license: http://www.opensource.org/licenses/mit-license.php ]
|
||||
--]]
|
||||
local ffi = require "ffi"
|
||||
|
||||
local Cclose = 0
|
||||
local Cposition = 1
|
||||
local Cconst = 2
|
||||
local Cbackref = 3
|
||||
local Carg = 4
|
||||
local Csimple = 5
|
||||
local Ctable = 6
|
||||
local Cfunction = 7
|
||||
local Cquery = 8
|
||||
local Cstring = 9
|
||||
local Cnum = 10
|
||||
local Csubst = 11
|
||||
local Cfold = 12
|
||||
local Cruntime = 13
|
||||
local Cgroup = 14
|
||||
|
||||
local MAXSTRCAPS = 10
|
||||
|
||||
local pushcapture
|
||||
local addonestring
|
||||
|
||||
|
||||
-- Goes back in a list of captures looking for an open capture
|
||||
-- corresponding to a close
|
||||
|
||||
local function findopen(cs, index)
|
||||
local n = 0; -- number of closes waiting an open
|
||||
while true do
|
||||
index = index - 1
|
||||
if cs.ocap[index].kind == Cclose then
|
||||
n = n + 1 -- one more open to skip
|
||||
elseif cs.ocap[index].siz == 0 then
|
||||
if n == 0 then
|
||||
return index
|
||||
end
|
||||
n = n - 1
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
local function checknextcap(cs, captop)
|
||||
local cap = cs.cap;
|
||||
-- not a single capture? ((cap)->siz != 0)
|
||||
if cs.ocap[cap].siz == 0 then
|
||||
local n = 0; -- number of opens waiting a close
|
||||
-- look for corresponding close
|
||||
while true do
|
||||
cap = cap + 1
|
||||
if cap > captop then return end
|
||||
if cs.ocap[cap].kind == Cclose then
|
||||
n = n - 1
|
||||
if n + 1 == 0 then
|
||||
break;
|
||||
end
|
||||
elseif cs.ocap[cap].siz == 0 then
|
||||
n = n + 1
|
||||
end
|
||||
end
|
||||
end
|
||||
cap = cap + 1; -- + 1 to skip last close (or entire single capture)
|
||||
if cap > captop then return end
|
||||
return true
|
||||
end
|
||||
|
||||
|
||||
-- Go to the next capture
|
||||
|
||||
local function nextcap(cs)
|
||||
local cap = cs.cap;
|
||||
-- not a single capture? ((cap)->siz != 0)
|
||||
if cs.ocap[cap].siz == 0 then
|
||||
local n = 0; -- number of opens waiting a close
|
||||
-- look for corresponding close
|
||||
while true do
|
||||
cap = cap + 1
|
||||
if cs.ocap[cap].kind == Cclose then
|
||||
n = n - 1
|
||||
if n + 1 == 0 then
|
||||
break;
|
||||
end
|
||||
elseif cs.ocap[cap].siz == 0 then
|
||||
n = n + 1
|
||||
end
|
||||
end
|
||||
end
|
||||
cs.cap = cap + 1; -- + 1 to skip last close (or entire single capture)
|
||||
end
|
||||
|
||||
|
||||
-- Push on the Lua stack all values generated by nested captures inside
|
||||
-- the current capture. Returns number of values pushed. 'addextra'
|
||||
-- makes it push the entire match after all captured values. The
|
||||
-- entire match is pushed also if there are no other nested values,
|
||||
-- so the function never returns zero.
|
||||
|
||||
local function pushnestedvalues(cs, addextra, out, valuetable)
|
||||
local co = cs.cap
|
||||
cs.cap = cs.cap + 1
|
||||
-- no nested captures?
|
||||
if cs.ocap[cs.cap - 1].siz ~= 0 then
|
||||
local st = cs.ocap[co].s
|
||||
local l = cs.ocap[co].siz - 1
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = cs.s and cs.s:sub(st, st + l - 1) or cs.stream(st, st + l - 1)
|
||||
return 1; -- that is it
|
||||
else
|
||||
local n = 0;
|
||||
while cs.ocap[cs.cap].kind ~= Cclose do -- repeat for all nested patterns
|
||||
n = n + pushcapture(cs, out, valuetable);
|
||||
end
|
||||
-- need extra?
|
||||
if addextra or n == 0 then
|
||||
local st = cs.ocap[co].s
|
||||
local l = cs.ocap[cs.cap].s - cs.ocap[co].s
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = cs.s and cs.s:sub(st, st + l - 1) or cs.stream(st, st + l - 1)
|
||||
n = n + 1
|
||||
end
|
||||
cs.cap = cs.cap + 1 -- skip close entry
|
||||
return n;
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
-- Push only the first value generated by nested captures
|
||||
|
||||
local function pushonenestedvalue(cs, out, valuetable)
|
||||
local n = pushnestedvalues(cs, false, out, valuetable)
|
||||
for i = n, 2, -1 do
|
||||
out.out[out.outindex] = nil
|
||||
out.outindex = out.outindex - 1
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
-- Try to find a named group capture with the name given at the top of
|
||||
-- the stack; goes backward from 'cap'.
|
||||
|
||||
local function findback(cs, cap, name, valuetable)
|
||||
-- repeat until end of list
|
||||
while cap > 0 do
|
||||
cap = cap - 1
|
||||
local continue
|
||||
if cs.ocap[cap].kind == Cclose then
|
||||
cap = findopen(cs, cap); -- skip nested captures
|
||||
elseif cs.ocap[cap].siz == 0 then
|
||||
continue = true -- opening an enclosing capture: skip and get previous
|
||||
end
|
||||
if not continue and cs.ocap[cap].kind == Cgroup and cs.ocap[cap].idx ~= 0 then
|
||||
local gname = valuetable[cs.ocap[cap].idx] -- get group name
|
||||
-- right group?
|
||||
if name == gname then
|
||||
return cap;
|
||||
end
|
||||
end
|
||||
end
|
||||
error(("back reference '%s' not found"):format(name), 0)
|
||||
end
|
||||
|
||||
|
||||
-- Back-reference capture. Return number of values pushed.
|
||||
|
||||
local function backrefcap(cs, out, valuetable)
|
||||
local curr = cs.cap;
|
||||
local name = valuetable[cs.ocap[cs.cap].idx] -- reference name
|
||||
cs.cap = findback(cs, curr, name, valuetable) -- find corresponding group
|
||||
local n = pushnestedvalues(cs, false, out, valuetable); -- push group's values
|
||||
cs.cap = curr + 1;
|
||||
return n;
|
||||
end
|
||||
|
||||
|
||||
-- Table capture: creates a new table and populates it with nested
|
||||
-- captures.
|
||||
|
||||
local function tablecap(cs, out, valuetable)
|
||||
local n = 0;
|
||||
local t = {}
|
||||
cs.cap = cs.cap + 1
|
||||
-- table is empty
|
||||
if cs.ocap[cs.cap - 1].siz == 0 then
|
||||
while cs.ocap[cs.cap].kind ~= Cclose do
|
||||
local subout = { outindex = 0, out = {} }
|
||||
-- named group?
|
||||
if cs.ocap[cs.cap].kind == Cgroup and cs.ocap[cs.cap].idx ~= 0 then
|
||||
local groupname = valuetable[cs.ocap[cs.cap].idx] -- push group name
|
||||
pushonenestedvalue(cs, subout, valuetable)
|
||||
t[groupname] = subout.out[1]
|
||||
else
|
||||
-- not a named group
|
||||
local k = pushcapture(cs, subout, valuetable)
|
||||
-- store all values into table
|
||||
for i = 1, subout.outindex do
|
||||
t[i + n] = subout.out[i]
|
||||
end
|
||||
n = n + k;
|
||||
end
|
||||
end
|
||||
cs.cap = cs.cap + 1 -- skip close entry
|
||||
end
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = t
|
||||
return 1; -- number of values pushed (only the table)
|
||||
end
|
||||
|
||||
|
||||
-- Table-query capture
|
||||
|
||||
local function querycap(cs, out, valuetable)
|
||||
local table = valuetable[cs.ocap[cs.cap].idx]
|
||||
local subout = { outindex = 0, out = {} }
|
||||
pushonenestedvalue(cs, subout, valuetable) -- get nested capture
|
||||
-- query cap. value at table
|
||||
if table[subout.out[1]] ~= nil then
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = table[subout.out[1]]
|
||||
return 1
|
||||
end
|
||||
return 0
|
||||
end
|
||||
|
||||
|
||||
-- Fold capture
|
||||
|
||||
local function foldcap(cs, out, valuetable)
|
||||
local fce = valuetable[cs.ocap[cs.cap].idx]
|
||||
cs.cap = cs.cap + 1
|
||||
-- no nested captures?
|
||||
-- or no nested captures (large subject)?
|
||||
if cs.ocap[cs.cap - 1].siz ~= 0 or
|
||||
cs.ocap[cs.cap].kind == Cclose then
|
||||
error("no initial value for fold capture", 0);
|
||||
end
|
||||
local subout = { outindex = 0; out = {} }
|
||||
local n = pushcapture(cs, subout, valuetable) -- nested captures with no values?
|
||||
if n == 0 then
|
||||
error("no initial value for fold capture", 0);
|
||||
end
|
||||
local acumulator = subout.out[1] -- leave only one result for accumulator
|
||||
while cs.ocap[cs.cap].kind ~= Cclose do
|
||||
local subout = { outindex = 0; out = {} }
|
||||
n = pushcapture(cs, subout, valuetable); -- get next capture's values
|
||||
acumulator = fce(acumulator, unpack(subout.out, 1, subout.outindex)) -- call folding function
|
||||
end
|
||||
cs.cap = cs.cap + 1; -- skip close entry
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = acumulator
|
||||
return 1; -- only accumulator left on the stack
|
||||
end
|
||||
|
||||
|
||||
local function retcount(...)
|
||||
return select('#', ...), { ... }
|
||||
end
|
||||
|
||||
|
||||
-- Function capture
|
||||
|
||||
local function functioncap(cs, out, valuetable)
|
||||
local fce = valuetable[cs.ocap[cs.cap].idx] -- push function
|
||||
local subout = { outindex = 0, out = {} }
|
||||
local n = pushnestedvalues(cs, false, subout, valuetable); -- push nested captures
|
||||
local count, ret = retcount(fce(unpack(subout.out, 1, n))) -- call function
|
||||
for i = 1, count do
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = ret[i]
|
||||
end
|
||||
return count
|
||||
end
|
||||
|
||||
|
||||
-- Select capture
|
||||
|
||||
local function numcap(cs, out, valuetable)
|
||||
local idx = valuetable[cs.ocap[cs.cap].idx] -- value to select
|
||||
-- no values?
|
||||
if idx == 0 then
|
||||
nextcap(cs); -- skip entire capture
|
||||
return 0; -- no value produced
|
||||
else
|
||||
local subout = { outindex = 0, out = {} }
|
||||
local n = pushnestedvalues(cs, false, subout, valuetable)
|
||||
-- invalid index?
|
||||
if n < idx then
|
||||
error(("no capture '%d'"):format(idx), 0)
|
||||
else
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = subout.out[idx] -- get selected capture
|
||||
return 1;
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
-- Calls a runtime capture. Returns number of captures removed by
|
||||
-- the call, including the initial Cgroup. (Captures to be added are
|
||||
-- on the Lua stack.)
|
||||
|
||||
local function runtimecap(cs, close, s, out, valuetable)
|
||||
local open = findopen(cs, close)
|
||||
assert(cs.ocap[open].kind == Cgroup)
|
||||
cs.ocap[close].kind = Cclose; -- closes the group
|
||||
cs.ocap[close].s = s;
|
||||
cs.cap = open;
|
||||
local fce = valuetable[cs.ocap[cs.cap].idx] -- push function to be called
|
||||
local subout = { outindex = 0, out = {} }
|
||||
local n = pushnestedvalues(cs, false, subout, valuetable); -- push nested captures
|
||||
local count, ret = retcount(fce(cs.s or cs.stream, s, unpack(subout.out, 1, n))) -- call dynamic function
|
||||
for i = 1, count do
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = ret[i]
|
||||
end
|
||||
return close - open -- number of captures of all kinds removed
|
||||
end
|
||||
|
||||
-- Collect values from current capture into array 'cps'. Current
|
||||
-- capture must be Cstring (first call) or Csimple (recursive calls).
|
||||
-- (In first call, fills %0 with whole match for Cstring.)
|
||||
-- Returns number of elements in the array that were filled.
|
||||
|
||||
local function getstrcaps(cs, cps, n)
|
||||
local k = n
|
||||
n = n + 1
|
||||
cps[k + 1].isstring = true; -- get string value
|
||||
cps[k + 1].startstr = cs.ocap[cs.cap].s; -- starts here
|
||||
cs.cap = cs.cap + 1
|
||||
-- nested captures?
|
||||
if cs.ocap[cs.cap - 1].siz == 0 then
|
||||
-- traverse them
|
||||
while cs.ocap[cs.cap].kind ~= Cclose do
|
||||
-- too many captures?
|
||||
if n >= MAXSTRCAPS then
|
||||
nextcap(cs); -- skip extra captures (will not need them)
|
||||
elseif cs.ocap[cs.cap].kind == Csimple then
|
||||
-- string?
|
||||
n = getstrcaps(cs, cps, n); -- put info. into array
|
||||
else
|
||||
cps[n + 1].isstring = false; -- not a string
|
||||
cps[n + 1].origcap = cs.cap; -- keep original capture
|
||||
nextcap(cs);
|
||||
n = n + 1;
|
||||
end
|
||||
end
|
||||
cs.cap = cs.cap + 1 -- skip close
|
||||
end
|
||||
cps[k + 1].endstr = cs.ocap[cs.cap - 1].s + cs.ocap[cs.cap - 1].siz - 1 -- ends here
|
||||
return n;
|
||||
end
|
||||
|
||||
|
||||
-- add next capture value (which should be a string) to buffer 'b'
|
||||
|
||||
-- String capture: add result to buffer 'b' (instead of pushing
|
||||
-- it into the stack)
|
||||
|
||||
local function stringcap(cs, b, valuetable)
|
||||
local cps = {}
|
||||
for i = 1, MAXSTRCAPS do
|
||||
cps[#cps + 1] = {}
|
||||
end
|
||||
local fmt = valuetable[cs.ocap[cs.cap].idx]
|
||||
local n = getstrcaps(cs, cps, 0) - 1; -- collect nested captures
|
||||
local i = 1
|
||||
-- traverse them
|
||||
while i <= #fmt do
|
||||
local c = fmt:sub(i, i)
|
||||
-- not an escape?
|
||||
if c ~= '%' then
|
||||
b[#b + 1] = c -- add it to buffer
|
||||
elseif fmt:sub(i + 1, i + 1) < '0' or fmt:sub(i + 1, i + 1) > '9' then
|
||||
-- not followed by a digit?
|
||||
i = i + 1
|
||||
b[#b + 1] = fmt:sub(i, i)
|
||||
else
|
||||
i = i + 1
|
||||
local l = fmt:sub(i, i) - '0'; -- capture index
|
||||
if l > n then
|
||||
error(("invalid capture index (%d)"):format(l), 0)
|
||||
elseif cps[l + 1].isstring then
|
||||
b[#b + 1] = cs.s and cs.s:sub(cps[l + 1].startstr, cps[l + 1].endstr - cps[l + 1].startstr + cps[l + 1].startstr - 1) or
|
||||
cs.stream(cps[l + 1].startstr, cps[l + 1].endstr - cps[l + 1].startstr + cps[l + 1].startstr - 1)
|
||||
else
|
||||
local curr = cs.cap;
|
||||
cs.cap = cps[l + 1].origcap; -- go back to evaluate that nested capture
|
||||
if not addonestring(cs, b, "capture", valuetable) then
|
||||
error(("no values in capture index %d"):format(l), 0)
|
||||
end
|
||||
cs.cap = curr; -- continue from where it stopped
|
||||
end
|
||||
end
|
||||
i = i + 1
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
-- Substitution capture: add result to buffer 'b'
|
||||
|
||||
local function substcap(cs, b, valuetable)
|
||||
local curr = cs.ocap[cs.cap].s;
|
||||
-- no nested captures?
|
||||
if cs.ocap[cs.cap].siz ~= 0 then
|
||||
-- keep original text
|
||||
b[#b + 1] = cs.s and cs.s:sub(curr, cs.ocap[cs.cap].siz - 1 + curr - 1) or
|
||||
cs.stream(curr, cs.ocap[cs.cap].siz - 1 + curr - 1)
|
||||
else
|
||||
cs.cap = cs.cap + 1 -- skip open entry
|
||||
-- traverse nested captures
|
||||
while cs.ocap[cs.cap].kind ~= Cclose do
|
||||
local next = cs.ocap[cs.cap].s;
|
||||
b[#b + 1] = cs.s and cs.s:sub(curr, next - curr + curr - 1) or
|
||||
cs.stream(curr, next - curr + curr - 1) -- add text up to capture
|
||||
if addonestring(cs, b, "replacement", valuetable) then
|
||||
curr = cs.ocap[cs.cap - 1].s + cs.ocap[cs.cap - 1].siz - 1; -- continue after match
|
||||
else
|
||||
-- no capture value
|
||||
curr = next; -- keep original text in final result
|
||||
end
|
||||
end
|
||||
b[#b + 1] = cs.s and cs.s:sub(curr, curr + cs.ocap[cs.cap].s - curr - 1) or
|
||||
cs.stream(curr, curr + cs.ocap[cs.cap].s - curr - 1) -- add last piece of text
|
||||
end
|
||||
cs.cap = cs.cap + 1 -- go to next capture
|
||||
end
|
||||
|
||||
|
||||
-- Evaluates a capture and adds its first value to buffer 'b'; returns
|
||||
-- whether there was a value
|
||||
|
||||
function addonestring(cs, b, what, valuetable)
|
||||
local tag = cs.ocap[cs.cap].kind
|
||||
if tag == Cstring then
|
||||
stringcap(cs, b, valuetable); -- add capture directly to buffer
|
||||
return 1
|
||||
elseif tag == Csubst then
|
||||
substcap(cs, b, valuetable); -- add capture directly to buffer
|
||||
return 1
|
||||
else
|
||||
local subout = { outindex = 0, out = {} }
|
||||
local n = pushcapture(cs, subout, valuetable);
|
||||
if n > 0 then
|
||||
if type(subout.out[1]) ~= 'string' and type(subout.out[1]) ~= 'number' then
|
||||
error(("invalid %s value (a %s)"):format(what, type(subout.out[1])), 0)
|
||||
end
|
||||
b[#b + 1] = subout.out[1]
|
||||
return n
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
-- Push all values of the current capture into the stack; returns
|
||||
-- number of values pushed
|
||||
|
||||
function pushcapture(cs, out, valuetable)
|
||||
local type = cs.ocap[cs.cap].kind
|
||||
if type == Cposition then
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = cs.ocap[cs.cap].s
|
||||
cs.cap = cs.cap + 1;
|
||||
return 1;
|
||||
elseif type == Cconst then
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = valuetable[cs.ocap[cs.cap].idx]
|
||||
cs.cap = cs.cap + 1
|
||||
return 1;
|
||||
elseif type == Carg then
|
||||
local arg = valuetable[cs.ocap[cs.cap].idx]
|
||||
cs.cap = cs.cap + 1
|
||||
if arg > cs.ptopcount then
|
||||
error(("reference to absent extra argument #%d"):format(arg), 0)
|
||||
end
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = cs.ptop[arg]
|
||||
return 1;
|
||||
elseif type == Csimple then
|
||||
local k = pushnestedvalues(cs, true, out, valuetable)
|
||||
local index = out.outindex
|
||||
table.insert(out.out, index - k + 1, out.out[index])
|
||||
out[index + 1] = nil
|
||||
return k;
|
||||
elseif type == Cruntime then
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = valuetable[cs.ocap[cs.cap].idx]
|
||||
cs.cap = cs.cap + 1;
|
||||
return 1;
|
||||
elseif type == Cstring then
|
||||
local b = {}
|
||||
stringcap(cs, b, valuetable)
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = table.concat(b)
|
||||
return 1;
|
||||
elseif type == Csubst then
|
||||
local b = {}
|
||||
substcap(cs, b, valuetable);
|
||||
out.outindex = out.outindex + 1
|
||||
out.out[out.outindex] = table.concat(b)
|
||||
return 1;
|
||||
elseif type == Cgroup then
|
||||
-- anonymous group?
|
||||
if cs.ocap[cs.cap].idx == 0 then
|
||||
return pushnestedvalues(cs, false, out, valuetable); -- add all nested values
|
||||
else
|
||||
-- named group: add no values
|
||||
nextcap(cs); -- skip capture
|
||||
return 0
|
||||
end
|
||||
elseif type == Cbackref then
|
||||
return backrefcap(cs, out, valuetable)
|
||||
elseif type == Ctable then
|
||||
return tablecap(cs, out, valuetable)
|
||||
elseif type == Cfunction then
|
||||
return functioncap(cs, out, valuetable)
|
||||
elseif type == Cnum then
|
||||
return numcap(cs, out, valuetable)
|
||||
elseif type == Cquery then
|
||||
return querycap(cs, out, valuetable)
|
||||
elseif type == Cfold then
|
||||
return foldcap(cs, out, valuetable)
|
||||
else
|
||||
assert(false)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
-- Prepare a CapState structure and traverse the entire list of
|
||||
-- captures in the stack pushing its results. 's' is the subject
|
||||
-- string, 'r' is the final position of the match, and 'ptop'
|
||||
-- the index in the stack where some useful values were pushed.
|
||||
-- Returns the number of results pushed. (If the list produces no
|
||||
-- results, push the final position of the match.)
|
||||
|
||||
local function getcaptures(capture, s, stream, r, valuetable, ...)
|
||||
local n = 0;
|
||||
local cs = { cap = 0 }
|
||||
local out = { outindex = 0; out = {} }
|
||||
-- is there any capture?
|
||||
if capture[cs.cap].kind ~= Cclose then
|
||||
cs.ocap = capture
|
||||
cs.s = s;
|
||||
cs.stream = stream
|
||||
cs.ptopcount, cs.ptop = retcount(...)
|
||||
repeat -- collect their values
|
||||
n = n + pushcapture(cs, out, valuetable)
|
||||
until cs.ocap[cs.cap].kind == Cclose
|
||||
end
|
||||
-- no capture values?
|
||||
if n == 0 then
|
||||
if not r then
|
||||
return
|
||||
else
|
||||
return r
|
||||
end
|
||||
end
|
||||
assert(out.outindex < 7998, "(too many captures)")
|
||||
return unpack(out.out, 1, out.outindex)
|
||||
end
|
||||
|
||||
local function getcapturesruntime(capture, s, stream, notdelete, min, max, captop, valuetable, ...)
|
||||
local n = 0;
|
||||
local cs = { cap = min }
|
||||
local out = { outindex = 0; out = {} }
|
||||
cs.ocap = capture
|
||||
cs.s = s
|
||||
cs.stream = stream
|
||||
cs.ptopcount, cs.ptop = retcount(...)
|
||||
local start = 0
|
||||
repeat -- collect their values
|
||||
if not checknextcap(cs, max) then break end
|
||||
local notdelete = notdelete or capture[cs.cap].kind == Cgroup and capture[cs.cap].idx ~= 0 and capture[cs.cap].candelete == 0
|
||||
pushcapture(cs, out, valuetable)
|
||||
if notdelete then
|
||||
start = cs.cap
|
||||
else
|
||||
n = n + cs.cap - start
|
||||
for i = 0, captop - cs.cap - 1 do
|
||||
ffi.copy(capture + start + i, capture + cs.cap + i, ffi.sizeof('CAPTURE'))
|
||||
end
|
||||
max = max - (cs.cap - start)
|
||||
captop = captop - (cs.cap - start)
|
||||
cs.cap = start
|
||||
end
|
||||
until cs.cap == max
|
||||
assert(out.outindex < 7998, "(too many captures)")
|
||||
return n, out.out, out.outindex
|
||||
end
|
||||
|
||||
return {
|
||||
getcaptures = getcaptures,
|
||||
runtimecap = runtimecap,
|
||||
getcapturesruntime = getcapturesruntime,
|
||||
}
|
||||
|
1057
tools/lpeg/lpcode.lua
Normal file
1057
tools/lpeg/lpcode.lua
Normal file
File diff suppressed because it is too large
Load Diff
1373
tools/lpeg/lpeg.lua
Normal file
1373
tools/lpeg/lpeg.lua
Normal file
File diff suppressed because it is too large
Load Diff
356
tools/lpeg/lpprint.lua
Normal file
356
tools/lpeg/lpprint.lua
Normal file
@ -0,0 +1,356 @@
|
||||
--[[
|
||||
LPEGLJ
|
||||
lpprint.lua
|
||||
Tree, code and debug print function (only for debuging)
|
||||
Copyright (C) 2014 Rostislav Sacek.
|
||||
based on LPeg v1.0 - PEG pattern matching for Lua
|
||||
Lua.org & PUC-Rio written by Roberto Ierusalimschy
|
||||
http://www.inf.puc-rio.br/~roberto/lpeg/
|
||||
|
||||
** Permission is hereby granted, free of charge, to any person obtaining
|
||||
** a copy of this software and associated documentation files (the
|
||||
** "Software"), to deal in the Software without restriction, including
|
||||
** without limitation the rights to use, copy, modify, merge, publish,
|
||||
** distribute, sublicense, and/or sell copies of the Software, and to
|
||||
** permit persons to whom the Software is furnished to do so, subject to
|
||||
** the following conditions:
|
||||
**
|
||||
** The above copyright notice and this permission notice shall be
|
||||
** included in all copies or substantial portions of the Software.
|
||||
**
|
||||
** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
** SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
**
|
||||
** [ MIT license: http://www.opensource.org/licenses/mit-license.php ]
|
||||
--]]
|
||||
|
||||
local ffi = require"ffi"
|
||||
local band, rshift, lshift = bit.band, bit.rshift, bit.lshift
|
||||
|
||||
ffi.cdef[[
|
||||
int isprint ( int c );
|
||||
]]
|
||||
|
||||
local RuleLR = 0x10000
|
||||
local Ruleused = 0x20000
|
||||
|
||||
-- {======================================================
|
||||
-- Printing patterns (for debugging)
|
||||
-- =======================================================
|
||||
|
||||
local TChar = 0
|
||||
local TSet = 1
|
||||
local TAny = 2 -- standard PEG elements
|
||||
local TTrue = 3
|
||||
local TFalse = 4
|
||||
local TRep = 5
|
||||
local TSeq = 6
|
||||
local TChoice = 7
|
||||
local TNot = 8
|
||||
local TAnd = 9
|
||||
local TCall = 10
|
||||
local TOpenCall = 11
|
||||
local TRule = 12 -- sib1 is rule's pattern, sib2 is 'next' rule
|
||||
local TGrammar = 13 -- sib1 is initial (and first) rule
|
||||
local TBehind = 14 -- match behind
|
||||
local TCapture = 15 -- regular capture
|
||||
local TRunTime = 16 -- run-time capture
|
||||
|
||||
local IAny = 0 -- if no char, fail
|
||||
local IChar = 1 -- if char != aux, fail
|
||||
local ISet = 2 -- if char not in val, fail
|
||||
local ITestAny = 3 -- in no char, jump to 'offset'
|
||||
local ITestChar = 4 -- if char != aux, jump to 'offset'
|
||||
local ITestSet = 5 -- if char not in val, jump to 'offset'
|
||||
local ISpan = 6 -- read a span of chars in val
|
||||
local IBehind = 7 -- walk back 'aux' characters (fail if not possible)
|
||||
local IRet = 8 -- return from a rule
|
||||
local IEnd = 9 -- end of pattern
|
||||
local IChoice = 10 -- stack a choice; next fail will jump to 'offset'
|
||||
local IJmp = 11 -- jump to 'offset'
|
||||
local ICall = 12 -- call rule at 'offset'
|
||||
local IOpenCall = 13 -- call rule number 'offset' (must be closed to a ICall)
|
||||
local ICommit = 14 -- pop choice and jump to 'offset'
|
||||
local IPartialCommit = 15 -- update top choice to current position and jump
|
||||
local IBackCommit = 16 -- "fails" but jump to its own 'offset'
|
||||
local IFailTwice = 17 -- pop one choice and then fail
|
||||
local IFail = 18 -- go back to saved state on choice and jump to saved offset
|
||||
local IGiveup = 19 -- internal use
|
||||
local IFullCapture = 20 -- complete capture of last 'off' chars
|
||||
local IOpenCapture = 21 -- start a capture
|
||||
local ICloseCapture = 22
|
||||
local ICloseRunTime = 23
|
||||
|
||||
local Cclose = 0
|
||||
local Cposition = 1
|
||||
local Cconst = 2
|
||||
local Cbackref = 3
|
||||
local Carg = 4
|
||||
local Csimple = 5
|
||||
local Ctable = 6
|
||||
local Cfunction = 7
|
||||
local Cquery = 8
|
||||
local Cstring = 9
|
||||
local Cnum = 10
|
||||
local Csubst = 11
|
||||
local Cfold = 12
|
||||
local Cruntime = 13
|
||||
local Cgroup = 14
|
||||
|
||||
|
||||
-- number of siblings for each tree
|
||||
local numsiblings = {
|
||||
[TRep] = 1,
|
||||
[TSeq] = 2,
|
||||
[TChoice] = 2,
|
||||
[TNot] = 1,
|
||||
[TAnd] = 1,
|
||||
[TRule] = 2,
|
||||
[TGrammar] = 1,
|
||||
[TBehind] = 1,
|
||||
[TCapture] = 1,
|
||||
[TRunTime] = 1,
|
||||
}
|
||||
local names = {
|
||||
[IAny] = "any",
|
||||
[IChar] = "char",
|
||||
[ISet] = "set",
|
||||
[ITestAny] = "testany",
|
||||
[ITestChar] = "testchar",
|
||||
[ITestSet] = "testset",
|
||||
[ISpan] = "span",
|
||||
[IBehind] = "behind",
|
||||
[IRet] = "ret",
|
||||
[IEnd] = "end",
|
||||
[IChoice] = "choice",
|
||||
[IJmp] = "jmp",
|
||||
[ICall] = "call",
|
||||
[IOpenCall] = "open_call",
|
||||
[ICommit] = "commit",
|
||||
[IPartialCommit] = "partial_commit",
|
||||
[IBackCommit] = "back_commit",
|
||||
[IFailTwice] = "failtwice",
|
||||
[IFail] = "fail",
|
||||
[IGiveup] = "giveup",
|
||||
[IFullCapture] = "fullcapture",
|
||||
[IOpenCapture] = "opencapture",
|
||||
[ICloseCapture] = "closecapture",
|
||||
[ICloseRunTime] = "closeruntime"
|
||||
}
|
||||
|
||||
local function printcharset(st)
|
||||
io.write("[");
|
||||
local i = 0
|
||||
while i <= 255 do
|
||||
local first = i;
|
||||
while band(st[rshift(i, 5)], lshift(1, band(i, 31))) ~= 0 and i <= 255 do
|
||||
i = i + 1
|
||||
end
|
||||
if i - 1 == first then -- unary range?
|
||||
io.write(("(%02x)"):format(first))
|
||||
elseif i - 1 > first then -- non-empty range?
|
||||
io.write(("(%02x-%02x)"):format(first, i - 1))
|
||||
end
|
||||
i = i + 1
|
||||
end
|
||||
io.write("]")
|
||||
end
|
||||
|
||||
local modes = {
|
||||
[Cclose] = "close",
|
||||
[Cposition] = "position",
|
||||
[Cconst] = "constant",
|
||||
[Cbackref] = "backref",
|
||||
[Carg] = "argument",
|
||||
[Csimple] = "simple",
|
||||
[Ctable] = "table",
|
||||
[Cfunction] = "function",
|
||||
[Cquery] = "query",
|
||||
[Cstring] = "string",
|
||||
[Cnum] = "num",
|
||||
[Csubst] = "substitution",
|
||||
[Cfold] = "fold",
|
||||
[Cruntime] = "runtime",
|
||||
[Cgroup] = "group"
|
||||
}
|
||||
|
||||
local function printcapkind(kind)
|
||||
io.write(("%s"):format(modes[kind]))
|
||||
end
|
||||
|
||||
local function printjmp(p, index)
|
||||
io.write(("-> %d"):format(index + p[index].offset))
|
||||
end
|
||||
|
||||
local function printrulename(p, index, rulenames)
|
||||
if rulenames and rulenames[index + p[index].offset] then
|
||||
io.write(' ', rulenames[index + p[index].offset])
|
||||
end
|
||||
end
|
||||
|
||||
local function printinst(p, index, valuetable, rulenames)
|
||||
local code = p[index].code
|
||||
if rulenames and rulenames[index] then
|
||||
io.write(rulenames[index], '\n')
|
||||
end
|
||||
io.write(("%04d: %s "):format(index, names[code]))
|
||||
if code == IChar then
|
||||
io.write(("'%s'"):format(string.char(p[index].val)))
|
||||
elseif code == ITestChar then
|
||||
io.write(("'%s'"):format(string.char(p[index].val)))
|
||||
printjmp(p, index)
|
||||
printrulename(p, index, rulenames)
|
||||
elseif code == IFullCapture then
|
||||
printcapkind(band(p[index].val, 0x0f));
|
||||
io.write((" (size = %d) (idx = %s)"):format(band(rshift(p[index].val, 4), 0xF), tostring(valuetable[p[index].offset])))
|
||||
elseif code == IOpenCapture then
|
||||
printcapkind(band(p[index].val, 0x0f))
|
||||
io.write((" (idx = %s)"):format(tostring(valuetable[p[index].offset])))
|
||||
elseif code == ISet then
|
||||
printcharset(valuetable[p[index].val]);
|
||||
elseif code == ITestSet then
|
||||
printcharset(valuetable[p[index].val])
|
||||
printjmp(p, index);
|
||||
printrulename(p, index, rulenames)
|
||||
elseif code == ISpan then
|
||||
printcharset(valuetable[p[index].val]);
|
||||
elseif code == IOpenCall then
|
||||
io.write(("-> %d"):format(p[index].offset))
|
||||
elseif code == IBehind then
|
||||
io.write(("%d"):format(p[index].val))
|
||||
elseif code == IJmp or code == ICall or code == ICommit or code == IChoice or
|
||||
code == IPartialCommit or code == IBackCommit or code == ITestAny then
|
||||
printjmp(p, index);
|
||||
if (code == ICall or code == IJmp) and p[index].aux > 0 then
|
||||
io.write(' ', valuetable[p[index].aux])
|
||||
else
|
||||
printrulename(p, index, rulenames)
|
||||
end
|
||||
end
|
||||
io.write("\n")
|
||||
end
|
||||
|
||||
|
||||
local function printpatt(p, valuetable)
|
||||
local ruleNames = {}
|
||||
for i = 0, p.size - 1 do
|
||||
local code = p.p[i].code
|
||||
if (code == ICall or code == IJmp) and p.p[i].aux > 0 then
|
||||
local index = i + p.p[i].offset
|
||||
ruleNames[index] = valuetable[p.p[i].aux]
|
||||
end
|
||||
end
|
||||
for i = 0, p.size - 1 do
|
||||
printinst(p.p, i, valuetable, ruleNames)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
local function printcap(cap, index, valuetable)
|
||||
printcapkind(cap[index].kind)
|
||||
io.write((" (idx: %s - size: %d) -> %d\n"):format(valuetable[cap[index].idx], cap[index].siz, cap[index].s))
|
||||
end
|
||||
|
||||
|
||||
local function printcaplist(cap, limit, valuetable)
|
||||
io.write(">======\n")
|
||||
local index = 0
|
||||
while cap[index].s and index < limit do
|
||||
printcap(cap, index, valuetable)
|
||||
index = index + 1
|
||||
end
|
||||
io.write("=======\n")
|
||||
end
|
||||
|
||||
-- ======================================================
|
||||
|
||||
|
||||
|
||||
-- {======================================================
|
||||
-- Printing trees (for debugging)
|
||||
-- =======================================================
|
||||
|
||||
local tagnames = {
|
||||
[TChar] = "char",
|
||||
[TSet] = "set",
|
||||
[TAny] = "any",
|
||||
[TTrue] = "true",
|
||||
[TFalse] = "false",
|
||||
[TRep] = "rep",
|
||||
[TSeq] = "seq",
|
||||
[TChoice] = "choice",
|
||||
[TNot] = "not",
|
||||
[TAnd] = "and",
|
||||
[TCall] = "call",
|
||||
[TOpenCall] = "opencall",
|
||||
[TRule] = "rule",
|
||||
[TGrammar] = "grammar",
|
||||
[TBehind] = "behind",
|
||||
[TCapture] = "capture",
|
||||
[TRunTime] = "run-time"
|
||||
}
|
||||
|
||||
|
||||
local function printtree(tree, ident, index, valuetable)
|
||||
for i = 1, ident do
|
||||
io.write(" ")
|
||||
end
|
||||
local tag = tree[index].tag
|
||||
io.write(("%s"):format(tagnames[tag]))
|
||||
if tag == TChar then
|
||||
local c = tree[index].val
|
||||
if ffi.C.isprint(c) then
|
||||
io.write((" '%c'\n"):format(c))
|
||||
else
|
||||
io.write((" (%02X)\n"):format(c))
|
||||
end
|
||||
elseif tag == TSet then
|
||||
printcharset(valuetable[tree[index].val]);
|
||||
io.write("\n")
|
||||
elseif tag == TOpenCall or tag == TCall then
|
||||
io.write((" key: %s\n"):format(tostring(valuetable[tree[index].val])))
|
||||
elseif tag == TBehind then
|
||||
io.write((" %d\n"):format(tree[index].val))
|
||||
printtree(tree, ident + 2, index + 1, valuetable);
|
||||
elseif tag == TCapture then
|
||||
io.write((" cap: %s n: %s\n"):format(modes[bit.band(tree[index].cap, 0xffff)], valuetable[tree[index].val]))
|
||||
printtree(tree, ident + 2, index + 1, valuetable);
|
||||
elseif tag == TRule then
|
||||
local extra = bit.band(tree[index].cap, RuleLR) == RuleLR and ' left recursive' or ''
|
||||
extra = extra .. (bit.band(tree[index].cap, Ruleused) ~= Ruleused and ' not used' or '')
|
||||
io.write((" n: %d key: %s%s\n"):format(bit.band(tree[index].cap, 0xffff) - 1, valuetable[tree[index].val], extra))
|
||||
printtree(tree, ident + 2, index + 1, valuetable);
|
||||
-- do not print next rule as a sibling
|
||||
elseif tag == TGrammar then
|
||||
local ruleindex = index + 1
|
||||
io.write((" %d\n"):format(tree[index].val)) -- number of rules
|
||||
for i = 1, tree[index].val do
|
||||
printtree(tree, ident + 2, ruleindex, valuetable);
|
||||
ruleindex = ruleindex + tree[ruleindex].ps
|
||||
end
|
||||
assert(tree[ruleindex].tag == TTrue); -- sentinel
|
||||
else
|
||||
local sibs = numsiblings[tree[index].tag] or 0
|
||||
io.write("\n")
|
||||
if sibs >= 1 then
|
||||
printtree(tree, ident + 2, index + 1, valuetable);
|
||||
if sibs >= 2 then
|
||||
printtree(tree, ident + 2, index + tree[index].ps, valuetable)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- }====================================================== */
|
||||
|
||||
return {
|
||||
printtree = printtree,
|
||||
printpatt = printpatt,
|
||||
printcaplist = printcaplist,
|
||||
printinst = printinst
|
||||
}
|
1041
tools/lpeg/lpvm.lua
Normal file
1041
tools/lpeg/lpvm.lua
Normal file
File diff suppressed because it is too large
Load Diff
286
tools/lpeg/re.lua
Normal file
286
tools/lpeg/re.lua
Normal file
@ -0,0 +1,286 @@
|
||||
-- $Id: re.lua,v 1.44 2013/03/26 20:11:40 roberto Exp $
|
||||
-- 2014/08/15 changes rostislav
|
||||
|
||||
-- imported functions and modules
|
||||
local tonumber, print, error = tonumber, print, error
|
||||
local setmetatable = setmetatable
|
||||
local m = require"lpeglj"
|
||||
|
||||
-- 'm' will be used to parse expressions, and 'mm' will be used to
|
||||
-- create expressions; that is, 're' runs on 'm', creating patterns
|
||||
-- on 'mm'
|
||||
local mm = m
|
||||
|
||||
-- pattern's metatable
|
||||
local mt = getmetatable(mm.P(0))
|
||||
mt = m.version() == "1.0.0.0LJ" and m or mt
|
||||
|
||||
|
||||
|
||||
-- No more global accesses after this point
|
||||
local version = _VERSION
|
||||
if version == "Lua 5.2" then _ENV = nil end
|
||||
|
||||
|
||||
local any = m.P(1)
|
||||
|
||||
|
||||
-- Pre-defined names
|
||||
local Predef = { nl = m.P"\n" }
|
||||
|
||||
|
||||
local mem
|
||||
local fmem
|
||||
local gmem
|
||||
|
||||
|
||||
local function updatelocale ()
|
||||
mm.locale(Predef)
|
||||
Predef.a = Predef.alpha
|
||||
Predef.c = Predef.cntrl
|
||||
Predef.d = Predef.digit
|
||||
Predef.g = Predef.graph
|
||||
Predef.l = Predef.lower
|
||||
Predef.p = Predef.punct
|
||||
Predef.s = Predef.space
|
||||
Predef.u = Predef.upper
|
||||
Predef.w = Predef.alnum
|
||||
Predef.x = Predef.xdigit
|
||||
Predef.A = any - Predef.a
|
||||
Predef.C = any - Predef.c
|
||||
Predef.D = any - Predef.d
|
||||
Predef.G = any - Predef.g
|
||||
Predef.L = any - Predef.l
|
||||
Predef.P = any - Predef.p
|
||||
Predef.S = any - Predef.s
|
||||
Predef.U = any - Predef.u
|
||||
Predef.W = any - Predef.w
|
||||
Predef.X = any - Predef.x
|
||||
mem = {} -- restart memoization
|
||||
fmem = {}
|
||||
gmem = {}
|
||||
local mt = {__mode = "v"}
|
||||
setmetatable(mem, mt)
|
||||
setmetatable(fmem, mt)
|
||||
setmetatable(gmem, mt)
|
||||
end
|
||||
|
||||
|
||||
updatelocale()
|
||||
|
||||
|
||||
|
||||
local I = m.P(function (s,i) print(i, s:sub(1, i-1)); return i end)
|
||||
|
||||
|
||||
local function getdef (id, defs)
|
||||
local c = defs and defs[id]
|
||||
if not c then error("undefined name: " .. id) end
|
||||
return c
|
||||
end
|
||||
|
||||
|
||||
local function patt_error (s, i)
|
||||
local msg = (#s < i + 20) and s:sub(i)
|
||||
or s:sub(i,i+20) .. "..."
|
||||
msg = ("pattern error near '%s'"):format(msg)
|
||||
error(msg, 2)
|
||||
end
|
||||
|
||||
local function mult (p, n)
|
||||
local np = mm.P(true)
|
||||
while n >= 1 do
|
||||
if n%2 >= 1 then np = np * p end
|
||||
p = p * p
|
||||
n = n/2
|
||||
end
|
||||
return np
|
||||
end
|
||||
|
||||
local function equalcap (s, i, c)
|
||||
if type(c) ~= "string" then return nil end
|
||||
local e = #c + i
|
||||
if type(s) == 'function' then -- stream mode
|
||||
if s(i, e - 1) == c then return e else return nil end
|
||||
else
|
||||
if s:sub(i, e - 1) == c then return e else return nil end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
local S = (Predef.space + "--" * (any - Predef.nl)^0)^0
|
||||
|
||||
local name = m.R("AZ", "az", "__") * m.R("AZ", "az", "__", "09")^0
|
||||
|
||||
local arrow = S * "<-"
|
||||
|
||||
local seq_follow = m.P"/" + ")" + "}" + ":}" + "~}" + "|}" + (name * arrow) + -1
|
||||
|
||||
name = m.C(name)
|
||||
|
||||
|
||||
-- a defined name only have meaning in a given environment
|
||||
local Def = name * m.Carg(1)
|
||||
|
||||
local num = m.C(m.R"09"^1) * S / tonumber
|
||||
|
||||
local String = "'" * m.C((any - "'")^0) * "'" +
|
||||
'"' * m.C((any - '"')^0) * '"'
|
||||
|
||||
|
||||
local defined = "%" * Def / function (c,Defs)
|
||||
local cat = Defs and Defs[c] or Predef[c]
|
||||
if not cat then error ("name '" .. c .. "' undefined") end
|
||||
return cat
|
||||
end
|
||||
|
||||
local Range = m.Cs(any * (m.P"-"/"") * (any - "]")) / mm.R
|
||||
|
||||
local item = defined + Range + m.C(any)
|
||||
|
||||
local Class =
|
||||
"["
|
||||
* (m.C(m.P"^"^-1)) -- optional complement symbol
|
||||
* m.Cf(item * (item - "]")^0, mt.__add) /
|
||||
function (c, p) return c == "^" and any - p or p end
|
||||
* "]"
|
||||
|
||||
local function adddef (t, k, exp)
|
||||
if t[k] then
|
||||
error("'"..k.."' already defined as a rule")
|
||||
else
|
||||
t[k] = exp
|
||||
end
|
||||
return t
|
||||
end
|
||||
|
||||
local function firstdef (n, r) return adddef({n}, n, r) end
|
||||
|
||||
|
||||
local function NT (n, b, p)
|
||||
if not b then
|
||||
error("rule '"..n.."' used outside a grammar")
|
||||
else return mm.V(n, p or 0)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
local exp = m.P{ "Exp",
|
||||
Exp = S * ( m.V"Grammar"
|
||||
+ m.Cf(m.V"Seq" * ("/" * S * m.V"Seq")^0, mt.__add) );
|
||||
Seq = m.Cf(m.Cc(m.P"") * m.V"Prefix"^0 , mt.__mul)
|
||||
* (#seq_follow + patt_error);
|
||||
Prefix = "&" * S * m.V"Prefix" / mt.__len
|
||||
+ "!" * S * m.V"Prefix" / mt.__unm
|
||||
+ m.V"Suffix";
|
||||
Suffix = m.Cf(m.V"Primary" * S *
|
||||
( ( m.P"+" * m.Cc(1, mt.__pow)
|
||||
+ m.P"*" * m.Cc(0, mt.__pow)
|
||||
+ m.P"?" * m.Cc(-1, mt.__pow)
|
||||
+ "^" * ( m.Cg(num * m.Cc(mult))
|
||||
+ m.Cg(m.C(m.S"+-" * m.R"09"^1) * m.Cc(mt.__pow))
|
||||
)
|
||||
+ "->" * S * ( m.Cg((String + num) * m.Cc(mt.__div))
|
||||
+ m.P"{}" * m.Cc(nil, m.Ct)
|
||||
+ m.Cg(Def / getdef * m.Cc(mt.__div))
|
||||
)
|
||||
+ "=>" * S * m.Cg(Def / getdef * m.Cc(m.Cmt))
|
||||
) * S
|
||||
)^0, function (a,b,f) return f(a,b) end );
|
||||
Primary = "(" * m.V"Exp" * ")"
|
||||
+ String / mm.P
|
||||
+ Class
|
||||
+ defined
|
||||
+ "{:" * (name * ":" + m.Cc(nil)) * m.V"Exp" * ":}" /
|
||||
function (n, p) return mm.Cg(p, n) end
|
||||
+ "=" * name / function (n) return mm.Cmt(mm.Cb(n), equalcap) end
|
||||
+ m.P"{}" / mm.Cp
|
||||
+ "{~" * m.V"Exp" * "~}" / mm.Cs
|
||||
+ "{|" * m.V"Exp" * "|}" / mm.Ct
|
||||
+ "{" * m.V"Exp" * "}" / mm.C
|
||||
+ m.P"." * m.Cc(any)
|
||||
+ (name * m.Cb("G") * (S * ":" * S * num)^-1 * -arrow + "<" * name * m.Cb("G") * (S * ":" * S * num)^-1 * ">") / NT;
|
||||
Definition = name * arrow * m.V"Exp";
|
||||
Grammar = m.Cg(m.Cc(true), "G") *
|
||||
m.Cf(m.V"Definition" / firstdef * m.Cg(m.V"Definition")^0,
|
||||
adddef) / mm.P
|
||||
}
|
||||
|
||||
local pattern = S * m.Cg(m.Cc(false), "G") * exp / mm.P * (-any + patt_error)
|
||||
|
||||
|
||||
local function compile (p, defs)
|
||||
if mm.type(p) == "pattern" then return p end -- already compiled
|
||||
local cp = pattern:match(p, 1, defs)
|
||||
if not cp then error("incorrect pattern", 3) end
|
||||
return cp
|
||||
end
|
||||
|
||||
local function match (s, p, i)
|
||||
local cp = mem[p]
|
||||
if not cp then
|
||||
cp = compile(p)
|
||||
mem[p] = cp
|
||||
end
|
||||
return cp:match(s, i or 1)
|
||||
end
|
||||
|
||||
local function streammatch (p, i)
|
||||
local cp = mem[p]
|
||||
if not cp then
|
||||
cp = compile(p)
|
||||
mem[p] = cp
|
||||
end
|
||||
return cp:streammatch(i or 1)
|
||||
end
|
||||
|
||||
-- Only for testing purpose
|
||||
local function emulatestreammatch(s, p, i)
|
||||
local cp = mem[p]
|
||||
if not cp then
|
||||
cp = compile(p)
|
||||
mem[p] = cp
|
||||
end
|
||||
return cp:emulatestreammatch(s, i or 1)
|
||||
end
|
||||
|
||||
local function find (s, p, i)
|
||||
local cp = fmem[p]
|
||||
if not cp then
|
||||
cp = compile(p) / 0
|
||||
cp = mm.P{ mm.Cp() * cp * mm.Cp() + 1 * mm.V(1) }
|
||||
fmem[p] = cp
|
||||
end
|
||||
local i, e = cp:match(s, i or 1)
|
||||
if i then return i, e - 1
|
||||
else return i
|
||||
end
|
||||
end
|
||||
|
||||
local function gsub (s, p, rep)
|
||||
local g = gmem[p] or {} -- ensure gmem[p] is not collected while here
|
||||
gmem[p] = g
|
||||
local cp = g[rep]
|
||||
if not cp then
|
||||
cp = compile(p)
|
||||
cp = mm.Cs((cp / rep + 1)^0)
|
||||
g[rep] = cp
|
||||
end
|
||||
return cp:match(s)
|
||||
end
|
||||
|
||||
|
||||
-- exported names
|
||||
local re = {
|
||||
compile = compile,
|
||||
match = match,
|
||||
streammatch = streammatch,
|
||||
emulatestreammatch = emulatestreammatch,
|
||||
find = find,
|
||||
gsub = gsub,
|
||||
updatelocale = updatelocale,
|
||||
}
|
||||
|
||||
if version == "Lua 5.1" then _G.re = re end
|
||||
|
||||
return re
|
25
tools/luajson/json.lua
Normal file
25
tools/luajson/json.lua
Normal file
@ -0,0 +1,25 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
package.path = package.path .. ";lpeg/?.lua"
|
||||
local decode = require("json.decode")
|
||||
local encode = require("json.encode")
|
||||
local util = require("json.util")
|
||||
|
||||
local _G = _G
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local json = {
|
||||
_VERSION = "1.3.4",
|
||||
_DESCRIPTION = "LuaJSON : customizable JSON decoder/encoder",
|
||||
_COPYRIGHT = "Copyright (c) 2007-2014 Thomas Harning Jr. <harningt@gmail.com>",
|
||||
decode = decode,
|
||||
encode = encode,
|
||||
util = util
|
||||
}
|
||||
|
||||
_G.json = json
|
||||
|
||||
return json
|
171
tools/luajson/json/decode.lua
Normal file
171
tools/luajson/json/decode.lua
Normal file
@ -0,0 +1,171 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local lpeg = require("lpeg")
|
||||
|
||||
local error = error
|
||||
local pcall = pcall
|
||||
|
||||
local jsonutil = require("json.util")
|
||||
local merge = jsonutil.merge
|
||||
local util = require("json.decode.util")
|
||||
|
||||
local decode_state = require("json.decode.state")
|
||||
|
||||
local setmetatable, getmetatable = setmetatable, getmetatable
|
||||
local assert = assert
|
||||
local ipairs, pairs = ipairs, pairs
|
||||
local string_char = require("string").char
|
||||
|
||||
local type = type
|
||||
|
||||
local require = require
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local modulesToLoad = {
|
||||
"composite",
|
||||
"strings",
|
||||
"number",
|
||||
"others"
|
||||
}
|
||||
local loadedModules = {
|
||||
}
|
||||
|
||||
local json_decode = {}
|
||||
|
||||
json_decode.default = {
|
||||
unicodeWhitespace = true,
|
||||
initialObject = false,
|
||||
nothrow = false
|
||||
}
|
||||
|
||||
local modes_defined = { "default", "strict", "simple" }
|
||||
|
||||
json_decode.simple = {}
|
||||
|
||||
json_decode.strict = {
|
||||
unicodeWhitespace = true,
|
||||
initialObject = true,
|
||||
nothrow = false
|
||||
}
|
||||
|
||||
for _,name in ipairs(modulesToLoad) do
|
||||
local mod = require("json.decode." .. name)
|
||||
if mod.mergeOptions then
|
||||
for _, mode in pairs(modes_defined) do
|
||||
mod.mergeOptions(json_decode[mode], mode)
|
||||
end
|
||||
end
|
||||
loadedModules[#loadedModules + 1] = mod
|
||||
end
|
||||
|
||||
-- Shift over default into defaultOptions to permit build optimization
|
||||
local defaultOptions = json_decode.default
|
||||
json_decode.default = nil
|
||||
|
||||
local function generateDecoder(lexer, options)
|
||||
-- Marker to permit detection of final end
|
||||
local marker = {}
|
||||
local parser = lpeg.Ct((options.ignored * lexer)^0 * lpeg.Cc(marker)) * options.ignored * (lpeg.P(-1) + util.unexpected())
|
||||
local decoder = function(data)
|
||||
local state = decode_state.create(options)
|
||||
local parsed = parser:match(data)
|
||||
assert(parsed, "Invalid JSON data")
|
||||
local i = 0
|
||||
while true do
|
||||
i = i + 1
|
||||
local item = parsed[i]
|
||||
if item == marker then break end
|
||||
if type(item) == 'function' and item ~= jsonutil.undefined and item ~= jsonutil.null then
|
||||
item(state)
|
||||
else
|
||||
state:set_value(item)
|
||||
end
|
||||
end
|
||||
if options.initialObject then
|
||||
assert(type(state.previous) == 'table', "Initial value not an object or array")
|
||||
end
|
||||
-- Make sure stack is empty
|
||||
assert(state.i == 0, "Unclosed elements present")
|
||||
return state.previous
|
||||
end
|
||||
if options.nothrow then
|
||||
return function(data)
|
||||
local status, rv = pcall(decoder, data)
|
||||
if status then
|
||||
return rv
|
||||
else
|
||||
return nil, rv
|
||||
end
|
||||
end
|
||||
end
|
||||
return decoder
|
||||
end
|
||||
|
||||
local function buildDecoder(mode)
|
||||
mode = mode and merge({}, defaultOptions, mode) or defaultOptions
|
||||
for _, mod in ipairs(loadedModules) do
|
||||
if mod.mergeOptions then
|
||||
mod.mergeOptions(mode)
|
||||
end
|
||||
end
|
||||
local ignored = mode.unicodeWhitespace and util.unicode_ignored or util.ascii_ignored
|
||||
-- Store 'ignored' in the global options table
|
||||
mode.ignored = ignored
|
||||
|
||||
--local grammar = {
|
||||
-- [1] = mode.initialObject and (ignored * (object_type + array_type)) or value_type
|
||||
--}
|
||||
local lexer
|
||||
for _, mod in ipairs(loadedModules) do
|
||||
local new_lexer = mod.generateLexer(mode)
|
||||
lexer = lexer and lexer + new_lexer or new_lexer
|
||||
end
|
||||
return generateDecoder(lexer, mode)
|
||||
end
|
||||
|
||||
-- Since 'default' is nil, we cannot take map it
|
||||
local defaultDecoder = buildDecoder(json_decode.default)
|
||||
local prebuilt_decoders = {}
|
||||
for _, mode in pairs(modes_defined) do
|
||||
if json_decode[mode] ~= nil then
|
||||
prebuilt_decoders[json_decode[mode]] = buildDecoder(json_decode[mode])
|
||||
end
|
||||
end
|
||||
|
||||
--[[
|
||||
Options:
|
||||
number => number decode options
|
||||
string => string decode options
|
||||
array => array decode options
|
||||
object => object decode options
|
||||
initialObject => whether or not to require the initial object to be a table/array
|
||||
allowUndefined => whether or not to allow undefined values
|
||||
]]
|
||||
local function getDecoder(mode)
|
||||
mode = mode == true and json_decode.strict or mode or json_decode.default
|
||||
local decoder = mode == nil and defaultDecoder or prebuilt_decoders[mode]
|
||||
if decoder then
|
||||
return decoder
|
||||
end
|
||||
return buildDecoder(mode)
|
||||
end
|
||||
|
||||
local function decode(data, mode)
|
||||
local decoder = getDecoder(mode)
|
||||
return decoder(data)
|
||||
end
|
||||
|
||||
local mt = {}
|
||||
mt.__call = function(self, ...)
|
||||
return decode(...)
|
||||
end
|
||||
|
||||
json_decode.getDecoder = getDecoder
|
||||
json_decode.decode = decode
|
||||
json_decode.util = util
|
||||
setmetatable(json_decode, mt)
|
||||
|
||||
return json_decode
|
190
tools/luajson/json/decode/composite.lua
Normal file
190
tools/luajson/json/decode/composite.lua
Normal file
@ -0,0 +1,190 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local pairs = pairs
|
||||
local type = type
|
||||
|
||||
local lpeg = require("lpeg")
|
||||
|
||||
local util = require("json.decode.util")
|
||||
local jsonutil = require("json.util")
|
||||
|
||||
local rawset = rawset
|
||||
|
||||
local assert = assert
|
||||
local tostring = tostring
|
||||
|
||||
local error = error
|
||||
local getmetatable = getmetatable
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local defaultOptions = {
|
||||
array = {
|
||||
trailingComma = true
|
||||
},
|
||||
object = {
|
||||
trailingComma = true,
|
||||
number = true,
|
||||
identifier = true,
|
||||
setObjectKey = rawset
|
||||
},
|
||||
calls = {
|
||||
defs = nil,
|
||||
-- By default, do not allow undefined calls to be de-serialized as call objects
|
||||
allowUndefined = false
|
||||
}
|
||||
}
|
||||
|
||||
local modeOptions = {
|
||||
default = nil,
|
||||
strict = {
|
||||
array = {
|
||||
trailingComma = false
|
||||
},
|
||||
object = {
|
||||
trailingComma = false,
|
||||
number = false,
|
||||
identifier = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
local function BEGIN_ARRAY(state)
|
||||
state:push()
|
||||
state:new_array()
|
||||
end
|
||||
local function END_ARRAY(state)
|
||||
state:end_array()
|
||||
state:pop()
|
||||
end
|
||||
|
||||
local function BEGIN_OBJECT(state)
|
||||
state:push()
|
||||
state:new_object()
|
||||
end
|
||||
local function END_OBJECT(state)
|
||||
state:end_object()
|
||||
state:pop()
|
||||
end
|
||||
|
||||
local function END_CALL(state)
|
||||
state:end_call()
|
||||
state:pop()
|
||||
end
|
||||
|
||||
local function SET_KEY(state)
|
||||
state:set_key()
|
||||
end
|
||||
|
||||
local function NEXT_VALUE(state)
|
||||
state:put_value()
|
||||
end
|
||||
|
||||
local function mergeOptions(options, mode)
|
||||
jsonutil.doOptionMerge(options, true, 'array', defaultOptions, mode and modeOptions[mode])
|
||||
jsonutil.doOptionMerge(options, true, 'object', defaultOptions, mode and modeOptions[mode])
|
||||
jsonutil.doOptionMerge(options, true, 'calls', defaultOptions, mode and modeOptions[mode])
|
||||
end
|
||||
|
||||
|
||||
local isPattern
|
||||
if lpeg.type then
|
||||
function isPattern(value)
|
||||
return lpeg.type(value) == 'pattern'
|
||||
end
|
||||
else
|
||||
local metaAdd = getmetatable(lpeg.P("")).__add
|
||||
function isPattern(value)
|
||||
return getmetatable(value).__add == metaAdd
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
local function generateSingleCallLexer(name, func)
|
||||
if type(name) ~= 'string' and not isPattern(name) then
|
||||
error("Invalid functionCalls name: " .. tostring(name) .. " not a string or LPEG pattern")
|
||||
end
|
||||
-- Allow boolean or function to match up w/ encoding permissions
|
||||
if type(func) ~= 'boolean' and type(func) ~= 'function' then
|
||||
error("Invalid functionCalls item: " .. name .. " not a function")
|
||||
end
|
||||
local function buildCallCapture(name)
|
||||
return function(state)
|
||||
if func == false then
|
||||
error("Function call on '" .. name .. "' not permitted")
|
||||
end
|
||||
state:push()
|
||||
state:new_call(name, func)
|
||||
end
|
||||
end
|
||||
local nameCallCapture
|
||||
if type(name) == 'string' then
|
||||
nameCallCapture = lpeg.P(name .. "(") * lpeg.Cc(name) / buildCallCapture
|
||||
else
|
||||
-- Name matcher expected to produce a capture
|
||||
nameCallCapture = name * "(" / buildCallCapture
|
||||
end
|
||||
-- Call func over nameCallCapture and value to permit function receiving name
|
||||
return nameCallCapture
|
||||
end
|
||||
|
||||
local function generateNamedCallLexers(options)
|
||||
if not options.calls or not options.calls.defs then
|
||||
return
|
||||
end
|
||||
local callCapture
|
||||
for name, func in pairs(options.calls.defs) do
|
||||
local newCapture = generateSingleCallLexer(name, func)
|
||||
if not callCapture then
|
||||
callCapture = newCapture
|
||||
else
|
||||
callCapture = callCapture + newCapture
|
||||
end
|
||||
end
|
||||
return callCapture
|
||||
end
|
||||
|
||||
local function generateCallLexer(options)
|
||||
local lexer
|
||||
local namedCapture = generateNamedCallLexers(options)
|
||||
if options.calls and options.calls.allowUndefined then
|
||||
lexer = generateSingleCallLexer(lpeg.C(util.identifier), true)
|
||||
end
|
||||
if namedCapture then
|
||||
lexer = lexer and lexer + namedCapture or namedCapture
|
||||
end
|
||||
if lexer then
|
||||
lexer = lexer + lpeg.P(")") * lpeg.Cc(END_CALL)
|
||||
end
|
||||
return lexer
|
||||
end
|
||||
|
||||
local function generateLexer(options)
|
||||
local ignored = options.ignored
|
||||
local array_options, object_options = options.array, options.object
|
||||
local lexer =
|
||||
lpeg.P("[") * lpeg.Cc(BEGIN_ARRAY)
|
||||
+ lpeg.P("]") * lpeg.Cc(END_ARRAY)
|
||||
+ lpeg.P("{") * lpeg.Cc(BEGIN_OBJECT)
|
||||
+ lpeg.P("}") * lpeg.Cc(END_OBJECT)
|
||||
+ lpeg.P(":") * lpeg.Cc(SET_KEY)
|
||||
+ lpeg.P(",") * lpeg.Cc(NEXT_VALUE)
|
||||
if object_options.identifier then
|
||||
-- Add identifier match w/ validation check that it is in key
|
||||
lexer = lexer + lpeg.C(util.identifier) * ignored * lpeg.P(":") * lpeg.Cc(SET_KEY)
|
||||
end
|
||||
local callLexers = generateCallLexer(options)
|
||||
if callLexers then
|
||||
lexer = lexer + callLexers
|
||||
end
|
||||
return lexer
|
||||
end
|
||||
|
||||
local composite = {
|
||||
mergeOptions = mergeOptions,
|
||||
generateLexer = generateLexer
|
||||
}
|
||||
|
||||
return composite
|
100
tools/luajson/json/decode/number.lua
Normal file
100
tools/luajson/json/decode/number.lua
Normal file
@ -0,0 +1,100 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local lpeg = require("lpeg")
|
||||
local tonumber = tonumber
|
||||
local jsonutil = require("json.util")
|
||||
local merge = jsonutil.merge
|
||||
local util = require("json.decode.util")
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local digit = lpeg.R("09")
|
||||
local digits = digit^1
|
||||
|
||||
-- Illegal octal declaration
|
||||
local illegal_octal_detect = #(lpeg.P('0') * digits) * util.denied("Octal numbers")
|
||||
|
||||
local int = (lpeg.P('-') + 0) * (lpeg.R("19") * digits + illegal_octal_detect + digit)
|
||||
|
||||
local frac = lpeg.P('.') * digits
|
||||
|
||||
local exp = lpeg.S("Ee") * (lpeg.S("-+") + 0) * digits
|
||||
|
||||
local nan = lpeg.S("Nn") * lpeg.S("Aa") * lpeg.S("Nn")
|
||||
local inf = lpeg.S("Ii") * lpeg.P("nfinity")
|
||||
local ninf = lpeg.P('-') * lpeg.S("Ii") * lpeg.P("nfinity")
|
||||
local hex = (lpeg.P("0x") + lpeg.P("0X")) * lpeg.R("09","AF","af")^1
|
||||
|
||||
local defaultOptions = {
|
||||
nan = true,
|
||||
inf = true,
|
||||
frac = true,
|
||||
exp = true,
|
||||
hex = false
|
||||
}
|
||||
|
||||
local modeOptions = {}
|
||||
|
||||
modeOptions.strict = {
|
||||
nan = false,
|
||||
inf = false
|
||||
}
|
||||
|
||||
local nan_value = 0/0
|
||||
local inf_value = 1/0
|
||||
local ninf_value = -1/0
|
||||
|
||||
--[[
|
||||
Options: configuration options for number rules
|
||||
nan: match NaN
|
||||
inf: match Infinity
|
||||
frac: match fraction portion (.0)
|
||||
exp: match exponent portion (e1)
|
||||
DEFAULT: nan, inf, frac, exp
|
||||
]]
|
||||
local function mergeOptions(options, mode)
|
||||
jsonutil.doOptionMerge(options, false, 'number', defaultOptions, mode and modeOptions[mode])
|
||||
end
|
||||
|
||||
local function generateLexer(options)
|
||||
options = options.number
|
||||
local ret = int
|
||||
if options.frac then
|
||||
ret = ret * (frac + 0)
|
||||
else
|
||||
ret = ret * (#frac * util.denied("Fractions", "number.frac") + 0)
|
||||
end
|
||||
if options.exp then
|
||||
ret = ret * (exp + 0)
|
||||
else
|
||||
ret = ret * (#exp * util.denied("Exponents", "number.exp") + 0)
|
||||
end
|
||||
if options.hex then
|
||||
ret = hex + ret
|
||||
else
|
||||
ret = #hex * util.denied("Hexadecimal", "number.hex") + ret
|
||||
end
|
||||
-- Capture number now
|
||||
ret = ret / tonumber
|
||||
if options.nan then
|
||||
ret = ret + nan / function() return nan_value end
|
||||
else
|
||||
ret = ret + #nan * util.denied("NaN", "number.nan")
|
||||
end
|
||||
if options.inf then
|
||||
ret = ret + ninf / function() return ninf_value end + inf / function() return inf_value end
|
||||
else
|
||||
ret = ret + (#ninf + #inf) * util.denied("+/-Inf", "number.inf")
|
||||
end
|
||||
return ret
|
||||
end
|
||||
|
||||
local number = {
|
||||
int = int,
|
||||
mergeOptions = mergeOptions,
|
||||
generateLexer = generateLexer
|
||||
}
|
||||
|
||||
return number
|
62
tools/luajson/json/decode/others.lua
Normal file
62
tools/luajson/json/decode/others.lua
Normal file
@ -0,0 +1,62 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local lpeg = require("lpeg")
|
||||
local jsonutil = require("json.util")
|
||||
local merge = jsonutil.merge
|
||||
local util = require("json.decode.util")
|
||||
|
||||
-- Container module for other JavaScript types (bool, null, undefined)
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
-- For null and undefined, use the util.null value to preserve null-ness
|
||||
local booleanCapture =
|
||||
lpeg.P("true") * lpeg.Cc(true)
|
||||
+ lpeg.P("false") * lpeg.Cc(false)
|
||||
|
||||
local nullCapture = lpeg.P("null")
|
||||
local undefinedCapture = lpeg.P("undefined")
|
||||
|
||||
local defaultOptions = {
|
||||
allowUndefined = true,
|
||||
null = jsonutil.null,
|
||||
undefined = jsonutil.undefined
|
||||
}
|
||||
|
||||
local modeOptions = {}
|
||||
|
||||
modeOptions.simple = {
|
||||
null = false, -- Mapped to nil
|
||||
undefined = false -- Mapped to nil
|
||||
}
|
||||
modeOptions.strict = {
|
||||
allowUndefined = false
|
||||
}
|
||||
|
||||
local function mergeOptions(options, mode)
|
||||
jsonutil.doOptionMerge(options, false, 'others', defaultOptions, mode and modeOptions[mode])
|
||||
end
|
||||
|
||||
local function generateLexer(options)
|
||||
-- The 'or nil' clause allows false to map to a nil value since 'nil' cannot be merged
|
||||
options = options.others
|
||||
local valueCapture = (
|
||||
booleanCapture
|
||||
+ nullCapture * lpeg.Cc(options.null or nil)
|
||||
)
|
||||
if options.allowUndefined then
|
||||
valueCapture = valueCapture + undefinedCapture * lpeg.Cc(options.undefined or nil)
|
||||
else
|
||||
valueCapture = valueCapture + #undefinedCapture * util.denied("undefined", "others.allowUndefined")
|
||||
end
|
||||
return valueCapture
|
||||
end
|
||||
|
||||
local others = {
|
||||
mergeOptions = mergeOptions,
|
||||
generateLexer = generateLexer
|
||||
}
|
||||
|
||||
return others
|
189
tools/luajson/json/decode/state.lua
Normal file
189
tools/luajson/json/decode/state.lua
Normal file
@ -0,0 +1,189 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
|
||||
local setmetatable = setmetatable
|
||||
local jsonutil = require("json.util")
|
||||
local assert = assert
|
||||
local type = type
|
||||
local next = next
|
||||
local unpack = require("table").unpack or unpack
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local state_ops = {}
|
||||
local state_mt = {
|
||||
__index = state_ops
|
||||
}
|
||||
|
||||
function state_ops.pop(self)
|
||||
self.previous_set = true
|
||||
self.previous = self.active
|
||||
local i = self.i
|
||||
-- Load in this array into the active item
|
||||
self.active = self.stack[i]
|
||||
self.active_state = self.state_stack[i]
|
||||
self.active_key = self.key_stack[i]
|
||||
self.stack[i] = nil
|
||||
self.state_stack[i] = nil
|
||||
self.key_stack[i] = nil
|
||||
|
||||
self.i = i - 1
|
||||
end
|
||||
|
||||
function state_ops.push(self)
|
||||
local i = self.i + 1
|
||||
self.i = i
|
||||
|
||||
self.stack[i] = self.active
|
||||
self.state_stack[i] = self.active_state
|
||||
self.key_stack[i] = self.active_key
|
||||
end
|
||||
|
||||
function state_ops.put_object_value(self, trailing)
|
||||
local object_options = self.options.object
|
||||
if trailing and object_options.trailingComma then
|
||||
if not self.active_key then
|
||||
return
|
||||
end
|
||||
end
|
||||
assert(self.active_key, "Missing key value")
|
||||
object_options.setObjectKey(self.active, self.active_key, self:grab_value())
|
||||
self.active_key = nil
|
||||
end
|
||||
|
||||
function state_ops.put_array_value(self, trailing)
|
||||
-- Safety check
|
||||
if trailing and not self.previous_set and self.options.array.trailingComma then
|
||||
return
|
||||
end
|
||||
local new_index = self.active_state + 1
|
||||
self.active_state = new_index
|
||||
self.active[new_index] = self:grab_value()
|
||||
end
|
||||
|
||||
function state_ops.put_value(self, trailing)
|
||||
if self.active_state == 'object' then
|
||||
self:put_object_value(trailing)
|
||||
else
|
||||
self:put_array_value(trailing)
|
||||
end
|
||||
end
|
||||
|
||||
function state_ops.new_array(self)
|
||||
local new_array = {}
|
||||
if jsonutil.InitArray then
|
||||
new_array = jsonutil.InitArray(new_array) or new_array
|
||||
end
|
||||
self.active = new_array
|
||||
self.active_state = 0
|
||||
self.active_key = nil
|
||||
self:unset_value()
|
||||
end
|
||||
|
||||
function state_ops.end_array(self)
|
||||
if self.previous_set or self.active_state ~= 0 then
|
||||
-- Not an empty array
|
||||
self:put_value(true)
|
||||
end
|
||||
if self.active_state ~= #self.active then
|
||||
-- Store the length in
|
||||
self.active.n = self.active_state
|
||||
end
|
||||
end
|
||||
|
||||
function state_ops.new_object(self)
|
||||
local new_object = {}
|
||||
self.active = new_object
|
||||
self.active_state = 'object'
|
||||
self.active_key = nil
|
||||
self:unset_value()
|
||||
end
|
||||
|
||||
function state_ops.end_object(self)
|
||||
if self.previous_set or next(self.active) then
|
||||
-- Not an empty object
|
||||
self:put_value(true)
|
||||
end
|
||||
end
|
||||
|
||||
function state_ops.new_call(self, name, func)
|
||||
-- TODO setup properly
|
||||
local new_call = {}
|
||||
new_call.name = name
|
||||
new_call.func = func
|
||||
self.active = new_call
|
||||
self.active_state = 0
|
||||
self.active_key = nil
|
||||
self:unset_value()
|
||||
end
|
||||
|
||||
function state_ops.end_call(self)
|
||||
if self.previous_set or self.active_state ~= 0 then
|
||||
-- Not an empty array
|
||||
self:put_value(true)
|
||||
end
|
||||
if self.active_state ~= #self.active then
|
||||
-- Store the length in
|
||||
self.active.n = self.active_state
|
||||
end
|
||||
local func = self.active.func
|
||||
if func == true then
|
||||
func = jsonutil.buildCall
|
||||
end
|
||||
self.active = func(self.active.name, unpack(self.active, 1, self.active.n or #self.active))
|
||||
end
|
||||
|
||||
|
||||
function state_ops.unset_value(self)
|
||||
self.previous_set = false
|
||||
self.previous = nil
|
||||
end
|
||||
|
||||
function state_ops.grab_value(self)
|
||||
assert(self.previous_set, "Previous value not set")
|
||||
self.previous_set = false
|
||||
return self.previous
|
||||
end
|
||||
|
||||
function state_ops.set_value(self, value)
|
||||
assert(not self.previous_set, "Value set when one already in slot")
|
||||
self.previous_set = true
|
||||
self.previous = value
|
||||
end
|
||||
|
||||
function state_ops.set_key(self)
|
||||
assert(self.active_state == 'object', "Cannot set key on array")
|
||||
local value = self:grab_value()
|
||||
local value_type = type(value)
|
||||
if self.options.object.number then
|
||||
assert(value_type == 'string' or value_type == 'number', "As configured, a key must be a number or string")
|
||||
else
|
||||
assert(value_type == 'string', "As configured, a key must be a string")
|
||||
end
|
||||
self.active_key = value
|
||||
end
|
||||
|
||||
|
||||
local function create(options)
|
||||
local ret = {
|
||||
options = options,
|
||||
stack = {},
|
||||
state_stack = {},
|
||||
key_stack = {},
|
||||
i = 0,
|
||||
active = nil,
|
||||
active_key = nil,
|
||||
previous = nil,
|
||||
active_state = nil
|
||||
|
||||
}
|
||||
return setmetatable(ret, state_mt)
|
||||
end
|
||||
|
||||
local state = {
|
||||
create = create
|
||||
}
|
||||
|
||||
return state
|
133
tools/luajson/json/decode/strings.lua
Normal file
133
tools/luajson/json/decode/strings.lua
Normal file
@ -0,0 +1,133 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local lpeg = require("lpeg")
|
||||
local jsonutil = require("json.util")
|
||||
local util = require("json.decode.util")
|
||||
local merge = jsonutil.merge
|
||||
|
||||
local tonumber = tonumber
|
||||
local string_char = require("string").char
|
||||
local floor = require("math").floor
|
||||
local table_concat = require("table").concat
|
||||
|
||||
local error = error
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local function get_error(item)
|
||||
local fmt_string = item .. " in string [%q] @ %i:%i"
|
||||
return lpeg.P(function(data, index)
|
||||
local line, line_index, bad_char, last_line = util.get_invalid_character_info(data, index)
|
||||
local err = fmt_string:format(bad_char, line, line_index)
|
||||
error(err)
|
||||
end) * 1
|
||||
end
|
||||
|
||||
local bad_unicode = get_error("Illegal unicode escape")
|
||||
local bad_hex = get_error("Illegal hex escape")
|
||||
local bad_character = get_error("Illegal character")
|
||||
local bad_escape = get_error("Illegal escape")
|
||||
|
||||
local knownReplacements = {
|
||||
["'"] = "'",
|
||||
['"'] = '"',
|
||||
['\\'] = '\\',
|
||||
['/'] = '/',
|
||||
b = '\b',
|
||||
f = '\f',
|
||||
n = '\n',
|
||||
r = '\r',
|
||||
t = '\t',
|
||||
v = '\v',
|
||||
z = '\z'
|
||||
}
|
||||
|
||||
-- according to the table at http://da.wikipedia.org/wiki/UTF-8
|
||||
local function utf8DecodeUnicode(code1, code2)
|
||||
code1, code2 = tonumber(code1, 16), tonumber(code2, 16)
|
||||
if code1 == 0 and code2 < 0x80 then
|
||||
return string_char(code2)
|
||||
end
|
||||
if code1 < 0x08 then
|
||||
return string_char(
|
||||
0xC0 + code1 * 4 + floor(code2 / 64),
|
||||
0x80 + code2 % 64)
|
||||
end
|
||||
return string_char(
|
||||
0xE0 + floor(code1 / 16),
|
||||
0x80 + (code1 % 16) * 4 + floor(code2 / 64),
|
||||
0x80 + code2 % 64)
|
||||
end
|
||||
|
||||
local function decodeX(code)
|
||||
code = tonumber(code, 16)
|
||||
return string_char(code)
|
||||
end
|
||||
|
||||
local doSimpleSub = lpeg.C(lpeg.S("'\"\\/bfnrtvz")) / knownReplacements
|
||||
local doUniSub = lpeg.P('u') * (lpeg.C(util.hexpair) * lpeg.C(util.hexpair) + bad_unicode)
|
||||
local doXSub = lpeg.P('x') * (lpeg.C(util.hexpair) + bad_hex)
|
||||
|
||||
local defaultOptions = {
|
||||
badChars = '',
|
||||
additionalEscapes = false, -- disallow untranslated escapes
|
||||
escapeCheck = #lpeg.S('bfnrtv/\\"xu\'z'), -- no check on valid characters
|
||||
decodeUnicode = utf8DecodeUnicode,
|
||||
strict_quotes = false
|
||||
}
|
||||
|
||||
local modeOptions = {}
|
||||
|
||||
modeOptions.strict = {
|
||||
badChars = '\b\f\n\r\t\v',
|
||||
additionalEscapes = false, -- no additional escapes
|
||||
escapeCheck = #lpeg.S('bfnrtv/\\"u'), --only these chars are allowed to be escaped
|
||||
strict_quotes = true
|
||||
}
|
||||
|
||||
local function mergeOptions(options, mode)
|
||||
jsonutil.doOptionMerge(options, false, 'strings', defaultOptions, mode and modeOptions[mode])
|
||||
end
|
||||
|
||||
local function buildCaptureString(quote, badChars, escapeMatch)
|
||||
local captureChar = (1 - lpeg.S("\\" .. badChars .. quote)) + (lpeg.P("\\") / "" * escapeMatch)
|
||||
-- During error, force end
|
||||
local captureString = captureChar^0 + (-#lpeg.P(quote) * bad_character + -1)
|
||||
return lpeg.P(quote) * lpeg.Cs(captureString) * lpeg.P(quote)
|
||||
end
|
||||
|
||||
local function generateLexer(options)
|
||||
options = options.strings
|
||||
local quotes = { '"' }
|
||||
if not options.strict_quotes then
|
||||
quotes[#quotes + 1] = "'"
|
||||
end
|
||||
local escapeMatch = doSimpleSub
|
||||
escapeMatch = escapeMatch + doXSub / decodeX
|
||||
escapeMatch = escapeMatch + doUniSub / options.decodeUnicode
|
||||
if options.escapeCheck then
|
||||
escapeMatch = options.escapeCheck * escapeMatch + bad_escape
|
||||
end
|
||||
if options.additionalEscapes then
|
||||
escapeMatch = options.additionalEscapes + escapeMatch
|
||||
end
|
||||
local captureString
|
||||
for i = 1, #quotes do
|
||||
local cap = buildCaptureString(quotes[i], options.badChars, escapeMatch)
|
||||
if captureString == nil then
|
||||
captureString = cap
|
||||
else
|
||||
captureString = captureString + cap
|
||||
end
|
||||
end
|
||||
return captureString
|
||||
end
|
||||
|
||||
local strings = {
|
||||
mergeOptions = mergeOptions,
|
||||
generateLexer = generateLexer
|
||||
}
|
||||
|
||||
return strings
|
121
tools/luajson/json/decode/util.lua
Normal file
121
tools/luajson/json/decode/util.lua
Normal file
@ -0,0 +1,121 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local lpeg = require("lpeg")
|
||||
local select = select
|
||||
local pairs, ipairs = pairs, ipairs
|
||||
local tonumber = tonumber
|
||||
local string_char = require("string").char
|
||||
local rawset = rawset
|
||||
local jsonutil = require("json.util")
|
||||
|
||||
local error = error
|
||||
local setmetatable = setmetatable
|
||||
|
||||
local table_concat = require("table").concat
|
||||
|
||||
local merge = require("json.util").merge
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local function get_invalid_character_info(input, index)
|
||||
local parsed = input:sub(1, index)
|
||||
local bad_character = input:sub(index, index)
|
||||
local _, line_number = parsed:gsub('\n',{})
|
||||
local last_line = parsed:match("\n([^\n]+.)$") or parsed
|
||||
return line_number, #last_line, bad_character, last_line
|
||||
end
|
||||
|
||||
local function build_report(msg)
|
||||
local fmt = msg:gsub("%%", "%%%%") .. " @ character: %i %i:%i [%s] line:\n%s"
|
||||
return lpeg.P(function(data, pos)
|
||||
local line, line_index, bad_char, last_line = get_invalid_character_info(data, pos)
|
||||
local text = fmt:format(pos, line, line_index, bad_char, last_line)
|
||||
error(text)
|
||||
end) * 1
|
||||
end
|
||||
local function unexpected()
|
||||
local msg = "unexpected character"
|
||||
return build_report(msg)
|
||||
end
|
||||
local function denied(item, option)
|
||||
local msg
|
||||
if option then
|
||||
msg = ("'%s' denied by option set '%s'"):format(item, option)
|
||||
else
|
||||
msg = ("'%s' denied"):format(item)
|
||||
end
|
||||
return build_report(msg)
|
||||
end
|
||||
|
||||
-- 09, 0A, 0B, 0C, 0D, 20
|
||||
local ascii_space = lpeg.S("\t\n\v\f\r ")
|
||||
local unicode_space
|
||||
do
|
||||
local chr = string_char
|
||||
local u_space = ascii_space
|
||||
-- \u0085 \u00A0
|
||||
u_space = u_space + lpeg.P(chr(0xC2)) * lpeg.S(chr(0x85) .. chr(0xA0))
|
||||
-- \u1680 \u180E
|
||||
u_space = u_space + lpeg.P(chr(0xE1)) * (lpeg.P(chr(0x9A, 0x80)) + chr(0xA0, 0x8E))
|
||||
-- \u2000 - \u200A, also 200B
|
||||
local spacing_end = ""
|
||||
for i = 0x80,0x8b do
|
||||
spacing_end = spacing_end .. chr(i)
|
||||
end
|
||||
-- \u2028 \u2029 \u202F
|
||||
spacing_end = spacing_end .. chr(0xA8) .. chr(0xA9) .. chr(0xAF)
|
||||
u_space = u_space + lpeg.P(chr(0xE2, 0x80)) * lpeg.S(spacing_end)
|
||||
-- \u205F
|
||||
u_space = u_space + lpeg.P(chr(0xE2, 0x81, 0x9F))
|
||||
-- \u3000
|
||||
u_space = u_space + lpeg.P(chr(0xE3, 0x80, 0x80))
|
||||
-- BOM \uFEFF
|
||||
u_space = u_space + lpeg.P(chr(0xEF, 0xBB, 0xBF))
|
||||
unicode_space = u_space
|
||||
end
|
||||
|
||||
local identifier = lpeg.R("AZ","az","__") * lpeg.R("AZ","az", "__", "09") ^0
|
||||
|
||||
local hex = lpeg.R("09","AF","af")
|
||||
local hexpair = hex * hex
|
||||
|
||||
local comments = {
|
||||
cpp = lpeg.P("//") * (1 - lpeg.P("\n"))^0 * lpeg.P("\n"),
|
||||
c = lpeg.P("/*") * (1 - lpeg.P("*/"))^0 * lpeg.P("*/")
|
||||
}
|
||||
|
||||
local comment = comments.cpp + comments.c
|
||||
|
||||
local ascii_ignored = (ascii_space + comment)^0
|
||||
|
||||
local unicode_ignored = (unicode_space + comment)^0
|
||||
|
||||
-- Parse the lpeg version skipping patch-values
|
||||
-- LPEG <= 0.7 have no version value... so 0.7 is value
|
||||
local DecimalLpegVersion = lpeg.version and tonumber(lpeg.version():match("^(%d+%.%d+)")) or 0.7
|
||||
|
||||
local function setObjectKeyForceNumber(t, key, value)
|
||||
key = tonumber(key) or key
|
||||
return rawset(t, key, value)
|
||||
end
|
||||
|
||||
local util = {
|
||||
unexpected = unexpected,
|
||||
denied = denied,
|
||||
ascii_space = ascii_space,
|
||||
unicode_space = unicode_space,
|
||||
identifier = identifier,
|
||||
hex = hex,
|
||||
hexpair = hexpair,
|
||||
comments = comments,
|
||||
comment = comment,
|
||||
ascii_ignored = ascii_ignored,
|
||||
unicode_ignored = unicode_ignored,
|
||||
DecimalLpegVersion = DecimalLpegVersion,
|
||||
get_invalid_character_info = get_invalid_character_info,
|
||||
setObjectKeyForceNumber = setObjectKeyForceNumber
|
||||
}
|
||||
|
||||
return util
|
161
tools/luajson/json/encode.lua
Normal file
161
tools/luajson/json/encode.lua
Normal file
@ -0,0 +1,161 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local type = type
|
||||
local assert, error = assert, error
|
||||
local getmetatable, setmetatable = getmetatable, setmetatable
|
||||
|
||||
local ipairs, pairs = ipairs, pairs
|
||||
local require = require
|
||||
|
||||
local output = require("json.encode.output")
|
||||
|
||||
local util = require("json.util")
|
||||
local util_merge, isCall = util.merge, util.isCall
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
--[[
|
||||
List of encoding modules to load.
|
||||
Loaded in sequence such that earlier encoders get priority when
|
||||
duplicate type-handlers exist.
|
||||
]]
|
||||
local modulesToLoad = {
|
||||
"strings",
|
||||
"number",
|
||||
"calls",
|
||||
"others",
|
||||
"array",
|
||||
"object"
|
||||
}
|
||||
-- Modules that have been loaded
|
||||
local loadedModules = {}
|
||||
|
||||
local json_encode = {}
|
||||
|
||||
-- Configuration bases for client apps
|
||||
local modes_defined = { "default", "strict" }
|
||||
|
||||
json_encode.default = {}
|
||||
json_encode.strict = {
|
||||
initialObject = true -- Require an object at the root
|
||||
}
|
||||
|
||||
-- For each module, load it and its defaults
|
||||
for _,name in ipairs(modulesToLoad) do
|
||||
local mod = require("json.encode." .. name)
|
||||
if mod.mergeOptions then
|
||||
for _, mode in pairs(modes_defined) do
|
||||
mod.mergeOptions(json_encode[mode], mode)
|
||||
end
|
||||
end
|
||||
loadedModules[name] = mod
|
||||
end
|
||||
|
||||
-- NOTE: Nested not found, so assume unsupported until use case arises
|
||||
local function flattenOutput(out, value)
|
||||
assert(type(value) ~= 'table')
|
||||
out = out or {}
|
||||
out[#out + 1] = value
|
||||
return out
|
||||
end
|
||||
|
||||
-- Prepares the encoding map from the already provided modules and new config
|
||||
local function prepareEncodeMap(options)
|
||||
local map = {}
|
||||
for _, name in ipairs(modulesToLoad) do
|
||||
local encodermap = loadedModules[name].getEncoder(options[name])
|
||||
for valueType, encoderSet in pairs(encodermap) do
|
||||
map[valueType] = flattenOutput(map[valueType], encoderSet)
|
||||
end
|
||||
end
|
||||
return map
|
||||
end
|
||||
|
||||
--[[
|
||||
Encode a value with a given encoding map and state
|
||||
]]
|
||||
local function encodeWithMap(value, map, state, isObjectKey)
|
||||
local t = type(value)
|
||||
local encoderList = assert(map[t], "Failed to encode value, unhandled type: " .. t)
|
||||
for _, encoder in ipairs(encoderList) do
|
||||
local ret = encoder(value, state, isObjectKey)
|
||||
if false ~= ret then
|
||||
return ret
|
||||
end
|
||||
end
|
||||
error("Failed to encode value, encoders for " .. t .. " deny encoding")
|
||||
end
|
||||
|
||||
|
||||
local function getBaseEncoder(options)
|
||||
local encoderMap = prepareEncodeMap(options)
|
||||
if options.preProcess then
|
||||
local preProcess = options.preProcess
|
||||
return function(value, state, isObjectKey)
|
||||
local ret = preProcess(value, isObjectKey or false)
|
||||
if nil ~= ret then
|
||||
value = ret
|
||||
end
|
||||
return encodeWithMap(value, encoderMap, state)
|
||||
end
|
||||
end
|
||||
return function(value, state, isObjectKey)
|
||||
return encodeWithMap(value, encoderMap, state)
|
||||
end
|
||||
end
|
||||
--[[
|
||||
Retreive an initial encoder instance based on provided options
|
||||
the initial encoder is responsible for initializing state
|
||||
State has at least these values configured: encode, check_unique, already_encoded
|
||||
]]
|
||||
function json_encode.getEncoder(options)
|
||||
options = options and util_merge({}, json_encode.default, options) or json_encode.default
|
||||
local encode = getBaseEncoder(options)
|
||||
|
||||
local function initialEncode(value)
|
||||
if options.initialObject then
|
||||
local errorMessage = "Invalid arguments: expects a JSON Object or Array at the root"
|
||||
assert(type(value) == 'table' and not isCall(value, options), errorMessage)
|
||||
end
|
||||
|
||||
local alreadyEncoded = {}
|
||||
local function check_unique(value)
|
||||
assert(not alreadyEncoded[value], "Recursive encoding of value")
|
||||
alreadyEncoded[value] = true
|
||||
end
|
||||
|
||||
local outputEncoder = options.output and options.output() or output.getDefault()
|
||||
local state = {
|
||||
encode = encode,
|
||||
check_unique = check_unique,
|
||||
already_encoded = alreadyEncoded, -- To unmark encoding when moving up stack
|
||||
outputEncoder = outputEncoder
|
||||
}
|
||||
local ret = encode(value, state)
|
||||
if nil ~= ret then
|
||||
return outputEncoder.simple and outputEncoder.simple(ret) or ret
|
||||
end
|
||||
end
|
||||
return initialEncode
|
||||
end
|
||||
|
||||
-- CONSTRUCT STATE WITH FOLLOWING (at least)
|
||||
--[[
|
||||
encoder
|
||||
check_unique -- used by inner encoders to make sure value is unique
|
||||
already_encoded -- used to unmark a value as unique
|
||||
]]
|
||||
function json_encode.encode(data, options)
|
||||
return json_encode.getEncoder(options)(data)
|
||||
end
|
||||
|
||||
local mt = {}
|
||||
mt.__call = function(self, ...)
|
||||
return json_encode.encode(...)
|
||||
end
|
||||
|
||||
setmetatable(json_encode, mt)
|
||||
|
||||
return json_encode
|
110
tools/luajson/json/encode/array.lua
Normal file
110
tools/luajson/json/encode/array.lua
Normal file
@ -0,0 +1,110 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local jsonutil = require("json.util")
|
||||
|
||||
local type = type
|
||||
local pairs = pairs
|
||||
local assert = assert
|
||||
|
||||
local table = require("table")
|
||||
local math = require("math")
|
||||
local table_concat = table.concat
|
||||
local math_floor, math_modf = math.floor, math.modf
|
||||
|
||||
local jsonutil = require("json.util")
|
||||
local util_IsArray = jsonutil.IsArray
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local defaultOptions = {
|
||||
isArray = util_IsArray
|
||||
}
|
||||
|
||||
local modeOptions = {}
|
||||
|
||||
local function mergeOptions(options, mode)
|
||||
jsonutil.doOptionMerge(options, false, 'array', defaultOptions, mode and modeOptions[mode])
|
||||
end
|
||||
|
||||
--[[
|
||||
Utility function to determine whether a table is an array or not.
|
||||
Criteria for it being an array:
|
||||
* ExternalIsArray returns true (or false directly reports not-array)
|
||||
* If the table has an 'n' value that is an integer >= 1 then it
|
||||
is an array... may result in false positives (should check some values
|
||||
before it)
|
||||
* It is a contiguous list of values with zero string-based keys
|
||||
]]
|
||||
local function isArray(val, options)
|
||||
local externalIsArray = options and options.isArray
|
||||
|
||||
if externalIsArray then
|
||||
local ret = externalIsArray(val)
|
||||
if ret == true or ret == false then
|
||||
return ret
|
||||
end
|
||||
end
|
||||
-- Use the 'n' element if it's a number
|
||||
if type(val.n) == 'number' and math_floor(val.n) == val.n and val.n >= 1 then
|
||||
return true
|
||||
end
|
||||
local len = #val
|
||||
for k,v in pairs(val) do
|
||||
if type(k) ~= 'number' then
|
||||
return false
|
||||
end
|
||||
local _, decim = math_modf(k)
|
||||
if not (decim == 0 and 1<=k) then
|
||||
return false
|
||||
end
|
||||
if k > len then -- Use Lua's length as absolute determiner
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
--[[
|
||||
Cleanup function to unmark a value as in the encoding process and return
|
||||
trailing results
|
||||
]]
|
||||
local function unmarkAfterEncode(tab, state, ...)
|
||||
state.already_encoded[tab] = nil
|
||||
return ...
|
||||
end
|
||||
local function getEncoder(options)
|
||||
options = options and jsonutil.merge({}, defaultOptions, options) or defaultOptions
|
||||
local function encodeArray(tab, state)
|
||||
if not isArray(tab, options) then
|
||||
return false
|
||||
end
|
||||
-- Make sure this value hasn't been encoded yet
|
||||
state.check_unique(tab)
|
||||
local encode = state.encode
|
||||
local compositeEncoder = state.outputEncoder.composite
|
||||
local valueEncoder = [[
|
||||
for i = 1, (composite.n or #composite) do
|
||||
local val = composite[i]
|
||||
PUTINNER(i ~= 1)
|
||||
val = encode(val, state)
|
||||
val = val or ''
|
||||
if val then
|
||||
PUTVALUE(val)
|
||||
end
|
||||
end
|
||||
]]
|
||||
return unmarkAfterEncode(tab, state, compositeEncoder(valueEncoder, '[', ']', ',', tab, encode, state))
|
||||
end
|
||||
return { table = encodeArray }
|
||||
end
|
||||
|
||||
local array = {
|
||||
mergeOptions = mergeOptions,
|
||||
isArray = isArray,
|
||||
getEncoder = getEncoder
|
||||
}
|
||||
|
||||
return array
|
68
tools/luajson/json/encode/calls.lua
Normal file
68
tools/luajson/json/encode/calls.lua
Normal file
@ -0,0 +1,68 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local table = require("table")
|
||||
local table_concat = table.concat
|
||||
|
||||
local select = select
|
||||
local getmetatable, setmetatable = getmetatable, setmetatable
|
||||
local assert = assert
|
||||
|
||||
local jsonutil = require("json.util")
|
||||
|
||||
local isCall, decodeCall = jsonutil.isCall, jsonutil.decodeCall
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local defaultOptions = {
|
||||
}
|
||||
|
||||
-- No real default-option handling needed...
|
||||
local modeOptions = {}
|
||||
|
||||
local function mergeOptions(options, mode)
|
||||
jsonutil.doOptionMerge(options, false, 'calls', defaultOptions, mode and modeOptions[mode])
|
||||
end
|
||||
|
||||
|
||||
--[[
|
||||
Encodes 'value' as a function call
|
||||
Must have parameters in the 'callData' field of the metatable
|
||||
name == name of the function call
|
||||
parameters == array of parameters to encode
|
||||
]]
|
||||
local function getEncoder(options)
|
||||
options = options and jsonutil.merge({}, defaultOptions, options) or defaultOptions
|
||||
local function encodeCall(value, state)
|
||||
if not isCall(value) then
|
||||
return false
|
||||
end
|
||||
local encode = state.encode
|
||||
local name, params = decodeCall(value)
|
||||
local compositeEncoder = state.outputEncoder.composite
|
||||
local valueEncoder = [[
|
||||
for i = 1, (composite.n or #composite) do
|
||||
local val = composite[i]
|
||||
PUTINNER(i ~= 1)
|
||||
val = encode(val, state)
|
||||
val = val or ''
|
||||
if val then
|
||||
PUTVALUE(val)
|
||||
end
|
||||
end
|
||||
]]
|
||||
return compositeEncoder(valueEncoder, name .. '(', ')', ',', params, encode, state)
|
||||
end
|
||||
return {
|
||||
table = encodeCall,
|
||||
['function'] = encodeCall
|
||||
}
|
||||
end
|
||||
|
||||
local calls = {
|
||||
mergeOptions = mergeOptions,
|
||||
getEncoder = getEncoder
|
||||
}
|
||||
|
||||
return calls
|
58
tools/luajson/json/encode/number.lua
Normal file
58
tools/luajson/json/encode/number.lua
Normal file
@ -0,0 +1,58 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local tostring = tostring
|
||||
local assert = assert
|
||||
local jsonutil = require("json.util")
|
||||
local huge = require("math").huge
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local defaultOptions = {
|
||||
nan = true,
|
||||
inf = true
|
||||
}
|
||||
|
||||
local modeOptions = {}
|
||||
modeOptions.strict = {
|
||||
nan = false,
|
||||
inf = false
|
||||
}
|
||||
|
||||
local function mergeOptions(options, mode)
|
||||
jsonutil.doOptionMerge(options, false, 'number', defaultOptions, mode and modeOptions[mode])
|
||||
end
|
||||
|
||||
|
||||
local function encodeNumber(number, options)
|
||||
if number ~= number then
|
||||
assert(options.nan, "Invalid number: NaN not enabled")
|
||||
return "NaN"
|
||||
end
|
||||
if number == huge then
|
||||
assert(options.inf, "Invalid number: Infinity not enabled")
|
||||
return "Infinity"
|
||||
end
|
||||
if number == -huge then
|
||||
assert(options.inf, "Invalid number: Infinity not enabled")
|
||||
return "-Infinity"
|
||||
end
|
||||
return tostring(number)
|
||||
end
|
||||
|
||||
local function getEncoder(options)
|
||||
options = options and jsonutil.merge({}, defaultOptions, options) or defaultOptions
|
||||
return {
|
||||
number = function(number, state)
|
||||
return encodeNumber(number, options)
|
||||
end
|
||||
}
|
||||
end
|
||||
|
||||
local number = {
|
||||
mergeOptions = mergeOptions,
|
||||
getEncoder = getEncoder
|
||||
}
|
||||
|
||||
return number
|
77
tools/luajson/json/encode/object.lua
Normal file
77
tools/luajson/json/encode/object.lua
Normal file
@ -0,0 +1,77 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local pairs = pairs
|
||||
local assert = assert
|
||||
|
||||
local type = type
|
||||
local tostring = tostring
|
||||
|
||||
local table_concat = require("table").concat
|
||||
local jsonutil = require("json.util")
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local defaultOptions = {
|
||||
}
|
||||
|
||||
local modeOptions = {}
|
||||
|
||||
local function mergeOptions(options, mode)
|
||||
jsonutil.doOptionMerge(options, false, 'object', defaultOptions, mode and modeOptions[mode])
|
||||
end
|
||||
|
||||
--[[
|
||||
Cleanup function to unmark a value as in the encoding process and return
|
||||
trailing results
|
||||
]]
|
||||
local function unmarkAfterEncode(tab, state, ...)
|
||||
state.already_encoded[tab] = nil
|
||||
return ...
|
||||
end
|
||||
--[[
|
||||
Encode a table as a JSON Object ( keys = strings, values = anything else )
|
||||
]]
|
||||
local function encodeTable(tab, options, state)
|
||||
-- Make sure this value hasn't been encoded yet
|
||||
state.check_unique(tab)
|
||||
local encode = state.encode
|
||||
local compositeEncoder = state.outputEncoder.composite
|
||||
local valueEncoder = [[
|
||||
local first = true
|
||||
for k, v in pairs(composite) do
|
||||
local ti = type(k)
|
||||
assert(ti == 'string' or ti == 'number' or ti == 'boolean', "Invalid object index type: " .. ti)
|
||||
local name = encode(tostring(k), state, true)
|
||||
if first then
|
||||
first = false
|
||||
else
|
||||
name = ',' .. name
|
||||
end
|
||||
PUTVALUE(name .. ':')
|
||||
local val = encode(v, state)
|
||||
val = val or ''
|
||||
if val then
|
||||
PUTVALUE(val)
|
||||
end
|
||||
end
|
||||
]]
|
||||
return unmarkAfterEncode(tab, state, compositeEncoder(valueEncoder, '{', '}', nil, tab, encode, state))
|
||||
end
|
||||
|
||||
local function getEncoder(options)
|
||||
options = options and jsonutil.merge({}, defaultOptions, options) or defaultOptions
|
||||
return {
|
||||
table = function(tab, state)
|
||||
return encodeTable(tab, options, state)
|
||||
end
|
||||
}
|
||||
end
|
||||
|
||||
local object = {
|
||||
mergeOptions = mergeOptions,
|
||||
getEncoder = getEncoder
|
||||
}
|
||||
|
||||
return object
|
66
tools/luajson/json/encode/others.lua
Normal file
66
tools/luajson/json/encode/others.lua
Normal file
@ -0,0 +1,66 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local tostring = tostring
|
||||
|
||||
local assert = assert
|
||||
local jsonutil = require("json.util")
|
||||
local type = type
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
-- Shortcut that works
|
||||
local encodeBoolean = tostring
|
||||
|
||||
local defaultOptions = {
|
||||
allowUndefined = true,
|
||||
null = jsonutil.null,
|
||||
undefined = jsonutil.undefined
|
||||
}
|
||||
|
||||
local modeOptions = {}
|
||||
|
||||
modeOptions.strict = {
|
||||
allowUndefined = false
|
||||
}
|
||||
|
||||
local function mergeOptions(options, mode)
|
||||
jsonutil.doOptionMerge(options, false, 'others', defaultOptions, mode and modeOptions[mode])
|
||||
end
|
||||
local function getEncoder(options)
|
||||
options = options and jsonutil.merge({}, defaultOptions, options) or defaultOptions
|
||||
local function encodeOthers(value, state)
|
||||
if value == options.null then
|
||||
return 'null'
|
||||
elseif value == options.undefined then
|
||||
assert(options.allowUndefined, "Invalid value: Unsupported 'Undefined' parameter")
|
||||
return 'undefined'
|
||||
else
|
||||
return false
|
||||
end
|
||||
end
|
||||
local function encodeBoolean(value, state)
|
||||
return value and 'true' or 'false'
|
||||
end
|
||||
local nullType = type(options.null)
|
||||
local undefinedType = options.undefined and type(options.undefined)
|
||||
-- Make sure that all of the types handled here are handled
|
||||
local ret = {
|
||||
boolean = encodeBoolean,
|
||||
['nil'] = function() return 'null' end,
|
||||
[nullType] = encodeOthers
|
||||
}
|
||||
if undefinedType then
|
||||
ret[undefinedType] = encodeOthers
|
||||
end
|
||||
return ret
|
||||
end
|
||||
|
||||
local others = {
|
||||
encodeBoolean = encodeBoolean,
|
||||
mergeOptions = mergeOptions,
|
||||
getEncoder = getEncoder
|
||||
}
|
||||
|
||||
return others
|
91
tools/luajson/json/encode/output.lua
Normal file
91
tools/luajson/json/encode/output.lua
Normal file
@ -0,0 +1,91 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local type = type
|
||||
local assert, error = assert, error
|
||||
local table_concat = require("table").concat
|
||||
local loadstring = loadstring or load
|
||||
|
||||
local io = require("io")
|
||||
|
||||
local setmetatable = setmetatable
|
||||
|
||||
local output_utility = require("json.encode.output_utility")
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local tableCompositeCache = setmetatable({}, {__mode = 'v'})
|
||||
|
||||
local TABLE_VALUE_WRITER = [[
|
||||
ret[#ret + 1] = %VALUE%
|
||||
]]
|
||||
|
||||
local TABLE_INNER_WRITER = ""
|
||||
|
||||
--[[
|
||||
nextValues can output a max of two values to throw into the data stream
|
||||
expected to be called until nil is first return value
|
||||
value separator should either be attached to v1 or in innerValue
|
||||
]]
|
||||
local function defaultTableCompositeWriter(nextValues, beginValue, closeValue, innerValue, composite, encode, state)
|
||||
if type(nextValues) == 'string' then
|
||||
local fun = output_utility.prepareEncoder(defaultTableCompositeWriter, nextValues, innerValue, TABLE_VALUE_WRITER, TABLE_INNER_WRITER)
|
||||
local ret = {}
|
||||
fun(composite, ret, encode, state)
|
||||
return beginValue .. table_concat(ret, innerValue) .. closeValue
|
||||
end
|
||||
end
|
||||
|
||||
-- no 'simple' as default action is just to return the value
|
||||
local function getDefault()
|
||||
return { composite = defaultTableCompositeWriter }
|
||||
end
|
||||
|
||||
-- BEGIN IO-WRITER OUTPUT
|
||||
local IO_INNER_WRITER = [[
|
||||
if %WRITE_INNER% then
|
||||
state.__outputFile:write(%INNER_VALUE%)
|
||||
end
|
||||
]]
|
||||
local IO_VALUE_WRITER = [[
|
||||
state.__outputFile:write(%VALUE%)
|
||||
]]
|
||||
|
||||
local function buildIoWriter(output)
|
||||
if not output then -- Default to stdout
|
||||
output = io.output()
|
||||
end
|
||||
local function ioWriter(nextValues, beginValue, closeValue, innerValue, composite, encode, state)
|
||||
-- HOOK OUTPUT STATE
|
||||
state.__outputFile = output
|
||||
if type(nextValues) == 'string' then
|
||||
local fun = output_utility.prepareEncoder(ioWriter, nextValues, innerValue, IO_VALUE_WRITER, IO_INNER_WRITER)
|
||||
local ret = {}
|
||||
output:write(beginValue)
|
||||
fun(composite, ret, encode, state)
|
||||
output:write(closeValue)
|
||||
return nil
|
||||
end
|
||||
end
|
||||
|
||||
local function ioSimpleWriter(encoded)
|
||||
if encoded then
|
||||
output:write(encoded)
|
||||
end
|
||||
return nil
|
||||
end
|
||||
return { composite = ioWriter, simple = ioSimpleWriter }
|
||||
end
|
||||
local function getIoWriter(output)
|
||||
return function()
|
||||
return buildIoWriter(output)
|
||||
end
|
||||
end
|
||||
|
||||
local output = {
|
||||
getDefault = getDefault,
|
||||
getIoWriter = getIoWriter
|
||||
}
|
||||
|
||||
return output
|
54
tools/luajson/json/encode/output_utility.lua
Normal file
54
tools/luajson/json/encode/output_utility.lua
Normal file
@ -0,0 +1,54 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local setmetatable = setmetatable
|
||||
local assert, loadstring = assert, loadstring or load
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
-- Key == weak, if main key goes away, then cache cleared
|
||||
local outputCache = setmetatable({}, {__mode = 'k'})
|
||||
-- TODO: inner tables weak?
|
||||
|
||||
local function buildFunction(nextValues, innerValue, valueWriter, innerWriter)
|
||||
local putInner = ""
|
||||
if innerValue and innerWriter then
|
||||
-- Prepare the lua-string representation of the separator to put in between values
|
||||
local formattedInnerValue = ("%q"):format(innerValue)
|
||||
-- Fill in the condition %WRITE_INNER% and the %INNER_VALUE% to actually write
|
||||
putInner = innerWriter:gsub("%%WRITE_INNER%%", "%%1"):gsub("%%INNER_VALUE%%", formattedInnerValue)
|
||||
end
|
||||
-- Template-in the value writer (if present) and its conditional argument
|
||||
local functionCode = nextValues:gsub("PUTINNER(%b())", putInner)
|
||||
-- %VALUE% is to be filled in by the value-to-write
|
||||
valueWriter = valueWriter:gsub("%%VALUE%%", "%%1")
|
||||
-- Template-in the value writer with its argument
|
||||
functionCode = functionCode:gsub("PUTVALUE(%b())", valueWriter)
|
||||
functionCode = [[
|
||||
return function(composite, ret, encode, state)
|
||||
]] .. functionCode .. [[
|
||||
end
|
||||
]]
|
||||
return assert(loadstring(functionCode))()
|
||||
end
|
||||
|
||||
local function prepareEncoder(cacheKey, nextValues, innerValue, valueWriter, innerWriter)
|
||||
local cache = outputCache[cacheKey]
|
||||
if not cache then
|
||||
cache = {}
|
||||
outputCache[cacheKey] = cache
|
||||
end
|
||||
local fun = cache[nextValues]
|
||||
if not fun then
|
||||
fun = buildFunction(nextValues, innerValue, valueWriter, innerWriter)
|
||||
cache[nextValues] = fun
|
||||
end
|
||||
return fun
|
||||
end
|
||||
|
||||
local output_utility = {
|
||||
prepareEncoder = prepareEncoder
|
||||
}
|
||||
|
||||
return output_utility
|
88
tools/luajson/json/encode/strings.lua
Normal file
88
tools/luajson/json/encode/strings.lua
Normal file
@ -0,0 +1,88 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local string_char = require("string").char
|
||||
local pairs = pairs
|
||||
|
||||
local jsonutil = require("json.util")
|
||||
local util_merge = jsonutil.merge
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local normalEncodingMap = {
|
||||
['"'] = '\\"',
|
||||
['\\'] = '\\\\',
|
||||
['/'] = '\\/',
|
||||
['\b'] = '\\b',
|
||||
['\f'] = '\\f',
|
||||
['\n'] = '\\n',
|
||||
['\r'] = '\\r',
|
||||
['\t'] = '\\t',
|
||||
['\v'] = '\\v' -- not in official spec, on report, removing
|
||||
}
|
||||
|
||||
local xEncodingMap = {}
|
||||
for char, encoded in pairs(normalEncodingMap) do
|
||||
xEncodingMap[char] = encoded
|
||||
end
|
||||
|
||||
-- Pre-encode the control characters to speed up encoding...
|
||||
-- NOTE: UTF-8 may not work out right w/ JavaScript
|
||||
-- JavaScript uses 2 bytes after a \u... yet UTF-8 is a
|
||||
-- byte-stream encoding, not pairs of bytes (it does encode
|
||||
-- some letters > 1 byte, but base case is 1)
|
||||
for i = 0, 255 do
|
||||
local c = string_char(i)
|
||||
if c:match('[%z\1-\031\128-\255]') and not normalEncodingMap[c] then
|
||||
-- WARN: UTF8 specializes values >= 0x80 as parts of sequences...
|
||||
-- without \x encoding, do not allow encoding > 7F
|
||||
normalEncodingMap[c] = ('\\u%.4X'):format(i)
|
||||
xEncodingMap[c] = ('\\x%.2X'):format(i)
|
||||
end
|
||||
end
|
||||
|
||||
local defaultOptions = {
|
||||
xEncode = false, -- Encode single-bytes as \xXX
|
||||
processor = nil, -- Simple processor for the string prior to quoting
|
||||
-- / is not required to be quoted but it helps with certain decoding
|
||||
-- Required encoded characters, " \, and 00-1F (0 - 31)
|
||||
encodeSet = '\\"/%z\1-\031',
|
||||
encodeSetAppend = nil -- Chars to append to the default set
|
||||
}
|
||||
|
||||
local modeOptions = {}
|
||||
|
||||
local function mergeOptions(options, mode)
|
||||
jsonutil.doOptionMerge(options, false, 'strings', defaultOptions, mode and modeOptions[mode])
|
||||
end
|
||||
|
||||
local function getEncoder(options)
|
||||
options = options and util_merge({}, defaultOptions, options) or defaultOptions
|
||||
local encodeSet = options.encodeSet
|
||||
if options.encodeSetAppend then
|
||||
encodeSet = encodeSet .. options.encodeSetAppend
|
||||
end
|
||||
local encodingMap = options.xEncode and xEncodingMap or normalEncodingMap
|
||||
local encodeString
|
||||
if options.processor then
|
||||
local processor = options.processor
|
||||
encodeString = function(s, state)
|
||||
return '"' .. processor(s:gsub('[' .. encodeSet .. ']', encodingMap)) .. '"'
|
||||
end
|
||||
else
|
||||
encodeString = function(s, state)
|
||||
return '"' .. s:gsub('[' .. encodeSet .. ']', encodingMap) .. '"'
|
||||
end
|
||||
end
|
||||
return {
|
||||
string = encodeString
|
||||
}
|
||||
end
|
||||
|
||||
local strings = {
|
||||
mergeOptions = mergeOptions,
|
||||
getEncoder = getEncoder
|
||||
}
|
||||
|
||||
return strings
|
152
tools/luajson/json/util.lua
Normal file
152
tools/luajson/json/util.lua
Normal file
@ -0,0 +1,152 @@
|
||||
--[[
|
||||
Licensed according to the included 'LICENSE' document
|
||||
Author: Thomas Harning Jr <harningt@gmail.com>
|
||||
]]
|
||||
local type = type
|
||||
local print = print
|
||||
local tostring = tostring
|
||||
local pairs = pairs
|
||||
local getmetatable, setmetatable = getmetatable, setmetatable
|
||||
local select = select
|
||||
|
||||
local _ENV = nil
|
||||
|
||||
local function foreach(tab, func)
|
||||
for k, v in pairs(tab) do
|
||||
func(k,v)
|
||||
end
|
||||
end
|
||||
local function printValue(tab, name)
|
||||
local parsed = {}
|
||||
local function doPrint(key, value, space)
|
||||
space = space or ''
|
||||
if type(value) == 'table' then
|
||||
if parsed[value] then
|
||||
print(space .. key .. '= <' .. parsed[value] .. '>')
|
||||
else
|
||||
parsed[value] = key
|
||||
print(space .. key .. '= {')
|
||||
space = space .. ' '
|
||||
foreach(value, function(key, value) doPrint(key, value, space) end)
|
||||
end
|
||||
else
|
||||
if type(value) == 'string' then
|
||||
value = '[[' .. tostring(value) .. ']]'
|
||||
end
|
||||
print(space .. key .. '=' .. tostring(value))
|
||||
end
|
||||
end
|
||||
doPrint(name, tab)
|
||||
end
|
||||
|
||||
local function clone(t)
|
||||
local ret = {}
|
||||
for k,v in pairs(t) do
|
||||
ret[k] = v
|
||||
end
|
||||
return ret
|
||||
end
|
||||
|
||||
local function inner_merge(t, remaining, from, ...)
|
||||
if remaining == 0 then
|
||||
return t
|
||||
end
|
||||
if from then
|
||||
for k,v in pairs(from) do
|
||||
t[k] = v
|
||||
end
|
||||
end
|
||||
return inner_merge(t, remaining - 1, ...)
|
||||
end
|
||||
|
||||
--[[*
|
||||
Shallow-merges tables in order onto the first table.
|
||||
|
||||
@param t table to merge entries onto
|
||||
@param ... sequence of 0 or more tables to merge onto 't'
|
||||
|
||||
@returns table 't' from input
|
||||
]]
|
||||
local function merge(t, ...)
|
||||
return inner_merge(t, select('#', ...), ...)
|
||||
end
|
||||
|
||||
-- Function to insert nulls into the JSON stream
|
||||
local function null()
|
||||
return null
|
||||
end
|
||||
|
||||
-- Marker for 'undefined' values
|
||||
local function undefined()
|
||||
return undefined
|
||||
end
|
||||
|
||||
local ArrayMT = {}
|
||||
|
||||
--[[
|
||||
Return's true if the metatable marks it as an array..
|
||||
Or false if it has no array component at all
|
||||
Otherwise nil to get the normal detection component working
|
||||
]]
|
||||
local function IsArray(value)
|
||||
if type(value) ~= 'table' then return false end
|
||||
local meta = getmetatable(value)
|
||||
local ret = meta == ArrayMT or (meta ~= nil and meta.__is_luajson_array)
|
||||
if not ret then
|
||||
if #value == 0 then return false end
|
||||
else
|
||||
return ret
|
||||
end
|
||||
end
|
||||
local function InitArray(array)
|
||||
setmetatable(array, ArrayMT)
|
||||
return array
|
||||
end
|
||||
|
||||
local CallMT = {}
|
||||
|
||||
local function isCall(value)
|
||||
return CallMT == getmetatable(value)
|
||||
end
|
||||
|
||||
local function buildCall(name, ...)
|
||||
local callData = {
|
||||
name = name,
|
||||
parameters = {n = select('#', ...), ...}
|
||||
}
|
||||
return setmetatable(callData, CallMT)
|
||||
end
|
||||
|
||||
local function decodeCall(callData)
|
||||
if not isCall(callData) then return nil end
|
||||
return callData.name, callData.parameters
|
||||
end
|
||||
|
||||
local function doOptionMerge(options, nested, name, defaultOptions, modeOptions)
|
||||
if nested then
|
||||
modeOptions = modeOptions and modeOptions[name]
|
||||
defaultOptions = defaultOptions and defaultOptions[name]
|
||||
end
|
||||
options[name] = merge(
|
||||
{},
|
||||
defaultOptions,
|
||||
modeOptions,
|
||||
options[name]
|
||||
)
|
||||
end
|
||||
|
||||
local json_util = {
|
||||
printValue = printValue,
|
||||
clone = clone,
|
||||
merge = merge,
|
||||
null = null,
|
||||
undefined = undefined,
|
||||
IsArray = IsArray,
|
||||
InitArray = InitArray,
|
||||
isCall = isCall,
|
||||
buildCall = buildCall,
|
||||
decodeCall = decodeCall,
|
||||
doOptionMerge = doOptionMerge
|
||||
}
|
||||
|
||||
return json_util
|
95
tools/luajson/test_json.lua
Normal file
95
tools/luajson/test_json.lua
Normal file
@ -0,0 +1,95 @@
|
||||
package.path = package.path .. ";lua_scripts/libraries/luajson/?.lua"
|
||||
|
||||
local JSON = require"json"
|
||||
|
||||
local jsontest = [[{ 1:{"scn_ptz_id":"",
|
||||
"scn_ptz_prepos":"Preset 176",
|
||||
"scn_ptz_order":1,
|
||||
"scn_ptz_duration":"30",
|
||||
"scn_ptz_rally_delay":"2"}
|
||||
,
|
||||
2:{"scn_ptz_id":"","scn_ptz_prepos":"route","scn_ptz_order":2,"scn_ptz_duration":"30","scn_ptz_rally_delay":"2"} }
|
||||
]]
|
||||
local jsontest2 = [[{
|
||||
"extension":"mpg",
|
||||
"id":1545148451781,
|
||||
"name":"Foule_1280x720p.mpg",
|
||||
"size":67240746,
|
||||
"date":1545148451,
|
||||
"mime":"video\/mpeg",
|
||||
"filename":"1545148451781.mpg",
|
||||
"dir":"\/home\/pixalarm_data\/fileprocessor_data",
|
||||
"function_metadatas":
|
||||
{
|
||||
"function_faceblur":
|
||||
{
|
||||
"date":1545228627,
|
||||
"current_frame":"845",
|
||||
"polygons":[
|
||||
{
|
||||
"polygon_id":"new_1",
|
||||
"polygon_vertex":"[
|
||||
[0.14254859611231102,0.12476007677543186],[0.13174946004319654,0.4740882917466411],
|
||||
[0.3898488120950324,0.6621880998080614],[0.4038876889848812,0.11516314779270634]
|
||||
]",
|
||||
"polygon_frame_start":"1",
|
||||
"polygon_frame_stop":"300",
|
||||
"polygon_type":"full_blur"
|
||||
},
|
||||
{
|
||||
"polygon_id":"new_2",
|
||||
"polygon_vertex":"[
|
||||
[0.6198704103671706,0.1727447216890595],[0.5496760259179265,0.6007677543186181],
|
||||
[0.7775377969762419,0.7946257197696737],[0.9028077753779697,0.761996161228407],
|
||||
[0.9481641468682506,0.2821497120921305],[0.7829373650107991,0.04798464491362764]
|
||||
]",
|
||||
"polygon_frame_start":"200",
|
||||
"polygon_frame_stop":"845",
|
||||
"polygon_type":"no_blur"
|
||||
}
|
||||
],
|
||||
"framecuts":[
|
||||
["17","110"],
|
||||
["248","298"],
|
||||
["488","620"],
|
||||
["378","428"]
|
||||
],
|
||||
"face_selection":[
|
||||
{
|
||||
"frame":"21",
|
||||
"x":"0.5",
|
||||
"y":"0.356"
|
||||
},
|
||||
{
|
||||
"frame":"108",
|
||||
"x":"0.4289",
|
||||
"y":"0.275"
|
||||
},
|
||||
{
|
||||
"frame":"294",
|
||||
"x":"0.726",
|
||||
"y":"0.2364"
|
||||
}
|
||||
],
|
||||
"blur_type":"blur",
|
||||
"blur_area":"face"
|
||||
}
|
||||
},
|
||||
"total_frame":"845",
|
||||
"status":"DECODE_FINISHED",
|
||||
"fps":"25.00"
|
||||
}]]
|
||||
|
||||
local res = JSON.decode(jsontest2)
|
||||
for k, v in pairs(res) do
|
||||
print( k, v)
|
||||
end
|
||||
|
||||
res = JSON.decode( '{"content" : {},"date" : "2014-12-30T08:29:48Z","error" : {"code" : 0,"httpcode" : 200,"message" : ""},"status" : 1}' )
|
||||
for k, v in pairs(res) do
|
||||
print( k, v)
|
||||
end
|
||||
|
||||
local jsondata = JSON.encode( res )
|
||||
print(jsondata)
|
||||
|
Loading…
Reference in New Issue
Block a user