Skip to content

Commit e73fbe0

Browse files
committed
better evo tok
1 parent ebb56d6 commit e73fbe0

2 files changed

Lines changed: 52 additions & 2 deletions

File tree

src/EvoFormat.cpp

Lines changed: 48 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,11 @@
1818

1919
#include "TIVarTypes.h"
2020
#include "TypeHandlers/TypeHandlers.h"
21+
#include "json.hpp"
2122
#include "tivarslib_utils.h"
2223

24+
using json = nlohmann::ordered_json;
25+
2326
namespace tivars::EvoFormat
2427
{
2528
namespace
@@ -113,6 +116,27 @@ namespace tivars::EvoFormat
113116
codepoint = static_cast<uint16_t>(value);
114117
return true;
115118
}
119+
120+
data_t hex_string_to_bytes(const std::string& hex, const char* fieldName)
121+
{
122+
if ((hex.size() % 2) != 0)
123+
{
124+
throw std::invalid_argument(std::string(fieldName) + " must contain an even number of hex digits");
125+
}
126+
127+
data_t out;
128+
out.reserve(hex.size() / 2);
129+
for (size_t i = 0; i < hex.size(); i += 2)
130+
{
131+
if (!std::isxdigit(static_cast<unsigned char>(hex[i])) ||
132+
!std::isxdigit(static_cast<unsigned char>(hex[i + 1])))
133+
{
134+
throw std::invalid_argument(std::string(fieldName) + " must be valid hexadecimal");
135+
}
136+
out.push_back(hexdec(hex.substr(i, 2)));
137+
}
138+
return out;
139+
}
116140
}
117141

118142
uint16_t evo_checksum(const data_t& body)
@@ -804,10 +828,32 @@ data_t tokenize_evo_token_words(const std::string& source, const options_t& opti
804828
const bool deindent = options.contains("deindent") && options.at("deindent") == 1;
805829
const bool detectStrings = !options.contains("detect_strings") || options.at("detect_strings") != 0;
806830

831+
std::string sourceText = source;
832+
const std::string trimmed = trim(sourceText);
833+
if (!trimmed.empty() && trimmed.front() == '{')
834+
{
835+
try
836+
{
837+
const json j = json::parse(trimmed);
838+
if (j.contains("rawDataHex"))
839+
{
840+
return hex_string_to_bytes(j.at("rawDataHex").get<std::string>(), "rawDataHex");
841+
}
842+
if (j.contains("code"))
843+
{
844+
sourceText = j.at("code").get<std::string>();
845+
}
846+
}
847+
catch (const json::exception&)
848+
{
849+
// Ignore non-JSON input and fall back to regular Evo tokenized parsing.
850+
}
851+
}
852+
807853
std::string normalizedSource;
808854
if (deindent)
809855
{
810-
std::istringstream lines{source};
856+
std::istringstream lines{sourceText};
811857
std::string line;
812858
while (std::getline(lines, line))
813859
{
@@ -820,7 +866,7 @@ data_t tokenize_evo_token_words(const std::string& source, const options_t& opti
820866
}
821867
else
822868
{
823-
normalizedSource = source;
869+
normalizedSource = sourceText;
824870
}
825871

826872
static constexpr uint16_t legacyQuote = 0x2A;

tests.cpp

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1704,6 +1704,10 @@ int main(int argc, char** argv)
17041704
const json readable = json::parse(directYHatProgram.getReadableContent());
17051705
assert(readable["code"] == "Disp \"ŷ\"");
17061706
assert(readable["rawDataHex"].get<std::string>().find("7701") != std::string::npos);
1707+
1708+
TIVarFile recreatedYHatProgram = TIVarFile::createNew("Program", "YHAT3", "84Evo");
1709+
recreatedYHatProgram.setContentFromString(directYHatProgram.getReadableContent());
1710+
assert(recreatedYHatProgram.getRawContent() == directYHatProgram.getRawContent());
17071711
}
17081712

17091713
{

0 commit comments

Comments
 (0)