Use QFile/QString for C parser files and paths
This commit is contained in:
parent
11dd7306d3
commit
6b70abaaf0
5 changed files with 14 additions and 36 deletions
|
@ -4,6 +4,7 @@
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
#include <QString>
|
||||||
|
|
||||||
namespace fex
|
namespace fex
|
||||||
{
|
{
|
||||||
|
@ -89,9 +90,7 @@ namespace fex
|
||||||
Lexer() = default;
|
Lexer() = default;
|
||||||
~Lexer() = default;
|
~Lexer() = default;
|
||||||
|
|
||||||
std::vector<Token> LexFile(const std::string &path);
|
std::vector<Token> LexFile(const QString &path);
|
||||||
std::vector<Token> LexString(const std::string &data);
|
|
||||||
void LexFileDumpTokens(const std::string &path, const std::string &out);
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::vector<Token> Lex();
|
std::vector<Token> Lex();
|
||||||
|
|
|
@ -21,7 +21,7 @@ namespace fex
|
||||||
std::vector<Array> ParseTopLevelArrays(std::vector<Token> tokens);
|
std::vector<Array> ParseTopLevelArrays(std::vector<Token> tokens);
|
||||||
std::map<std::string, ArrayValue> ParseTopLevelObjects(std::vector<Token> tokens);
|
std::map<std::string, ArrayValue> ParseTopLevelObjects(std::vector<Token> tokens);
|
||||||
|
|
||||||
std::map<std::string, int> ReadDefines(const std::string &filename, std::vector<std::string> matching);
|
std::map<std::string, int> ReadDefines(const QString &filename, std::vector<std::string> matching);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
int EvaluateExpression(std::vector<Token> tokens);
|
int EvaluateExpression(std::vector<Token> tokens);
|
||||||
|
|
|
@ -596,7 +596,7 @@ bool ParseUtil::gameStringToBool(QString gameString, bool * ok) {
|
||||||
QMap<QString, QHash<QString, QString>> ParseUtil::readCStructs(const QString &filename, const QString &label, const QHash<int, QString> memberMap) {
|
QMap<QString, QHash<QString, QString>> ParseUtil::readCStructs(const QString &filename, const QString &label, const QHash<int, QString> memberMap) {
|
||||||
QString filePath = this->root + "/" + filename;
|
QString filePath = this->root + "/" + filename;
|
||||||
auto cParser = fex::Parser();
|
auto cParser = fex::Parser();
|
||||||
auto tokens = fex::Lexer().LexFile(filePath.toStdString());
|
auto tokens = fex::Lexer().LexFile(filePath);
|
||||||
auto structs = cParser.ParseTopLevelObjects(tokens);
|
auto structs = cParser.ParseTopLevelObjects(tokens);
|
||||||
QMap<QString, QHash<QString, QString>> structMaps;
|
QMap<QString, QHash<QString, QString>> structMaps;
|
||||||
for (auto it = structs.begin(); it != structs.end(); it++) {
|
for (auto it = structs.begin(); it != structs.end(); it++) {
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
|
#include <QFile>
|
||||||
|
|
||||||
namespace fex
|
namespace fex
|
||||||
{
|
{
|
||||||
|
@ -155,48 +156,26 @@ namespace fex
|
||||||
return Token(Token::Type::kDefine, filename_, line_number_);
|
return Token(Token::Type::kDefine, filename_, line_number_);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<Token> Lexer::LexString(const std::string &data)
|
std::vector<Token> Lexer::LexFile(const QString &path)
|
||||||
{
|
{
|
||||||
filename_ = "string literal";
|
filename_ = path.toStdString();
|
||||||
line_number_ = 1;
|
|
||||||
index_ = 0;
|
|
||||||
data_ = data;
|
|
||||||
|
|
||||||
return Lex();
|
|
||||||
}
|
|
||||||
|
|
||||||
std::vector<Token> Lexer::LexFile(const std::string &path)
|
|
||||||
{
|
|
||||||
filename_ = path;
|
|
||||||
line_number_ = 1;
|
line_number_ = 1;
|
||||||
|
|
||||||
std::ifstream file;
|
// Note: Using QFile instead of ifstream to handle encoding differences between platforms
|
||||||
file.open(path);
|
// (specifically to handle accented characters on Windows)
|
||||||
|
QFile file(path);
|
||||||
|
file.open(QIODevice::ReadOnly);
|
||||||
|
|
||||||
std::stringstream stream;
|
const QByteArray data = file.readAll();
|
||||||
stream << file.rdbuf();
|
|
||||||
|
|
||||||
index_ = 0;
|
index_ = 0;
|
||||||
data_ = stream.str();
|
data_ = data.toStdString();
|
||||||
|
|
||||||
file.close();
|
file.close();
|
||||||
|
|
||||||
return Lex();
|
return Lex();
|
||||||
}
|
}
|
||||||
|
|
||||||
void Lexer::LexFileDumpTokens(const std::string &path, const std::string &out)
|
|
||||||
{
|
|
||||||
std::ofstream file;
|
|
||||||
file.open(out);
|
|
||||||
|
|
||||||
for (Token token : LexFile(path))
|
|
||||||
{
|
|
||||||
file << token.ToString() << std::endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
file.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
std::vector<Token> Lexer::Lex()
|
std::vector<Token> Lexer::Lex()
|
||||||
{
|
{
|
||||||
std::vector<Token> tokens;
|
std::vector<Token> tokens;
|
||||||
|
|
|
@ -337,7 +337,7 @@ namespace fex
|
||||||
return DefineStatement(identifer, value);
|
return DefineStatement(identifer, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::map<std::string, int> Parser::ReadDefines(const std::string &filename, std::vector<std::string> matching)
|
std::map<std::string, int> Parser::ReadDefines(const QString &filename, std::vector<std::string> matching)
|
||||||
{
|
{
|
||||||
std::map<std::string, int> out;
|
std::map<std::string, int> out;
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue