Added more support for writing hresultwrap itself. There is probably a more C++-y way to write all this :| If only I could use Go...
This commit is contained in:
parent
a07727515f
commit
8f5eba45bd
|
@ -6,13 +6,19 @@
|
|||
bool generate(const char *line, size_t n, FILE *fout)
|
||||
{
|
||||
std::vector<char> genout;
|
||||
std::vector<Slice *> *tokens;
|
||||
std::vector<Slice *>::const_iterator i;
|
||||
size_t nw;
|
||||
|
||||
genout.push_back('/');
|
||||
genout.push_back('/');
|
||||
genout.push_back(' ');
|
||||
genout.insert(genout.end(), line, line + n);
|
||||
genout.push_back('\n');
|
||||
tokens = TokenizeWhitespace(line, n);
|
||||
for (i = tokens->begin(); i < tokens->end(); i++) {
|
||||
genout.push_back('/');
|
||||
genout.push_back('/');
|
||||
genout.push_back(' ');
|
||||
AppendSlice(&genout, *i);
|
||||
genout.push_back('\n');
|
||||
}
|
||||
FreeTokenized(tokens);
|
||||
|
||||
genout.push_back('\n');
|
||||
nw = fwrite(genout.data(), sizeof (char), genout.size(), fout);
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
#else
|
||||
#endif
|
||||
#include <typeinfo>
|
||||
#include <algorithm>
|
||||
#include "lib.hpp"
|
||||
|
||||
class eofError : public Error {
|
||||
|
@ -65,6 +66,11 @@ bool IsEOF(Error *e)
|
|||
return typeid (*e) == typeid (eofError);
|
||||
}
|
||||
|
||||
Error *WriteVector(WriteCloser *w, std::vector<char> *v)
|
||||
{
|
||||
return w->Write(v->data(), v->size());
|
||||
}
|
||||
|
||||
#define nbuf 1024
|
||||
|
||||
Scanner::Scanner(ReadCloser *r)
|
||||
|
@ -139,3 +145,55 @@ Error *Scanner::Err(void) const
|
|||
return this->err;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Slice::Slice(const char *p, size_t n)
|
||||
{
|
||||
this->p = p;
|
||||
this->n = n;
|
||||
}
|
||||
|
||||
const char *Slice::Data(void) const
|
||||
{
|
||||
return this->p;
|
||||
}
|
||||
|
||||
size_t Slice::Len(void) const
|
||||
{
|
||||
return this->n;
|
||||
}
|
||||
|
||||
std::vector<Slice *> *TokenizeWhitespace(const char *buf, size_t n)
|
||||
{
|
||||
std::vector<Slice *> *ret;
|
||||
const char *p, *q;
|
||||
const char *end;
|
||||
|
||||
ret = new std::vector<Slice *>;
|
||||
p = buf;
|
||||
end = buf + n;
|
||||
while (p < end) {
|
||||
if (*p == ' ' || *p == '\t') {
|
||||
p++;
|
||||
continue;
|
||||
}
|
||||
for (q = p; q < end; q++)
|
||||
if (*q == ' ' || *q == '\t')
|
||||
break;
|
||||
ret->push_back(new Slice(p, q - p));
|
||||
p = q;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
void FreeTokenized(std::vector<Slice *> *v)
|
||||
{
|
||||
std::for_each(v->begin(), v->end(), [](Slice *s) {
|
||||
delete s;
|
||||
});
|
||||
delete v;
|
||||
}
|
||||
|
||||
void AppendSlice(std::vector<char> *v, Slice *s)
|
||||
{
|
||||
v->insert(v->end(), s->Data(), s->Data() + s->Len());
|
||||
}
|
||||
|
|
|
@ -27,6 +27,8 @@ public:
|
|||
};
|
||||
|
||||
extern Error *OpenRead(const char *filename, ReadCloser **r);
|
||||
extern Error *CreateWrite(const char *filename, WriteCloser **w);
|
||||
extern Error *WriteVector(WriteCloser *w, std::vector<char> *v);
|
||||
|
||||
class Scanner {
|
||||
ReadCloser *r;
|
||||
|
@ -44,3 +46,17 @@ public:
|
|||
size_t Len(void) const;
|
||||
Error *Err(void) const;
|
||||
};
|
||||
|
||||
class Slice {
|
||||
const char *p;
|
||||
size_t n;
|
||||
public:
|
||||
Slice(const char *p, size_t n);
|
||||
|
||||
const char *Data(void) const;
|
||||
size_t Len(void) const;
|
||||
};
|
||||
|
||||
extern std::vector<Slice *> *TokenizeWhitespace(const char *buf, size_t n);
|
||||
extern void FreeTokenized(std::vector<Slice *> *v);
|
||||
extern void AppendSlice(std::vector<char> *v, Slice *s);
|
||||
|
|
|
@ -104,3 +104,15 @@ Error *OpenRead(const char *filename, ReadCloser **r)
|
|||
*r = new posixReadCloser(fd);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Error *CreateWrite(const char *filename, WriteCloser **w)
|
||||
{
|
||||
int fd;
|
||||
|
||||
*w = NULL;
|
||||
fd = open(filename, O_WRONLY | O_CREAT | O_TRUNC, 0644);
|
||||
if (fd < 0)
|
||||
return new posixError(errno);
|
||||
*w = new posixWriteCloser(fd);
|
||||
return NULL;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue