lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
C++
treap_vis_multiset.cc
varqox/benchmarks
7713013f1a6af368da4cf634a72ae69ca6d17334
#include <bits/stdc++.h> using namespace std; #define FOR(i,a,n) for (decltype(a) i = (a), i##__ = (n); i <= i##__; ++i) #define REP(i,n) FOR(i,0,(n)-1) #define FORD(i,a,n) for (decltype(n) i = (a), i##__ = (n); i >= i##__; --i) #define ALL(x) x.begin(), x.end() #define EB emplace_back #define ST first #define ND second #define OO(A) template<class... T> ostream& operator<<(ostream& os, const A<T...>& x) { return __o(os, ALL(x)); } #define SZ(x) ((int)x.size()) typedef long long LL; typedef pair<int, int> PII; typedef vector<int> VI; typedef vector<VI> VVI; typedef vector<PII> VPII; template<class A, class B> ostream& operator<<(ostream&, const pair<A, B>&); template<class I> ostream& __o(ostream&, I, I); template<class T, size_t N> ostream& operator<<(ostream& os, const array<T, N>& x) { return __o(os, ALL(x)); } OO(vector) OO(deque) OO(set) OO(multiset) OO(map) OO(multimap) template<class A, class B> ostream& operator<<(ostream& os, const pair<A, B>& p) { return os << "(" << p.ST << ", " << p.ND << ")"; } template<class I> ostream& __o(ostream& os, I a, I b) { os << "{"; for (; a != b;) os << *a++, os << (a == b ? "" : " "); return os << "}"; } template<class I> ostream& __d(ostream& os, I a, I b) { os << "{\n"; for (I c = a; a != b; ++a) os << " " << distance(c, a) << ": " << *a << endl; return os << "}"; } template<class... T> void __e(T&&... a) { int t[] = {(cerr << forward<T>(a), 0)...}; (void)t; cerr << endl; } template<class A, class B> inline void mini(A& a, B&& b) { if (b < a) a = b; } template<class A, class B> inline void maxi(A& a, B&& b) { if (b > a) a = b; } inline int ceil2(int x) { return (x < 2 ? 1 : 1 << (sizeof(x) * 8 - __builtin_clz(x - 1))); } #ifdef DEBUG # define D(...) __VA_ARGS__ #else # define D(...) #endif #define LOG(x) D(cerr << #x ": " << x) #define LOGN(x) D(LOG(x) << endl) #define DUMP(x) D(cerr << #x ": ", __d(cerr, ALL(x)) << endl) #define E(...) D(__e(__VA_ARGS__)) constexpr char nl = '\n'; struct treap { treap* left = nullptr; treap* right = nullptr; int val; int sz = 1; int rank = rand(); treap(int x = 0) : val(x) {} }; inline int sz(treap* x) { return (x ? x->sz : 0); } inline void update(treap* a) { a->sz = 1 + sz(a->left) + sz(a->right); } inline treap* merge(treap* a, treap* b) { if (!a) return b; if (!b) return a; if (a->rank > b->rank) { a->right = merge(a->right, b); update(a); return a; } else { b->left = merge(a, b->left); update(b); return b; } } inline pair<treap*, treap*> split(treap* a, int k) { if (!a) return {nullptr, nullptr}; int sl = sz(a->left); if (sl >= k) { auto p = split(a->left, k); a->left = p.ND; update(a); return {p.ST, a}; } else { auto p = split(a->right, k - sl - 1); a->right = p.ST; update(a); return {a, p.ND}; } } inline treap* insert(treap* a, int pos, int val) { auto p = split(a, pos); return merge(p.ST, merge(new treap(val), p.ND)); } inline treap* erase(treap* a, int pos) { auto p = split(a, pos); auto p1 = split(p.ND, 1); delete p1.ST; return merge(p.ST, p1.ND); } inline void _print(treap* a) { if (a) { _print(a->left); cerr << a->val << ' '; _print(a->right); } } inline void print(treap* a) { cerr << "{", _print(a), cerr << "}\n"; } inline int find(treap* a, int val) { if (!a) return -0x3f3f3f3f; if (a->val == val) return sz(a->left); else if (a->val > val) return find(a->left, val); else return sz(a->left) + 1 + find(a->right, val); } inline int upper_bound(treap* a, int val) { if (!a) return 0; if (a->val <= val) return sz(a->left) + 1 + upper_bound(a->right, val); else return upper_bound(a->left, val); } inline treap* insertVal(treap* a, int val) { return insert(a, upper_bound(a, val), val); } inline treap* eraseVal(treap* a, int val) { int pos = find(a, val); return (pos >= 0 ? erase(a, pos) : a); } #include <sys/time.h> class Timer { struct timeval begin; public: Timer() { start(); } void start() { gettimeofday(&begin, NULL); } long long microtime() { struct timeval end; gettimeofday(&end, NULL); return (end.tv_sec - begin.tv_sec) * 1000000LL + end.tv_usec - begin.tv_usec; } double time() { return microtime() * 0.000001; } }; int main() { ios::sync_with_stdio(false); cin.tie(nullptr); srand(10101029); constexpr int N = 1e6; VI v(N); for (int& x : v) x = rand(); VI v1 = v; random_shuffle(ALL(v1)); Timer timer; treap *t = nullptr; timer.start(); for (int x : v) t = insertVal(t, x); cout << "Treap - insert: " << fixed << setprecision(4) << timer.time() << " s" << endl; timer.start(); for (int x : v1) t = eraseVal(t, x); cout << "Treap - erase: " << fixed << setprecision(4) << timer.time() << " s" << endl; multiset<int> S; timer.start(); for (int x : v) S.insert(x); cout << "Multiset - insert: " << fixed << setprecision(4) << timer.time() << " s" << endl; timer.start(); for (int x : v1) S.erase(x); cout << "Multiset - erase: " << fixed << setprecision(4) << timer.time() << " s" << endl; return 0; }
#include <bits/stdc++.h> using namespace std; #define FOR(i,a,n) for (decltype(a) i = (a), i##__ = (n); i <= i##__; ++i) #define REP(i,n) FOR(i,0,(n)-1) #define FORD(i,a,n) for (decltype(n) i = (a), i##__ = (n); i >= i##__; --i) #define ALL(x) x.begin(), x.end() #define EB emplace_back #define ST first #define ND second #define OO(A) template<class... T> ostream& operator<<(ostream& os, const A<T...>& x) { return __o(os, ALL(x)); } #define SZ(x) ((int)x.size()) typedef long long LL; typedef pair<int, int> PII; typedef vector<int> VI; typedef vector<VI> VVI; typedef vector<PII> VPII; template<class A, class B> ostream& operator<<(ostream&, const pair<A, B>&); template<class I> ostream& __o(ostream&, I, I); template<class T, size_t N> ostream& operator<<(ostream& os, const array<T, N>& x) { return __o(os, ALL(x)); } OO(vector) OO(deque) OO(set) OO(multiset) OO(map) OO(multimap) template<class A, class B> ostream& operator<<(ostream& os, const pair<A, B>& p) { return os << "(" << p.ST << ", " << p.ND << ")"; } template<class I> ostream& __o(ostream& os, I a, I b) { os << "{"; for (; a != b;) os << *a++, os << (a == b ? "" : " "); return os << "}"; } template<class I>
template<class... T> void __e(T&&... a) { int t[] = {(cerr << forward<T>(a), 0)...}; (void)t; cerr << endl; } template<class A, class B> inline void mini(A& a, B&& b) { if (b < a) a = b; } template<class A, class B> inline void maxi(A& a, B&& b) { if (b > a) a = b; } inline int ceil2(int x) { return (x < 2 ? 1 : 1 << (sizeof(x) * 8 - __builtin_clz(x - 1))); } #ifdef DEBUG # define D(...) __VA_ARGS__ #else # define D(...) #endif #define LOG(x) D(cerr << #x ": " << x) #define LOGN(x) D(LOG(x) << endl) #define DUMP(x) D(cerr << #x ": ", __d(cerr, ALL(x)) << endl) #define E(...) D(__e(__VA_ARGS__)) constexpr char nl = '\n'; struct treap { treap* left = nullptr; treap* right = nullptr; int val; int sz = 1; int rank = rand(); treap(int x = 0) : val(x) {} }; inline int sz(treap* x) { return (x ? x->sz : 0); } inline void update(treap* a) { a->sz = 1 + sz(a->left) + sz(a->right); } inline treap* merge(treap* a, treap* b) { if (!a) return b; if (!b) return a; if (a->rank > b->rank) { a->right = merge(a->right, b); update(a); return a; } else { b->left = merge(a, b->left); update(b); return b; } } inline pair<treap*, treap*> split(treap* a, int k) { if (!a) return {nullptr, nullptr}; int sl = sz(a->left); if (sl >= k) { auto p = split(a->left, k); a->left = p.ND; update(a); return {p.ST, a}; } else { auto p = split(a->right, k - sl - 1); a->right = p.ST; update(a); return {a, p.ND}; } } inline treap* insert(treap* a, int pos, int val) { auto p = split(a, pos); return merge(p.ST, merge(new treap(val), p.ND)); } inline treap* erase(treap* a, int pos) { auto p = split(a, pos); auto p1 = split(p.ND, 1); delete p1.ST; return merge(p.ST, p1.ND); } inline void _print(treap* a) { if (a) { _print(a->left); cerr << a->val << ' '; _print(a->right); } } inline void print(treap* a) { cerr << "{", _print(a), cerr << "}\n"; } inline int find(treap* a, int val) { if (!a) return -0x3f3f3f3f; if (a->val == val) return sz(a->left); else if (a->val > val) return find(a->left, val); else return sz(a->left) + 1 + find(a->right, val); } inline int upper_bound(treap* a, int val) { if (!a) return 0; if (a->val <= val) return sz(a->left) + 1 + upper_bound(a->right, val); else return upper_bound(a->left, val); } inline treap* insertVal(treap* a, int val) { return insert(a, upper_bound(a, val), val); } inline treap* eraseVal(treap* a, int val) { int pos = find(a, val); return (pos >= 0 ? erase(a, pos) : a); } #include <sys/time.h> class Timer { struct timeval begin; public: Timer() { start(); } void start() { gettimeofday(&begin, NULL); } long long microtime() { struct timeval end; gettimeofday(&end, NULL); return (end.tv_sec - begin.tv_sec) * 1000000LL + end.tv_usec - begin.tv_usec; } double time() { return microtime() * 0.000001; } }; int main() { ios::sync_with_stdio(false); cin.tie(nullptr); srand(10101029); constexpr int N = 1e6; VI v(N); for (int& x : v) x = rand(); VI v1 = v; random_shuffle(ALL(v1)); Timer timer; treap *t = nullptr; timer.start(); for (int x : v) t = insertVal(t, x); cout << "Treap - insert: " << fixed << setprecision(4) << timer.time() << " s" << endl; timer.start(); for (int x : v1) t = eraseVal(t, x); cout << "Treap - erase: " << fixed << setprecision(4) << timer.time() << " s" << endl; multiset<int> S; timer.start(); for (int x : v) S.insert(x); cout << "Multiset - insert: " << fixed << setprecision(4) << timer.time() << " s" << endl; timer.start(); for (int x : v1) S.erase(x); cout << "Multiset - erase: " << fixed << setprecision(4) << timer.time() << " s" << endl; return 0; }
ostream& __d(ostream& os, I a, I b) { os << "{\n"; for (I c = a; a != b; ++a) os << " " << distance(c, a) << ": " << *a << endl; return os << "}"; }
function_block-function_prefix_line
[ { "content": "class Cstring {\n\nprivate:\n\n\tsize_t len_, real_len_;\n\npublic:\n\n\tchar *p;\n\n\n\n\texplicit Cstring(size_t len = 0, char c = '\\0'): len_(len),\n\n\t\t\treal_len_(len + 1), p((char*)malloc(real_len_)) {\n\n\t\tif (p == NULL)\n\n\t\t\tthrow std::bad_alloc();\n\n\n\n\t\tmemset(p, c, len);\n\n\t\tp[len] = '\\0';\n\n\t}\n\n\n\n\tCstring(const char* str, size_t len = -1)\n\n\t\t\t: len_(min(len, strlen(str))), real_len_(len_ + 1),\n\n\t\t\tp((char*)malloc(real_len_)) {\n\n\t\tif (p == NULL)\n\n\t\t\tthrow std::bad_alloc();\n", "file_path": "text_bech.cc", "rank": 0, "score": 41117.14248903029 }, { "content": "class Timer {\n\n\tstruct timeval begin;\n\n\n\npublic:\n\n\tTimer() { start(); }\n\n\n\n\tvoid start() { gettimeofday(&begin, NULL); }\n\n\n\n\tlong long microtime() {\n\n\t\tstruct timeval end;\n\n\t\tgettimeofday(&end, NULL);\n\n\t\treturn (end.tv_sec - begin.tv_sec) * 1000000LL + end.tv_usec -\n\n\t\t\tbegin.tv_usec;\n\n\t}\n\n\n\n\tdouble time() { return microtime() * 0.000001; }\n\n};\n\n\n\nint main() {\n\n\tstring a(NULL);\n", "file_path": "copying_memory.cc", "rank": 1, "score": 41117.14248903029 }, { "content": "class Stopwatch {\n\n\tstd::chrono::steady_clock::time_point begin;\n\n\n\npublic:\n\n\tStopwatch() noexcept { restart(); }\n\n\n\n\tvoid restart() noexcept { begin = std::chrono::steady_clock::now(); }\n\n\n\n\tlong long microtime() const noexcept {\n\n\t\tusing namespace std::chrono;\n\n\t\treturn duration_cast<microseconds>(steady_clock::now() - begin).count();\n\n\t}\n\n\n\n\tdouble time() const noexcept {\n\n\t\tusing namespace std::chrono;\n\n\t\treturn duration<double>(steady_clock::now() - begin).count();\n\n\t}\n\n};\n\n\n\n#include <sys/shm.h>\n\n#include <sys/stat.h>\n\n\n", "file_path": "ipc_bench.cc", "rank": 2, "score": 41117.14248903029 }, { "content": "class Stopwatch {\n\n\tstd::chrono::steady_clock::time_point begin;\n\n\n\npublic:\n\n\tStopwatch() noexcept { restart(); }\n\n\n\n\tvoid restart() noexcept { begin = std::chrono::steady_clock::now(); }\n\n\n\n\tlong long microtime() const noexcept {\n\n\t\tusing namespace std::chrono;\n\n\t\treturn duration_cast<microseconds>(steady_clock::now() - begin).count();\n\n\t}\n\n\n\n\tdouble time() const noexcept {\n\n\t\tusing namespace std::chrono;\n\n\t\treturn duration<double>(steady_clock::now() - begin).count();\n\n\t}\n\n};\n\n\n\n#include <sys/stat.h>\n", "file_path": "iostream_vis_cstdio.cc", "rank": 3, "score": 39550.37021544317 }, { "content": "class SharedMemorySegment {\n\nprivate:\n\n\tint id_;\n\n\tvoid* addr_;\n\n\tSharedMemorySegment(const SharedMemorySegment&);\n\n\tSharedMemorySegment& operator=(const SharedMemorySegment&);\n\n\n\npublic:\n\n\tSharedMemorySegment(size_t size) : id_(shmget(IPC_PRIVATE, size,\n\n\t\t\t\tIPC_CREAT | IPC_EXCL | S_IRUSR | S_IWUSR)),\n\n\t\t\taddr_(nullptr) {\n\n\n\n\t\tif (id_ != -1) {\n\n\t\t\tif ((addr_ = shmat(id_, nullptr, 0)) == (void*)-1)\n\n\t\t\t\taddr_ = nullptr;\n\n\t\t\tshmctl(id_, IPC_RMID, nullptr);\n\n\t\t}\n\n\t}\n\n\n\n\t~SharedMemorySegment() {\n", "file_path": "ipc_bench.cc", "rank": 5, "score": 38166.61836739824 }, { "content": "class Timer {\n\n\tstruct timeval begin;\n\n\n\npublic:\n\n\tTimer() { restart(); }\n\n\n\n\tvoid restart() { gettimeofday(&begin, NULL); }\n\n\n\n\tlong long microtime() {\n\n\t\tstruct timeval end;\n\n\t\tgettimeofday(&end, NULL);\n\n\t\treturn (end.tv_sec - begin.tv_sec) * 1000000LL + end.tv_usec -\n\n\t\t\tbegin.tv_usec;\n\n\t}\n\n\n\n\tdouble time() { return microtime() * 0.000001; }\n\n};\n\n\n\ntemplate<size_t N>\n\narray<char, N> make_concat(const string& a, const string& b, const string& c) {\n", "file_path": "buff_vis_string_concat.cc", "rank": 6, "score": 38166.61836739824 }, { "content": "class Timer {\n\n\tstruct timeval begin;\n\n\n\npublic:\n\n\tTimer() { start(); }\n\n\n\n\tvoid start() { gettimeofday(&begin, NULL); }\n\n\n\n\tlong long microtime() {\n\n\t\tstruct timeval end;\n\n\t\tgettimeofday(&end, NULL);\n\n\t\treturn (end.tv_sec - begin.tv_sec) * 1000000LL + end.tv_usec -\n\n\t\t\tbegin.tv_usec;\n\n\t}\n\n\n\n\tdouble time() { return microtime() * 0.000001; }\n\n};\n\n\n\ninline void foo1() noexcept { cout << \"foo1\" << endl; }\n\ninline void foo2() noexcept { cout << \"foo2\" << endl; }\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 7, "score": 34840.60950191859 }, { "content": "struct Poli6 : Polimorph { void operator()() const { foo6(); } };\n\n\n\ninline Polimorph* get_poli() noexcept {\n\n\tswitch (rand() % 6) {\n\n\t\tcase 0: return new Poli1;\n\n\t\tcase 1: return new Poli2;\n\n\t\tcase 2: return new Poli3;\n\n\t\tcase 3: return new Poli4;\n\n\t\tcase 4: return new Poli5;\n\n\t\tcase 5: return new Poli6;\n\n\t}\n\n\n\n\treturn new Poli1;\n\n}\n\n\n\nint main() {\n\n\tios::sync_with_stdio(false);\n\n\tcin.tie(nullptr);\n\n\n\n\tvector<void(*)()> u;\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 8, "score": 22722.987731663863 }, { "content": "struct Poli5 : Polimorph { void operator()() const { foo5(); } };\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 9, "score": 22722.987731663863 }, { "content": "struct Poli4 : Polimorph { void operator()() const { foo4(); } };\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 10, "score": 22722.987731663863 }, { "content": "struct Poli3 : Polimorph { void operator()() const { foo3(); } };\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 11, "score": 22722.987731663863 }, { "content": "struct Poli2 : Polimorph { void operator()() const { foo2(); } };\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 12, "score": 22722.987731663863 }, { "content": "struct Poli1 : Polimorph { void operator()() const { foo1(); } };\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 13, "score": 22722.987731663863 }, { "content": "// Krzysztof Małysa\n\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n\n\n#define FOR(i,a,n) for (int i = (a), i##__ = (n); i <= i##__; ++i)\n\n#define REP(i,n) FOR(i,0,n-1)\n\n#define FORD(i,a,n) for (int i = (a), i##__ = (n); i >= i##__; --i)\n\n#define ALL(x) x.begin(), x.end()\n\n#define EB emplace_back\n\n#define ST first\n\n#define ND second\n\n#define OO(A) template<class... T> ostream& operator<<(ostream& os, const A<T...>& x) { return __o(os, ALL(x)); }\n\n#define SZ(x) ((int)x.size())\n\n\n\ntypedef long long LL;\n\ntypedef pair<int, int> PII;\n\ntypedef vector<int> VI;\n\ntypedef vector<VI> VVI;\n\ntypedef vector<PII> VPII;\n\n\n", "file_path": "buff_vis_string_concat.cc", "rank": 14, "score": 30.180011342849625 }, { "content": "// Krzysztof Małysa\n\n#include <bits/stdc++.h>\n\nusing namespace std;\n\n\n\n#define FOR(i,a,n) for (auto i ## __ = (n), i = (a); i <= i ## __; ++i)\n\n#define FORD(i,a,n) for (auto i = (a), i ## __ = (n); i >= i ## __; --i)\n\n#define REP(i,n) FOR(i, 0, n - 1)\n\n#define ALL(x) x.begin(), x.end()\n\n#define SZ(x) (int(x.size()))\n\n#define EB emplace_back\n\n#define ST first\n\n#define ND second\n\n#define tpv typedef vector<\n\n\n\ntypedef long long LL;\n\ntypedef pair<int, int> PII;\n\ntpv int> VI;\n\ntpv VI> VVI;\n\ntpv PII> VPII;\n\ntpv LL> VLL;\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 16, "score": 26.525982971586636 }, { "content": "// Krzysztof Małysa\n\n// cin + cout vis printf + scanf benchmark\n\n#include <bits/stdc++.h>\n\n#include <unistd.h>\n\n\n\nusing namespace std;\n\n\n\n#define FOR(i,a,n) for (int i = (a), __n ## i = n; i <= __n ## i; ++i)\n\n#define REP(i,n) FOR(i,0,n)\n\n#define FORD(i,a,n) for (int i = (a), __n ## i = n; i >= __n ## i; --i)\n\n#define ALL(x) (x).begin(), (x).end()\n\n#define SZ(x) (int(x.size()))\n\n#define eprintf(...) fprintf(stderr, __VA_ARGS__)\n\n#define ST first\n\n#define ND second\n\n#define MP make_pair\n\n#define PB push_back\n\n#define EB emplace_back\n\n#define O(...) ostream& operator <<(ostream& os, const __VA_ARGS__& x)\n\n#define OO(...) O(__VA_ARGS__) { return __out(os, ALL(x)); }\n", "file_path": "iostream_vis_cstdio.cc", "rank": 17, "score": 19.574830471564574 }, { "content": "// Krzysztof Małysa\n\n// Interprocess communication speed test: pipe, shared memory + semaphores, sockets\n\n#include <bits/stdc++.h>\n\n#include <unistd.h>\n\n\n\nusing namespace std;\n\n\n\n#define FOR(i,a,n) for (int i = (a), __n ## i = n; i <= __n ## i; ++i)\n\n#define REP(i,n) FOR(i,0,n)\n\n#define FORD(i,a,n) for (int i = (a), __n ## i = n; i >= __n ## i; --i)\n\n#define ALL(x) (x).begin(), (x).end()\n\n#define SZ(x) (int(x.size()))\n\n#define eprintf(...) fprintf(stderr, __VA_ARGS__)\n\n#define ST first\n\n#define ND second\n\n#define MP make_pair\n\n#define PB push_back\n\n#define EB emplace_back\n\n#define O(...) ostream& operator <<(ostream& os, const __VA_ARGS__& x)\n\n#define OO(...) O(__VA_ARGS__) { return __out(os, ALL(x)); }\n", "file_path": "ipc_bench.cc", "rank": 18, "score": 19.12436091481195 }, { "content": "#define T template\n\n#define CL class\n\n\n\ntypedef unsigned uint;\n\ntypedef long long LL;\n\ntypedef unsigned long long ULL;\n\ntypedef vector<int> VI;\n\ntypedef vector<VI> VVI;\n\ntypedef pair<int, int> PII;\n\ntypedef vector<PII> VPII;\n\ntypedef pair<LL, LL> PLL;\n\n\n\nT<CL A> inline A abs(const A& a) { return a < A() ? -a : a; }\n\nT<CL A, CL B> inline void mini(A& a, const B& b) { if (b < a) a = b; }\n\nT<CL A, CL B> inline void maxi(A& a, const B& b) { if (b > a) a = b; }\n\n\n\nT<CL Iter> ostream& __out(ostream& os, Iter a, Iter b, const string& s = \", \");\n\nT<CL A, CL B> O(pair<A, B>);\n\n\n\nT<CL A> OO(vector<A>)\n", "file_path": "ipc_bench.cc", "rank": 19, "score": 18.65936257408674 }, { "content": "#define T template\n\n#define CL class\n\n\n\ntypedef unsigned uint;\n\ntypedef long long LL;\n\ntypedef unsigned long long ULL;\n\ntypedef vector<int> VI;\n\ntypedef vector<VI> VVI;\n\ntypedef pair<int, int> PII;\n\ntypedef vector<PII> VPII;\n\ntypedef pair<LL, LL> PLL;\n\n\n\nT<CL A> inline A abs(const A& a) { return a < A() ? -a : a; }\n\nT<CL A, CL B> inline void mini(A& a, const B& b) { if (b < a) a = b; }\n\nT<CL A, CL B> inline void maxi(A& a, const B& b) { if (b > a) a = b; }\n\n\n\nT<CL Iter> ostream& __out(ostream& os, Iter a, Iter b, const string& s = \", \");\n\nT<CL A, CL B> O(pair<A, B>);\n\n\n\nT<CL A> OO(vector<A>)\n", "file_path": "iostream_vis_cstdio.cc", "rank": 20, "score": 18.659362574086735 }, { "content": "// Krzysztof Małysa\n\n// Speed test of memory copying functions\n\n#include <bits/stdc++.h>\n\n#include <unistd.h>\n\n\n\nusing namespace std;\n\n\n\n#define FOR(i,a,n) for (int i = (a), __n ## i = n; i < __n ## i; ++i)\n\n#define REP(i,n) FOR(i,0,n)\n\n#define FORD(i,a,n) for (int i = (a), __n ## i = n; i >= __n ## i; --i)\n\n#define LET(x,a) __typeof(a) x = (a)\n\n#define FOREACH(i,x) for (LET(i, x.begin()), __n##i = x.end(); i != __n##i; ++i)\n\n#define ALL(x) x.begin(), x.end()\n\n#define SZ(x) (int(x.size()))\n\n#define eprintf(...) fprintf(stderr, __VA_ARGS__)\n\n#define ST first\n\n#define ND second\n\n#define MP make_pair\n\n#define PB push_back\n\n#define O(...) ostream& operator <<(ostream& os, const __VA_ARGS__& x)\n", "file_path": "copying_memory.cc", "rank": 21, "score": 17.875344322120167 }, { "content": "// Krzysztof Małysa\n\n// Program to test text structures/functions speed\n\n#include <bits/stdc++.h>\n\n#include <unistd.h>\n\n\n\nusing namespace std;\n\n\n\n#define FOR(i,a,n) for (int i = (a), __n ## i = n; i < __n ## i; ++i)\n\n#define REP(i,n) FOR(i,0,n)\n\n#define FORD(i,a,n) for (int i = (a), __n ## i = n; i >= __n ## i; --i)\n\n#define LET(x,a) __typeof(a) x = (a)\n\n#define FOREACH(i,x) for (LET(i, x.begin()), __n##i = x.end(); i != __n##i; ++i)\n\n#define ALL(x) x.begin(), x.end()\n\n#define SZ(x) (int(x.size()))\n\n#define eprintf(...) fprintf(stderr, __VA_ARGS__)\n\n#define ST first\n\n#define ND second\n\n#define MP make_pair\n\n#define PB push_back\n\n#define O(...) ostream& operator <<(ostream& os, const __VA_ARGS__& x)\n", "file_path": "text_bech.cc", "rank": 22, "score": 17.728971520983727 }, { "content": "template<class A, class B> ostream& operator<<(ostream&, const pair<A, B>&);\n\ntemplate<class I> ostream& __o(ostream&, I, I);\n\ntemplate<class T, size_t N> ostream& operator<<(ostream& os, const array<T, N>& x) { return __o(os, ALL(x)); }\n\nOO(vector) OO(deque) OO(set) OO(multiset) OO(map) OO(multimap)\n\ntemplate<class A, class B> ostream& operator<<(ostream& os, const pair<A, B>& p) {\n\n\treturn os << \"(\" << p.ST << \", \" << p.ND << \")\";\n\n}\n\ntemplate<class I> ostream& __o(ostream& os, I a, I b) {\n\n\tos << \"{\";\n\n\tfor (; a != b;)\n\n\t\tos << *a++, cerr << (a == b ? \"\" : \" \");\n\n\treturn os << \"}\";\n\n}\n\ntemplate<class I> ostream& __d(ostream& os, I a, I b) {\n\n\tos << \"{\\n\";\n\n\tfor (I c = a; a != b; ++a)\n\n\t\tos << \" \" << distance(c, a) << \": \" << *a << endl;\n\n\treturn os << \"}\";\n\n}\n\ntemplate<class... T> void __e(T&&... a) {\n", "file_path": "buff_vis_string_concat.cc", "rank": 24, "score": 16.260179497105966 }, { "content": "T<CL Iter>\n\nostream& __out(ostream& os, Iter a, Iter b, const string& s) {\n\n\tos << \"{\";\n\n\tif (a != b) {\n\n\t\tos << *a;\n\n\t\twhile (++a != b)\n\n\t\t\tos << s << *a;\n\n\t}\n\n\treturn os << \"}\";\n\n}\n\n\n\nT<CL A, CL B>\n\nO(pair<A, B>) {\n\n\treturn os << \"(\" << x.ST << \", \" << x.ND << \")\";\n\n}\n\n\n\nCL Input {\n\n\tstatic const int BUFF_SIZE = 1 << 16;\n\n\tunsigned char buff[BUFF_SIZE], *pos, *end;\n\n\n", "file_path": "text_bech.cc", "rank": 25, "score": 15.45745661124093 }, { "content": "T<CL Iter>\n\nostream& __out(ostream& os, Iter a, Iter b, const string& s) {\n\n\tos << \"{\";\n\n\tif (a != b) {\n\n\t\tos << *a;\n\n\t\twhile (++a != b)\n\n\t\t\tos << s << *a;\n\n\t}\n\n\treturn os << \"}\";\n\n}\n\n\n\nT<CL A, CL B>\n\nO(pair<A, B>) {\n\n\treturn os << \"(\" << x.ST << \", \" << x.ND << \")\";\n\n}\n\n\n\nCL Input {\n\n\tstatic const int BUFF_SIZE = 1 << 16;\n\n\tunsigned char buff[BUFF_SIZE], *pos, *end;\n\n\n", "file_path": "copying_memory.cc", "rank": 26, "score": 15.457456611240932 }, { "content": "#define OO(...) O(__VA_ARGS__) { return __out(os, ALL(x)); }\n\n#define T template\n\n#define CL class\n\n\n\ntypedef unsigned uint;\n\ntypedef long long LL;\n\ntypedef unsigned long long ULL;\n\ntypedef vector<int> VI;\n\ntypedef vector<VI> VVI;\n\ntypedef vector<LL> VLL;\n\ntypedef pair<int, int> PII;\n\ntypedef vector<PII> VPII;\n\ntypedef vector<VPII> VVPII;\n\ntypedef pair<LL, LL> PLLLL;\n\ntypedef vector<PLLLL> VPLLLL;\n\ntypedef vector<bool> VB;\n\ntypedef vector<char> VC;\n\n\n\nT<CL A>\n\ninline A abs(const A& a) { return a < A() ? -a : a; }\n", "file_path": "copying_memory.cc", "rank": 27, "score": 14.996010946181856 }, { "content": "#define OO(...) O(__VA_ARGS__) { return __out(os, ALL(x)); }\n\n#define T template\n\n#define CL class\n\n\n\ntypedef unsigned uint;\n\ntypedef long long LL;\n\ntypedef unsigned long long ULL;\n\ntypedef vector<int> VI;\n\ntypedef vector<VI> VVI;\n\ntypedef vector<LL> VLL;\n\ntypedef pair<int, int> PII;\n\ntypedef vector<PII> VPII;\n\ntypedef vector<VPII> VVPII;\n\ntypedef pair<LL, LL> PLLLL;\n\ntypedef vector<PLLLL> VPLLLL;\n\ntypedef vector<bool> VB;\n\ntypedef vector<char> VC;\n\n\n\nT<CL A>\n\ninline A abs(const A& a) { return a < A() ? -a : a; }\n", "file_path": "text_bech.cc", "rank": 28, "score": 14.996010946181855 }, { "content": "\twhile (a != b) {\n\n\t\tos << \" \" << a - beg << \": \" << *a << \"\\n\";\n\n\t\t++a;\n\n\t}\n\n\treturn os << \"}\";\n\n}\n\n\n\nT<CL A, CL B> O(pair<A, B>) { return os << \"(\" << x.ST << \", \" << x.ND << \")\"; }\n\n\n\nCL Input {\n\n\tstatic const int BUFF_SIZE = 1 << 16;\n\n\tconst int fd;\n\n\tunsigned char buff[BUFF_SIZE], *pos, *end;\n\n\n\n\tvoid grabBuffer() { end = (pos = buff) + read(fd, buff, BUFF_SIZE); }\n\n\n\npublic:\n\n\texplicit Input(int x) : fd(x), pos(buff), end(buff) {}\n\n\n\n\tint peek() {\n", "file_path": "ipc_bench.cc", "rank": 29, "score": 12.22116482584715 }, { "content": "\twhile (a != b) {\n\n\t\tos << \" \" << a - beg << \": \" << *a << \"\\n\";\n\n\t\t++a;\n\n\t}\n\n\treturn os << \"}\";\n\n}\n\n\n\nT<CL A, CL B> O(pair<A, B>) { return os << \"(\" << x.ST << \", \" << x.ND << \")\"; }\n\n\n\n#undef LIKELY\n\n#undef UNLIKELY\n\n\n\n#if defined(__GNUC__) && __GNUC__ >= 4\n\n# define LIKELY(x) (__builtin_expect((x), 1))\n\n# define UNLIKELY(x) (__builtin_expect((x), 0))\n\n#else\n\n# define LIKELY(x) (x)\n\n# define UNLIKELY(x) (x)\n\n#endif\n\n\n", "file_path": "iostream_vis_cstdio.cc", "rank": 30, "score": 10.487040207797966 }, { "content": "T<CL A> OO(deque<A>)\n\nT<CL A> OO(list<A>)\n\nT<CL A, CL B> OO(set<A, B>)\n\nT<CL A, CL B> OO(multiset<A, B>)\n\nT<CL A, CL B, CL C> OO(map<A, B, C>)\n\nT<CL A, CL B, CL C> OO(multimap<A, B, C>)\n\n\n\nT<CL Iter> ostream& __out(ostream& os, Iter a, Iter b, const string& s) {\n\n\tos << \"{\";\n\n\tif (a != b) {\n\n\t\tos << *a;\n\n\t\twhile (++a != b)\n\n\t\t\tos << s << *a;\n\n\t}\n\n\treturn os << \"}\";\n\n}\n\n\n\nT<CL Iter> ostream& __dump(ostream& os, Iter a, Iter b) {\n\n\tos << \"{\\n\";\n\n\tIter beg = a;\n", "file_path": "ipc_bench.cc", "rank": 31, "score": 10.250858884910748 }, { "content": "T<CL A> OO(deque<A>)\n\nT<CL A> OO(list<A>)\n\nT<CL A, CL B> OO(set<A, B>)\n\nT<CL A, CL B> OO(multiset<A, B>)\n\nT<CL A, CL B, CL C> OO(map<A, B, C>)\n\nT<CL A, CL B, CL C> OO(multimap<A, B, C>)\n\n\n\nT<CL Iter> ostream& __out(ostream& os, Iter a, Iter b, const string& s) {\n\n\tos << \"{\";\n\n\tif (a != b) {\n\n\t\tos << *a;\n\n\t\twhile (++a != b)\n\n\t\t\tos << s << *a;\n\n\t}\n\n\treturn os << \"}\";\n\n}\n\n\n\nT<CL Iter> ostream& __dump(ostream& os, Iter a, Iter b) {\n\n\tos << \"{\\n\";\n\n\tIter beg = a;\n", "file_path": "iostream_vis_cstdio.cc", "rank": 32, "score": 10.250858884910746 }, { "content": "\n\nT<CL A, CL B>\n\ninline void mini(A& a, const B& b) {\n\n\tif (b < a)\n\n\t\ta = b;\n\n}\n\n\n\nT<CL A, CL B>\n\ninline void maxi(A& a, const B& b) {\n\n\tif (b > a)\n\n\t\ta = b;\n\n}\n\n\n\nT<CL Iter>\n\nostream& __out(ostream& os, Iter a, Iter b, const string& s = \", \");\n\n\n\nT<CL A, CL B>\n\nO(pair<A, B>);\n\n\n\nT<CL A>\n", "file_path": "copying_memory.cc", "rank": 34, "score": 8.794182867214744 }, { "content": "\n\nT<CL A, CL B>\n\ninline void mini(A& a, const B& b) {\n\n\tif (b < a)\n\n\t\ta = b;\n\n}\n\n\n\nT<CL A, CL B>\n\ninline void maxi(A& a, const B& b) {\n\n\tif (b > a)\n\n\t\ta = b;\n\n}\n\n\n\nT<CL Iter>\n\nostream& __out(ostream& os, Iter a, Iter b, const string& s = \", \");\n\n\n\nT<CL A, CL B>\n\nO(pair<A, B>);\n\n\n\nT<CL A>\n", "file_path": "text_bech.cc", "rank": 35, "score": 8.794182867214744 }, { "content": "union semun {\n\n\tint val; /* Value for SETVAL */\n\n\tstruct semid_ds *buf; /* Buffer for IPC_STAT, IPC_SET */\n\n\tunsigned short *array; /* Array for GETALL, SETALL */\n\n\tstruct seminfo *__buf; /* Buffer for IPC_INFO\n\n\t (Linux-specific) */\n\n};\n\n\n\n#include <sys/types.h>\n\n#include <sys/ipc.h>\n\n#include <sys/sem.h>\n\n\n\n/* Obtain a binary semaphore’s ID, allocating if necessary. */\n\nint binary_semaphore_allocation(key_t key, int sem_flags) {\n\n\treturn semget(key, 1, sem_flags);\n\n}\n\n\n\n/* Deallocate a binary semaphore. All users must have finished their\n\nuse. Returns -1 on failure. */\n\nint binary_semaphore_deallocate(int semid) {\n", "file_path": "ipc_bench.cc", "rank": 37, "score": 8.509449796669305 }, { "content": "\tStopwatch sw;\n\n\tlong long got = 0, x;\n\n\twhile (got < data_length &&\n\n\t\t\t(x = read(fd[0], buff, std::min<LL>(data_length - got, BUFF_SIZE))) >= 0)\n\n\t\tgot += x;\n\n\n\n\tclose(fd[0]);\n\n\tclose(fd[1]);\n\n\n\n\tif (got < data_length)\n\n\t\treturn -1;\n\n\n\n\treturn sw.time();\n\n}\n\n\n\nint main(int argc, char **argv) {\n\n\tlong long bytes = 1LL << 30;\n\n\tif (argc > 1)\n\n\t\tbytes = atoll(argv[1]);\n\n\n", "file_path": "ipc_bench.cc", "rank": 39, "score": 7.677721171884972 }, { "content": "This returns immediately. */\n\nint binary_semaphore_oper(int semid, int x) {\n\n\tstruct sembuf operations[1];\n\n\t/* Use the first (and only) semaphore. */\n\n\toperations[0].sem_num = 0;\n\n\t/* Increment by 1. */\n\n\toperations[0].sem_op = x;\n\n\t/* Permit undoing. */\n\n\toperations[0].sem_flg = 0;\n\n\treturn semop(semid, operations, 1);\n\n}\n\n\n\ndouble testSharedMemory(long long data_length) {\n\n\tconstexpr int BUFF_SIZE = 1 << 16;\n\n\n\n\tstruct xxx {\n\n\t\tint size;\n\n\t\tchar buff[BUFF_SIZE];\n\n\t};\n\n\n", "file_path": "ipc_bench.cc", "rank": 40, "score": 7.56051802930803 }, { "content": "\n\n\tint fd[2];\n\n\tif (socketpair(AF_LOCAL, SOCK_STREAM, 0, fd))\n\n\t\treturn -1;\n\n\n\n\tint cpid = fork();\n\n\tif (cpid == -1)\n\n\t\tabort();\n\n\n\n\telse if (cpid == 0) {\n\n\t\tlong long x;\n\n\t\tmemset(buff, '7', BUFF_SIZE);\n\n\n\n\t\twhile (data_length &&\n\n\t\t\t\t(x = write(fd[1], buff, std::min<LL>(BUFF_SIZE, data_length))) >= 0)\n\n\t\t\tdata_length -= x;\n\n\n\n\t\t_exit(0);\n\n\t}\n\n\n", "file_path": "ipc_bench.cc", "rank": 41, "score": 7.54540194392043 }, { "content": "\tint t[] = {(cerr << forward<T>(a), 0)...}; (void)t;\n\n\tcerr << endl;\n\n}\n\n\n\ntemplate<class A, class B> void mini(A& a, B&& b) { if (b < a) a = b; }\n\ntemplate<class A, class B> void maxi(A& a, B&& b) { if (b > a) a = b; }\n\nint ceil2(int x) { return 1 << (sizeof(x) * 8 - __builtin_clz(x - 1)); }\n\n\n\n#ifdef DEBUG\n\n# define D(...) __VA_ARGS__\n\n#else\n\n# define D(...)\n\n#endif\n\n\n\n#define LOG(x) D(cerr << #x \": \" << x)\n\n#define LOGN(x) D(LOG(x) << endl)\n\n#define DUMP(x) D(cerr << #x \": \", __d(cerr, ALL(x)) << endl)\n\n#define E(...) D(__e(__VA_ARGS__))\n\n#define endl '\\n'\n\nconstexpr char nl = '\\n';\n\n// End of templates\n\n\n\n#include <sys/time.h>\n\n\n", "file_path": "buff_vis_string_concat.cc", "rank": 43, "score": 7.322656441695642 }, { "content": "\t\tmemcpy(buff, x->buff, x->size);\n\n\t\tgot += x->size;\n\n\t\t// cerr << x->size << endl;\n\n\t\tbinary_semaphore_oper(sem, 2);\n\n\t}\n\n\n\n\t(void)binary_semaphore_deallocate(sem);\n\n\n\n\tif (got < data_length)\n\n\t\treturn -1;\n\n\n\n\treturn sw.time();\n\n}\n\n\n\n#include <sys/socket.h>\n\n#include <sys/un.h>\n\n\n\ndouble testSocket(long long data_length) {\n\n\tconstexpr int BUFF_SIZE = 1 << 16;\n\n\tchar buff[BUFF_SIZE];\n", "file_path": "ipc_bench.cc", "rank": 44, "score": 7.027333017545116 }, { "content": "\tvector<function<void()>> v;\n\n\tvector<function<void()>> v1;\n\n\tvector<Polimorph*> w;\n\n\tmap<int, void(*)()> l;\n\n\tmap<int, function<void()>> m;\n\n\tmap<int, function<void()>> m1;\n\n\tmap<int, Polimorph*> n;\n\n\n\n\tconstexpr int N = 300;\n\n\tauto fillu = [&] {\n\n\t\tu = {};\n\n\t\tu.resize(N);\n\n\t\tREP (i, N)\n\n\t\t\tu[i] = get_foo();\n\n\t};\n\n\n\n\tauto fillv = [&] {\n\n\t\tv = {};\n\n\t\tv.resize(N);\n\n\t\tREP (i, N)\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 45, "score": 6.931821724390009 }, { "content": "\t\t}\n\n\t};\n\n\n\n\tauto usev = [&] {\n\n\t\tREP (i, N / 3) {\n\n\t\t\tint k = rand() % N;\n\n\t\t\tif (k < N) // Real life check\n\n\t\t\t\tv[k]();\n\n\t\t}\n\n\t};\n\n\n\n\tauto usev1 = [&] {\n\n\t\tREP (i, N / 3) {\n\n\t\t\tint k = rand() % N;\n\n\t\t\tif (k < N) // Real life check\n\n\t\t\t\tv1[k]();\n\n\t\t}\n\n\t};\n\n\n\n\tauto usew = [&] {\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 46, "score": 6.8666999516435325 }, { "content": "\t\tREP (i, N / 3) {\n\n\t\t\tint k = rand() % N;\n\n\t\t\tif (k < N) // Real life check\n\n\t\t\t\t(*w[k])();\n\n\t\t}\n\n\t};\n\n\n\n\tauto usel = [&] {\n\n\t\tREP (i, N / 3)\n\n\t\t\tl[rand() % N]();\n\n\t};\n\n\n\n\tauto usem = [&] {\n\n\t\tREP (i, N / 3)\n\n\t\t\tm[rand() % N]();\n\n\t};\n\n\n\n\tauto usem1 = [&] {\n\n\t\tREP (i, N / 3)\n\n\t\t\tm1[rand() % N]();\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 47, "score": 6.243206187755238 }, { "content": "\t\tif (addr_)\n\n\t\t\tshmdt(addr_);\n\n\t}\n\n\n\n\tint key() const { return id_; }\n\n\n\n\tvoid* addr() const { return addr_; }\n\n};\n\n\n\ndouble testPipe(long long data_length) {\n\n\tconstexpr int BUFF_SIZE = (1 << 12) - 1;\n\n\tchar buff[BUFF_SIZE];\n\n\tint fd[2];\n\n\tpipe(fd);\n\n\n\n\tint cpid = fork();\n\n\tif (cpid == -1)\n\n\t\tabort();\n\n\n\n\telse if (cpid == 0) {\n", "file_path": "ipc_bench.cc", "rank": 48, "score": 6.140439071074095 }, { "content": "\t\tlong long x;\n\n\t\tmemset(buff, '7', BUFF_SIZE);\n\n\n\n\t\twhile (data_length && (x = write(fd[1], buff, std::min<LL>(BUFF_SIZE, data_length))) >= 0)\n\n\t\t\tdata_length -= x;\n\n\n\n\t\t_exit(0);\n\n\t}\n\n\n\n\tStopwatch sw;\n\n\tlong long got = 0, x;\n\n\twhile (got < data_length && (x = read(fd[0], buff, std::min<LL>(data_length - got, BUFF_SIZE))) >= 0)\n\n\t\tgot += x;\n\n\n\n\tif (got < data_length)\n\n\t\treturn -1;\n\n\n\n\treturn sw.time();\n\n}\n\n\n", "file_path": "ipc_bench.cc", "rank": 49, "score": 5.986070309207019 }, { "content": "\n\nconstexpr char nl = '\\n';\n\n#define endl nl\n\n\n\n#define ris return *this\n\n#define tem template<class T\n\n\n\ntem, class B> inline void mini(T&& a, B&& b) { if (b < a) a = b; }\n\ntem, class B> inline void maxi(T&& a, B&& b) { if (b > a) a = b; }\n\nint ceil2(int x) { return x < 2 ? 1 : 1 << (sizeof(x) * 8 - __builtin_clz(x - 1)); }\n\n\n\ntem> struct Dump { T a, b; };\n\ntem> auto dump(T&& x) -> Dump<decltype(x.begin())> { return {ALL(x)}; }\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 50, "score": 5.838778591736904 }, { "content": "\tfor (char& c : s)\n\n\t\tc = getRandom('A', 'z');\n\n\tcout << s << '\\n' << flush;\n\n}\n\n\n\nvoid benchCoutCstring(int n) {\n\n\tunique_ptr<char[]> s(new char[n + 1]);\n\n\ts[n] = '\\0';\n\n\tREP (i, n - 1)\n\n\t\ts[i] = getRandom('A', 'z');\n\n\tcout << s.get() << '\\n' << flush;\n\n}\n\n\n\nvoid benchPrintfInt(int n) {\n\n\tVI t(n);\n\n\tfor (int& i : t)\n\n\t\ti = _r_gen();\n\n\tfor (int i : t)\n\n\t\tprintf(\"%i\\n\", i);\n\n}\n", "file_path": "iostream_vis_cstdio.cc", "rank": 52, "score": 5.592049205418062 }, { "content": "\tcout << x << endl;\n\n}\n\n\n\nvoid benchScanfCstring(int n) {\n\n\tunique_ptr<char[]> s(new char[n + 1]);\n\n\tscanf(\"%s\", s.get());\n\n\tcout << strlen(s.get()) << endl;\n\n}\n\n\n\nvoid benchCoutInt(int n) {\n\n\tVI t(n);\n\n\tfor (int& i : t)\n\n\t\ti = _r_gen();\n\n\tfor (int i : t)\n\n\t\tcout << i << '\\n';\n\n\tcout << flush;\n\n}\n\n\n\nvoid benchCoutString(int n) {\n\n\tstring s(n, ' ');\n", "file_path": "iostream_vis_cstdio.cc", "rank": 53, "score": 5.580813310588878 }, { "content": "\tassert(pid != -1);\n\n\tif (pid == 0) {\n\n\t\tdup2(STDOUT_FILENO, STDERR_FILENO);\n\n\t\tdup2(fd, STDIN_FILENO);\n\n\t\tfreopen(\"/tmp/bench-test.txt\", \"w\", stdout);\n\n\t\tStopwatch st;\n\n\t\tfunc(n);\n\n\t\tcerr << bench_name << \":\\t\" << setprecision(6) << fixed << st.time()\n\n\t\t\t<< endl;\n\n\t\tfflush(stdout);\n\n\t\t_exit(0);\n\n\t}\n\n\n\n\twaitpid(pid, nullptr, 0);\n\n\n\n\tfclose(f);\n\n}\n\n\n\nvoid genInts(FILE *f, int n) {\n\n\twhile (n--)\n", "file_path": "iostream_vis_cstdio.cc", "rank": 54, "score": 5.228573907635395 }, { "content": "\t\t\t\tcase 0: m1[i] = []{ cout << \"foo1\" << endl; }; break;\n\n\t\t\t\tcase 1: m1[i] = []{ cout << \"foo2\" << endl; }; break;\n\n\t\t\t\tcase 2: m1[i] = []{ cout << \"foo3\" << endl; }; break;\n\n\t\t\t\tcase 3: m1[i] = []{ cout << \"foo4\" << endl; }; break;\n\n\t\t\t\tcase 4: m1[i] = []{ cout << \"foo5\" << endl; }; break;\n\n\t\t\t\tcase 5: m1[i] = []{ cout << \"foo6\" << endl; }; break;\n\n\t\t\t}\n\n\t};\n\n\n\n\tauto filln = [&] {\n\n\t\tn = {};\n\n\t\tREP (i, N)\n\n\t\t\tn[i] = get_poli();\n\n\t};\n\n\n\n\tauto useu = [&] {\n\n\t\tREP (i, N / 3) {\n\n\t\t\tint k = rand() % N;\n\n\t\t\tif (k < N) // Real life check\n\n\t\t\t\tu[k]();\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 55, "score": 4.953367588224274 }, { "content": "\tsize_t sz = a.size() + b.size() + c.size();\n\n\tassert(sz < N);\n\n\tarray<char, N> res;\n\n\tmemcpy(res.data(), a.data(), a.size());\n\n\tmemcpy(res.data() + a.size(), b.data(), b.size());\n\n\tmemcpy(res.data() + a.size() + b.size(), c.data(), c.size());\n\n\tres[sz] = '\\0';\n\n\treturn res;\n\n}\n\n\n\nint main() {\n\n\tios::sync_with_stdio(false);\n\n\tcin.tie(nullptr);\n\n\n\n\tint n = 10000000;\n\n\tstring a = \"219489840124\";\n\n\tstring b = \"rdhfhoisdfusd9f97\";\n\n\tstring c = \";[;];];[][;;];;\";\n\n\t// cin >> a >> b >> c;\n\n\n", "file_path": "buff_vis_string_concat.cc", "rank": 56, "score": 4.8729617512584245 }, { "content": "\trun(genString, benchCinString, n, \"cin >> string\");\n\n\t// run(genString, benchFinString, n, \"fin >> string\");\n\n\trun(genString, benchCinCstring, n, \"cin >> char*\");\n\n\t// run(genString, benchFinCstring, n, \"fin >> char*\");\n\n\trun(genString, benchScanfCstring, n, \"scanf(char*)\");\n\n\tcout << endl;\n\n\trun(genNothing, benchCoutInt, n, \"cout << int\");\n\n\trun(genNothing, benchPrintfInt, n, \"printf(int)\");\n\n\tcout << endl;\n\n\trun(genNothing, benchCoutString, n, \"cout << string\");\n\n\trun(genNothing, benchCoutCstring, n, \"cout << char*\");\n\n\trun(genNothing, benchPrintfCstring, n, \"printf(char*)\");\n\n}\n\n\n\nint main() {\n\n\tconstexpr int N = 2e7;\n\n\tbenchmark(N);\n\n\treturn 0;\n\n}\n", "file_path": "iostream_vis_cstdio.cc", "rank": 57, "score": 4.832443035661284 }, { "content": "\n\ninline int ceil2(int x) { return 1 << (sizeof(x) * 8 -__builtin_clz(x - 1)); }\n\n\n\n#undef T\n\n#undef CL\n\n#ifdef DEBUG\n\n# define D(...) __VA_ARGS__\n\n#else\n\n# define D(...)\n\n#endif\n\n\n\n#define E(...) D(eprintf(__VA_ARGS__))\n\n#define OUT(a,b) D(cerr << #a \": \", __out(cerr, a, b), E(\"\\n\"))\n\n#define DUMP(x) D(cerr << #x \": \", __dump(cerr, ALL(x)), E(\"\\n\"))\n\n#define LOG(x) D(cerr << #x \": \" << (x))\n\n#define LOG2(x, y) D(cerr << x << \": \" << (y))\n\n#define LOGN(x) D(LOG(x) << endl)\n\n#define LOGN2(x, y) D(LOG2(x, y) << endl)\n\n/// End of templates\n\n\n\ntemplate<class A, class B>\n\ninline int fastMod(A x, B mod) { return x < mod ? x : x % mod; }\n\n\n", "file_path": "iostream_vis_cstdio.cc", "rank": 58, "score": 4.686877724214813 }, { "content": "\tunion semun ignored_argument;\n\n\treturn semctl(semid, 1, IPC_RMID, ignored_argument);\n\n}\n\n\n\n/* Initialize a binary semaphore with a value of 1. */\n\nint binary_semaphore_initialize(int semid, int x = 1) {\n\n\tunion semun argument;\n\n\tunsigned short values[1];\n\n\tvalues[0] = x;\n\n\targument.array = values;\n\n\treturn semctl(semid, 0, SETALL, argument);\n\n}\n\n\n\n/* Wait on a binary semaphore. Block until the semaphore value is positive, then\n\ndecrement it by 1. */\n\nint binary_semaphore_wait(int semid) {\n\n\tstruct sembuf operations[1];\n\n\t/* Use the first (and only) semaphore. */\n\n\toperations[0].sem_num = 0;\n\n\t/* Decrement by 1. */\n", "file_path": "ipc_bench.cc", "rank": 59, "score": 4.628732167700832 }, { "content": "#include <sys/wait.h>\n\n\n\nint getUnlinkedTmpFile(int flags = 0) noexcept {\n\n\tint fd;\n\n#ifdef O_TMPFILE\n\n\tfd = open(\"/tmp\", O_TMPFILE | O_RDWR | flags, S_0600);\n\n\tif (fd != -1)\n\n\t\treturn fd;\n\n\n\n\tif (errno != EINVAL)\n\n\t\treturn -1;\n\n#endif\n\n\n\n\tchar name[] = \"/tmp/tmp_unlinked_file.XXXXXX\";\n\n\tumask(077); // Only owner can access this temporary file\n\n\tfd = mkostemp(name, flags);\n\n\tif (fd == -1)\n\n\t\treturn -1;\n\n\n\n\t(void)unlink(name);\n", "file_path": "iostream_vis_cstdio.cc", "rank": 60, "score": 4.4054358625028325 }, { "content": "\tbenchmark(usen, \"use n\", 1);\n\n\tbenchmark(useif, \"use if\", 1);\n\n\tcerr << nl;\n\n\tbenchmark(useu, \"use u\", 10000);\n\n\tbenchmark(usev, \"use v\", 10000);\n\n\tbenchmark(usev1, \"use v1\", 10000);\n\n\tbenchmark(usew, \"use w\", 10000);\n\n\tbenchmark(usel, \"use l\", 10000);\n\n\tbenchmark(usem, \"use m\", 10000);\n\n\tbenchmark(usem1, \"use m1\", 10000);\n\n\tbenchmark(usen, \"use n\", 10000);\n\n\tbenchmark(useif, \"use if\", 10000);\n\n\n\n\treturn 0;\n\n}\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 61, "score": 4.233876247267462 }, { "content": "\tbenchmark(fillm1, \"fill m1\", 100);\n\n\tbenchmark(filln, \"fill n\", 100);\n\n\tcerr << nl;\n\n\tbenchmark(useu, \"use u\", 10);\n\n\tbenchmark(usev, \"use v\", 10);\n\n\tbenchmark(usev1, \"use v1\", 10);\n\n\tbenchmark(usew, \"use w\", 10);\n\n\tbenchmark(usel, \"use l\", 10);\n\n\tbenchmark(usem, \"use m\", 10);\n\n\tbenchmark(usem1, \"use m1\", 10);\n\n\tbenchmark(usen, \"use n\", 10);\n\n\tbenchmark(useif, \"use if\", 10);\n\n\tcerr << nl;\n\n\tbenchmark(useu, \"use u\", 1);\n\n\tbenchmark(usev, \"use v\", 1);\n\n\tbenchmark(usev1, \"use v1\", 1);\n\n\tbenchmark(usew, \"use w\", 1);\n\n\tbenchmark(usel, \"use l\", 1);\n\n\tbenchmark(usem, \"use m\", 1);\n\n\tbenchmark(usem1, \"use m1\", 1);\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 62, "score": 4.203512319492669 }, { "content": "\toperations[0].sem_op = -1;\n\n\t/* Permit undoing. */\n\n\toperations[0].sem_flg = SEM_UNDO;\n\n\treturn semop(semid, operations, 1);\n\n}\n\n\n\n/* Post to a binary semaphore: increment its value by 1.\n\nThis returns immediately. */\n\nint binary_semaphore_post(int semid) {\n\n\tstruct sembuf operations[1];\n\n\t/* Use the first (and only) semaphore. */\n\n\toperations[0].sem_num = 0;\n\n\t/* Increment by 1. */\n\n\toperations[0].sem_op = 1;\n\n\t/* Permit undoing. */\n\n\toperations[0].sem_flg = SEM_UNDO;\n\n\treturn semop(semid, operations, 1);\n\n}\n\n\n\n/* Post to a binary semaphore: increment its value by 1.\n", "file_path": "ipc_bench.cc", "rank": 63, "score": 3.919252703430023 }, { "content": "OO(vector<A>)\n\n\n\nT<CL A>\n\nOO(deque<A>)\n\n\n\nT<CL A>\n\nOO(list<A>)\n\n\n\nT<CL A, CL B>\n\nOO(set<A, B>)\n\n\n\nT<CL A, CL B, CL C>\n\nOO(map<A, B, C>)\n\n\n\nT<CL A, CL B>\n\nOO(multiset<A, B>)\n\n\n\nT<CL A, CL B, CL C>\n\nOO(multimap<A, B, C>)\n\n\n", "file_path": "copying_memory.cc", "rank": 64, "score": 3.9115381694528573 }, { "content": "OO(vector<A>)\n\n\n\nT<CL A>\n\nOO(deque<A>)\n\n\n\nT<CL A>\n\nOO(list<A>)\n\n\n\nT<CL A, CL B>\n\nOO(set<A, B>)\n\n\n\nT<CL A, CL B, CL C>\n\nOO(map<A, B, C>)\n\n\n\nT<CL A, CL B>\n\nOO(multiset<A, B>)\n\n\n\nT<CL A, CL B, CL C>\n\nOO(multimap<A, B, C>)\n\n\n", "file_path": "text_bech.cc", "rank": 65, "score": 3.9115381694528573 }, { "content": "\t\tREP (i, N)\n\n\t\t\tw[i] = get_poli();\n\n\t};\n\n\n\n\tauto filll = [&] {\n\n\t\tl = {};\n\n\t\tREP (i, N)\n\n\t\t\tl[i] = get_foo();\n\n\t};\n\n\n\n\tauto fillm = [&] {\n\n\t\tm = {};\n\n\t\tREP (i, N)\n\n\t\t\tm[i] = get_foo();\n\n\t};\n\n\n\n\tauto fillm1 = [&] {\n\n\t\tm1 = {};\n\n\t\tREP (i, N)\n\n\t\t\tswitch (rand() % 6) {\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 66, "score": 3.8351189941470945 }, { "content": "\t\tfprintf(f, \"%i\\n\", (int)_r_gen());\n\n}\n\n\n\nvoid genString(FILE *f, int n) {\n\n\twhile (n--)\n\n\t\tfputc(getRandom('A', 'z'), f);\n\n\tfputc('\\n', f);\n\n}\n\n\n\nvoid genNothing(FILE*, int) {}\n\n\n\nvoid benchmark(int n) {\n\n\tios::sync_with_stdio(false);\n\n\tcin.tie(nullptr);\n\n\tcout << \"n: \" << n << \" (\" << (double)n << \")\" << endl;\n\n\n\n\trun(genInts, benchCinInt, n, \"cin >> int\");\n\n\t// run(genInts, benchFinInt, n, \"fin >> int\");\n\n\trun(genInts, benchScanfInt, n, \"scanf(int)\");\n\n\tcout << endl;\n", "file_path": "iostream_vis_cstdio.cc", "rank": 67, "score": 3.7098370783440333 }, { "content": "\treturn fd;\n\n}\n\n\n\ninline int sclose(int fd) noexcept {\n\n\twhile (close(fd) == -1)\n\n\t\tif (errno != EINTR)\n\n\t\t\treturn -1;\n\n\n\n\treturn 0;\n\n}\n\n\n\nmt19937 _r_gen(chrono::system_clock::now().time_since_epoch().count());\n\nint getRandom(int a, int b) {\n\n\treturn uniform_int_distribution<int>(a, b)(_r_gen);\n\n}\n\n\n\n#define BENCHMARK(...)\n\n\n\nvoid benchCinInt(int n) {\n\n\tint x = 0;\n", "file_path": "iostream_vis_cstdio.cc", "rank": 68, "score": 3.7040265250960034 }, { "content": "\n\n\tT<CL A, CL B, CL C>\n\n\tvoid operator()(A& a, B& b, C& c) { operator()(a, b), operator()(c); }\n\n\n\n\tT<CL A, CL B, CL C, CL D>\n\n\tvoid operator()(A& a, B& b, C& c, D& d) {\n\n\t\toperator()(a, b, c);\n\n\t\toperator()(d);\n\n\t}\n\n\n\n\tT<CL A, CL B, CL C, CL D, CL E>\n\n\tvoid operator()(A& a, B& b, C& c, D& d, E& e) {\n\n\t\toperator()(a, b, c, d);\n\n\t\toperator()(e);\n\n\t}\n\n\n\n\tT<CL A, CL B, CL C, CL D, CL E, CL F>\n\n\tvoid operator()(A& a, B& b, C& c, D& d, E& e, F& f) {\n\n\t\toperator()(a, b, c, d, e);\n\n\t\toperator()(f);\n", "file_path": "text_bech.cc", "rank": 69, "score": 3.6671387978306997 }, { "content": "\t};\n\n\n\n\tauto usen = [&] {\n\n\t\tREP (i, N / 3)\n\n\t\t\t(*n[rand() % N])();\n\n\t};\n\n\n\n\tauto useif = [&] {\n\n\t\tstatic_assert(N <= 300, \"\");\n\n\t\tREP (i, N / 3)\n\n\t\t\tswitch (rand() % N) {\n\n\t\t\t\tcase 0: foo4(); break;\n\n\t\t\t\tcase 1: foo4(); break;\n\n\t\t\t\tcase 2: foo6(); break;\n\n\t\t\t\tcase 3: foo6(); break;\n\n\t\t\t\tcase 4: foo1(); break;\n\n\t\t\t\tcase 5: foo2(); break;\n\n\t\t\t\tcase 6: foo5(); break;\n\n\t\t\t\tcase 7: foo1(); break;\n\n\t\t\t\tcase 8: foo4(); break;\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 70, "score": 3.636904072294988 }, { "content": "\tvoid skipWhiteSpaces() {\n\n\t\twhile (isspace(peek()))\n\n\t\t\t++pos;\n\n\t}\n\n\n\n\tT<CL A>\n\n\tA get();\n\n\n\n\tT<CL A>\n\n\tvoid operator()(A& x) { x = get<A>(); }\n\n\n\n\tT<CL A, CL B>\n\n\tvoid operator()(A& a, B& b) { operator()(a), operator()(b); }\n\n\n\n\tT<CL A, CL B, CL C>\n\n\tvoid operator()(A& a, B& b, C& c) { operator()(a, b), operator()(c); }\n\n\n\n\tT<CL A, CL B, CL C, CL D>\n\n\tvoid operator()(A& a, B& b, C& c, D& d) {\n\n\t\toperator()(a, b, c);\n", "file_path": "copying_memory.cc", "rank": 71, "score": 3.624789643886392 }, { "content": "\t}\n\n\n\n\tsize_t size() { return len_; }\n\n};\n\n\n\ninline std::string const& to_string(std::string const& s) { return s; }\n\n\n\ninline size_t size(const std::string& str) { return str.size(); }\n\n\n\ninline size_t size(const char* str) { return strlen(str); }\n\n\n\nconstexpr inline size_t size(char) { return 1; }\n\n\n\ntemplate<typename... Args>\n\ninline std::string stringer(Args const&... args)\n\n{\n\n\tstd::string result;\n\n\tsize_t len = 0;\n\n\tbool t[] = { (len += size(args), false)... };\n\n\t(void)t;\n", "file_path": "text_bech.cc", "rank": 72, "score": 3.52986873379424 }, { "content": "\tREP (i, n - 1)\n\n\t\tcin >> x;\n\n\tcout << x << endl;\n\n}\n\n\n\nvoid benchFinInt(int n) {\n\n\tint x = 0;\n\n\tREP (i, n - 1)\n\n\t\tfin >> x;\n\n\tcout << x << endl;\n\n}\n\n\n\nvoid benchCinString(int) {\n\n\tstring s;\n\n\tcin >> s;\n\n\tcout << s.size() << endl;\n\n}\n\n\n\nvoid benchFinString(int) {\n\n\tstring s;\n", "file_path": "iostream_vis_cstdio.cc", "rank": 73, "score": 3.4968892386738832 }, { "content": "\tSharedMemorySegment sms(sizeof(xxx));\n\n\tchar buff[BUFF_SIZE];\n\n\tmemset(sms.addr(), 0, sizeof(xxx));\n\n\n\n\tint sem = binary_semaphore_allocation(IPC_PRIVATE, IPC_CREAT | S_IRWXU);\n\n\t// if (sem < 0)\n\n\t// \teprintf(\"Error: semget() - %s\\n\", strerror(errno));\n\n\tbinary_semaphore_initialize(sem, 0);\n\n\n\n\tint cpid = fork();\n\n\tif (cpid == -1)\n\n\t\tabort();\n\n\telse if (cpid == 0) {\n\n\t\tmemset(buff, '7', BUFF_SIZE);\n\n\t\txxx *x = (xxx*)sms.addr();\n\n\n\n\t\twhile (data_length) {\n\n\t\t\tx->size = std::min<LL>(data_length, BUFF_SIZE);\n\n\t\t\t// copy(buff, buff+BUFF_SIZE, x->buff);\n\n\t\t\tmemcpy(x->buff, buff, x->size);\n", "file_path": "ipc_bench.cc", "rank": 74, "score": 3.4058096475076516 }, { "content": "\n\n\tREP (i, 20000000) {\n\n#if WHICH == 1\n\n\t\t// std::string::append()\n\n\t\ts.clear();\n\n\t\ts.append(\"12312948\").append(\": + \").append(\"askfhkas\").append(\" - \").append(\"iuqwruiuwiq\");\n\n\n\n#elif WHICH == 2\n\n\t\t// std::string operator<<()\n\n\t\ts.clear();\n\n\t\ts << \"12312948\" << \": + \" << \"askfhkas\" << \" - \" << \"iuqwruiuwiq\";\n\n\n\n#elif WHICH == 3\n\n\t\t// std::string + stringer\n\n\t\ts = stringer(\"12312948\", \": + \", \"askfhkas\", \" - \", \"iuqwruiuwiq\");\n\n\n\n#elif WHICH == 4\n\n\t\t// snprintf\n\n\t\tsnprintf(t, 200, \"%s: + %s - %s\", \"12312948\", \"askfhkas\", \"iuqwruiuwiq\");\n\n\n", "file_path": "text_bech.cc", "rank": 75, "score": 3.3899821133793235 }, { "content": "\tfin >> s;\n\n\tcout << s.size() << endl;\n\n}\n\n\n\nvoid benchCinCstring(int n) {\n\n\tunique_ptr<char[]> s(new char[n + 1]);\n\n\tcin >> s.get();\n\n\tcout << strlen(s.get()) << endl;\n\n}\n\n\n\nvoid benchFinCstring(int n) {\n\n\tunique_ptr<char[]> s(new char[n + 1]);\n\n\tfin >> s.get();\n\n\tcout << strlen(s.get()) << endl;\n\n}\n\n\n\nvoid benchScanfInt(int n) {\n\n\tint x = 0;\n\n\tREP (i, n - 1)\n\n\t\tscanf(\"%i\", &x);\n", "file_path": "iostream_vis_cstdio.cc", "rank": 76, "score": 3.2918324199808557 }, { "content": "\t\toperator()(d);\n\n\t}\n\n\n\n\tT<CL A, CL B, CL C, CL D, CL E>\n\n\tvoid operator()(A& a, B& b, C& c, D& d, E& e) {\n\n\t\toperator()(a, b, c, d);\n\n\t\toperator()(e);\n\n\t}\n\n\n\n\tT<CL A, CL B, CL C, CL D, CL E, CL F>\n\n\tvoid operator()(A& a, B& b, C& c, D& d, E& e, F& f) {\n\n\t\toperator()(a, b, c, d, e);\n\n\t\toperator()(f);\n\n\t}\n\n} input;\n\n\n\n\n\nT<> uint Input::get<uint>() {\n\n\tskipWhiteSpaces();\n\n\tuint x = 0;\n", "file_path": "copying_memory.cc", "rank": 77, "score": 3.2569806623970337 }, { "content": "CL Input {\n\n\tstatic const int BUFF_SIZE = 1 << 16;\n\n\tconst int fd;\n\n\tunsigned char buff[BUFF_SIZE], *pos, *end;\n\n\n\n\tvoid grabBuffer() { end = (pos = buff) + read(fd, buff, BUFF_SIZE); }\n\n\n\npublic:\n\n\texplicit Input(int x) : fd(x), pos(buff), end(buff) {}\n\n\n\n\tint peek() {\n\n\t\tif (UNLIKELY(pos == end))\n\n\t\t\tgrabBuffer();\n\n\t\treturn LIKELY(pos != end) ? *pos : -1;\n\n\t}\n\n\n\n\tint getChar() {\n\n\t\tif (UNLIKELY(pos == end))\n\n\t\t\tgrabBuffer();\n\n\t\treturn LIKELY(pos != end) ? *pos++ : -1;\n", "file_path": "iostream_vis_cstdio.cc", "rank": 78, "score": 3.136218656690107 }, { "content": "\t\t\t\tcase 289: foo6(); break;\n\n\t\t\t\tcase 290: foo3(); break;\n\n\t\t\t\tcase 291: foo6(); break;\n\n\t\t\t\tcase 292: foo5(); break;\n\n\t\t\t\tcase 293: foo4(); break;\n\n\t\t\t\tcase 294: foo4(); break;\n\n\t\t\t\tcase 295: foo4(); break;\n\n\t\t\t\tcase 296: foo3(); break;\n\n\t\t\t\tcase 297: foo4(); break;\n\n\t\t\t\tcase 298: foo5(); break;\n\n\t\t\t\tcase 299: foo1(); break;\n\n\t\t\tdefault: cerr << \"N is too big!\" << nl; abort();\n\n\t\t\t}\n\n\t};\n\n\n\n\tauto benchmark = [](auto func, auto name, size_t iterations = 1) {\n\n\t\tsrand(19472974);\n\n\t\t// cerr << \"### \" << name << \" ###\\n\" << flush;\n\n\t\tTimer tm;\n\n\t\tREP (i, (int)iterations)\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 79, "score": 3.1070540029446696 }, { "content": "\t\t\tv[i] = get_foo();\n\n\t};\n\n\n\n\tauto fillv1 = [&] {\n\n\t\tv1 = {};\n\n\t\tv1.resize(N);\n\n\t\tREP (i, N)\n\n\t\t\tswitch (rand() % 6) {\n\n\t\t\t\tcase 0: v1[i] = []{ cout << \"foo1\" << endl; }; break;\n\n\t\t\t\tcase 1: v1[i] = []{ cout << \"foo2\" << endl; }; break;\n\n\t\t\t\tcase 2: v1[i] = []{ cout << \"foo3\" << endl; }; break;\n\n\t\t\t\tcase 3: v1[i] = []{ cout << \"foo4\" << endl; }; break;\n\n\t\t\t\tcase 4: v1[i] = []{ cout << \"foo5\" << endl; }; break;\n\n\t\t\t\tcase 5: v1[i] = []{ cout << \"foo6\" << endl; }; break;\n\n\t\t\t}\n\n\t};\n\n\n\n\tauto fillw = [&] {\n\n\t\tw = {};\n\n\t\tw.resize(N);\n", "file_path": "stdfunction_vis_function_pointer_vis_raw_call_in_if.cpp", "rank": 81, "score": 3.082862115551836 }, { "content": "#elif WHICH == 5\n\n\t\t// raw copy function calls\n\n\t\tint a = strlen(\"12312948\");\n\n\t\tint b = a + strlen(\": + \");\n\n\t\tint c = b + strlen(\"askfhkas\");\n\n\t\tint d = c + strlen(\" - \");\n\n\t\tstrcpy(t, \"12312948\");\n\n\t\tstrcpy(t + a, \": + \");\n\n\t\tstrcpy(t + b, \"askfhkas\");\n\n\t\tstrcpy(t + c, \" - \");\n\n\t\tstrcpy(t + d, \"iuqwruiuwiq\");\n\n\n\n#elif WHICH == 6\n\n\t\t// sample class to operate on raw copy function calls\n\n\t\tx.clear(); x << \"12312948\" << \": + \" << \"askfhkas\" << \" - \" << \"iuqwruiuwiq\";\n\n\n\n#elif WHICH == 6\n\n\t\t// Cstring\n\n\t\tstr.clear(); str << \"12312948\" << \": + \" << \"askfhkas\" << \" - \" << \"iuqwruiuwiq\";\n\n#endif\n\n\t}\n\n\n\n\tprintf(\"%s\\n\", t);\n\n\tprintf(\"%s\\n\", s.c_str());\n\n\tprintf(\"%s\\n\", x.t);\n\n\tprintf(\"%s\\n\", str.p);\n\n\treturn 0;\n\n}\n", "file_path": "text_bech.cc", "rank": 82, "score": 3.0372121249873105 }, { "content": "\tvoid skipWhiteSpaces() {\n\n\t\twhile (isspace(peek()))\n\n\t\t\t++pos;\n\n\t}\n\n\n\n\tT<CL A>\n\n\tA get();\n\n\n\n\tvoid operator()(char* s) {\n\n\t\tskipWhiteSpaces();\n\n\t\twhile (!isspace(peek()))\n\n\t\t\t*s++ = *pos++;\n\n\t\t*s = '\\0';\n\n\t}\n\n\n\n\tT<CL A>\n\n\tvoid operator()(A& x) { x = get<A>(); }\n\n\n\n\tT<CL A, CL B>\n\n\tvoid operator()(A& a, B& b) { operator()(a), operator()(b); }\n", "file_path": "text_bech.cc", "rank": 83, "score": 2.874939596504283 }, { "content": "\n\nvoid benchPrintfCstring(int n) {\n\n\tunique_ptr<char[]> s(new char[n + 1]);\n\n\ts[n] = '\\0';\n\n\tREP (i, n - 1)\n\n\t\ts[i] = getRandom('A', 'z');\n\n\tprintf(\"%s\\n\", s.get());\n\n}\n\n\n\ntemplate<class Gen, class Func>\n\nvoid run(Gen gen, Func func, int n, const string& bench_name) {\n\n\tint fd = getUnlinkedTmpFile();\n\n\tassert(fd != -1);\n\n\tFILE *f = fdopen(fd, \"rw\");\n\n\tassert(f);\n\n\n\n\tgen(f, n);\n\n\tlseek(fd, 0, SEEK_SET);\n\n\n\n\tpid_t pid = fork();\n", "file_path": "iostream_vis_cstdio.cc", "rank": 85, "score": 2.723853618867315 }, { "content": "\tvoid grabBuffer() {\n\n\t\tpos = buff;\n\n\t\tend = buff + read(0, buff, BUFF_SIZE);\n\n\t}\n\n\n\npublic:\n\n\tInput() : pos(buff), end(buff) {}\n\n\n\n\tint peek() {\n\n\t\tif (pos == end)\n\n\t\t\tgrabBuffer();\n\n\t\treturn pos != end ? *pos : -1;\n\n\t}\n\n\n\n\tint getChar() {\n\n\t\tif (pos == end)\n\n\t\t\tgrabBuffer();\n\n\t\treturn pos != end ? *pos++ : -1;\n\n\t}\n\n\n", "file_path": "copying_memory.cc", "rank": 86, "score": 2.6958383245174327 }, { "content": "\tvoid grabBuffer() {\n\n\t\tpos = buff;\n\n\t\tend = buff + read(0, buff, BUFF_SIZE);\n\n\t}\n\n\n\npublic:\n\n\tInput() : pos(buff), end(buff) {}\n\n\n\n\tint peek() {\n\n\t\tif (pos == end)\n\n\t\t\tgrabBuffer();\n\n\t\treturn pos != end ? *pos : -1;\n\n\t}\n\n\n\n\tint getChar() {\n\n\t\tif (pos == end)\n\n\t\t\tgrabBuffer();\n\n\t\treturn pos != end ? *pos++ : -1;\n\n\t}\n\n\n", "file_path": "text_bech.cc", "rank": 87, "score": 2.6958383245174327 }, { "content": "\n\n\t\tstrcpy(p, str);\n\n\t}\n\n\n\n\tCstring(const Cstring& x): len_(x.len_), real_len_(len_ + 1),\n\n\t\t\tp((char*)malloc(real_len_)) {\n\n\t\tif (p == NULL)\n\n\t\t\tthrow std::bad_alloc();\n\n\n\n\t\tstrcpy(p, x.p);\n\n\t}\n\n\n\n\t~Cstring() { free(p); }\n\n\n\n\tvoid clear() { len_ = 0; }\n\n\n\n\tvoid reserve(size_t len) {\n\n\t\tif (len + 1 > real_len_) {\n\n\t\t\tsize_t new_real_len = std::max(len + 1, real_len_ << 1);\n\n\t\t\tif (NULL == realloc(p, new_real_len))\n", "file_path": "text_bech.cc", "rank": 88, "score": 2.6535793755242976 }, { "content": "\t\t\tdata_length -= x->size;\n\n\t\t\tbinary_semaphore_oper(sem, 1);\n\n\t\t\tbinary_semaphore_oper(sem, -2);\n\n\t\t}\n\n\t\tx->size = BUFF_SIZE-1;\n\n\n\n\t\t(void)binary_semaphore_deallocate(sem);\n\n\t\t_exit(0);\n\n\t}\n\n\n\n\t// binary_semaphore_initialize(sem, 2);\n\n\tStopwatch sw;\n\n\tlong long got = 0;\n\n\txxx *x = (xxx*)sms.addr();\n\n\twhile (got < data_length) {\n\n\t\tbinary_semaphore_oper(sem, -1);\n\n\t\tif (x->size <= 0) {\n\n\t\t\tcerr << \"x->size: error!\\n\";\n\n\t\t\tbreak;\n\n\t\t}\n", "file_path": "ipc_bench.cc", "rank": 89, "score": 2.637040068083747 }, { "content": "# define D(...)\n\n# define E(...)\n\n# define OUT(...)\n\n# define LOG(...)\n\n# define LOG2(...)\n\n# define LOGN(...)\n\n# define LOGN2(...)\n\n#endif\n\n/// End of templates\n\n\n\n#include <sys/time.h>\n\n\n", "file_path": "copying_memory.cc", "rank": 90, "score": 2.6136096131846402 }, { "content": "\tskipWhiteSpaces();\n\n\tuint x = 0;\n\n\twhile (isdigit(peek()))\n\n\t\tx = x * 10 + *pos++ - '0';\n\n\treturn x;\n\n}\n\n\n\nT<> int Input::get<int>() {\n\n\tskipWhiteSpaces();\n\n\treturn peek() == '-' ? (++pos, -get<uint>()) : get<uint>();\n\n}\n\n\n\nT<> ULL Input::get<ULL>() {\n\n\tskipWhiteSpaces();\n\n\tULL x = 0;\n\n\twhile (isdigit(peek()))\n\n\t\tx = x * 10 + *pos++ - '0';\n\n\treturn x;\n\n}\n\n\n", "file_path": "text_bech.cc", "rank": 91, "score": 2.5997998984857893 }, { "content": "\tskipWhiteSpaces();\n\n\tx = *pos++;\n\n\treturn *this;\n\n}\n\n\n\nT<> Input& Input::operator>> <uint>(uint& x) {\n\n\tskipWhiteSpaces();\n\n\tx = 0;\n\n\twhile (isdigit(peek()))\n\n\t\tx = x * 10 + *pos++ - '0';\n\n\treturn *this;\n\n}\n\n\n\nT<> Input& Input::operator>> <int>(int& x) {\n\n\tskipWhiteSpaces();\n\n\tif (peek() != '-')\n\n\t\treturn operator>> <uint>((uint&)x);\n\n\t++pos; operator>> <uint>((uint&)x); x = -x;\n\n\treturn *this;\n\n}\n", "file_path": "iostream_vis_cstdio.cc", "rank": 92, "score": 2.5997998984857893 }, { "content": "\twhile (isdigit(peek()))\n\n\t\tx = x * 10 + *pos++ - '0';\n\n\treturn x;\n\n}\n\n\n\nT<> int Input::get<int>() {\n\n\tskipWhiteSpaces();\n\n\treturn peek() == '-' ? (++pos, -get<uint>()) : get<uint>();\n\n}\n\n\n\nT<> ULL Input::get<ULL>() {\n\n\tskipWhiteSpaces();\n\n\tULL x = 0;\n\n\twhile (isdigit(peek()))\n\n\t\tx = x * 10 + *pos++ - '0';\n\n\treturn x;\n\n}\n\n\n\nT<> LL Input::get<LL>() {\n\n\tskipWhiteSpaces();\n", "file_path": "copying_memory.cc", "rank": 93, "score": 2.5103687693363037 }, { "content": "}\n\n\n\nT<> Input& Input::operator>> <int>(int& x) {\n\n\tskipWhiteSpaces();\n\n\tif (peek() != '-')\n\n\t\treturn operator>> <uint>((uint&)x);\n\n\t++pos; operator>> <uint>((uint&)x); x = -x;\n\n\treturn *this;\n\n}\n\n\n\nT<> Input& Input::operator>> <ULL>(ULL& x) {\n\n\tskipWhiteSpaces();\n\n\tx = 0;\n\n\twhile (isdigit(peek()))\n\n\t\tx = x * 10 + *pos++ - '0';\n\n\treturn *this;\n\n}\n\n\n\nT<> Input& Input::operator>> <LL>(LL& x) {\n\n\tskipWhiteSpaces();\n", "file_path": "ipc_bench.cc", "rank": 94, "score": 2.488964153276263 }, { "content": "\tif (peek() != '-')\n\n\t\treturn operator>> <ULL>((ULL&)x);\n\n\t++pos; operator>> <ULL>((ULL&)x); x = -x;\n\n\treturn *this;\n\n}\n\n\n\nT<> Input& Input::operator>> <string>(string& x) {\n\n\tskipWhiteSpaces();\n\n\tx.clear();\n\n\twhile (!isspace(peek()))\n\n\t\tx += *pos++;\n\n\treturn *this;\n\n}\n\n\n\ninline int ceil2(int x) { return 1 << (sizeof(x) * 8 -__builtin_clz(x - 1)); }\n\n\n\n#undef T\n\n#undef CL\n\n#ifdef DEBUG\n\n# define D(...) __VA_ARGS__\n", "file_path": "ipc_bench.cc", "rank": 95, "score": 2.488964153276263 }, { "content": "\t\t\t\tthrow std::bad_alloc();\n\n\n\n\t\t\treal_len_ = new_real_len;\n\n\t\t}\n\n\t}\n\n\n\n\tvoid append(const char* str, size_t len) {\n\n\t\treserve(len_ + len);\n\n\t\tstrncpy(p + len_, str, len);\n\n\t\tp[len_ += len] = '\\0';\n\n\t}\n\n\n\n\tCstring& operator<<(const Cstring& x) {\n\n\t\tappend(x.p, x.len_);\n\n\t\treturn *this;\n\n\t}\n\n\n\n\tCstring& operator<<(const char* str) {\n\n\t\tappend(str, strlen(str));\n\n\t\treturn *this;\n", "file_path": "text_bech.cc", "rank": 96, "score": 2.0240818572338717 }, { "content": "\t\tif (pos == end)\n\n\t\t\tgrabBuffer();\n\n\t\treturn pos != end ? *pos : -1;\n\n\t}\n\n\n\n\tint getChar() {\n\n\t\tif (pos == end)\n\n\t\t\tgrabBuffer();\n\n\t\treturn pos != end ? *pos++ : -1;\n\n\t}\n\n\n\n\tvoid skipWhiteSpaces() {\n\n\t\twhile (isspace(peek()))\n\n\t\t\t++pos;\n\n\t}\n\n\n\n\tInput& operator>>(char *s) {\n\n\t\tskipWhiteSpaces();\n\n\t\twhile (!isspace(peek()))\n\n\t\t\t*s++ = *pos++;\n", "file_path": "ipc_bench.cc", "rank": 97, "score": 1.9774042532627867 }, { "content": "\tcout << a << endl;\n\n\n\n\tsrand(10101029);\n\n\n\n\tconst int LEN = 10e3 + 1, OPER = 1e6;\n\n\tprintf(\"LEN: %i\\n\", LEN);\n\n\tprintf(\"OPER: %i\\n\", OPER);\n\n\tchar t[LEN];\n\n\tchar t2[LEN];\n\n\t// Fill t\n\n\tREP (i, LEN - 1)\n\n\t\tt[i] = rand() % 26 + 'a';\n\n\tt[LEN - 1] = '\\0';\n\n\n\n\t// Benchmark\n\n\tTimer timer;\n\n/*\n\n\t// iterate\n\n\ttimer.start();\n\n\tREP (i, OPER) {\n", "file_path": "copying_memory.cc", "rank": 98, "score": 1.9252378231859129 }, { "content": "\tresult.reserve(len);\n\n\tbool x[100] = {(result += args, false)...};\n\n\t(void)x;\n\n\treturn result;\n\n}\n\n\n\ntemplate<class T>\n\ninline string& operator<<(const string& s, const T& x) {\n\n\treturn const_cast<string&>(s) += x;\n\n}\n\n\n\nint main() {\n\n\tchar t[200];\n\n\tstring s;\n\n\ts.reserve(100);\n\n\txxx x;\n\n\tCstring str;\n\n\tstr.reserve(100);\n\n\n\n#define WHICH 1 // SET THIS TO CHOOSE WHICH TEST WILL BE RUN\n", "file_path": "text_bech.cc", "rank": 99, "score": 1.8519525408016886 } ]
C++
src/lib/drishti/eye/EyeModelEyelids.cpp
ZJCRT/drishti
7c0da7e71cd4cff838b0b8ef195855cb68951839
#include "drishti/eye/EyeModelEstimatorImpl.h" #include "drishti/core/drishti_stdlib_string.h" #include "drishti/eye/EyeIO.h" #define DRISHTI_EYE_DEBUG_INITS 0 #if DRISHTI_EYE_DEBUG_INITS # include <opencv2/highgui.hpp> #endif DRISHTI_EYE_NAMESPACE_BEGIN using PointVec = std::vector<cv::Point2f>; static void jitter(const EyeModel& eye, const geometry::UniformSimilarityParams& params, std::vector<EyeModel>& poses, int n); static void jitter(const cv::Rect& roi, const geometry::UniformSimilarityParams& params, std::vector<cv::Rect>& poses, int n); static PointVec getMedianOfPoses(const std::vector<PointVec>& poses); #if DRISHTI_EYE_DEBUG_INITS static std::vector<cv::Point2f> operator*(const cv::Matx33f& H, const std::vector<cv::Point2f>& points); static void drawEyes(const cv::Mat& I, const std::vector<EyeModel>& eyes, const std::string& name = "eyes"); static std::vector<EyeModel> shapesToEyes(const std::vector<PointVec>& shapes, const EyeModelSpecification& spec, const cv::Matx33f& S); #endif void EyeModelEstimator::Impl::segmentEyelids(const cv::Mat& I, EyeModel& eye) const { PointVec mu = m_eyeEstimator->getMeanShape(); cv::Rect roi({ 0, 0 }, I.size()); std::vector<cv::Rect> rois = { roi }; if (m_eyelidInits > 1) { jitter(roi, m_jitterEyelidParams, rois, m_eyelidInits - 1); } std::vector<PointVec> poses(rois.size(), mu); std::vector<bool> mask; for (int i = 0; i < rois.size(); i++) { (*m_eyeEstimator)(I(rois[i]), poses[i], mask); cv::Point2f shift = rois[i].tl(); for (auto& p : poses[i]) { p += shift; } } PointVec pose = (poses.size() > 1) ? getMedianOfPoses(poses) : poses[0]; eye = shapeToEye(poses[0], m_eyeSpec); #if DRISHTI_EYE_DEBUG_INITS drawEyes(I, shapesToEyes(poses, m_eyeSpec, cv::Matx33f::eye()), "poses-out"); #endif } void EyeModelEstimator::Impl::segmentEyelids_(const cv::Mat& I, EyeModel& eye) const { std::vector<PointVec> poses{ m_eyeEstimator->getMeanShape() }; if (m_eyelidInits > 1) { auto toShape = [&](const EyeModel& e) { return eyeToShape(e, m_eyeSpec); }; std::vector<EyeModel> jittered; jitter(shapeToEye(m_eyeEstimator->getMeanShape(), m_eyeSpec), m_jitterEyelidParams, jittered, m_eyelidInits - 1); std::transform(jittered.begin(), jittered.end(), std::back_inserter(poses), toShape); } #if DRISHTI_EYE_DEBUG_INITS drawEyes(I, shapesToEyes(poses, m_eyeSpec, cv::Matx33f::diag({ I.cols, I.cols, 1.f })), "poses-in"); #endif std::vector<bool> mask; for (auto & pose : poses) { (*m_eyeEstimator)(I, pose, mask); } PointVec pose = (poses.size() > 1) ? getMedianOfPoses(poses) : poses[0]; eye = shapeToEye(poses[0], m_eyeSpec); #if DRISHTI_EYE_DEBUG_INITS drawEyes(I, shapesToEyes(poses, m_eyeSpec, cv::Matx33f::eye()), "poses-out"); #endif } #if DRISHTI_EYE_DEBUG_INITS static std::vector<EyeModel> shapesToEyes(const std::vector<PointVec>& shapes, const EyeModelSpecification& spec, const cv::Matx33f& S) { auto toEye = [&](const PointVec& shape) { return S * shapeToEye(shape, spec); }; std::vector<EyeModel> eyes; std::transform(shapes.begin(), shapes.end(), std::back_inserter(eyes), toEye); return eyes; }; std::vector<cv::Point2f> operator*(const cv::Matx33f& H, const std::vector<cv::Point2f>& points) { std::vector<cv::Point2f> points_ = points; for (auto& p : points_) { cv::Point3f q = H * cv::Point3f(p.x, p.y, 1.f); p = { q.x / q.z, q.y / q.z }; } return points_; } static float getMaxSeparation(const PointVec& points) { float maxSeparation = 0.f; for (int i = 0; i < points.size(); i++) { for (int j = i + 1; j < points.size(); j++) { float separation = cv::norm(points[i] - points[j]); if (separation > maxSeparation) { maxSeparation = separation; } } } return maxSeparation; } static void drawEyes(const cv::Mat& I, const std::vector<EyeModel>& eyes, const std::string& name) { cv::Mat canvas; cv::cvtColor(I, canvas, cv::COLOR_GRAY2BGR); for (const auto& v : eyes) { cv::Matx41d color = cv::Scalar::randu(100, 255); v.draw(canvas, 0, 0, { color(0), color(1), color(2) }, 1); } cv::imshow(name, canvas); cv::waitKey(0); } #endif static void jitter(const cv::Rect& roi, const geometry::UniformSimilarityParams& params, std::vector<cv::Rect>& poses, int n) { cv::Point2f center = drishti::geometry::centroid<int, float>(roi); const geometry::UniformSimilarityParams& params_ = params; for (int i = 0; i < n; i++) { bool hasRoi = false; cv::Rect roi2; for (int j = 0; j < 100; j++) { cv::Matx33f H = geometry::randomSimilarity(params_, cv::theRNG(), center, false); roi2 = H * roi; cv::Rect valid = roi2 & roi; if (roi.contains(valid.tl()) && roi.contains(valid.br())) { hasRoi = true; break; } } if (hasRoi) { poses.push_back(roi2); } } } static void jitter(const EyeModel& eye, const geometry::UniformSimilarityParams& params, std::vector<EyeModel>& poses, int n) { cv::Point2f center = drishti::core::centroid(eye.eyelids); const geometry::UniformSimilarityParams& params_ = params; for (int i = 0; i < n; i++) { cv::Matx33f H = geometry::randomSimilarity(params_, cv::theRNG(), center); poses.push_back(H * eye); } } static PointVec getMedianOfPoses(const std::vector<PointVec>& poses) { std::vector<std::vector<float>> params[2]; params[0].resize(poses[0].size()); params[1].resize(poses[0].size()); for (const auto & pose : poses) { for (int j = 0; j < pose.size(); j++) { params[0][j].push_back(pose[j].x); params[1][j].push_back(pose[j].y); } } std::vector<cv::Point2f> pose(poses[0].size()); for (int i = 0; i < params[0].size(); i++) { pose[i] = { median(params[0][i]), median(params[1][i]) }; } return pose; } DRISHTI_EYE_NAMESPACE_END
#include "drishti/eye/EyeModelEstimatorImpl.h" #include "drishti/core/drishti_stdlib_string.h" #include "drishti/eye/EyeIO.h" #define DRISHTI_EYE_DEBUG_INITS 0 #if DRISHTI_EYE_DEBUG_INITS # include <opencv2/highgui.hpp> #endif DRISHTI_EYE_NAMESPACE_BEGIN using PointVec = std::vector<cv::Point2f>; static void jitter(const EyeModel& eye, const geometry::UniformSimilarityParams& params, std::vector<EyeModel>& poses, int n); static void jitter(const cv::Rect& roi, const geometry::UniformSimilarityParams& params, std::vector<cv::Rect>& poses, int n); static PointVec getMedianOfPoses(const std::vector<PointVec>& poses); #if DRISHTI_EYE_DEBUG_INITS static std::vector<cv::Point2f> operator*(const cv::Matx33f& H, const std::vector<cv::Point2f>& points); static void drawEyes(const cv::Mat& I, const std::vector<EyeModel>& eyes, const std::string& name = "eyes"); static std::vector<EyeModel> shapesToEyes(const std::vector<PointVec>& shapes, const EyeModelSpecification& spec, const cv::Matx33f& S); #endif void EyeModelEstimator::Impl::segmentEyelids(const cv::Mat& I, EyeModel& eye) const { PointVec mu = m_eyeEstimator->getMeanShape(); cv::Rect roi({ 0, 0 }, I.size()); std::vector<cv::Rect> rois = { roi }; if (m_eyelidInits > 1) { jitter(roi, m_jitterEyelidParams, rois, m_eyelidInits - 1); } std::vector<PointVec> poses(rois.size(), mu); std::vector<bool> mask; for (int i = 0; i < rois.size(); i++) { (*m_eyeEstimator)(I(rois[i]), poses[i], mask); cv::Point2f shift = rois[i].tl(); for (auto& p : poses[i]) { p += shift; } } PointVec pose = (poses.size() > 1) ? getMedianOfPoses(poses) : poses[0]; eye = shapeToEye(poses[0], m_eyeSpec); #if DRISHTI_EYE_DEBUG_INITS drawEyes(I, shapesToEyes(poses, m_eyeSpec, cv::Matx33f::eye()), "poses-out"); #endif } void EyeModelEstimator::Impl::segmentEyelids_(const cv::Mat& I, EyeModel& eye) const { std::vector<PointVec> poses{ m_eyeEstimator->getMeanShape() }; if (m_eyelidInits > 1) { auto toShape = [&](const EyeModel& e) { return eyeToShape(e, m_eyeSpec); }; std::vector<EyeModel> jittered; jitter(shapeToEye(m_eyeEstimator->getMeanShape(), m_eyeSpec), m_jitterEyelidParams, jittered, m_eyelidInits - 1); std::transform(jittered.begin(), jittered.end(), std::back_inserter(poses), toShape); } #if DRISHTI_EYE_DEBUG_INITS drawEyes(I, shapesToEyes(poses, m_eyeSpec, cv::Matx33f::diag({ I.cols, I.cols, 1.f })), "poses-in"); #endif std::vector<bool> mask; for (auto & pose : poses) { (*m_eyeEstimator)(I, pose, mask); } PointVec pose = (poses.size() > 1) ? getMedianOfPoses(poses) : poses[0]; eye = shapeToEye(poses[0], m_eyeSpec); #if DRISHTI_EYE_DEBUG_INITS drawEyes(I, shapesToEyes(poses, m_eyeSpec, cv::Matx33f::eye()), "poses-out"); #endif } #if DRISHTI_EYE_DEBUG_INITS static std::vector<EyeModel> shapesToEyes(const std::vector<PointVec>& shapes, const EyeModelSpecification& spec, const cv::Matx33f& S) { auto toEye = [&](const PointVec& shape) { return S * shapeToEye(shape, spec); }; std::vector<EyeModel> eyes; std::transform(shapes.begin(), shapes.end(), std::back_inserter(eyes), toEye); return eyes; }; std::vector<cv::Point2f> operator*(const cv::Matx33f& H, const std::vector<cv::Point2f>& points) { std::vector<cv::Point2f> points_ = points; for (auto& p : points_) { cv::Point3f q = H * cv::Point3f(p.x, p.y, 1.f); p = { q.x / q.z, q.y / q.z }; } return points_; } static float getMaxSeparation(const PointVec& points) { float maxSeparation = 0.f; for (int i = 0; i < points.size(); i++) { for (int j = i + 1; j < points.size(); j++) { float separation = cv::norm(points[i] - points[j]); if (separation > maxSeparation) { maxSeparation = separation; } } } return maxSeparation; } static void drawEyes(const cv::Mat& I, const std::vector<EyeModel>& eyes, const std::string& name) { cv::Mat canvas; cv::cvtColor(I, canvas, cv::COLOR_GRAY2BGR); for (const auto& v : eyes) { cv::Matx41d color = cv::Scalar::randu(100, 255); v.draw(canvas, 0, 0, { color(0), color(1), color(2) }, 1); } cv::imshow(name, canvas); cv::waitKey(0); } #endif static void jitter(const cv::Rect& roi, const geometry::UniformSimilarityParams& params, std::vector<cv::Rect>& poses, int n) { cv::Point2f center = drishti::geometry::centroid<int, float>(roi); const geometry::UniformSimilarityParams& params_ = params; for (int i = 0; i < n; i++) { bool hasRoi = false; cv::Rect roi2; for (int j = 0; j < 100; j++) { cv::Matx33f H = geometry::randomSimilarity(params_, cv::theRNG(), center, false); roi2 = H * roi; cv::Rect valid = roi2 & roi; if (roi.contains(valid.tl()) && roi.contains(valid.br())) { hasRoi = true; break; } } if (hasRoi) { poses.push_back(roi2); } } }
static PointVec getMedianOfPoses(const std::vector<PointVec>& poses) { std::vector<std::vector<float>> params[2]; params[0].resize(poses[0].size()); params[1].resize(poses[0].size()); for (const auto & pose : poses) { for (int j = 0; j < pose.size(); j++) { params[0][j].push_back(pose[j].x); params[1][j].push_back(pose[j].y); } } std::vector<cv::Point2f> pose(poses[0].size()); for (int i = 0; i < params[0].size(); i++) { pose[i] = { median(params[0][i]), median(params[1][i]) }; } return pose; } DRISHTI_EYE_NAMESPACE_END
static void jitter(const EyeModel& eye, const geometry::UniformSimilarityParams& params, std::vector<EyeModel>& poses, int n) { cv::Point2f center = drishti::core::centroid(eye.eyelids); const geometry::UniformSimilarityParams& params_ = params; for (int i = 0; i < n; i++) { cv::Matx33f H = geometry::randomSimilarity(params_, cv::theRNG(), center); poses.push_back(H * eye); } }
function_block-full_function
[]
C++
Development/OrignalDev/Util/EventTimeLine.cc
isuhao/ravl2
317e0ae1cb51e320b877c3bad6a362447b5e52ec
#include "Jack/EventTimeLine.hh" #include "Ravl/GUI/Manager.hh" #include "Ravl/Image/Font.hh" #include <gdk/gdk.h> #define DODEBUG 0 #if DODEBUG #define ONDEBUG(x) x #else #define ONDEBUG(x) #endif namespace RavlGUIN { using namespace RavlAudioN; EventTimeLineBodyC::EventTimeLineBodyC() : RawCanvasBodyC(15,15), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), atMarker(0), markerLeft(0), markerRight(0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0) {} EventTimeLineBodyC::EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng,const TranscriptionBaseListC &_events) : RawCanvasBodyC(srow,scol), displayRange(rng), events(_events), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), atMarker(0), markerLeft(0), markerRight(0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0) { } EventTimeLineBodyC::EventTimeLineBodyC(const RealRangeC &rng,const TranscriptionBaseListC &_events) : RawCanvasBodyC(15,15), displayRange(rng), events(_events), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0), label("label") {} EventTimeLineBodyC::EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng) : RawCanvasBodyC(srow,scol), displayRange(rng), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0), label("label") {} EventTimeLineBodyC::EventTimeLineBodyC(const RealRangeC &rng) : RawCanvasBodyC(15,15), displayRange(rng), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), label("label") {} static bool DestroyGc(GdkGC *gc) { g_object_unref(gc); return true; } bool EventTimeLineBodyC::SetMarkers(RealT time,RealT left, RealT right){ markerRight = right; markerLeft = left; SetMarker(time); return true; } EventTimeLineBodyC::~EventTimeLineBodyC() { if(markerGc != 0) { Manager.Queue(Trigger(DestroyGc,markerGc)); markerGc = 0; } } bool EventTimeLineBodyC::Create() { ConnectRef(Signal("expose_event"),*this,&EventTimeLineBodyC::EventExpose); ConnectRef(Signal("configure_event"),*this,&EventTimeLineBodyC::EventConfigure); ConnectRef(Signal("button_press_event"),*this,&EventTimeLineBodyC::EventMousePress); if(!RawCanvasBodyC::Create()) return false; return true; } bool EventTimeLineBodyC::SetDisplayRange(RealRangeC &rng) { Manager.QueueOnGUI(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetDisplayRange,rng)); return true; } bool EventTimeLineBodyC::GUISetDisplayRange(RealRangeC &rng) { displayRange = rng; ONDEBUG(cerr << "EventTimeLineBodyC::GUISetDisplayRange(). Range=" << displayRange << " \n"); GUIDraw(); return true; } bool EventTimeLineBodyC::SetMarker(RealT time) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetMarker,time)); return true; } bool EventTimeLineBodyC::GUISetMarker(RealT time) { atMarker = time; GUIDraw(); return true; } bool EventTimeLineBodyC::Goto(RealT &time) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUIGoto,time)); return true; } bool EventTimeLineBodyC::GUIGoto(RealT &time) { ONDEBUG(cerr << "EventTimeLineBodyC::GUIGotot(). Time=" << time << " \n"); RealT size = displayRange.Size()/2; displayRange = RealRangeC(time - size ,time + size); GUIDraw(); return true; } bool EventTimeLineBodyC::SetDisplaySpan(RealT &size) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetDisplaySpan,size)); return true; } bool EventTimeLineBodyC::GUISetDisplaySpan(RealT &size) { RealT time = displayRange.Center(); RealT val = size / 2; displayRange = RealRangeC(time - val,time + val); GUIDraw(); return true; } bool EventTimeLineBodyC::SetEvents(TranscriptionBaseListC &_events) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetEvents,_events)); return true; } bool EventTimeLineBodyC::GUISetEvents(TranscriptionBaseListC &_events) { events = _events; GUIDraw(); return true; } bool EventTimeLineBodyC::EventConfigure(GdkEvent* &event) { ONDEBUG(cerr << "EventTimeLineBodyC::EventConfigure(). \n"); IndexRange2dC newRec; TranslateConfigureEvent(event,newRec); if(newRec == displayArea) return true; displayArea = newRec; if(markerGc == 0) { markerGc = gdk_gc_new(DrawArea()); gdk_gc_copy(markerGc,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 255 * 255; colour.green = 0; colour.blue = 0; gdk_gc_set_rgb_fg_color (markerGc,&colour); } if(markerGcL == 0) { markerGcL = gdk_gc_new(DrawArea()); gdk_gc_copy(markerGcL,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 0; colour.green = 255 * 255; colour.blue = 0; gdk_gc_set_rgb_fg_color (markerGcL,&colour); } if(markerGcR == 0) { markerGcR = gdk_gc_new(DrawArea()); gdk_gc_copy(markerGcR,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 0; colour.green = 0; colour.blue = 255 * 255; gdk_gc_set_rgb_fg_color (markerGcR,&colour); } if(text == 0) { text = gdk_gc_new(DrawArea()); gdk_gc_copy(text,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 255 * 255; colour.green = 200 * 255; colour.blue = 100 * 255; gdk_gc_set_rgb_fg_color (text,&colour); } return true; } bool EventTimeLineBodyC::EventExpose(GdkEvent* &event) { ONDEBUG(cerr << "EventTimeLineBodyC::EventExpose(). \n"); IntT toFollow; IndexRange2dC newRec; TranslateExposeEvent(event,newRec,toFollow); GUIDraw(); return true; } bool EventTimeLineBodyC::EventMousePress(MouseEventC &mousePress) { ONDEBUG(cerr << "EventTimeLineBodyC::EventMousePress(). \n"); RealT scale = (RealT)displayArea.Range2().Size() / displayRange.Size(); if(mousePress.HasChanged(1) && mousePress.IsCntrl()){ DeleteEvent(); timeSelected1(atMarker,markerLeft,markerRight); } else if(mousePress.HasChanged(1)&&mousePress.IsShift()) SetEventVal(mousePress.At()[1] / scale + displayRange.Min()); else if(mousePress.HasChanged(1)){ atMarker = (static_cast<RealT>(mousePress.At()[1]) / scale) + displayRange.Min(); #if 0 for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) { if(it->Data2().Contains(time)) cerr << "Time " << time << " in " << it->Data2() << "\n"; } #endif timeSelected(atMarker); } else if(mousePress.HasChanged(0)&&mousePress.IsShift()) AddEvent(); else if(mousePress.HasChanged(0)){ markerLeft = mousePress.At()[1] / scale + displayRange.Min(); timeSelected1(atMarker,markerLeft,markerRight); } else if(mousePress.HasChanged(2)){ markerRight = mousePress.At()[1] / scale + displayRange.Min(); timeSelected1(atMarker,markerLeft,markerRight); } GUIDraw(); return true; } bool EventTimeLineBodyC::GUIDraw() { ONDEBUG(cerr << "EventTimeLineBodyC::GUIDraw(). Range=" << displayRange << " Events=" << events.Size() << "\n"); if(displayArea.Cols() < 1 || displayArea.Rows() < 1) return true; GUIDrawRectangle(GUIDrawGCWhite(),displayArea,true); RealT scale = (RealT)displayArea.Range2().Size() / displayRange.Size(); IndexRangeC vertRange = displayArea.Range1().Shrink(4); #if 1 if(markerGc != 0) { IndexRange2dC markRange(displayArea.Range1(), IndexRangeC((atMarker - displayRange.Min()) * scale, ((atMarker+1) - displayRange.Min()) * scale)); if(markRange.Range2().Size() < 3) { markRange.Range2().Expand((3 - markRange.Range2().Size())/2); } GUIDrawRectangle(markerGc,markRange,true); } #endif #if 1 if(markerGcL != 0) { IndexRange2dC markRange(displayArea.Range1(), IndexRangeC((markerLeft - displayRange.Min()) * scale, ((markerLeft+1) - displayRange.Min()) * scale)); if(markRange.Range2().Size() < 3) { markRange.Range2().Expand((3 - markRange.Range2().Size())/2); } GUIDrawRectangle(markerGcL,markRange,true); } #endif #if 1 if(markerGcR != 0) { IndexRange2dC markRange(displayArea.Range1(), IndexRangeC((markerRight - displayRange.Min()) * scale, ((markerRight+1) - displayRange.Min()) * scale)); if(markRange.Range2().Size() < 3) { markRange.Range2().Expand((3 - markRange.Range2().Size())/2); } GUIDrawRectangle(markerGcR,markRange,true); } #endif DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events); ONDEBUG(cerr << "VertRange=" << vertRange << " Scale=" << scale << "\n"); IndexC midV = vertRange.Center(); GUIDrawLine(GUIDrawGCGrey(),Index2dC(midV,displayArea.Range2().Min()),Index2dC(midV,displayArea.Range2().Max())); for(;it;it++) { if(displayRange.IsOverlapping(it->Data2())){ IndexRangeC rng2((it->Data2().Min() - displayRange.Min()) * scale, (it->Data2().Max() - displayRange.Min()) * scale); IndexRange2dC box(vertRange,rng2); if(box.Range2().Size() == 0) box.Range2().Max()++; box.ClipBy(displayArea); GdkGC* drawContext = gdk_gc_new(DrawArea()); gdk_gc_copy(drawContext,GUIDrawGC()); GdkColor colour; if(it->Data1() == 0){ colour.pixel = 0; colour.red = 0; colour.green = 255 * 255; colour.blue = 0; } else if(it->Data1() == 4){ colour.pixel = 0; colour.red = 255 * 255; colour.green = 0; colour.blue = 0; } else{ colour.pixel = 0; colour.red = -255 * 255*it->Data1()/3; colour.green = -255 * 255*it->Data1()/3; colour.blue = -255 * 255*it->Data1()/3; } gdk_gc_set_rgb_fg_color (drawContext,&colour); GUIDrawRectangle(drawContext,box,true); } } GUIDrawText(text,GUIDrawFont(),Point2dC(10,10),label); #if 0 IndexC midH = displayArea.Range2().Center(); GUIDrawLine(GUIDrawGCGrey(),Index2dC(displayArea.Range1().Min(),midH),Index2dC(displayArea.Range1().Max(),midH)); #endif return true; } RealT EventTimeLineBodyC::GetMarkerRight(){return markerRight;} RealT EventTimeLineBodyC::GetMarkerLeft(){return markerLeft;} RealT EventTimeLineBodyC::GetMarkerTime(){return atMarker;} bool EventTimeLineBodyC::AddEvent(){ if(markerLeft < markerRight) AddEvent(RealRangeC(markerLeft,markerRight)); else AddEvent(RealRangeC(markerRight,markerLeft)); return true; } bool EventTimeLineBodyC::AddEvent(RealRangeC data){ events+=Tuple3C<IntT,RealRangeC,StringC>(0,data,""); return true; } bool EventTimeLineBodyC::DeleteEvent(){ if(markerLeft < markerRight) for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) if(it.Data().Data2().Min() > markerLeft && it.Data().Data2().Min() < markerRight) it.Del(); else for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) if(it.Data().Data2().Min() > markerRight && it.Data().Data2().Min() < markerLeft) it.Del(); return true; } TranscriptionBaseListC &EventTimeLineBodyC::GetEvents(){return events;} bool EventTimeLineBodyC::SetEventVal(RealT time){ for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) if(it.Data().Data2().Contains(time)){ it.Data().Data1() = (it.Data().Data1()+1) % 5; GUIDraw(); } return true; } }
#include "Jack/EventTimeLine.hh" #include "Ravl/GUI/Manager.hh" #include "Ravl/Image/Font.hh" #include <gdk/gdk.h> #define DODEBUG 0 #if DODEBUG #define ONDEBUG(x) x #else #define ONDEBUG(x) #endif namespace RavlGUIN { using namespace RavlAudioN; EventTimeLineBodyC::EventTimeLineBodyC() : RawCanvasBodyC(15,15), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), atMarker(0), markerLeft(0), markerRight(0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0) {} EventTimeLineBodyC::EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng,const TranscriptionBaseListC &_events) : RawCanvasBodyC(srow,scol), displayRange(rng), events(_events), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), atMarker(0), markerLeft(0), markerRight(0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0) { } EventTimeLineBodyC::EventTimeLineBodyC(const RealRangeC &rng,const TranscriptionBaseListC &_events) : RawCanvasBodyC(15,15), displayRange(rng), events(_events), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0), label("label") {} EventTimeLineBodyC::EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng) : RawCanvasBodyC(srow,scol), displayRange(rng), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), text(0), label("label") {} EventTimeLineBodyC::EventTimeLineBodyC(const RealRangeC &rng) : RawCanvasBodyC(15,15), displayRange(rng), timeSelected1(0.0,0.0,0.0), timeSelected(0.0), updateId(0), markerGc(0), markerGcL(0), markerGcR(0), label("label") {} static bool DestroyGc(GdkGC *gc) { g_object_unref(gc); return true; } bool EventTimeLineBodyC::SetMarkers(RealT time,RealT left, RealT right){ markerRight = right; markerLeft = left; SetMarker(time); return true; } EventTimeLineBodyC::~EventTimeLineBodyC() { if(markerGc != 0) { Manager.Queue(Trigger(DestroyGc,markerGc)); markerGc = 0; } } bool EventTimeLineBodyC::Create() { ConnectRef(Signal("expose_event"),*this,&EventTimeLineBodyC::EventExpose); ConnectRef(Signal("configure_event"),*this,&EventTimeLineBodyC::EventConfigure); ConnectRef(Signal("button_press_event"),*this,&EventTimeLineBodyC::EventMousePress); if(!RawCanvasBodyC::Create()) return false; return true; } bool EventTimeLineBodyC::SetDisplayRange(RealRangeC &rng) { Manager.QueueOnGUI(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetDisplayRange,rng)); return true; } bool EventTimeLineBodyC::GUISetDisplayRange(RealRangeC &rng) { displayRange = rng; ONDEBUG(cerr << "EventTimeLineBodyC::GUISetDisplayRange(). Range=" << displayRange << " \n"); GUIDraw(); return true; } bool EventTimeLineBodyC::SetMarker(RealT time) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetMarker,time)); return true; } bool EventTimeLineBodyC::GUISetMarker(RealT time) { atMarker = time; GUIDraw(); return true; } bool EventTimeLineBodyC::Goto(RealT &time) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUIGoto,time)); return true; } bool EventTimeLineBodyC::GUIGoto(RealT &time) { ONDEBUG(cerr << "EventTimeLineBodyC::GUIGotot(). Time=" << time << " \n"); RealT size = displayRange.Size()/2; displayRange = RealRangeC(time - size ,time + size); GUIDraw(); return true; } bool EventTimeLineBodyC::SetDisplaySpan(RealT &size) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetDisplaySpan,size)); return true; } bool EventTimeLineBodyC::GUISetDisplaySpan(RealT &size) { RealT time = displayRange.Center(); RealT val = size / 2; displayRange = RealRangeC(time - val,time + val); GUIDraw(); return true; } bool EventTimeLineBodyC::SetEvents(TranscriptionBaseListC &_events) { Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetEvents,_events)); return true; } bool EventTimeLineBodyC::GUISetEvents(TranscriptionBaseListC &_events) { events = _events; GUIDraw(); return true; } bool EventTimeLineBodyC::EventConfigure(GdkEvent* &event) { ONDEBUG(cerr << "EventTimeLineBodyC::EventConfigure(). \n"); IndexRange2dC newRec; TranslateConfigureEvent(event,newRec); if(newRec == displayArea) return true; displayArea = newRec; if(markerGc == 0) { markerGc = gdk_gc_new(DrawArea()); gdk_gc_copy(markerGc,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 255 * 255; colour.green = 0; colour.blue = 0; gdk_gc_set_rgb_fg_color (markerGc,&colour); } if(markerGcL == 0) { markerGcL = gdk_gc_new(DrawArea()); gdk_gc_copy(markerGcL,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 0; colour.green = 255 * 255; colour.blue = 0; gdk_gc_set_rgb_fg_color (markerGcL,&colour); } if(markerGcR == 0) { markerGcR = gdk_gc_new(DrawArea()); gdk_gc_copy(markerGcR,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 0; colour.green = 0; colour.blue = 255 * 255; gdk_gc_set_rgb_fg_color (markerGcR,&colour); } if(text == 0) { text = gdk_gc_new(DrawArea()); gdk_gc_copy(text,GUIDrawGC()); GdkColor colour; colour.pixel = 0; colour.red = 255 * 255; colour.green = 200 * 255; colour.blue = 100 * 255; gdk_gc_set_rgb_fg_color (text,&colour); } return true; } bool EventTimeLineBodyC::EventExpose(GdkEvent* &event) { ONDEBUG(cerr << "EventTimeLineBodyC::EventExpose(). \n"); IntT toFollow; IndexRange2dC newRec; TranslateExposeEvent(event,newRec,toFollow); GUIDraw(); return true; } bool EventTimeLineBodyC::EventMousePress(MouseEventC &mousePress) { ONDEBUG(cerr << "EventTimeLineBodyC::EventMousePress(). \n"); RealT scale = (RealT)displayArea.Range2().Size() / displayRange.Size(); if(mousePress.HasChanged(1) && mousePress.IsCntrl()){ DeleteEvent(); timeSelected1(atMarker,markerLeft,markerRight); } else if(mousePress.HasChanged(1)&&mousePress.IsShift()) SetEventVal(mousePress.At()[1] / scale + displayRange.Min()); else if(mousePress.HasChanged(1)){ atMarker = (static_cast<RealT>(mousePress.At()[1]) / scale) + displayRange.Min(); #if 0 for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) { if(it->Data2().Contains(time)) cerr << "Time " << time << " in " << it->Data2() << "\n"; } #endif timeSelected(atMarker); } else if(mousePress.HasChanged(0)&&mousePress.IsShift()) AddEvent(); else if(mousePress.HasChanged(0)){ markerLeft = mousePress.At()[1] / scale + displayRange.Min(); timeSelected1(atMarker,markerLeft,markerRight); } else if(mousePress.HasChanged(2)){ markerRight = mousePress.At()[1] / scale + displayRange.Min(); timeSelected1(atMarker,markerLeft,markerRight); } GUIDraw(); return true; } bool EventTimeLineBodyC::GUIDraw() { ONDEBUG(cerr << "EventTimeLineBodyC::GUIDraw(). Range=" << displayRange << " Events=" << events.Size() << "\n"); if(displayArea.Cols() < 1 || displayArea.Rows() < 1) return true; GUIDrawRectangle(GUIDrawGCWhite(),displayArea,true); RealT scale = (RealT)displayArea.Range2().Size() / displayRange.Size(); IndexRangeC vertRange = displayArea.Range1().Shrink(4); #if 1 if(markerGc != 0) { IndexRange2dC markRange(displayArea.Range1(), IndexRangeC((atMarker - displayRange.Min()) * scale, ((atMarker+1) - displayRange.Min()) * scale)); if(markRange.Range2().Size() < 3) { markRange.Range2().Expand((3 - markRange.Range2().Size())/2); } GUIDrawRectangle(markerGc,markRange,true); } #endif #if 1
#endif #if 1 if(markerGcR != 0) { IndexRange2dC markRange(displayArea.Range1(), IndexRangeC((markerRight - displayRange.Min()) * scale, ((markerRight+1) - displayRange.Min()) * scale)); if(markRange.Range2().Size() < 3) { markRange.Range2().Expand((3 - markRange.Range2().Size())/2); } GUIDrawRectangle(markerGcR,markRange,true); } #endif DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events); ONDEBUG(cerr << "VertRange=" << vertRange << " Scale=" << scale << "\n"); IndexC midV = vertRange.Center(); GUIDrawLine(GUIDrawGCGrey(),Index2dC(midV,displayArea.Range2().Min()),Index2dC(midV,displayArea.Range2().Max())); for(;it;it++) { if(displayRange.IsOverlapping(it->Data2())){ IndexRangeC rng2((it->Data2().Min() - displayRange.Min()) * scale, (it->Data2().Max() - displayRange.Min()) * scale); IndexRange2dC box(vertRange,rng2); if(box.Range2().Size() == 0) box.Range2().Max()++; box.ClipBy(displayArea); GdkGC* drawContext = gdk_gc_new(DrawArea()); gdk_gc_copy(drawContext,GUIDrawGC()); GdkColor colour; if(it->Data1() == 0){ colour.pixel = 0; colour.red = 0; colour.green = 255 * 255; colour.blue = 0; } else if(it->Data1() == 4){ colour.pixel = 0; colour.red = 255 * 255; colour.green = 0; colour.blue = 0; } else{ colour.pixel = 0; colour.red = -255 * 255*it->Data1()/3; colour.green = -255 * 255*it->Data1()/3; colour.blue = -255 * 255*it->Data1()/3; } gdk_gc_set_rgb_fg_color (drawContext,&colour); GUIDrawRectangle(drawContext,box,true); } } GUIDrawText(text,GUIDrawFont(),Point2dC(10,10),label); #if 0 IndexC midH = displayArea.Range2().Center(); GUIDrawLine(GUIDrawGCGrey(),Index2dC(displayArea.Range1().Min(),midH),Index2dC(displayArea.Range1().Max(),midH)); #endif return true; } RealT EventTimeLineBodyC::GetMarkerRight(){return markerRight;} RealT EventTimeLineBodyC::GetMarkerLeft(){return markerLeft;} RealT EventTimeLineBodyC::GetMarkerTime(){return atMarker;} bool EventTimeLineBodyC::AddEvent(){ if(markerLeft < markerRight) AddEvent(RealRangeC(markerLeft,markerRight)); else AddEvent(RealRangeC(markerRight,markerLeft)); return true; } bool EventTimeLineBodyC::AddEvent(RealRangeC data){ events+=Tuple3C<IntT,RealRangeC,StringC>(0,data,""); return true; } bool EventTimeLineBodyC::DeleteEvent(){ if(markerLeft < markerRight) for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) if(it.Data().Data2().Min() > markerLeft && it.Data().Data2().Min() < markerRight) it.Del(); else for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) if(it.Data().Data2().Min() > markerRight && it.Data().Data2().Min() < markerLeft) it.Del(); return true; } TranscriptionBaseListC &EventTimeLineBodyC::GetEvents(){return events;} bool EventTimeLineBodyC::SetEventVal(RealT time){ for(DLIterC<Tuple3C<IntT,RealRangeC,StringC> > it(events);it;it++) if(it.Data().Data2().Contains(time)){ it.Data().Data1() = (it.Data().Data1()+1) % 5; GUIDraw(); } return true; } }
if(markerGcL != 0) { IndexRange2dC markRange(displayArea.Range1(), IndexRangeC((markerLeft - displayRange.Min()) * scale, ((markerLeft+1) - displayRange.Min()) * scale)); if(markRange.Range2().Size() < 3) { markRange.Range2().Expand((3 - markRange.Range2().Size())/2); } GUIDrawRectangle(markerGcL,markRange,true); }
if_condition
[ { "content": "\n\n#include \"../.././GUI/Util/EventTimeLine.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/GUI/EventTimeLine.hh", "rank": 0, "score": 176136.96804029416 }, { "content": " class EventTimeLineC;\n\n \n\n //! userlevel=Develop\n\n //: Event time line.\n\n \n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 1, "score": 144942.75875159682 }, { "content": " class EventTimeLineC \n\n : public RawCanvasC \n\n {\n\n public:\n\n EventTimeLineC()\n\n {}\n\n //: Default constructor.\n\n // Creates an invalid handle.\n\n\n\n EventTimeLineC(const RealRangeC &rng,const DListC<Tuple2C<IntT,RealRangeC> > &events) \n\n : RawCanvasC(*new EventTimeLineBodyC(rng,events))\n\n {}\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n\n\n EventTimeLineC(const RealRangeC &rng) \n\n : RawCanvasC(*new EventTimeLineBodyC(rng))\n\n {}\n\n //: Constructor.\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 2, "score": 144942.75875159682 }, { "content": " class EventTimeLineBodyC \n\n : public RawCanvasBodyC \n\n {\n\n public:\n\n EventTimeLineBodyC();\n\n //: Default constructor.\n\n \n\n EventTimeLineBodyC(const RealRangeC &rng,const DListC<Tuple2C<IntT,RealRangeC> > &events);\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n\n\n EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng,const DListC<Tuple2C<IntT,RealRangeC> > &events);\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n \n\n EventTimeLineBodyC(const RealRangeC &rng);\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 3, "score": 142662.84010799776 }, { "content": " class EventTimeLineC \n\n : public RawCanvasC \n\n {\n\n public:\n\n EventTimeLineC()\n\n {}\n\n //: Default constructor.\n\n // Creates an invalid handle.\n\n\n\n EventTimeLineC(const RealRangeC &rng,const TranscriptionBaseListC &events) \n\n : RawCanvasC(*new EventTimeLineBodyC(rng,events))\n\n {}\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n\n\n EventTimeLineC(const RealRangeC &rng) \n\n : RawCanvasC(*new EventTimeLineBodyC(rng))\n\n {}\n\n //: Constructor.\n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 4, "score": 142662.84010799776 }, { "content": " class EventTimeLineC;\n\n \n\n //! userlevel=Develop\n\n //: Event time line.\n\n // Display a set of events as black marks on a white backgrouond. A red marker indicates current position.\n\n \n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 5, "score": 142662.84010799776 }, { "content": " class EventTimeLineBodyC \n\n : public RawCanvasBodyC \n\n {\n\n public:\n\n EventTimeLineBodyC();\n\n //: Default constructor.\n\n \n\n EventTimeLineBodyC(const RealRangeC &rng,const TranscriptionBaseListC &events);\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n\n\n EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng,const TranscriptionBaseListC &events);\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n\n\n EventTimeLineBodyC(const RealRangeC &rng);\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 6, "score": 140453.53597995295 }, { "content": "\n\n#include \"../.././OS/Text/TextFile.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/Text/TextFile.hh", "rank": 7, "score": 136263.15219810946 }, { "content": "\n\n#include \"../.././OS/Text/TextCursor.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/Text/TextCursor.hh", "rank": 8, "score": 136263.15219810946 }, { "content": "\n\n#include \"../.././OS/Text/TextBuffer.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/Text/TextBuffer.hh", "rank": 9, "score": 136263.15219810946 }, { "content": "\n\n#include \"../.././OS/Text/TextFragment.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/Text/TextFragment.hh", "rank": 10, "score": 136263.15219810946 }, { "content": "\n\n#include \"../.././OS/Text/TextFileLine.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/Text/TextFileLine.hh", "rank": 11, "score": 133447.90935017043 }, { "content": " Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetDisplaySpan,size, redraw));\n\n return true;\n\n }\n\n \n\n //: Set the length of time to display.\n\n \n\n bool EventTimeLineBodyC::GUISetDisplaySpan(RealT &size, bool redraw) {\n\n ONDEBUG(cerr << \"bool EventTimeLineBodyC::GUISetDisplaySpan(RealT &size=\" << size << \")\\n\");\n\n RealT time = displayRange.Center();\n\n RealT val = size / 2;\n\n displayRange = RealRangeC(time - val,time + val);\n\n if (redraw)\n\n GUIDraw();\n\n return true;\n\n }\n\n\n\n\n\n //: Set event list.\n\n \n\n bool EventTimeLineBodyC::SetEvents(DListC<Tuple2C<IntT,RealRangeC> > &_events, bool redraw) {\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 12, "score": 124008.57257237071 }, { "content": " }\n\n\n\n\n\n //: Draw widget on screen.\n\n \n\n bool EventTimeLineBodyC::GUIDraw() {\n\n ONDEBUG(cerr << \"EventTimeLineBodyC::GUIDraw(). Range=\" << displayRange << \" Events=\" << events.Size() << \"\\n\");\n\n\n\n if(displayArea.Cols() < 1 || displayArea.Rows() < 1)\n\n return true; // No display area.\n\n \n\n // Clear the box\n\n GUIDrawRectangle(GUIDrawGCGrey(), displayArea, true);\n\n \n\n RealT scale = static_cast<RealT>(displayArea.Range2().Size()) / displayRange.Size();\n\n \n\n // Render visible part.\n\n IndexC midV = displayArea.Range1().Center();\n\n IntT arrowWidth = Floor(displayArea.Range2().Size() * m_arrowWidth);\n\n IntT arrowBorder = Floor(static_cast<RealT>(arrowWidth) * m_arrowBorder);\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 13, "score": 124007.7938513362 }, { "content": " }\n\n \n\n return true;\n\n }\n\n\n\n // Get the time\n\n RealT scale = static_cast<RealT>(displayArea.Range2().Size()) / displayRange.Size();\n\n RealT time = (static_cast<RealT>(mouseCol.V()) / scale) + displayRange.Min();\n\n\n\n#if 0\n\n // Dump range click is in.\n\n for(DLIterC<Tuple2C<IntT,RealRangeC> > it(events);it;it++) {\n\n if(it->Data2().Contains(time))\n\n cerr << \"Time \" << time << \" in \" << it->Data2() << \"\\n\";\n\n }\n\n#endif\n\n \n\n //cerr << \"Press time=\" << time << \"\\n\";\n\n timeSelected(time);\n\n return true;\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 14, "score": 124007.69553512847 }, { "content": " segmentGc(0)\n\n {}\n\n \n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n \n\n EventTimeLineBodyC::EventTimeLineBodyC(const RealRangeC &rng) \n\n : RawCanvasBodyC(15,15),\n\n displayRange(rng),\n\n m_localSegment(3,-8),\n\n timeSelected(0.0),\n\n atMarker(0),\n\n m_atSpan(0),\n\n updateId(0),\n\n markerGc(0),\n\n segmentGc(0)\n\n {}\n\n\n\n static bool DestroyGc(GdkGC *gc) {\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 15, "score": 124003.38122998895 }, { "content": " bool EventTimeLineBodyC::Goto(RealT &time, bool redraw) {\n\n Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUIGoto,time, redraw));\n\n return true;\n\n }\n\n \n\n //: Centre on a specific time.\n\n \n\n bool EventTimeLineBodyC::GUIGoto(RealT &time, bool redraw) {\n\n ONDEBUG(cerr << \"EventTimeLineBodyC::GUIGoto(). Time=\" << time << \" \\n\");\n\n RealT size = displayRange.Size()/2;\n\n displayRange = RealRangeC(time - size ,time + size);\n\n //atMarker = time;\n\n if (redraw)\n\n GUIDraw();\n\n return true;\n\n }\n\n \n\n //: Set the length of time to display.\n\n \n\n bool EventTimeLineBodyC::SetDisplaySpan(RealT &size, bool redraw) {\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 16, "score": 124002.25848349542 }, { "content": "\n\n //: Set range of times to display\n\n \n\n bool EventTimeLineBodyC::GUISetDisplayRange(RealRangeC &rng, bool redraw) {\n\n displayRange = rng; \n\n ONDEBUG(cerr << \"EventTimeLineBodyC::GUISetDisplayRange(). Range=\" << displayRange << \" \\n\");\n\n if (redraw)\n\n GUIDraw();\n\n return true;\n\n }\n\n\n\n\n\n \n\n //: Set the local segment.\n\n \n\n bool EventTimeLineBodyC::GUISetLocalSegment(const RealRangeC &segRange, bool redraw) {\n\n ONDEBUG(cerr << \"EventTimeLineBodyC::GUISetLocalSegment(const RealRangeC &segRange=\"<<segRange<<\")\\n\");\n\n if(m_localSegment != segRange) {\n\n m_localSegment = segRange;\n\n if (redraw)\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 17, "score": 124000.62328912235 }, { "content": " colour.green = 128 * 255;\n\n colour.blue = 255 * 255;\n\n gdk_gc_set_rgb_fg_color (segmentGc,&colour); \n\n }\n\n return true;\n\n }\n\n \n\n //: Expose of area.\n\n \n\n bool EventTimeLineBodyC::EventExpose(GdkEvent* &event) {\n\n ONDEBUG(cerr << \"EventTimeLineBodyC::EventExpose(). \\n\");\n\n IntT toFollow;\n\n IndexRange2dC newRec;\n\n TranslateExposeEvent(event,newRec,toFollow);\n\n GUIDraw();\n\n return true;\n\n }\n\n \n\n //: Event mouse press.\n\n \n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 18, "score": 123999.32241632036 }, { "content": " //!param: events - List of events.\n\n\n\n EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng);\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n \n\n ~EventTimeLineBodyC();\n\n //: Destructor.\n\n \n\n virtual bool Create()\n\n { return CommonCreate(); }\n\n //: Create the widget.\n\n \n\n virtual bool Create(GtkWidget *_widget)\n\n { return CommonCreate(_widget); }\n\n //: Create the widget.\n\n \n\n bool SetDisplayRange(RealRangeC &rng, bool redraw=true);\n\n //: Set range of times to display\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 19, "score": 123996.77841156429 }, { "content": "\n\n //: Set the inactive segment list\n\n bool EventTimeLineBodyC::GUISetActiveSegments(DListC<RealRangeC> & _segments, bool redraw)\n\n {\n\n m_activeSegments = _segments;\n\n if (redraw)\n\n GUIDraw();\n\n return true;\n\n }\n\n\n\n\n\n\n\n //: Handle configure event.\n\n \n\n bool EventTimeLineBodyC::EventConfigure(GdkEvent* &event) {\n\n ONDEBUG(cerr << \"EventTimeLineBodyC::EventConfigure(). \\n\");\n\n IndexRange2dC newRec;\n\n TranslateConfigureEvent(event,newRec);\n\n if(newRec == displayArea)\n\n return true; // Same size, nothing to do!\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 20, "score": 123996.09117910305 }, { "content": " \n\n bool GUISetEvents(DListC<Tuple2C<IntT,RealRangeC> > &events, bool redraw=true)\n\n { return Body().GUISetEvents(events, redraw); }\n\n //: Set event list.\n\n\n\n bool SetActiveSegments(DListC<RealRangeC> & segments, bool redraw=true)\n\n { return Body().SetActiveSegments(segments, redraw); }\n\n //: Set list of inactive segments\n\n\n\n bool GUISetActiveSegments(DListC<RealRangeC> & segments, bool redraw=true)\n\n { return Body().GUISetActiveSegments(segments, redraw); }\n\n //: Set a list of inactive segments.\n\n\n\n \n\n bool SetDisplayRange(RealRangeC &rng, bool redraw=true)\n\n { return Body().SetDisplayRange(rng, redraw); }\n\n //: Set range of times to display\n\n \n\n bool GUISetDisplayRange(RealRangeC &rng, bool redraw=true)\n\n { return Body().GUISetDisplayRange(rng, redraw); }\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 21, "score": 123995.15036803245 }, { "content": " \n\n bool GUISetDisplayRange(RealRangeC &rng, bool redraw=true);\n\n //: Set range of times to display\n\n \n\n bool GUISetLocalSegment(const RealRangeC &segRange, bool redraw=true);\n\n //: Set the local segment.\n\n // Segments with a negative size will not be displayed\n\n \n\n inline Signal1C<RealT> &SigTimeSelected()\n\n { return timeSelected; }\n\n //: Frame selected signal\n\n \n\n bool Goto(RealT &time, bool redraw=true);\n\n //: Centre on a specific time.\n\n \n\n bool GUIGoto(RealT &time, bool redraw=true);\n\n //: Centre on a specific time.\n\n \n\n bool SetMarker(RealT time, bool redraw=true);\n\n //: Set marker position.\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 22, "score": 123994.1803388055 }, { "content": " ConnectRef(Signal(\"expose_event\"),*this,&EventTimeLineBodyC::EventExpose);\n\n ConnectRef(Signal(\"configure_event\"),*this,&EventTimeLineBodyC::EventConfigure);\n\n ConnectRef(Signal(\"button_press_event\"),*this,&EventTimeLineBodyC::EventMousePress);\n\n \n\n bool ret = false;\n\n if (_widget == NULL)\n\n ret = RawCanvasBodyC::Create();\n\n else\n\n ret = RawCanvasBodyC::Create(_widget);\n\n \n\n return ret;\n\n }\n\n\n\n //: Set range of times to display\n\n \n\n bool EventTimeLineBodyC::SetDisplayRange(RealRangeC &rng, bool redraw)\n\n {\n\n Manager.QueueOnGUI(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetDisplayRange,rng, redraw));\n\n return true; \n\n }\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 23, "score": 123993.76736768062 }, { "content": " protected:\n\n\n\n IndexRange2dC displayArea; // Area to use for displaying images.\n\n RealRangeC displayRange; // Range of times to display.\n\n DListC<Tuple2C<IntT,RealRangeC> > events;\n\n DListC<RealRangeC > m_activeSegments;\n\n RealRangeC m_localSegment; // Local segment.\n\n Signal1C<RealT> timeSelected; // Frame selected signal\n\n RealT atMarker; // Marker for where you are in the sequence.\n\n RealT m_atSpan;\n\n UIntT updateId;\n\n GdkGC *markerGc;\n\n GdkGC *segmentGc;\n\n \n\n const static RealT m_arrowWidth;\n\n const static RealT m_arrowBorder;\n\n \n\n friend class EventTimeLineC;\n\n };\n\n\n\n\n\n\n\n //! userlevel=Normal\n\n //: Event time line.\n\n \n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 24, "score": 123992.34809601131 }, { "content": " displayRange(rng),\n\n events(_events),\n\n m_localSegment(3,-8),\n\n timeSelected(0.0),\n\n atMarker(0),\n\n m_atSpan(0),\n\n updateId(0),\n\n markerGc(0),\n\n segmentGc(0)\n\n {}\n\n\n\n EventTimeLineBodyC::EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng)\n\n : RawCanvasBodyC(srow,scol),\n\n displayRange(rng),\n\n m_localSegment(3,-8),\n\n timeSelected(0.0),\n\n atMarker(0),\n\n m_atSpan(0),\n\n updateId(0),\n\n markerGc(0),\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 25, "score": 123991.4964483443 }, { "content": " \n\n EventTimeLineBodyC::EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng,const DListC<Tuple2C<IntT,RealRangeC> > &_events)\n\n : RawCanvasBodyC(srow,scol),\n\n displayRange(rng),\n\n events(_events),\n\n m_localSegment(3,-8),\n\n timeSelected(0.0),\n\n atMarker(0),\n\n m_atSpan(0),\n\n updateId(0),\n\n markerGc(0),\n\n segmentGc(0)\n\n {}\n\n \n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n \n\n EventTimeLineBodyC::EventTimeLineBodyC(const RealRangeC &rng,const DListC<Tuple2C<IntT,RealRangeC> > &_events) \n\n : RawCanvasBodyC(15,15),\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 26, "score": 123990.92163598991 }, { "content": " bool EventTimeLineBodyC::EventMousePress(MouseEventC &mousePress) {\n\n ONDEBUG(cerr << \"EventTimeLineBodyC::EventMousePress(). \\n\");\n\n IndexC mouseCol = mousePress.At()[1];\n\n IntT arrowWidth = Floor(displayArea.Range2().Size() * m_arrowWidth);\n\n\n\n ONDEBUG(cerr << \"mouseCol:\" << mouseCol << \"\\tarrowWidth: \" << arrowWidth);\n\n if (mouseCol < displayArea.Range2().Min() + arrowWidth) {\n\n // Left arrow clicked\n\n DLIterC< Tuple2C<IntT, RealRangeC> > it(events);\n\n for (it.Last(); it; it--) {\n\n if (it->Data2().Min() < atMarker)\n\n break;\n\n }\n\n \n\n if (it) {\n\n if (it->Data2().Contains(atMarker))\n\n atMarker--;\n\n else\n\n atMarker = it->Data2().Max() - 1;\n\n timeSelected(atMarker);\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 27, "score": 123990.48577413846 }, { "content": " //: Set range of times to display\n\n \n\n bool GUISetLocalSegment(const RealRangeC &segRange, bool redraw=true)\n\n { return Body().GUISetLocalSegment(segRange, redraw); }\n\n //: Set the local segment.\n\n // Segments with a negative size will not be displayed\n\n \n\n bool SetDisplaySpan(RealT &size, bool redraw=true)\n\n { return Body().SetDisplaySpan(size, redraw); }\n\n //: Set the length of time to display.\n\n \n\n bool GUISetDisplaySpan(RealT &size, bool redraw=true)\n\n { return Body().GUISetDisplaySpan(size, redraw); }\n\n //: Set the length of time to display.\n\n \n\n Signal1C<RealT> &SigTimeSelected()\n\n { return Body().SigTimeSelected(); }\n\n //: Frame selected signal\n\n \n\n const RealRangeC & LocalSegment() const\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 28, "score": 123990.419556089 }, { "content": " Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetEvents,_events, redraw));\n\n return true;\n\n }\n\n \n\n //: Set event list.\n\n \n\n bool EventTimeLineBodyC::GUISetEvents(DListC<Tuple2C<IntT,RealRangeC> > &_events, bool redraw) {\n\n events = _events;\n\n if (redraw)\n\n GUIDraw();\n\n return true;\n\n }\n\n\n\n //: Set the inactive segment list\n\n bool EventTimeLineBodyC::SetActiveSegments(DListC<RealRangeC> & _segments, bool redraw)\n\n {\n\n Manager.Queue(Trigger(EventTimeLineC(*this), &EventTimeLineC::GUISetActiveSegments, _segments, redraw));\n\n return true;\n\n }\n\n\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 29, "score": 123989.88743433222 }, { "content": " : RawCanvasC(bod)\n\n {}\n\n \n\n EventTimeLineBodyC& Body()\n\n { return static_cast<EventTimeLineBodyC&>(WidgetC::Body()); }\n\n //: Access body.\n\n\n\n const EventTimeLineBodyC& Body() const\n\n { return static_cast<const EventTimeLineBodyC&>(WidgetC::Body()); }\n\n //: Access body.\n\n \n\n public:\n\n\n\n bool Goto(RealT &time, bool redraw=true)\n\n { return Body().Goto(time, redraw); }\n\n //: Centre on a specific time.\n\n \n\n bool GUIGoto(RealT &time, bool redraw=true)\n\n { return Body().GUIGoto(time, redraw); }\n\n //: Centre on a specific time.\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 30, "score": 123989.42014114263 }, { "content": " \n\n bool SetMarker(RealT time, bool redraw=true)\n\n { return Body().SetMarker(time, redraw); }\n\n //: Set marker position.\n\n \n\n bool GUISetMarker(RealT time, bool redraw=true)\n\n { return Body().GUISetMarker(time, redraw); }\n\n //: Set marker position.\n\n \n\n bool SetMarkerSpan(RealT span, bool redraw=true)\n\n { return Body().SetMarkerSpan(span, redraw); }\n\n //: Set the span of the marker\n\n\n\n bool GUISetMarkerSpan(RealT span, bool redraw=true)\n\n { return Body().GUISetMarkerSpan(span, redraw); }\n\n //: Set the span\n\n\n\n bool SetEvents(DListC<Tuple2C<IntT,RealRangeC> > &events, bool redraw=true)\n\n { return Body().SetEvents(events, redraw); }\n\n //: Set event list.\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 31, "score": 123987.63592122095 }, { "content": " g_object_unref(gc);\n\n return true;\n\n }\n\n \n\n //: Destructor.\n\n \n\n EventTimeLineBodyC::~EventTimeLineBodyC() {\n\n if(markerGc != 0) {\n\n Manager.Queue(Trigger(DestroyGc,markerGc));\n\n markerGc = 0;\n\n }\n\n if(segmentGc != 0) {\n\n Manager.Queue(Trigger(DestroyGc,segmentGc));\n\n segmentGc = 0; \n\n }\n\n }\n\n \n\n //: Create the widget.\n\n \n\n bool EventTimeLineBodyC::CommonCreate(GtkWidget *_widget) {\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 32, "score": 123987.44341745586 }, { "content": " GUIDraw();\n\n }\n\n return true;\n\n }\n\n \n\n //: Set marker position.\n\n \n\n bool EventTimeLineBodyC::SetMarker(RealT time, bool redraw) {\n\n Manager.Queue(Trigger(EventTimeLineC(*this),&EventTimeLineC::GUISetMarker,time, redraw));\n\n return true;\n\n }\n\n \n\n //: Set marker position.\n\n \n\n bool EventTimeLineBodyC::GUISetMarker(RealT time, bool redraw) {\n\n ONDEBUG(cerr << \"EventTimeLineBodyC::GUISetMarker(RealT time=\" << time << \")\\n\");\n\n RavlAssertMsg(Manager.IsGUIThread(), \"GUI Thread only\");\n\n atMarker = time;\n\n if (redraw)\n\n GUIDraw();\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 33, "score": 123987.40827864764 }, { "content": " \n\n bool GUISetMarker(RealT time, bool redraw=true);\n\n //: Set marker position.\n\n\n\n bool SetMarkerSpan(RealT span, bool redraw=true);\n\n //: Set the span of the marker \n\n\n\n bool GUISetMarkerSpan(RealT span, bool redraw=true);\n\n //: Set the span\n\n \n\n bool SetEvents(DListC<Tuple2C<IntT,RealRangeC> > &events, bool redraw);\n\n //: Set event list.\n\n \n\n bool GUISetEvents(DListC<Tuple2C<IntT,RealRangeC> > &events, bool redraw);\n\n //: Set event list.\n\n\n\n bool SetActiveSegments(DListC<RealRangeC> & segments, bool redraw);\n\n //: Set list of inactive segments\n\n\n\n bool GUISetActiveSegments(DListC<RealRangeC> & segments, bool redraw);\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 34, "score": 123986.61168479823 }, { "content": " IndexRangeC rng2((clipped.Min() - displayRange.Min()) * scale,\n\n (clipped.Max() - displayRange.Min()) * scale);\n\n IndexRangeC rng1 = displayArea.Range1();\n\n IndexRange2dC box(rng1, rng2);\n\n box.Range2().ClipBy(arrowBox.Range2());\n\n GUIDrawRectangle(GUIDrawGCWhite(),box,true);\n\n }\n\n\n\n\n\n if(markerGc != 0) {\n\n IndexRangeC horizRange ((atMarker - displayRange.Min()) * scale,\n\n (atMarker+ (m_atSpan) - displayRange.Min()) * scale);\n\n IndexRange2dC markRange(displayArea.Range1(),\n\n horizRange);\n\n if(markRange.Range2().Size() < 3) {// Make tiny bars are big enough to see.\n\n markRange.Range2().Expand((3 - markRange.Range2().Size())/2);\n\n }\n\n //markRange.Range2().ClipBy(arrowBox.Range2());\n\n GUIDrawRectangle(markerGc,markRange,true);\n\n }\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 35, "score": 123986.55656388245 }, { "content": " //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n \n\n EventTimeLineC(IntT srow,IntT scol,const RealRangeC &rng ,const DListC<Tuple2C<IntT,RealRangeC> > &events) \n\n : RawCanvasC(*new EventTimeLineBodyC(srow,scol,rng,events))\n\n {}\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n \n\n EventTimeLineC(IntT srow,IntT scol,const RealRangeC &rng) \n\n : RawCanvasC(*new EventTimeLineBodyC(srow,scol,rng))\n\n {}\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n \n\n \n\n protected:\n\n EventTimeLineC(EventTimeLineBodyC &bod)\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 36, "score": 123986.34823607298 }, { "content": "\n\n // Truncate the clip to the display range.\n\n RealRangeC clipped(it->Data2());\n\n clipped.ClipBy(displayRange);\n\n\n\n\n\n IndexRangeC rng2((clipped.Min() - displayRange.Min()) * scale,\n\n (clipped.Max() - displayRange.Min()) * scale);\n\n // ONDEBUG(cerr << \"Elm=\" << rng2 << \"\\n\");\n\n IndexRange2dC box(vertRange, rng2);\n\n if(box.Range2().Size() == 0) // Make tiny bars at least 1 pixel wide.\n\n box.Range2().Max()++; \n\n box.ClipBy(arrowBox);\n\n GUIDrawRectangle(GUIDrawGCBlack(),box,true);\n\n }\n\n#if 0\n\n IndexC midH = displayArea.Range2().Center();\n\n GUIDrawLine(GUIDrawGCGrey(),Index2dC(displayArea.Range1().Min(),midH),Index2dC(displayArea.Range1().Max(),midH));\n\n#endif\n\n return true;\n\n }\n\n\n\n \n\n}\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 37, "score": 123986.18168181273 }, { "content": "\n\n\n\n\n\n // Draw segment.\n\n// if(segmentGc != 0 && m_localSegment.Size() > 0 && m_localSegment.IsOverlapping(displayRange)) {\n\n RealRangeC clippedLocalSegment(m_localSegment);\n\n clippedLocalSegment.ClipBy(displayRange);\n\n if(segmentGc != 0 && clippedLocalSegment.Size() > 0 && clippedLocalSegment.IsOverlapping(displayRange)) {\n\n RealRangeC dispSegment((clippedLocalSegment.Min() - displayRange.Min()) * scale,\n\n (clippedLocalSegment.Max() - displayRange.Min()) * scale);\n\n \n\n if(dispSegment.Size() == 0) // Make tiny bars at least 1 pixel wide.\n\n dispSegment.Max()++;\n\n IndexRange2dC markRange(displayArea.Range1().Shrink(3),\n\n dispSegment.IndexRange());\n\n \n\n markRange.Range2().ClipBy(arrowBox.Range2());\n\n if(markRange.Range2().Size() > 0)\n\n GUIDrawRectangle(segmentGc,markRange,true);\n\n }\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 38, "score": 123984.77045616817 }, { "content": "// This file is part of RAVL, Recognition And Vision Library \n\n// Copyright (C) 2006, OmniPerception Ltd.\n\n// This code may be redistributed under the terms of the GNU Lesser\n\n// General Public License (LGPL). See the lgpl.licence file for details or\n\n// see http://www.gnu.org/copyleft/lesser.html\n\n// file-header-ends-here\n\n#ifndef RAVL_EVENTTIMELINE_HEADER\n\n#define RAVL_EVENTTIMELINE_HEADER 1\n\n//! author=\"Charles Galambos\"\n\n//! rcsid=\"$Id: EventTimeLine.hh 7501 2010-02-09 18:33:20Z cyberplug $\"\n\n//! lib=RavlGUIUtil\n\n\n\n#include \"Ravl/GUI/RawCanvas.hh\"\n\n#include \"Ravl/RealRange1d.hh\"\n\n#include \"Ravl/GUI/MouseEvent.hh\"\n\n#include \"Ravl/Tuple2.hh\"\n\n\n\nnamespace RavlGUIN {\n\n \n\n using namespace RavlImageN;\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 39, "score": 123984.46896658928 }, { "content": " return true;\n\n }\n\n \n\n //: Centre on a specific time.\n\n\n\n\n\n bool EventTimeLineBodyC::SetMarkerSpan(RealT span, bool redraw){\n\n Manager.Queue(Trigger(EventTimeLineC(*this), &EventTimeLineC::GUISetMarkerSpan, span, redraw));\n\n return true;\n\n }\n\n\n\n\n\n bool EventTimeLineBodyC::GUISetMarkerSpan(RealT span, bool redraw){\n\n RavlAssertMsg(Manager.IsGUIThread(), \"GUI Thread only\");\n\n m_atSpan = span;\n\n if (redraw)\n\n GUIDraw();\n\n return true;\n\n }\n\n\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 40, "score": 123983.55803763261 }, { "content": " //: Set a list of inactive segments. \n\n \n\n bool SetDisplaySpan(RealT &size, bool redraw);\n\n //: Set the length of time to display.\n\n \n\n bool GUISetDisplaySpan(RealT &size, bool redraw);\n\n //: Set the length of time to display.\n\n\n\n inline const RealRangeC & LocalSegment() const\n\n { return m_localSegment; }\n\n //: Access local segment.\n\n\n\n inline const RealRangeC & DisplayRange() const\n\n { return displayRange; }\n\n //: Access display range\n\n\n\n inline const RealT & At() const\n\n { return atMarker; }\n\n //: Access at position.\n\n\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 41, "score": 123981.82365680797 }, { "content": "#define ONDEBUG(x)\n\n#endif\n\n\n\nnamespace RavlGUIN {\n\n\n\n const RealT EventTimeLineBodyC::m_arrowWidth = 0.025;\n\n const RealT EventTimeLineBodyC::m_arrowBorder = 0.25;\n\n \n\n //: Default constructor.\n\n \n\n EventTimeLineBodyC::EventTimeLineBodyC()\n\n : RawCanvasBodyC(15,15),\n\n m_localSegment(3,-8),\n\n timeSelected(0.0),\n\n atMarker(0),\n\n m_atSpan(0),\n\n updateId(0),\n\n markerGc(0),\n\n segmentGc(0)\n\n {}\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 42, "score": 123981.22175983588 }, { "content": " }\n\n \n\n return true;\n\n }\n\n \n\n if (mouseCol > displayArea.Range2().Max() - arrowWidth) {\n\n // Right arrow clicked\n\n DLIterC< Tuple2C<IntT, RealRangeC> > it(events);\n\n for(; it; it++) {\n\n if (it->Data2().Min() > atMarker)\n\n break;\n\n \n\n }\n\n \n\n if (it) {\n\n if (it->Data2().Contains(atMarker))\n\n atMarker++;\n\n else\n\n atMarker = it->Data2().Min();\n\n timeSelected(atMarker);\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 43, "score": 123981.14044210056 }, { "content": "// This file is part of RAVL, Recognition And Vision Library \n\n// Copyright (C) 2006, OmniPerception Ltd.\n\n// This code may be redistributed under the terms of the GNU Lesser\n\n// General Public License (LGPL). See the lgpl.licence file for details or\n\n// see http://www.gnu.org/copyleft/lesser.html\n\n// file-header-ends-here\n\n\n\n//! author=\"Charles Galambos\"\n\n//! rcsid=\"$Id: EventTimeLine.cc 7668 2010-03-26 11:53:03Z robowaz $\"\n\n//! lib=RavlGUIUtil\n\n\n\n#include \"Ravl/GUI/EventTimeLine.hh\"\n\n#include \"Ravl/GUI/Manager.hh\"\n\n#include \"EventTimeLine.hh\"\n\n#include <gdk/gdk.h>\n\n\n\n#define DODEBUG 0\n\n#if DODEBUG\n\n#define ONDEBUG(x) x\n\n#else\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 44, "score": 123980.57953333581 }, { "content": " \n\n // Draw the centre line\n\n GUIDrawLine(GUIDrawGCBlack(),Index2dC(midV,minCol),Index2dC(midV,maxCol));\n\n\n\n // Draw the arrow (left)\n\n GUIDrawLine(GUIDrawGCBlack(), Index2dC(midV, displayArea.Range2().Min() + arrowBorder), Index2dC(vertRange.Min(), minCol - arrowBorder));\n\n GUIDrawLine(GUIDrawGCBlack(), Index2dC(midV, displayArea.Range2().Min() + arrowBorder), Index2dC(vertRange.Max(), minCol - arrowBorder));\n\n GUIDrawLine(GUIDrawGCBlack(), Index2dC(vertRange.Min(), minCol - arrowBorder), Index2dC(vertRange.Max(), minCol - arrowBorder));\n\n // Draw the arrow (right)\n\n GUIDrawLine(GUIDrawGCBlack(), Index2dC(midV, displayArea.Range2().Max() - arrowBorder), Index2dC(vertRange.Min(), maxCol + arrowBorder));\n\n GUIDrawLine(GUIDrawGCBlack(), Index2dC(midV, displayArea.Range2().Max() - arrowBorder), Index2dC(vertRange.Max(), maxCol + arrowBorder));\n\n GUIDrawLine(GUIDrawGCBlack(), Index2dC(vertRange.Min(), maxCol + arrowBorder), Index2dC(vertRange.Max(), maxCol + arrowBorder));\n\n \n\n \n\n // Draw segment withing the displayed time\n\n for( DLIterC< Tuple2C<IntT, RealRangeC> > it(events); it ; it++)\n\n {\n\n // skip segments that start after our max, or end before our min.\n\n if ((it->Data2().Max() < minLimit) || (it->Data2().Min() > maxLimit))\n\n continue;\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 45, "score": 123977.67265258421 }, { "content": " { return Body().LocalSegment(); }\n\n //: Access local segment.\n\n\n\n const RealRangeC & DisplayRange() const\n\n { return Body().DisplayRange(); }\n\n //: Access display range\n\n\n\n const RealT & At() const\n\n { return Body().At(); }\n\n //: Access at position.\n\n\n\n const DListC<Tuple2C<IntT,RealRangeC> > & Events() const\n\n { return Body().Events() ; }\n\n //: Access to events.\n\n\n\n friend class EventTimeLineBodyC;\n\n }; \n\n}\n\n#endif\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 46, "score": 123974.8298298203 }, { "content": " inline const DListC<Tuple2C<IntT,RealRangeC> > & Events() const\n\n { return events; }\n\n //: Access to the events\n\n \n\n protected:\n\n bool CommonCreate(GtkWidget *_widget = NULL);\n\n //: Common GUI creation\n\n\n\n bool EventConfigure(GdkEvent* &event);\n\n //: Configure.\n\n \n\n bool EventExpose(GdkEvent* &event);\n\n //: Expose of area.\n\n \n\n bool EventMousePress(MouseEventC &mousePress);\n\n //: Event mouse press.\n\n \n\n bool GUIDraw();\n\n //: Draw widget on screen.\n\n\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.hh", "rank": 47, "score": 123974.77295716452 }, { "content": " IndexC minCol = displayArea.Range2().Min() + arrowWidth;\n\n IndexC maxCol = displayArea.Range2().Max() - arrowWidth;\n\n IndexRangeC vertRange(displayArea.Range1().Shrink(4));\n\n \n\n // How much time do the arrow obscure?\n\n RealT arrowTime = m_arrowWidth * displayRange.Size();\n\n IndexRange2dC arrowBox(vertRange, displayArea.Range2().Shrink(arrowWidth));\n\n const RealT minLimit = displayRange.Min() + arrowTime;\n\n const RealT maxLimit = displayRange.Max() - arrowTime;\n\n\n\n\n\n // draw active segments\n\n for (DLIterC<RealRangeC> iter(m_activeSegments) ; iter.IsElm() ; iter.Next())\n\n {\n\n // skip any out of range.\n\n if ((iter->Max()<minLimit) || (iter->Min()>maxLimit))\n\n continue;\n\n\n\n RealRangeC clipped(iter.Data());\n\n clipped.ClipBy(displayRange);\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 48, "score": 123971.86330008018 }, { "content": " displayArea = newRec;\n\n // Other config...\n\n if(markerGc == 0) {\n\n markerGc = gdk_gc_new(DrawArea()); \n\n gdk_gc_copy(markerGc,GUIDrawGC());\n\n \n\n GdkColor colour;\n\n colour.pixel = 0;\n\n colour.red = 255 * 255;\n\n colour.green = 0;\n\n colour.blue = 0;\n\n gdk_gc_set_rgb_fg_color (markerGc,&colour);\n\n }\n\n if(segmentGc == 0) {\n\n segmentGc = gdk_gc_new(DrawArea()); \n\n gdk_gc_copy(segmentGc,GUIDrawGC());\n\n \n\n GdkColor colour;\n\n colour.pixel = 0;\n\n colour.red = 128 * 255;\n", "file_path": "RAVL2/GUI/Util/EventTimeLine.cc", "rank": 49, "score": 123970.58246594263 }, { "content": "\n\n#include \"../.././Core/IO/Event.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/DP/Event.hh", "rank": 50, "score": 123792.83810341278 }, { "content": "\n\n#include \"../.././Core/System/TimeCode.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/TimeCode.hh", "rank": 51, "score": 123770.12300055586 }, { "content": " bool AddEvent();\n\n //: Add Event between left and right markers\n\n \n\n bool AddEvent(RealRangeC data);\n\n //: Add Event with range data\n\n \n\n bool DeleteEvent();\n\n //: Delete Event whose position starts between left and right markers\n\n // may want to change\n\n \n\n bool SetEventVal(RealT time);\n\n //: Change value of events at time. Cycles through 0 to 5\n\n // may want to change\n\n \n\n bool SetMarkers(RealT time, RealT left,RealT right);\n\n //: Set marker positions\n\n \n\n TranscriptionBaseListC &GetEvents();\n\n //: Get Event List\n\n \n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 60, "score": 120704.42406712878 }, { "content": " IndexRange2dC displayArea; // Area to use for displaying images.\n\n \n\n RealRangeC displayRange; // Range of times to display.\n\n TranscriptionBaseListC events; // list of events\n\n \n\n Signal3C<RealT,RealT,RealT> timeSelected1; // Frame selected signal\n\n Signal1C<RealT> timeSelected; // Frame selected signal\n\n \n\n RealT atMarker; // Marker for where you are in the sequence.\n\n RealT markerLeft; // Left marker\n\n RealT markerRight; // Right marker\n\n\n\n UIntT updateId;\n\n \n\n GdkGC *markerGc;\n\n GdkGC *markerGcL;\n\n GdkGC *markerGcR;\n\n GdkGC *text;\n\n \n\n StringC label; //label\n\n \n\n \n\n friend class EventTimeLineC;\n\n };\n\n \n\n //! userlevel=Normal\n\n //: Event time line.\n\n // Display a set of events as black marks on a white backgrouond. A red marker indicates current position.\n\n \n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 65, "score": 120703.3102258494 }, { "content": " \n\n bool DeleteEvent() \n\n { return Body().DeleteEvent(); }\n\n //: Delete Event whose position starts between left and right markers\n\n // may want to change\n\n \n\n bool SetEventVal(RealT time) \n\n { return Body().SetEventVal(time); }\n\n //: Delete Event whose position starts between left and right markers\n\n // may want to change\n\n \n\n bool SetMarkers(RealT time,RealT left, RealT right){return Body().SetMarkers(time,left,right);}\n\n //: Set markers in position\n\n \n\n TranscriptionBaseListC &GetEvents() \n\n { return Body().GetEvents(); }\n\n //: Access event lis\n\n \n\n friend class EventTimeLineBodyC;\n\n };\n\n \n\n \n\n}\n\n\n\n\n\n\n\n#endif\n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 67, "score": 120700.11212544971 }, { "content": " Signal1C<RealT> &SigTimeSelected()\n\n { return Body().SigTimeSelected(); }\n\n //: Frame selected signal\n\n \n\n RealT GetMarkerRight(){return Body().GetMarkerRight();}\n\n //: Get marker right\n\n \n\n RealT GetMarkerLeft(){return Body().GetMarkerLeft();}\n\n //: Get marker left\n\n \n\n RealT GetMarkerTime(){return Body().GetMarkerTime();}\n\n //: Get marker time\n\n \n\n bool AddEvent() \n\n { return Body().AddEvent(); }\n\n //: Add Event between left and right markers\n\n \n\n bool AddEvent(RealRangeC data) \n\n { return Body().AddEvent(data); }\n\n //: Add Event with range data\n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 70, "score": 120698.93141803914 }, { "content": " protected:\n\n virtual bool Create();\n\n //: Create the widget.\n\n \n\n bool EventConfigure(GdkEvent* &event);\n\n //: Configure.\n\n \n\n bool EventExpose(GdkEvent* &event);\n\n //: Expose of area.\n\n \n\n bool EventMousePress(MouseEventC &mousePress);\n\n //: Event mouse press.\n\n // Middle mouse button selects time, left positions left marker, right right marker\n\n // Left plus shift adds label\n\n // Middle plus ctrl deletes label whose range lies between l+r markers\n\n // Middle plus shift changes label value\n\n \n\n bool GUIDraw();\n\n //: Draw widget on screen.\n\n \n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 73, "score": 120694.62336887096 }, { "content": " //: Set event list.\n\n \n\n bool SetDisplaySpan(RealT &size);\n\n //: Set the length of time to display.\n\n \n\n bool GUISetDisplaySpan(RealT &size);\n\n //: Set the length of time to display.\n\n \n\n StringC &Label(){return label;}\n\n //: Access timeline a label\n\n \n\n RealT GetMarkerRight();\n\n //: Get right marker\n\n \n\n RealT GetMarkerLeft();\n\n //: Get left marker\n\n \n\n RealT GetMarkerTime();\n\n //: Get time selected\n\n \n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 74, "score": 120694.467244746 }, { "content": " //!param: events - List of events.\n\n\n\n EventTimeLineBodyC(IntT srow,IntT scol,const RealRangeC &rng);\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n \n\n ~EventTimeLineBodyC();\n\n //: Destructor.\n\n \n\n bool SetDisplayRange(RealRangeC &rng);\n\n //: Set range of times to display\n\n \n\n bool GUISetDisplayRange(RealRangeC &rng);\n\n //: Set range of times to display\n\n \n\n Signal3C<RealT,RealT,RealT> &SigTimeSelected1()\n\n { return timeSelected1; }\n\n //: Get marker position signal\n\n \n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 75, "score": 120694.00245551305 }, { "content": " bool SetMarker(RealT time)\n\n { return Body().SetMarker(time); }\n\n //: Set marker position.\n\n \n\n bool GUISetMarker(RealT time)\n\n { return Body().GUISetMarker(time); }\n\n //: Set marker position.\n\n \n\n bool SetEvents(TranscriptionBaseListC &events)\n\n { return Body().SetEvents(events); }\n\n //: Set event list.\n\n \n\n bool GUISetEvents(TranscriptionBaseListC &events)\n\n { return Body().GUISetEvents(events); }\n\n //: Set event list.\n\n \n\n bool SetDisplayRange(RealRangeC &rng)\n\n { return Body().SetDisplayRange(rng); }\n\n //: Set range of times to display\n\n \n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 76, "score": 120693.42824242622 }, { "content": " bool GUISetDisplayRange(RealRangeC &rng)\n\n { return Body().GUISetDisplayRange(rng); }\n\n //: Set range of times to display\n\n \n\n bool SetDisplaySpan(RealT &size)\n\n { return Body().SetDisplaySpan(size); }\n\n //: Set the length of time to display.\n\n \n\n bool GUISetDisplaySpan(RealT &size)\n\n { return Body().GUISetDisplaySpan(size); }\n\n //: Set the length of time to display.\n\n \n\n StringC & Label() \n\n { return Body().Label(); }\n\n //: Access label\n\n \n\n Signal1C<RealT> &SigTimeSelected1()\n\n { return Body().SigTimeSelected1(); }\n\n //: Frame selected signal\n\n \n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 79, "score": 120691.41043540982 }, { "content": "// This file is part of RAVL, Recognition And Vision Library \n\n// Copyright (C) 2006, OmniPerception Ltd.\n\n// This code may be redistributed under the terms of the GNU Lesser\n\n// General Public License (LGPL). See the lgpl.licence file for details or\n\n// see http://www.gnu.org/copyleft/lesser.html\n\n// file-header-ends-here\n\n#ifndef RAVL_GUIEVENTTIMELINE_HEADER\n\n#define RAVL_GUIEVENTTIMELINE_HEADER 1\n\n//! author=\"Charles Galambos\"\n\n//! rcsid=\"$Id: EventTimeLine.hh 5371 2006-02-28 08:18:33Z craftit $\"\n\n//! docentry=\"Ravl.API.GUI.Util\"\n\n\n\n#include \"Ravl/GUI/RawCanvas.hh\"\n\n#include \"Ravl/RealRange1d.hh\"\n\n#include \"Ravl/GUI/MouseEvent.hh\"\n\n#include \"Ravl/Tuple2.hh\"\n\n#include \"Ravl/Threads/Signal3.hh\"\n\n#include \"Ravl/Threads/Signal.hh\"\n\n#include \"Jack/Transcription.hh\"\n\n\n\nnamespace RavlGUIN {\n\n using namespace RavlAudioN;\n\n using namespace RavlGUIN;\n\n using namespace RavlImageN;\n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 80, "score": 120690.84575979492 }, { "content": " //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n \n\n EventTimeLineC(IntT srow,IntT scol,const RealRangeC &rng ,const TranscriptionBaseListC &events) \n\n : RawCanvasC(*new EventTimeLineBodyC(srow,scol,rng,events))\n\n {}\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n \n\n EventTimeLineC(IntT srow,IntT scol,const RealRangeC &rng) \n\n : RawCanvasC(*new EventTimeLineBodyC(srow,scol,rng))\n\n {}\n\n //: Constructor.\n\n //!param: rng - Range of times to display.\n\n //!param: events - List of events.\n\n \n\n \n\n protected:\n\n EventTimeLineC(EventTimeLineBodyC &bod)\n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 82, "score": 120688.38838411238 }, { "content": " : RawCanvasC(bod)\n\n {}\n\n \n\n EventTimeLineBodyC& Body()\n\n { return static_cast<EventTimeLineBodyC&>(WidgetC::Body()); }\n\n //: Access body.\n\n\n\n const EventTimeLineBodyC& Body() const\n\n { return static_cast<const EventTimeLineBodyC&>(WidgetC::Body()); }\n\n //: Access body.\n\n \n\n public:\n\n bool Goto(RealT &time)\n\n { return Body().Goto(time); }\n\n //: Centre on a specific time.\n\n \n\n bool GUIGoto(RealT &time)\n\n { return Body().GUIGoto(time); }\n\n //: Centre on a specific time.\n\n \n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 85, "score": 120684.40872502331 }, { "content": " Signal1C<RealT> &SigTimeSelected()\n\n { return timeSelected; }\n\n //: Frame selected signal\n\n \n\n bool Goto(RealT &time);\n\n //: Centre on a specific time.\n\n \n\n bool GUIGoto(RealT &time);\n\n //: Centre on a specific time.\n\n \n\n bool SetMarker(RealT time);\n\n //: Set marker position.\n\n \n\n bool GUISetMarker(RealT time);\n\n //: Set marker position.\n\n \n\n bool SetEvents(TranscriptionBaseListC &events);\n\n //: Set event list.\n\n \n\n bool GUISetEvents(TranscriptionBaseListC &events);\n", "file_path": "Development/OrignalDev/Util/EventTimeLine.hh", "rank": 86, "score": 120679.71320296772 }, { "content": "\n\n#include \"../.././GUI/GTK/ColourSelector.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/GUI/ColourSelector.hh", "rank": 89, "score": 120528.10467058569 }, { "content": "\n\n#include \"../.././Core/IO/OffsetScale.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/DP/OffsetScale.hh", "rank": 90, "score": 120516.4941098501 }, { "content": "\n\n#include \"../.././Image/Base/ScaleValues.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/Image/ScaleValues.hh", "rank": 91, "score": 120516.4941098501 }, { "content": "\n\n#include \"../.././Image/Processing/Filters/Warp/WarpScale.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/Image/WarpScale.hh", "rank": 92, "score": 120516.24361505262 }, { "content": "\n\n#include \"../.././GUI/GTK/MouseEvent.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/GUI/MouseEvent.hh", "rank": 93, "score": 120499.43693760851 }, { "content": "\n\n#include \"../.././Core/IO/EventSet.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/DP/EventSet.hh", "rank": 94, "score": 120499.43693760851 }, { "content": "\n\n#include \"../.././GUI/GTK/EventBox.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/GUI/EventBox.hh", "rank": 95, "score": 120499.43693760851 }, { "content": "\n\n#include \"../.././OS/Threads/Posix/ThreadEvent.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/Threads/ThreadEvent.hh", "rank": 96, "score": 120499.31061653135 }, { "content": "\n\n#include \"../.././GUI/GTK/TextBox.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/GUI/TextBox.hh", "rank": 97, "score": 120472.38539892128 }, { "content": "\n\n#include \"../.././GUI/GTK/TextView.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/GUI/TextView.hh", "rank": 98, "score": 120472.38539892128 }, { "content": "\n\n#include \"../.././GUI/GTK/TextEntry.hh\"\n\n\n", "file_path": "RAVL2/MSVC/include/Ravl/GUI/TextEntry.hh", "rank": 99, "score": 120472.38539892128 } ]
C++
printscan/print/drivers/usermode/tools/uni/minidev.new/tips.cpp
npocmaka/Windows-Server-2003
5c6fe3db626b63a384230a1aa6b92ac416b0765f
#include "StdAfx.H" #include "Resource.H" #include <WinReg.H> #include <Sys\Stat.H> #include <Sys\Types.H> #include "tips.h" #ifdef _DEBUG #define new DEBUG_NEW #undef THIS_FILE static char THIS_FILE[] = __FILE__; #endif #define MAX_BUFLEN 1000 static const TCHAR szSection[] = _T("Tip"); static const TCHAR szIntFilePos[] = _T("FilePos"); static const TCHAR szTimeStamp[] = _T("TimeStamp"); static const TCHAR szIntStartup[] = _T("StartUp"); CTipOfTheDay::CTipOfTheDay(CWnd* pParent ) : CDialog(IDD_TIP, pParent) { m_bStartup = TRUE; CWinApp* pApp = AfxGetApp(); m_bStartup = !pApp->GetProfileInt(szSection, szIntStartup, 0); UINT iFilePos = pApp->GetProfileInt(szSection, szIntFilePos, 0); CString csTipFile = pApp->m_pszHelpFilePath; csTipFile = csTipFile.Left(csTipFile.ReverseFind(_T('\\'))); csTipFile = csTipFile + _T("\\tips.txt"); m_pStream = fopen(csTipFile, "r"); if (m_pStream == NULL) { m_strTip.LoadString(CG_IDS_FILE_ABSENT); return; } struct _stat buf; _fstat(_fileno(m_pStream), &buf); CString strCurrentTime = ctime(&buf.st_ctime); strCurrentTime.TrimRight(); CString strStoredTime = pApp->GetProfileString(szSection, szTimeStamp, NULL); if (strCurrentTime != strStoredTime) { iFilePos = 0; pApp->WriteProfileString(szSection, szTimeStamp, strCurrentTime); } if (fseek(m_pStream, iFilePos, SEEK_SET) != 0) { AfxMessageBox(CG_IDP_FILE_CORRUPT); } else { GetNextTipString(m_strTip); } } CTipOfTheDay::~CTipOfTheDay() { if (m_pStream != NULL) { CWinApp* pApp = AfxGetApp(); pApp->WriteProfileInt(szSection, szIntFilePos, ftell(m_pStream)); fclose(m_pStream); } } void CTipOfTheDay::DoDataExchange(CDataExchange* pDX) { CDialog::DoDataExchange(pDX); DDX_Check(pDX, IDC_STARTUP, m_bStartup); DDX_Text(pDX, IDC_TIPSTRING, m_strTip); } BEGIN_MESSAGE_MAP(CTipOfTheDay, CDialog) ON_BN_CLICKED(IDC_NEXTTIP, OnNextTip) ON_WM_CTLCOLOR() ON_WM_PAINT() END_MESSAGE_MAP() void CTipOfTheDay::OnNextTip() { GetNextTipString(m_strTip); UpdateData(FALSE); } void CTipOfTheDay::GetNextTipString(CString& strNext) { LPTSTR lpsz = strNext.GetBuffer(MAX_BUFLEN); BOOL bStop = FALSE; while (!bStop) { if (_fgetts(lpsz, MAX_BUFLEN, m_pStream) == NULL) { if (fseek(m_pStream, 0, SEEK_SET) != 0) AfxMessageBox(CG_IDP_FILE_CORRUPT); } else { if (*lpsz != ' ' && *lpsz != '\t' && *lpsz != '\n' && *lpsz != ';' && *lpsz != '*') { bStop = TRUE; } } } strNext.ReleaseBuffer(); } HBRUSH CTipOfTheDay::OnCtlColor(CDC* pDC, CWnd* pWnd, UINT nCtlColor) { if (pWnd->GetDlgCtrlID() == IDC_TIPSTRING) return (HBRUSH)GetStockObject(WHITE_BRUSH); return CDialog::OnCtlColor(pDC, pWnd, nCtlColor); } void CTipOfTheDay::OnOK() { CDialog::OnOK(); CWinApp* pApp = AfxGetApp(); pApp->WriteProfileInt(szSection, szIntStartup, !m_bStartup); } BOOL CTipOfTheDay::OnInitDialog() { CDialog::OnInitDialog(); if (m_pStream == NULL) GetDlgItem(IDC_NEXTTIP)->EnableWindow(FALSE); return TRUE; } void CTipOfTheDay::OnPaint() { CPaintDC dc(this); CWnd* pStatic = GetDlgItem(IDC_BULB); CRect rect; pStatic->GetWindowRect(&rect); ScreenToClient(&rect); CBrush brush; brush.CreateStockObject(WHITE_BRUSH); dc.FillRect(rect, &brush); CBitmap bmp; bmp.LoadBitmap(IDB_LIGHTBULB); BITMAP bmpInfo; bmp.GetBitmap(&bmpInfo); CDC dcTmp; dcTmp.CreateCompatibleDC(&dc); dcTmp.SelectObject(&bmp); rect.bottom = bmpInfo.bmHeight + rect.top; dc.BitBlt(rect.left, rect.top, rect.Width(), rect.Height(), &dcTmp, 0, 0, SRCCOPY); CString strMessage; strMessage.LoadString(CG_IDS_DIDYOUKNOW); rect.left += bmpInfo.bmWidth; dc.DrawText(strMessage, rect, DT_VCENTER | DT_SINGLELINE); }
#include "StdAfx.H" #include "Resource.H" #include <WinReg.H> #include <Sys\Stat.H> #include <Sys\Types.H> #include "tips.h" #ifdef _DEBUG #define new DEBUG_NEW #undef THIS_FILE static char THIS_FILE[] = __FILE__; #endif #define MAX_BUFLEN 1000 static const TCHAR szSection[] = _T("Tip"); static const TCHAR szIntFilePos[] = _T("FilePos"); static const TCHAR szTimeStamp[] = _T("TimeStamp"); static const TCHAR szIntStartup[] = _T("StartUp"); CTipOfTheDay::CTipOfTheDay(CWnd* pParent ) : CDialog(IDD_TIP, pParent) { m_bStartup = TRUE; CWinApp* pApp = AfxGetApp(); m_bStartup = !pApp->GetProfileInt(szSection, szIntStartup, 0); UINT iFilePos = pApp->GetProfileInt(szSection, szIntFilePos, 0); CString csTipFile = pApp->m_pszHelpFilePath; csTipFile = csTipFile.Left(csTipFile.ReverseFind(_T('\\'))); csTipFile = csTipFile + _T("\\tips.txt"); m_pStream = fopen(csTipFile, "r"); if (m_pStream == NULL) { m_strTip.LoadString(CG_IDS_FILE_ABSENT); return; } struct _stat buf; _fstat(_fileno(m_pStream), &buf); CString strCurrentTime = ctime(&buf.st_ctime); strCurrentTime.TrimRight(); CString strStoredTime = pApp->GetProfileString(szSection, szTimeStamp, NULL); if (strCurrentTime != strStoredTime) { iFilePos = 0; pApp->WriteProfileString(szSection, szTimeStamp, strCurrentTime); } if (fseek(m_pStream, iFilePos, SEEK_SET) != 0) { AfxMessageBox(CG_IDP_FILE_CORRUPT); } else { GetNextTipString(m_strTip); } } CTipOfTheDay::~CTipOfTheDay() {
} } void CTipOfTheDay::DoDataExchange(CDataExchange* pDX) { CDialog::DoDataExchange(pDX); DDX_Check(pDX, IDC_STARTUP, m_bStartup); DDX_Text(pDX, IDC_TIPSTRING, m_strTip); } BEGIN_MESSAGE_MAP(CTipOfTheDay, CDialog) ON_BN_CLICKED(IDC_NEXTTIP, OnNextTip) ON_WM_CTLCOLOR() ON_WM_PAINT() END_MESSAGE_MAP() void CTipOfTheDay::OnNextTip() { GetNextTipString(m_strTip); UpdateData(FALSE); } void CTipOfTheDay::GetNextTipString(CString& strNext) { LPTSTR lpsz = strNext.GetBuffer(MAX_BUFLEN); BOOL bStop = FALSE; while (!bStop) { if (_fgetts(lpsz, MAX_BUFLEN, m_pStream) == NULL) { if (fseek(m_pStream, 0, SEEK_SET) != 0) AfxMessageBox(CG_IDP_FILE_CORRUPT); } else { if (*lpsz != ' ' && *lpsz != '\t' && *lpsz != '\n' && *lpsz != ';' && *lpsz != '*') { bStop = TRUE; } } } strNext.ReleaseBuffer(); } HBRUSH CTipOfTheDay::OnCtlColor(CDC* pDC, CWnd* pWnd, UINT nCtlColor) { if (pWnd->GetDlgCtrlID() == IDC_TIPSTRING) return (HBRUSH)GetStockObject(WHITE_BRUSH); return CDialog::OnCtlColor(pDC, pWnd, nCtlColor); } void CTipOfTheDay::OnOK() { CDialog::OnOK(); CWinApp* pApp = AfxGetApp(); pApp->WriteProfileInt(szSection, szIntStartup, !m_bStartup); } BOOL CTipOfTheDay::OnInitDialog() { CDialog::OnInitDialog(); if (m_pStream == NULL) GetDlgItem(IDC_NEXTTIP)->EnableWindow(FALSE); return TRUE; } void CTipOfTheDay::OnPaint() { CPaintDC dc(this); CWnd* pStatic = GetDlgItem(IDC_BULB); CRect rect; pStatic->GetWindowRect(&rect); ScreenToClient(&rect); CBrush brush; brush.CreateStockObject(WHITE_BRUSH); dc.FillRect(rect, &brush); CBitmap bmp; bmp.LoadBitmap(IDB_LIGHTBULB); BITMAP bmpInfo; bmp.GetBitmap(&bmpInfo); CDC dcTmp; dcTmp.CreateCompatibleDC(&dc); dcTmp.SelectObject(&bmp); rect.bottom = bmpInfo.bmHeight + rect.top; dc.BitBlt(rect.left, rect.top, rect.Width(), rect.Height(), &dcTmp, 0, 0, SRCCOPY); CString strMessage; strMessage.LoadString(CG_IDS_DIDYOUKNOW); rect.left += bmpInfo.bmWidth; dc.DrawText(strMessage, rect, DT_VCENTER | DT_SINGLELINE); }
if (m_pStream != NULL) { CWinApp* pApp = AfxGetApp(); pApp->WriteProfileInt(szSection, szIntFilePos, ftell(m_pStream)); fclose(m_pStream);
function_block-random_span
[]
C++
src/kdtree.cpp
pillowsofwind/mini_renderer
a02b16c4a96d21fc5920479ea496b31f462a6407
#include "kdtree.hpp" #include <climits> #include <iostream> #include <algorithm> using namespace std; void KDTree::load(int _size, HitPoint *_data) { m_nNode = 0; m_size = _size; m_data = _data; m_index = new int[m_size]; m_memory = new Node[m_size]; for (int i = 0; i < m_size; ++i) m_index[i] = i; } void KDTree::medianPartition(int l, int r, int dim, int k) { int mid = (l + r) >> 1, temp = m_index[mid]; int i = l, j = r; while (i < j) { while (m_data[m_index[i]].position[dim] < m_data[temp].position[dim]) ++i; while (m_data[m_index[j]].position[dim] > m_data[temp].position[dim]) --j; if (i <= j) { int t = m_index[i]; m_index[i] = m_index[j]; m_index[j] = t; ++i; --j; } } if (l < j && l <= k && k <= j) medianPartition(l, j, dim, k); if (i < r && i <= k && k <= r) medianPartition(i, r, dim, k); } KDTree::Node *KDTree::build(int l, int r, double *min, double *max) { if (r <= l) return nullptr; float temp = -1; int split; for (int i = 0; i < K; ++i) if (max[i] - min[i] > temp) { temp = max[i] - min[i]; split = i; } int mid = (l + r) >> 1; medianPartition(l, r - 1, split, mid); m_memory[m_nNode].value = m_index[mid]; m_memory[m_nNode].split = split; Node *node = &m_memory[m_nNode++]; temp = max[split]; max[split] = m_data[m_index[mid]].position[split]; node->left = build(l, mid, min, max); max[split] = temp; temp = min[split]; min[split] = m_data[m_index[mid]].position[split]; node->right = build(mid + 1, r, min, max); min[split] = temp; m_data[node->value].maxRadius2 = m_data[node->value].radius2; if (node->left && m_data[node->left->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->left->value].maxRadius2; if (node->right && m_data[node->right->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->right->value].maxRadius2; return node; } void KDTree::build() { m_nNode = 0; double *min = new double[K], *max = new double[K]; for (int i = 0; i < K; ++i) min[i] = LONG_MAX, max[i] = LONG_MIN; for (int i = 0; i < m_size; ++i) { Vector3f temp = m_data[i].position;; for (int j = 0; j < K; ++j) { if (m_data[i].position[j] < min[j]) min[j] = m_data[i].position[j]; if (m_data[i].position[j] > max[j]) max[j] = m_data[i].position[j]; } } m_root = build(0, m_size, min, max); } void KDTree::insertPhoton(Node *node, const Photon &photon) { if (node == NULL) return; int pos = node->value; if (Vector3f::dot(m_data[pos].position - photon.P, m_data[pos].position - photon.P) < m_data[pos].radius2) if (photon.object == m_data[pos].object) { m_data[pos].nNew += 1; m_data[pos].phi += photon.color; } int split = node->split; Node *another; if (photon.P[split] < m_data[pos].position[split]) { another = node->right; insertPhoton(node->left, photon); } else { another = node->left; insertPhoton(node->right, photon); } if ((another) && (m_data[pos].position[split] - photon.P[split]) * (m_data[pos].position[split] - photon.P[split]) < m_data[another->value].maxRadius2 + 1e6) insertPhoton(another, photon); } void KDTree::insertPhoton(const Photon &photon) { insertPhoton(m_root, photon); } void KDTree::update(Node *node) { if (node->left) update(node->left); if (node->right) update(node->right); m_data[node->value].maxRadius2 = m_data[node->value].radius2; if (node->left && m_data[node->left->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->left->value].maxRadius2; if (node->right && m_data[node->right->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->right->value].maxRadius2; } void KDTree::update() { update(m_root); } KDTree::~KDTree() { }
#include "kdtree.hpp" #include <climits> #include <iostream> #include <algorithm> using namespace std; void KDTree::load(int _size, HitPoint *_data) { m_nNode = 0; m_size = _size; m_data = _data; m_index = new int[m_size]; m_memory = new Node[m_size]; for (int i = 0; i < m_size; ++i) m_index[i] = i; } void KDTree::medianPartition(int l, int r, int dim, int k) { int mid = (l + r) >> 1, temp = m_index[mid]; int i = l, j = r; while (i < j) { while (m_data[m_index[i]].position[dim] < m_data[temp].position[dim]) ++i; while (m_data[m_index[j]].position[dim] > m_data[temp].position[dim]) --j; if (i <= j) { int t = m_index[i]; m_index[i] = m_index[j]; m_index[j] = t; ++i; --j; } } if (l < j && l <= k && k <= j) medianPartition(l, j, dim, k); if (i < r && i <= k && k <= r) medianPartition(i, r, dim, k); } KDTree::Node *KDTree::build(int l, int r, double *min, double *max) { if (r <= l) return nullptr; float temp = -1; int split; for (int i = 0; i < K; ++i) if (max[i] - min[i] > temp) { temp = max[i] - min[i]; split = i; } int mid = (l + r) >> 1; medianPartition(l, r - 1, split, mid); m_memory[m_nNode].value = m_index[mid]; m_memory[m_nNode].split = split; Node *node = &m_memory[m_nNode++]; temp = max[split]; max[split] = m_data[m_index[mid]].position[split]; node->left = build(l, mid, min, max); max[split] = temp; temp = min[split]; min[split] = m_data[m_index[mid]].position[split]; node->right = build(mid + 1, r, min, max); min[split] = temp; m_data[node->value].maxRadius2 = m_data[node->value].radius2; if (node->left && m_data[node->left->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->left->value].maxRadius2; if (node->right && m_data[node->right->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->right->value].maxRadius2; return node; } void KDTree::build() { m_nNode = 0; double *min = new double[K], *max = new double[K]; for (int i = 0; i < K; ++i) min[i] = LONG_MAX, m
e->left) update(node->left); if (node->right) update(node->right); m_data[node->value].maxRadius2 = m_data[node->value].radius2; if (node->left && m_data[node->left->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->left->value].maxRadius2; if (node->right && m_data[node->right->value].maxRadius2 > m_data[node->value].maxRadius2) m_data[node->value].maxRadius2 = m_data[node->right->value].maxRadius2; } void KDTree::update() { update(m_root); } KDTree::~KDTree() { }
ax[i] = LONG_MIN; for (int i = 0; i < m_size; ++i) { Vector3f temp = m_data[i].position;; for (int j = 0; j < K; ++j) { if (m_data[i].position[j] < min[j]) min[j] = m_data[i].position[j]; if (m_data[i].position[j] > max[j]) max[j] = m_data[i].position[j]; } } m_root = build(0, m_size, min, max); } void KDTree::insertPhoton(Node *node, const Photon &photon) { if (node == NULL) return; int pos = node->value; if (Vector3f::dot(m_data[pos].position - photon.P, m_data[pos].position - photon.P) < m_data[pos].radius2) if (photon.object == m_data[pos].object) { m_data[pos].nNew += 1; m_data[pos].phi += photon.color; } int split = node->split; Node *another; if (photon.P[split] < m_data[pos].position[split]) { another = node->right; insertPhoton(node->left, photon); } else { another = node->left; insertPhoton(node->right, photon); } if ((another) && (m_data[pos].position[split] - photon.P[split]) * (m_data[pos].position[split] - photon.P[split]) < m_data[another->value].maxRadius2 + 1e6) insertPhoton(another, photon); } void KDTree::insertPhoton(const Photon &photon) { insertPhoton(m_root, photon); } void KDTree::update(Node *node) { if (nod
random
[ { "content": "struct OctNode {\n\n OctNode *child[8];\n\n\n\n OctNode() {\n\n child[0] = 0;\n\n }\n\n\n\n bool isTerm() { return child[0] == 0; }\n\n\n\n std::vector<int> obj;\n\n};\n\n\n", "file_path": "include/octree.hpp", "rank": 0, "score": 62447.16502680025 }, { "content": "class HitPoint : public Hit {\n\npublic:\n\n\n\n // some other features, mainly according to Henrik's paper\n\n int row, col; // pixel location\n\n Vector3f weight; // pixel weight\n\n Vector3f phi; // accumulative reflected flux\n\n float radius2, maxRadius2; // current radius , max radius from the tree\n\n float nAccum, nNew; // accumulative count , new from last collision\n\n\n\n HitPoint() {}\n\n\n\n HitPoint(int r, int c, const Vector3f &w) : row(r), col(c), weight(w), phi(Vector3f::ZERO), nAccum(0), nNew(0) {}\n\n\n\n void update(float alpha) { // radius reduction\n\n if (nAccum <= 0 || nNew <= 0)\n\n return;\n\n float k = (nAccum + alpha * nNew) / (nAccum + nNew);\n\n radius2 *= k; // reduce\n\n phi *= k;\n\n nAccum += alpha * nNew; // update\n\n nNew = 0;\n\n }\n\n};\n\n\n\n#endif // HIT_H\n", "file_path": "include/hit.hpp", "rank": 1, "score": 44639.32806576601 }, { "content": "#ifndef TRIANGLE_H\n\n#define TRIANGLE_H\n\n\n\n#include \"object3d.hpp\"\n\n#include <vecmath.h>\n\n#include <cmath>\n\n#include <iostream>\n\n\n\nusing namespace std;\n\n\n", "file_path": "include/triangle.hpp", "rank": 2, "score": 33426.65906747227 }, { "content": "\n\n ///@brief indexing\n\n unsigned char aa;\n\n\n\n void proc_subtree(float tx0, float ty0, float tz0, float tx1, float ty1, float tz1, OctNode *node);\n\n\n\n void intersect(const Ray &ray);\n\n\n\n void **arg;\n\n\n\n void (*termFunc)(int idx, void **arg);\n\n};\n\n\n\nOctree buildOctree(const Mesh &m, int maxLevel = 7);\n\n\n\n#endif //RAY_TRACER_OCTREE_HPP\n", "file_path": "include/octree.hpp", "rank": 3, "score": 33425.39261660181 }, { "content": "#include <vector>\n\n#include <ctime>\n\n#include <algorithm>\n\n#include <cstdlib>\n\n#include <iostream>\n\n//#include <omp.h>\n\n\n\n// one for all max bounces/depth\n\n#define MAX_BOUNCES 8\n\n\n\n// PPM parameters\n\n#define MAX_PPM_ITER 8000\n\n#define MAX_PHOTON_NUM 500000\n\n#define INIT_RADIUS 2\n\n#define ALPHA 0.5\n\n\n\n// depth of field features, set this according to the scene\n\n#define FOCUS_DIST 8\n\n#define LEN_RADIUS 0.012\n\n\n\n// volumetric light switch:1 on\n\n#define VOLUMETRIC 0\n\n\n\n// path tracing features\n\n#define SAMPLES 200 // samples per pixels = samples*9 (in path tracing)\n\n\n\n#define EPS 1e-4\n\n\n\n\n", "file_path": "include/render.hpp", "rank": 4, "score": 33421.95628005128 }, { "content": " //update\n\n if (texture) {// add texture\n\n float theta = acos(-Vector3f::dot(n, texV));\n\n float phi = acos(std::min(std::max(Vector3f::dot(n, texU) / sin(theta), -1.f), 1.f));\n\n float u = theta / M_PI, v = phi / (2 * M_PI);\n\n v = (Vector3f::dot(n, Vector3f::cross(texU, texV)) < 0) ? (1 - v) : v;\n\n h.texture = true;\n\n h.texture_coordinate = Vector2f(u, v);\n\n }\n\n h.set(t, material, n, r.getOrigin() + t * dir, id);\n\n return in ? INSIDE : OUTSIDE;\n\n }\n\n }\n\n }\n\n }\n\n\n\n // get the nearest distance and also the farther one\n\n float intersect_0(const Ray &r, float *tin = nullptr, float *tout = nullptr) const {\n\n\n\n // returns distance, 0 if nohit\n", "file_path": "include/sphere.hpp", "rank": 5, "score": 33421.45030035006 }, { "content": "\n\n void insertPhoton(Node *node, const Photon &photon);\n\n\n\n void update(Node *node);\n\n\n\npublic:\n\n ~KDTree();\n\n\n\n HitPoint *data(int &size) const {\n\n size = m_size;\n\n return m_data;\n\n }\n\n\n\n void build();\n\n\n\n void load(int size, HitPoint *data);\n\n\n\n void insertPhoton(const Photon &photon);\n\n\n\n void update();\n", "file_path": "include/kdtree.hpp", "rank": 6, "score": 33421.08425161069 }, { "content": "inline void Texture::load(const char *filename) {\n\n bimg = new bitmap_image(filename);\n\n height = bimg->height();\n\n width = bimg->width();\n\n}\n\n\n\ninline void Texture::operator()(int x, int y, unsigned char *color) {\n\n x = clamp(x, 0, width - 1);\n\n y = clamp(y, 0, height - 1);\n\n bimg->get_pixel(x, y, color[0], color[1], color[2]);\n\n}\n\n\n\ninline bool Texture::valid() {\n\n return bimg != 0;\n\n}\n\n\n\ninline Vector3f Texture::operator()(float x, float y) {\n\n Vector3f color;\n\n int ix, iy;\n\n x = x * width;\n", "file_path": "include/texture.hpp", "rank": 7, "score": 33419.70399070518 }, { "content": "//\n\n// Created by XRW's notebook on 2020/6/16.\n\n//\n\n\n\n#ifndef RAY_TRACER_KDTREE_HPP\n\n#define RAY_TRACER_KDTREE_HPP\n\n\n\n#include \"Vector3f.h\"\n\n#include \"object3d.hpp\"\n\n\n\n#include <algorithm>\n\n#include <climits>\n\n\n", "file_path": "include/kdtree.hpp", "rank": 8, "score": 33419.678749748804 }, { "content": "//\n\n// return img.GetPixel(x, y);\n\n// }\n\n\n\n void render(int type = 0);\n\n\n\n // basic ray tracing\n\n Vector3f trace(const Ray &ray, HitPoint hit, float tmin, int bounces);\n\n\n\n // monte carlo path tracing\n\n Vector3f path_trace(const Ray &ray, float tmin, int bounces, unsigned short *Xi, int E = 1);\n\n\n\n // monte carlo path tracing with volumetric effect\n\n Vector3f path_trace_plus(const Ray &ray, float tmin, int bounces, unsigned short *Xi);\n\n\n\n // progressive photon mapping\n\n void trace_photon(Photon &photon, int depth);\n\n\n\n // update kdtree\n\n void update_kdtree();\n", "file_path": "include/render.hpp", "rank": 9, "score": 33418.70957272076 }, { "content": "\n\n // update bounding box features\n\n data[i].width = 0;\n\n data[i].t0 = std::max(0.0, c - r);\n\n data[i].t1 = std::min(1.0, c + r);\n\n for (double t = data[i].t0; t <= data[i].t1; t += 0.00001) {\n\n Vector2f pos = get_pos(t);\n\n if (data[i].width < pos.x())\n\n data[i].width = pos.x();\n\n }\n\n if (max < data[i].width)\n\n max = data[i].width;\n\n data[i].width += EPS;\n\n }\n\n max += EPS;\n\n height = get_pos(1).y();\n\n }\n\n\n\n Vector2f get_pos(double t) {\n\n\n", "file_path": "include/bezier.hpp", "rank": 10, "score": 33418.09025453547 }, { "content": " // destructor\n\n ~Hit(){};\n\n\n\n float getT() const {\n\n return t;\n\n }\n\n\n\n Material *getMaterial() const {\n\n return material;\n\n }\n\n\n\n const Vector3f &getNormal() const {\n\n return normal;\n\n }\n\n\n\n void set(float _t, Material *m, const Vector3f &n, const Vector3f &p, int i) {\n\n t = _t;\n\n material = m;\n\n normal = n;\n\n position = p;\n", "file_path": "include/hit.hpp", "rank": 11, "score": 33417.98933327614 }, { "content": " }\n\n\n\n Intersection intersect(const Ray &r, Hit &h, float tmin) override {\n\n // calculate the nearest intersection among all objects in group\n\n Intersection res = MISS;\n\n for (int i = 0; i < data.size(); i++) {\n\n if (data[i] == nullptr)\n\n continue;\n\n else {\n\n Intersection tmp= data[i]->intersect(r, h, tmin);// calculate intersection as possible for all objects\n\n if(tmp!=MISS)\n\n res=tmp;\n\n }\n\n }\n\n// printf(\" %d\\n\",res);\n\n return res;\n\n }\n\n\n\n void addObject(int index, Object3D *obj) {\n\n data[index] = obj;\n", "file_path": "include/group.hpp", "rank": 12, "score": 33417.8156095468 }, { "content": "#ifndef GROUP_H\n\n#define GROUP_H\n\n\n\n\n\n#include \"object3d.hpp\"\n\n#include \"ray.hpp\"\n\n#include \"hit.hpp\"\n\n#include <iostream>\n\n#include <vector>\n\n\n", "file_path": "include/group.hpp", "rank": 13, "score": 33417.09673786929 }, { "content": " Hit(float _t, Material *m, const Vector3f &n, int i) {\n\n t = _t;\n\n material = m;\n\n normal = n;\n\n texture = false;\n\n object = nullptr;\n\n id = i;\n\n }\n\n\n\n Hit(const Hit &h) {\n\n t = h.t;\n\n material = h.material;\n\n normal = h.normal;\n\n position = h.position;\n\n texture = h.texture;\n\n texture_coordinate = h.texture_coordinate;\n\n object = h.object;\n\n id = h.id;\n\n }\n\n\n", "file_path": "include/hit.hpp", "rank": 14, "score": 33416.65813319623 }, { "content": "#ifndef MATERIAL_H\n\n#define MATERIAL_H\n\n\n\n#include <iostream>\n\n#include <cassert>\n\n\n\n#include \"vecmath.h\"\n\n#include \"ray.hpp\"\n\n#include \"texture.hpp\"\n\n#include \"hit.hpp\"\n\n#include \"noise.hpp\"\n\n#include \"simple_method.hpp\"\n\n\n\n#define SHININESS 20\n\n\n", "file_path": "include/material.hpp", "rank": 15, "score": 33416.61422580829 }, { "content": "#ifndef SPHERE_H\n\n#define SPHERE_H\n\n\n\n#include \"object3d.hpp\"\n\n#include <vecmath.h>\n\n#include <cmath>\n\n#include <algorithm>\n\n\n\n#define DEFAULT_TEXU Vector3f(0,-1,0)\n\n#define DEFAULT_TEXV Vector3f(1,0,0)\n\n\n", "file_path": "include/sphere.hpp", "rank": 16, "score": 33416.56974786957 }, { "content": "#ifndef RAY_H\n\n#define RAY_H\n\n\n\n#include <cassert>\n\n#include <iostream>\n\n#include <Vector3f.h>\n\n\n\n// Ray class mostly copied from Peter Shirley and Keith Morley\n", "file_path": "include/ray.hpp", "rank": 17, "score": 33416.53072177002 }, { "content": " }\n\n\n\n void SetPixel(int x, int y, const Vector3f& color)\n\n {\n\n assert( x >= 0 && x < width );\n\n assert( y >= 0 && y < height );\n\n data[ y * width + x ] = color;\n\n }\n\n\n\n static Image* LoadPPM( const char* filename );\n\n void SavePPM( const char* filename ) const;\n\n\n\n static Image* LoadTGA( const char* filename );\n\n void SaveTGA( const char* filename ) const;\n\n int SaveBMP(const char *filename);\n\n void SaveImage(const char *filename);\n\n // extension for image comparison\n\n static Image* compare( Image* img1, Image* img2 );\n\n\n\nprivate:\n\n\n\n int width;\n\n int height;\n\n Vector3f* data;\n\n\n\n};\n\n\n\n#endif // IMAGE_H", "file_path": "include/image.hpp", "rank": 18, "score": 33416.30059163893 }, { "content": "\n\n // evaluate radience\n\n void evaluate(int iter);\n\n\n\n // save bmp image\n\n void save_image();\n\n};\n\n\n\n\n\n#endif //FINAL_RENDER_HPP\n", "file_path": "include/render.hpp", "rank": 19, "score": 33416.18667091668 }, { "content": " }\n\n\n\n int Height() const\n\n {\n\n return height;\n\n }\n\n\n\n const Vector3f& GetPixel( int x, int y ) const\n\n {\n\n assert( x >= 0 && x < width );\n\n assert( y >= 0 && y < height );\n\n return data[ y * width + x ];\n\n }\n\n\n\n void SetAllPixels( const Vector3f& color )\n\n {\n\n for( int i = 0; i < width * height; ++i )\n\n {\n\n data[i] = color;\n\n }\n", "file_path": "include/image.hpp", "rank": 20, "score": 33416.09307622049 }, { "content": "\n\n explicit Render(SceneParser *p, char *out) : parser(p), output(out), max_bounces(MAX_BOUNCES),\n\n len_radius(LEN_RADIUS), focus_dist(FOCUS_DIST),\n\n init_radius(INIT_RADIUS), alpha(ALPHA), volumetic(VOLUMETRIC) {\n\n group = parser->getGroup();\n\n // set the parameters according to the scene\n\n Vector3f center(2, 2, 3);\n\n Material *material=new Material;\n\n material->type = DIFF;\n\n media = Sphere(center, 10, material);\n\n delete material;\n\n }\n\n\n\n ~Render() {}\n\n\n\n// static Vector3f get_pixel(const Image &img, int x, int y) {\n\n// if (x < 0) x = 0;\n\n// if (x >= img.Width()) x = img.Width() - 1;\n\n// if (y < 0) y = 0;\n\n// if (y >= img.Height()) y = img.Height() - 1;\n", "file_path": "include/render.hpp", "rank": 21, "score": 33415.70316358485 }, { "content": "//\n\n// Created by XRW's notebook on 2020/6/17.\n\n//\n\n\n\n#ifndef RAY_TRACER_BEZIER_HPP\n\n#define RAY_TRACER_BEZIER_HPP\n\n\n\n#include \"object3d.hpp\"\n\n#include \"vecmath.h\"\n\n\n\n#include <algorithm>\n\n\n\n#define EPS 1e-4 // adjust this for accuracy\n\n\n\n#define DEFAULT_TEXU Vector3f(0,-1,0)\n\n#define DEFAULT_TEXV Vector3f(1,0,0)\n\n\n", "file_path": "include/bezier.hpp", "rank": 22, "score": 33415.56679519693 }, { "content": " center_ += point_at_len;\n\n }\n\n\n\n Matrix3f R(horizontal, up, direction);\n\n dir = R * dir;\n\n dir.normalize();\n\n // generate a ray from the center\n\n return Ray(center_, dir);\n\n }\n\n\n\nprotected:\n\n float angle;\n\n};\n\n\n\n#endif //CAMERA_H\n\n\n", "file_path": "include/camera.hpp", "rank": 23, "score": 33415.54128706473 }, { "content": " }\n\n\n\n virtual Vector3f get_random_point() const {\n\n return position;\n\n }\n\n\n\n\n\nprivate:\n\n\n\n PointLight(); // don't use\n\n float falloff;\n\n Vector3f position;\n\n\n\n};\n\n\n", "file_path": "include/light.hpp", "rank": 24, "score": 33415.4797209485 }, { "content": "\n\n void init(int size, HitPoint *data) {\n\n load(size, data);\n\n build();\n\n }\n\n\n\n};\n\n\n\n#endif //RAY_TRACER_KDTREE_HPP\n", "file_path": "include/kdtree.hpp", "rank": 25, "score": 33415.25331839544 }, { "content": "#ifndef TRANSFORM_H\n\n#define TRANSFORM_H\n\n\n\n#include <vecmath.h>\n\n#include \"object3d.hpp\"\n\n\n\n// transforms a 3D point using a matrix, returning a 3D point\n\nstatic Vector3f transformPoint(const Matrix4f &mat, const Vector3f &point) {\n\n return (mat * Vector4f(point, 1)).xyz();\n\n}\n\n\n\n// transform a 3D direction using a matrix, returning a direction\n\nstatic Vector3f transformDirection(const Matrix4f &mat, const Vector3f &dir) {\n\n return (mat * Vector4f(dir, 0)).xyz();\n\n}\n\n\n", "file_path": "include/transform.hpp", "rank": 26, "score": 33414.922328499015 }, { "content": " }\n\n\n\n const Vector3f &getDirection() const {\n\n return direction;\n\n }\n\n\n\n Vector3f pointAtParameter(float t) const {\n\n return origin + direction * t;\n\n }\n\n\n\nprivate:\n\n\n\n Vector3f origin;\n\n Vector3f direction;\n\n\n\n};\n\n\n\ninline std::ostream &operator<<(std::ostream &os, const Ray &r) {\n\n os << \"Ray <\" << r.getOrigin() << \", \" << r.getDirection() << \">\";\n\n return os;\n\n}\n\n\n\n#endif // RAY_H\n\n\n", "file_path": "include/ray.hpp", "rank": 27, "score": 33414.77900939069 }, { "content": " y = (1 - y) * height;\n\n ix = (int) x;\n\n iy = (int) y;\n\n unsigned char pixels[4][3];\n\n float alpha = x - ix;\n\n float beta = y - iy;\n\n operator()(ix, iy, pixels[0]);\n\n operator()(ix + 1, iy, pixels[1]);\n\n operator()(ix, iy + 1, pixels[2]);\n\n operator()(ix + 1, iy + 1, pixels[3]);\n\n for (int ii = 0; ii < 3; ii++) {\n\n color[ii] = (1 - alpha) * (1 - beta) * pixels[0][ii]\n\n + alpha * (1 - beta) * pixels[1][ii]\n\n + (1 - alpha) * beta * pixels[2][ii]\n\n + alpha * beta * pixels[3][ii];\n\n }\n\n return color / 255;\n\n}\n\n\n\ninline Texture::~Texture() {\n", "file_path": "include/texture.hpp", "rank": 28, "score": 33414.767605820714 }, { "content": "\n\n float beta = betaCramer / ACramer;\n\n float gamma = gammaCramer / ACramer;\n\n float t = tCramer / ACramer;\n\n\n\n if (t >= tmin && t < hit.getT() && beta + gamma <= 1.0 && beta >= 0.0 && gamma >= 0.0) {\n\n float alpha = 1 - beta - gamma;\n\n Vector3f normal = alpha * normals[0] + beta * normals[1] + gamma * normals[2];\n\n\n\n if (texture) {\n\n hit.texture_coordinate =\n\n alpha * texture_coordinates[0] + beta * texture_coordinates[1] + gamma * texture_coordinates[2];\n\n std::cout << hit.texture_coordinate[0] << hit.texture_coordinate[1] << std::endl;\n\n }\n\n\n\n Vector3f P=ray.pointAtParameter(t);\n\n hit.set(t, material, normal.normalized(),P, id);\n\n\n\n return OUTSIDE;\n\n }\n\n return MISS;\n\n }\n\n\n\n};\n\n\n\n#endif //TRIANGLE_H\n", "file_path": "include/triangle.hpp", "rank": 29, "score": 33414.68688811851 }, { "content": " px[j] = px[j + 1] - px[j];\n\n py[j] = py[j + 1] - py[j];\n\n }\n\n }\n\n\n\n double down = 1;\n\n double next = n;\n\n double factor = 1;\n\n for (int i = 0; i <= n; ++i, --next) {\n\n if (i != 0)\n\n factor *= i;\n\n dx[i] = dx[i] * down / factor;\n\n dy[i] = dy[i] * down / factor;\n\n down *= next;\n\n }\n\n\n\n max = 0;\n\n double interval = 1. / (num - 1);\n\n double c = 0;\n\n for (int i = 0; i <= num; c += interval, ++i) {\n", "file_path": "include/bezier.hpp", "rank": 30, "score": 33414.6617858244 }, { "content": "\n\n float getRefractionIndex() { return refractionIndex; }\n\n\n\n Vector3f getDiffuseColor() { return diffuseColor; }\n\n\n\n Vector3f getType() { return type; }\n\n\n\n Vector3f getEmissionColor() { return emissionColor; }\n\n\n\n void setNoise(const Noise &n) { noise = n; }\n\n\n\n};\n\n\n\ninline float ReLU(float a) {\n\n return (a > 0) ? a : 0;\n\n}\n\n\n\ninline Vector4f get_weight(material_type type) {\n\n\n\n // Diff,Spec,Refl,Refr,Total=1\n", "file_path": "include/material.hpp", "rank": 31, "score": 33414.50111814604 }, { "content": " bool init;\n\n};\n\n\n\ninline Vector3f Noise::getColor(const Vector3f &pos) {\n\n\n\n float N = PerlinNoise::octaveNoise(pos, octaves);\n\n float M = sin(frequency * pos.x() + amplitude * N);\n\n\n\n // sin is between -1 and 1, normalize it for interpolation.\n\n M = (M + 1) / 2.0f;\n\n\n\n return M * color[0] + (1 - M) * color[1];\n\n}\n\n\n\ninline Noise::Noise(int _octaves, const Vector3f &color1,\n\n const Vector3f &color2, float freq, float amp) :\n\n octaves(_octaves), frequency(freq), amplitude(amp) {\n\n color[0] = color1;\n\n color[1] = color2;\n\n init = true;\n", "file_path": "include/noise.hpp", "rank": 32, "score": 33414.11261883575 }, { "content": "//\n\n// Created by XRW's notebook on 2020/6/13.\n\n//\n\n\n\n#ifndef RENDER_HPP\n\n#define RENDER_HPP\n\n\n\n#include \"group.hpp\"\n\n#include \"image.hpp\"\n\n#include \"scene_parser.hpp\"\n\n#include \"camera.hpp\"\n\n#include \"scene_parser.hpp\"\n\n#include \"ray.hpp\"\n\n#include \"hit.hpp\"\n\n#include \"material.hpp\"\n\n#include \"light.hpp\"\n\n#include \"simple_method.hpp\"\n\n#include \"kdtree.hpp\"\n\n\n\n#include <cassert>\n", "file_path": "include/render.hpp", "rank": 33, "score": 33414.02333769637 }, { "content": "#ifndef MESH_H\n\n#define MESH_H\n\n\n\n#include \"object3d.hpp\"\n\n#include \"triangle.hpp\"\n\n#include \"Vector2f.h\"\n\n#include \"Vector3f.h\"\n\n#include \"octree.hpp\"\n\n\n\n#include <vector>\n\n\n\n//by default counterclockwise winding is front face\n", "file_path": "include/mesh.hpp", "rank": 34, "score": 33413.878160563014 }, { "content": "#ifndef HIT_H\n\n#define HIT_H\n\n\n\n#include <vecmath.h>\n\n#include \"ray.hpp\"\n\n#include \"object3d.hpp\"\n\n\n", "file_path": "include/hit.hpp", "rank": 35, "score": 33413.654059530345 }, { "content": "#ifndef LIGHT_H\n\n#define LIGHT_H\n\n\n\n\n\n#include \"vecmath.h\"\n\n#include \"object3d.hpp\"\n\n#include \"simple_method.hpp\"\n\n\n", "file_path": "include/light.hpp", "rank": 36, "score": 33413.610980955724 }, { "content": "#ifndef OBJECT3D_H\n\n#define OBJECT3D_H\n\n\n\n#include \"ray.hpp\"\n\n#include \"hit.hpp\"\n\n#include \"material.hpp\"\n\n\n", "file_path": "include/object3d.hpp", "rank": 37, "score": 33413.56866919396 }, { "content": "//\n\n// Created by XRW's notebook on 2020/6/13.\n\n//\n\n\n\n#ifndef RAY_TRACER_NOISE_HPP\n\n#define RAY_TRACER_NOISE_HPP\n\n\n\n#include <cmath>\n\n#include <cstdio>\n\n\n\n#include \"vecmath.h\"\n\n#include \"perlin_noise.hpp\"\n\n\n", "file_path": "include/noise.hpp", "rank": 38, "score": 33413.56710426035 }, { "content": "#ifndef CAMERA_H\n\n#define CAMERA_H\n\n\n\n#include \"ray.hpp\"\n\n#include <vecmath.h>\n\n#include <cmath>\n\n\n\n#define M_PI 3.14159265358979323846\n\n#define DegreesToRadians(x) ((M_PI * x) / 180.0f)\n\n\n", "file_path": "include/camera.hpp", "rank": 39, "score": 33413.329813257034 }, { "content": "#ifndef IMAGE_H\n\n#define IMAGE_H\n\n\n\n#include <cassert>\n\n#include <vecmath.h>\n\n\n\n// Simple image class\n", "file_path": "include/image.hpp", "rank": 40, "score": 33413.27658888748 }, { "content": "#ifndef PLANE_H\n\n#define PLANE_H\n\n\n\n#include \"object3d.hpp\"\n\n#include <vecmath.h>\n\n#include <cmath>\n\n\n\n// TODO: Implement Plane representing an infinite plane\n\n// function: ax+by+cz=d\n\n// choose your representation , add more fields and fill in the functions\n\n\n", "file_path": "include/plane.hpp", "rank": 41, "score": 33413.11400596636 }, { "content": " id = i;\n\n }\n\n\n\n};\n\n\n\ninline std::ostream &operator<<(std::ostream &os, const Hit &h) {\n\n os << \"Hit <\" << h.getT() << \", \" << h.getNormal() << \">\";\n\n return os;\n\n}\n\n\n", "file_path": "include/hit.hpp", "rank": 42, "score": 33413.08158277517 }, { "content": "//\n\n// Created by XRW's notebook on 2020/6/13.\n\n//\n\n\n\n#ifndef RAY_TRACER_TEXTURE_HPP\n\n#define RAY_TRACER_TEXTURE_HPP\n\n\n\n#include \"bitmap_image.hpp\"\n\n#include \"Vector3f.h\"\n\n\n", "file_path": "include/texture.hpp", "rank": 43, "score": 33412.92039201512 }, { "content": " virtual Ray generateRay(const Vector2f &point) = 0;\n\n\n\n virtual ~Camera() = default;\n\n\n\n int getWidth() const { return width; }\n\n\n\n int getHeight() const { return height; }\n\n\n\nprotected:\n\n // Extrinsic parameters\n\n Vector3f center;\n\n Vector3f direction;\n\n Vector3f up;\n\n Vector3f horizontal;\n\n // Intrinsic parameters\n\n int width;\n\n int height;\n\n};\n\n\n\ninline const Vector3f random_unit_disk() //find a random point in unit_disk\n", "file_path": "include/camera.hpp", "rank": 44, "score": 33412.84308337083 }, { "content": " obj->id = index;\n\n }\n\n\n\n int getGroupSize() {\n\n return size;\n\n }\n\n\n\n};\n\n\n\n#endif\n\n\n\n\n", "file_path": "include/group.hpp", "rank": 45, "score": 33412.487233178006 }, { "content": " }\n\n\n\n virtual Vector3f get_random_point() const {\n\n return -direction * 1e38;\n\n }\n\n\n\nprivate:\n\n\n\n DirectionalLight(); // don't use\n\n\n\n Vector3f direction;\n\n\n\n};\n\n\n", "file_path": "include/light.hpp", "rank": 46, "score": 33412.39039951567 }, { "content": " return MISS;\n\n else {\n\n // calculate t\n\n float lph = sqrt(radius * radius - lch * lch);\n\n float t = loh - lph;\n\n float t_ = loh + lph;\n\n if (t > h.getT() || (t < tmin && t_ > h.getT()))// not the smallest t among all tests\n\n return MISS;\n\n else {\n\n if (t_ < tmin) // make no sense\n\n return MISS;\n\n else {\n\n bool in = false;\n\n if (t < tmin) {\n\n in = true;\n\n t = t_;\n\n }\n\n // calculate n at the intersection\n\n Vector3f CP = r.getOrigin() + t * dir - center;\n\n Vector3f n = CP.normalized();\n", "file_path": "include/sphere.hpp", "rank": 47, "score": 33412.348230631615 }, { "content": " Vector3f OC = center - r.getOrigin();\n\n Vector3f dir = r.getDirection();\n\n dir.normalize();\n\n float t, eps = 1e-4;\n\n float b = Vector3f::dot(OC, dir);\n\n float det = b * b - Vector3f::dot(OC, OC) + radius * radius;\n\n if (det < 0)\n\n return 0;\n\n else\n\n det = sqrt(det);\n\n if (tin && tout) {\n\n *tin = (b - det <= 0) ? 0 : b - det;\n\n *tout = b + det;\n\n }\n\n return (t = b - det) > eps ? t : ((t = b + det) > eps ? t : 0);\n\n }\n\n\n\n Vector4f get_light_info() override {\n\n return Vector4f(center.x(), center.y(), center.z(), radius);\n\n }\n\n};\n\n\n\n\n\n#endif\n\n\n", "file_path": "include/sphere.hpp", "rank": 48, "score": 33412.18124875673 }, { "content": "//\n\n// Created by XRW's notebook on 2020/6/14.\n\n//\n\n\n\n#ifndef RAY_TRACER_OCTREE_HPP\n\n#define RAY_TRACER_OCTREE_HPP\n\n\n\n#include <vector>\n\n\n", "file_path": "include/octree.hpp", "rank": 49, "score": 33412.16446079436 }, { "content": " Vector3f dir = r.getDirection();\n\n float t = (d - Vector3f::dot(normal, O)) / Vector3f::dot(normal, dir);\n\n if (t > h.getT())// not the smallest among all the tests\n\n return MISS;\n\n else {\n\n if (t < tmin)// make no sense\n\n return MISS;\n\n else {\n\n // update\n\n Vector3f P = r.pointAtParameter(t);\n\n h.set(t, material, normal, P, id);\n\n return OUTSIDE;\n\n }\n\n }\n\n }\n\n\n\nprotected:\n\n Vector3f normal;\n\n float d;\n\n};\n\n\n\n#endif //PLANE_H\n\n\t\t\n\n\n", "file_path": "include/plane.hpp", "rank": 50, "score": 33411.98011775823 }, { "content": " center = c;\n\n radius = ra;\n\n if (t) {\n\n texU = DEFAULT_TEXU;\n\n texV = DEFAULT_TEXV;\n\n }\n\n }\n\n\n\n ~Sphere() override = default;\n\n\n\n Intersection intersect(const Ray &r, Hit &h, float tmin) override {\n\n // decide whether the ray intersects the ball\n\n // calculate vector OC and Ray direction(nomal)\n\n Vector3f OC = center - r.getOrigin();\n\n Vector3f dir = r.getDirection();\n\n dir.normalize();\n\n // calculate |CH|\n\n float loh = Vector3f::dot(dir, OC);\n\n float lch = sqrt(OC.length() * OC.length() - loh * loh);\n\n if (lch > radius)\n", "file_path": "include/sphere.hpp", "rank": 51, "score": 33411.92933943637 }, { "content": " // get position on bezier curve at t\n\n double ans_x = 0, ans_y = 0, t_pow = 1;\n\n for (int i = 0; i <= n; ++i) {\n\n ans_x += dx[i] * t_pow;\n\n ans_y += dy[i] * t_pow;\n\n t_pow *= t;\n\n }\n\n return Vector2f(ans_x, ans_y);\n\n }\n\n\n\n Vector2f get_dir(double t) {\n\n\n\n // get derivative on bezier curve at t\n\n double ans_x = 0, ans_y = 0, t_pow = 1;\n\n for (int i = 1; i <= n; ++i) {\n\n ans_x += dx[i] * i * t_pow;\n\n ans_y += dy[i] * i * t_pow;\n\n t_pow *= t;\n\n }\n\n return Vector2f(ans_x, ans_y);\n\n }\n\n\n\n};\n\n\n", "file_path": "include/bezier.hpp", "rank": 52, "score": 33411.92051366331 }, { "content": " Vector3f direction = ray.getDirection();\n\n\n\n float ACramer = Matrix3f(vertices[0].x() - vertices[1].x(), vertices[0].x() - vertices[2].x(), direction.x(),\n\n vertices[0].y() - vertices[1].y(), vertices[0].y() - vertices[2].y(), direction.y(),\n\n vertices[0].z() - vertices[1].z(), vertices[0].z() - vertices[2].z(),\n\n direction.z()).determinant();\n\n float betaCramer = Matrix3f(vertices[0].x() - origin.x(), vertices[0].x() - vertices[2].x(), direction.x(),\n\n vertices[0].y() - origin.y(), vertices[0].y() - vertices[2].y(), direction.y(),\n\n vertices[0].z() - origin.z(), vertices[0].z() - vertices[2].z(),\n\n direction.z()).determinant();\n\n float gammaCramer = Matrix3f(vertices[0].x() - vertices[1].x(), vertices[0].x() - origin.x(), direction.x(),\n\n vertices[0].y() - vertices[1].y(), vertices[0].y() - origin.y(), direction.y(),\n\n vertices[0].z() - vertices[1].z(), vertices[0].z() - origin.z(),\n\n direction.z()).determinant();\n\n float tCramer = Matrix3f(vertices[0].x() - vertices[1].x(), vertices[0].x() - vertices[2].x(),\n\n vertices[0].x() - origin.x(),\n\n vertices[0].y() - vertices[1].y(), vertices[0].y() - vertices[2].y(),\n\n vertices[0].y() - origin.y(),\n\n vertices[0].z() - vertices[1].z(), vertices[0].z() - vertices[2].z(),\n\n vertices[0].z() - origin.z()).determinant();\n", "file_path": "include/triangle.hpp", "rank": 53, "score": 33411.904564468394 }, { "content": "{\n\n // produce 2 random numbers in [-1,1]\n\n float rand_x, rand_y;\n\n do {\n\n rand_x = 2.0f * (rand() % 1000) / 1000.f - 1.0f;\n\n rand_y = 2.0f * (rand() % 1000) / 1000.f - 1.0f;\n\n } while (rand_x * rand_x + rand_y * rand_y <= 1);\n\n\n\n return Vector3f(rand_x, rand_y, 0);\n\n}\n\n\n", "file_path": "include/camera.hpp", "rank": 54, "score": 33411.786849826545 }, { "content": " }\n\n\n\n virtual Vector3f get_random_point() const {\n\n\n\n // get a random point on the sphere light\n\n Vector3f unit_random = random_sphere();\n\n unit_random *= radius; // gives a vector to its surface\n\n return position + unit_random;\n\n }\n\n\n\n\n\nprivate:\n\n\n\n float radius;\n\n Vector3f position;\n\n\n\n};\n\n\n\n#endif // LIGHT_H", "file_path": "include/light.hpp", "rank": 55, "score": 33411.22961198075 }, { "content": " }\n\n return inter;\n\n }\n\n\n\nprotected:\n\n Object3D *o; //un-transformed object\n\n Matrix4f transform;\n\n};\n\n\n\n#endif //TRANSFORM_H\n", "file_path": "include/transform.hpp", "rank": 56, "score": 33408.85693861922 }, { "content": " Vector3f n = hit.getNormal().normalized();\n\n\n\n if (noise.valid()) {\n\n kd = noise.getColor(ray.getOrigin() + ray.getDirection() * hit.getT());\n\n }\n\n\n\n //Diffuse Shading\n\n Vector3f color = (lightColor * kd) * ReLU(Vector3f::dot(dirToLight, n));\n\n\n\n Vector3f ks = get_spec(type);\n\n Vector3f Rx = 2.f * Vector3f::dot(dirToLight, n) * n - dirToLight;\n\n\n\n //Specular Shading\n\n color += (lightColor * ks) * pow(ReLU(Vector3f::dot(dirToLight, n)), SHININESS);\n\n\n\n return color;\n\n}\n\n\n\n#endif // MATERIAL_H\n", "file_path": "include/material.hpp", "rank": 57, "score": 33408.85693861922 }, { "content": " if (bimg != 0) {\n\n delete bimg;\n\n }\n\n}\n\n\n\ninline Texture::Texture() : bimg(0), width(0), height(0) {\n\n}\n\n\n\n#endif //RAY_TRACER_TEXTURE_HPP\n", "file_path": "include/texture.hpp", "rank": 58, "score": 33408.85693861922 }, { "content": "}\n\n\n\ninline Noise::Noise(const Noise &n) :\n\n octaves(n.octaves), frequency(n.frequency),\n\n amplitude(n.amplitude), init(n.init) {\n\n color[0] = n.color[0];\n\n color[1] = n.color[1];\n\n}\n\n\n\n#endif //RAY_TRACER_NOISE_HPP\n", "file_path": "include/noise.hpp", "rank": 59, "score": 33408.85693861922 }, { "content": " switch (type) {\n\n case DIFF:\n\n return Vector4f(1, 0.2, 0.1, 0);\n\n case SPEC:\n\n return Vector4f(0.2, 1, 1, 0);\n\n case REFR:\n\n return Vector4f(0.2, 0.2, 0.6, 1);\n\n default:\n\n return Vector4f(1, 0, 0, 0);\n\n }\n\n}\n\n\n\ninline Vector3f get_spec(material_type type) {\n\n\n\n switch (type) {\n\n case DIFF:\n\n return Vector3f(0.02, 0.02, 0.02); // do reflect a little\n\n case SPEC:\n\n return Vector3f(0.99, 0.99, 0.99);\n\n case REFR:\n", "file_path": "include/material.hpp", "rank": 60, "score": 33408.85693861922 }, { "content": " return Vector3f(0.6, 0.6, 0.6);\n\n default:\n\n return Vector3f::ZERO;\n\n }\n\n}\n\n\n\n\n\ninline Vector3f Material::Shade(const Ray &ray, const Hit &hit,\n\n const Vector3f &dirToLight, const Vector3f &lightColor) {\n\n\n\n // the function here is based on the Phong shading model\n\n Vector3f kd;\n\n\n\n if (t.valid() && hit.texture) {\n\n Vector2f texCoord = hit.texture_coordinate;\n\n Vector3f texColor = t(texCoord[0], texCoord[1]);\n\n kd = texColor;\n\n } else {\n\n kd = this->diffuseColor;\n\n }\n", "file_path": "include/material.hpp", "rank": 61, "score": 33408.85693861922 }, { "content": "\tVector2f getRow( int i ) const;\n\n\tvoid setRow( int i, const Vector2f& v );\n\n\n\n\tVector2f getCol( int j ) const;\n\n\tvoid setCol( int j, const Vector2f& v );\n\n\n\n\tfloat determinant();\n\n\tMatrix2f inverse( bool* pbIsSingular = NULL, float epsilon = 0.f );\n\n\n\n\tvoid transpose();\n\n\tMatrix2f transposed() const;\n\n\n\n\t// ---- Utility ----\n\n\toperator float* (); // automatic type conversion for GL\n\n\tvoid print();\n\n\n\n\tstatic float determinant2x2( float m00, float m01,\n\n\t\tfloat m10, float m11 );\n\n\n\n\tstatic Matrix2f ones();\n", "file_path": "deps/vecmath/include/Matrix2f.h", "rank": 62, "score": 31543.77855250256 }, { "content": "\tQuat4f( const Vector4f& v );\n\n\n\n\t// returns the ith element\n\n\tconst float& operator [] ( int i ) const;\n\n\tfloat& operator [] ( int i );\n\n\n\n\tfloat w() const;\n\n\tfloat x() const;\n\n\tfloat y() const;\n\n\tfloat z() const;\n\n\tVector3f xyz() const;\n\n\tVector4f wxyz() const;\n\n\n\n\tfloat abs() const;\n\n\tfloat absSquared() const;\n\n\tvoid normalize();\n\n\tQuat4f normalized() const;\n\n\n\n\tvoid conjugate();\n\n\tQuat4f conjugated() const;\n", "file_path": "deps/vecmath/include/Quat4f.h", "rank": 63, "score": 31543.49720907912 }, { "content": " const float& operator [] ( int i ) const;\n\n\tfloat& operator [] ( int i );\n\n\n\n float& x();\n\n\tfloat& y();\n\n\n\n\tfloat x() const;\n\n\tfloat y() const;\n\n\n\n Vector2f xy() const;\n\n\tVector2f yx() const;\n\n\tVector2f xx() const;\n\n\tVector2f yy() const;\n\n\n\n\t// returns ( -y, x )\n\n Vector2f normal() const;\n\n\n\n float abs() const;\n\n float absSquared() const;\n\n void normalize();\n", "file_path": "deps/vecmath/include/Vector2f.h", "rank": 64, "score": 31542.8571115968 }, { "content": "\tvoid setSubmatrix2x2( int i0, int j0, const Matrix2f& m );\n\n\n\n\t// sets a 3x3 submatrix of this matrix to m\n\n\t// starting with upper left corner at (i0, j0)\n\n\tvoid setSubmatrix3x3( int i0, int j0, const Matrix3f& m );\n\n\n\n\tfloat determinant() const;\n\n\tMatrix4f inverse( bool* pbIsSingular = NULL, float epsilon = 0.f ) const;\n\n\n\n\tvoid transpose();\n\n\tMatrix4f transposed() const;\n\n\n\n\t// ---- Utility ----\n\n\toperator float* (); // automatic type conversion for GL\n\n\toperator const float* () const; // automatic type conversion for GL\n\n\t\n\n\tvoid print();\n\n\n\n\tstatic Matrix4f ones();\n\n\tstatic Matrix4f identity();\n", "file_path": "deps/vecmath/include/Matrix4f.h", "rank": 65, "score": 31542.724714998014 }, { "content": "//\n\n// Created by XRW's notebook on 2020/6/13.\n\n//\n\n\n\n#ifndef RAY_TRACER_SIMPLE_METHOD_HPP\n\n#define RAY_TRACER_SIMPLE_METHOD_HPP\n\n\n\n#define _USE_MATH_DEFINES\n\n\n\n#include <cmath>\n\n#include <cstdio>\n\n#include <algorithm>\n\n\n\ninline double erand48(unsigned short xsubi[3]) {\n\n\n\n // genarate a random number in [0,1]\n\n return (double) rand() / (double) RAND_MAX;\n\n}\n\n\n\n\n", "file_path": "include/simple_method.hpp", "rank": 66, "score": 31542.666831327108 }, { "content": "\n\n\tVector3f getRow( int i ) const;\n\n\tvoid setRow( int i, const Vector3f& v );\n\n\n\n\tVector3f getCol( int j ) const;\n\n\tvoid setCol( int j, const Vector3f& v );\n\n\n\n\t// gets the 2x2 submatrix of this matrix to m\n\n\t// starting with upper left corner at (i0, j0)\n\n\tMatrix2f getSubmatrix2x2( int i0, int j0 ) const;\n\n\n\n\t// sets a 2x2 submatrix of this matrix to m\n\n\t// starting with upper left corner at (i0, j0)\n\n\tvoid setSubmatrix2x2( int i0, int j0, const Matrix2f& m );\n\n\n\n\tfloat determinant() const;\n\n\tMatrix3f inverse( bool* pbIsSingular = NULL, float epsilon = 0.f ) const; // TODO: invert in place as well\n\n\n\n\tvoid transpose();\n\n\tMatrix3f transposed() const;\n", "file_path": "deps/vecmath/include/Matrix3f.h", "rank": 67, "score": 31541.984853557642 }, { "content": " {\n\n delete[] data_;\n\n }\n\n data_ = new unsigned char[length_];\n\n valid_ = true;\n\n }\n\n inline unsigned char* row(unsigned int row_index) const\n\n {\n\n return data_ + (row_index * row_increment_);\n\n }\n\n void load_bitmap()\n\n {\n\n std::ifstream stream(file_name_.c_str(),std::ios::binary);\n\n if (!stream)\n\n {\n\n std::cerr << \"bitmap_image::load_bitmap() ERROR: bitmap_image - file \" << file_name_ << \" not found!\" << std::endl;\n\n return;\n\n }\n\n\n\n bitmap_file_header bfh;\n", "file_path": "include/bitmap_image.hpp", "rank": 68, "score": 31541.507648538776 }, { "content": "\tconst float& operator () ( int i, int j ) const;\n\n\tfloat& operator () ( int i, int j );\n\n\n\n\tVector4f getRow( int i ) const;\n\n\tvoid setRow( int i, const Vector4f& v );\n\n\n\n\t// get column j (mod 4)\n\n\tVector4f getCol( int j ) const;\n\n\tvoid setCol( int j, const Vector4f& v );\n\n\n\n\t// gets the 2x2 submatrix of this matrix to m\n\n\t// starting with upper left corner at (i0, j0)\n\n\tMatrix2f getSubmatrix2x2( int i0, int j0 ) const;\n\n\n\n\t// gets the 3x3 submatrix of this matrix to m\n\n\t// starting with upper left corner at (i0, j0)\n\n\tMatrix3f getSubmatrix3x3( int i0, int j0 ) const;\n\n\n\n\t// sets a 2x2 submatrix of this matrix to m\n\n\t// starting with upper left corner at (i0, j0)\n", "file_path": "deps/vecmath/include/Matrix4f.h", "rank": 69, "score": 31541.281560055 }, { "content": "\n\n static float fade(float t) {\n\n return t * t * t * (t * (t * 6 - 15) + 10);\n\n }\n\n\n\n static float lerp(float t, float a, float b) {\n\n return a + t * (b - a);\n\n }\n\n\n\n static float grad(int hash, float x, float y, float z) {\n\n int h = hash & 15; // CONVERT LO 4 BITS OF HASH CODE\n\n float u = h < 8 ? x : y; // INTO 12 GRADIENT DIRECTIONS.\n\n float v = h < 4 ? y : h == 12 || h == 14 ? x : z;\n\n return ((h & 1) == 0 ? u : -u) + ((h & 2) == 0 ? v : -v);\n\n\n\n }\n\n\n\n // permutation\n\n static int p[512];\n\n\n\n};\n\n\n\n#endif\n", "file_path": "include/perlin_noise.hpp", "rank": 70, "score": 31540.720813742886 }, { "content": " return ((v >> 8) | (v << 8));\n\n}\n\n\n\ninline unsigned int flip(const unsigned int& v)\n\n{\n\n return (((v & 0xFF000000) >> 0x18) |\n\n ((v & 0x000000FF) << 0x18) |\n\n ((v & 0x00FF0000) >> 0x08) |\n\n ((v & 0x0000FF00) << 0x08));\n\n}\n\ntemplate<typename T>\n\ninline void read_from_stream(std::ifstream& stream,T& t)\n\n{\n\n stream.read(reinterpret_cast<char*>(&t),sizeof(T));\n\n}\n\n\n\ntemplate<typename T>\n\ninline void write_to_stream(std::ofstream& stream,const T& t)\n\n{\n\n stream.write(reinterpret_cast<const char*>(&t),sizeof(T));\n", "file_path": "include/bitmap_image.hpp", "rank": 71, "score": 31540.468548503293 }, { "content": "\tVector4f& operator = ( const Vector4f& rv );\n\n\n\n\t// no destructor necessary\n\n\n\n\t// returns the ith element\n\n\tconst float& operator [] ( int i ) const;\n\n\tfloat& operator [] ( int i );\n\n\n\n\tfloat& x();\n\n\tfloat& y();\n\n\tfloat& z();\n\n\tfloat& w();\n\n\n\n\tfloat x() const;\n\n\tfloat y() const;\n\n\tfloat z() const;\n\n\tfloat w() const;\n\n\n\n\tVector2f xy() const;\n\n\tVector2f yz() const;\n", "file_path": "deps/vecmath/include/Vector4f.h", "rank": 72, "score": 31540.392020273717 }, { "content": "\n\n\t// no destructor necessary\n\n\n\n\t// returns the ith element\n\n const float& operator [] ( int i ) const;\n\n float& operator [] ( int i );\n\n\n\n float& x();\n\n\tfloat& y();\n\n\tfloat& z();\n\n\n\n\tfloat x() const;\n\n\tfloat y() const;\n\n\tfloat z() const;\n\n\n\n\tVector2f xy() const;\n\n\tVector2f xz() const;\n\n\tVector2f yz() const;\n\n\n\n\tVector3f xyz() const;\n", "file_path": "deps/vecmath/include/Vector3f.h", "rank": 73, "score": 31540.37921459961 }, { "content": " sizeof(bit_count) +\n\n sizeof(compression) +\n\n sizeof(size_image) +\n\n sizeof(x_pels_per_meter) +\n\n sizeof(y_pels_per_meter) +\n\n sizeof(clr_used) +\n\n sizeof(clr_important);\n\n }\n\n};\n\n\n\n\n\ninline void read_bih(std::ifstream& stream,bitmap_information_header& bih);\n\ninline void read_bfh(std::ifstream& stream, bitmap_file_header& bfh);\n\ninline void write_bih(std::ofstream& stream, const bitmap_information_header& bih);\n\ninline void write_bfh(std::ofstream& stream, const bitmap_file_header& bfh);\n\ntemplate<typename T>\n\nT clamp(const T& v, const T& lower_range, const T& upper_range)\n\n{\n\n if (v < lower_range)\n\n return lower_range;\n\n else if (v > upper_range)\n\n return upper_range;\n\n else\n\n return v;\n\n}\n", "file_path": "include/bitmap_image.hpp", "rank": 74, "score": 31539.899896928764 }, { "content": " BezierObject *parseBezier();\n\n\n\n Mesh *parseTriangleMesh();\n\n\n\n Transform *parseTransform();\n\n\n\n CubeMap *parseCubeMap();\n\n\n\n int getToken(char token[MAX_PARSER_TOKEN_LENGTH]);\n\n\n\n Vector3f readVector3f();\n\n\n\n Vector2f readVec2f();\n\n\n\n float readfloat();\n\n\n\n int readInt();\n\n\n\n FILE *file;\n\n Camera *camera;\n", "file_path": "include/scene_parser.hpp", "rank": 75, "score": 31539.882296527136 }, { "content": "\tVector3f yzx() const;\n\n\tVector3f zxy() const;\n\n\n\n\tfloat length() const;\n\n float squaredLength() const;\n\n\n\n\tvoid normalize();\n\n\tVector3f normalized() const;\n\n\n\n\tVector2f homogenized() const;\n\n\n\n\tvoid negate();\n\n\n\n\t// ---- Utility ----\n\n operator const float* () const; // automatic type conversion for OpenGL\n\n operator float* (); // automatic type conversion for OpenGL\n\n\tvoid print() const;\t\n\n\n\n\tVector3f& operator += ( const Vector3f& v );\n\n\tVector3f& operator -= ( const Vector3f& v );\n", "file_path": "deps/vecmath/include/Vector3f.h", "rank": 76, "score": 31539.658823277052 }, { "content": "* is permitted under the guidelines and in accordance with the most *\n\n* current version of the Common Public License. *\n\n* http://www.opensource.org/licenses/cpl1.0.php *\n\n* *\n\n***************************************************************************\n\n*/\n\n\n\n#include <iostream>\n\n#include <string>\n\n#include <fstream>\n\n#include <iterator>\n\n#include <limits>\n\n#include <cmath>\n\n#include <cstdlib>\n\n\n\n\n", "file_path": "include/bitmap_image.hpp", "rank": 77, "score": 31539.604926982534 }, { "content": "\n\n\t// ---- Utility ----\n\n\toperator float * (); // automatic type conversion for GL\n\n\tvoid print();\n\n\n\n\tstatic float determinant3x3( float m00, float m01, float m02,\n\n\t\tfloat m10, float m11, float m12,\n\n\t\tfloat m20, float m21, float m22 );\n\n\n\n\tstatic Matrix3f ones();\n\n\tstatic Matrix3f identity();\n\n\tstatic Matrix3f rotateX( float radians );\n\n\tstatic Matrix3f rotateY( float radians );\n\n\tstatic Matrix3f rotateZ( float radians );\n\n\tstatic Matrix3f scaling( float sx, float sy, float sz );\n\n\tstatic Matrix3f uniformScaling( float s );\n\n\tstatic Matrix3f rotation( const Vector3f& rDirection, float radians );\n\n\n\n\t// Returns the rotation matrix represented by a unit quaternion\n\n\t// if q is not normalized, it it normalized first\n", "file_path": "deps/vecmath/include/Matrix3f.h", "rank": 78, "score": 31539.518945319294 }, { "content": " channel_mode_ = image.channel_mode_;\n\n create_bitmap();\n\n std::copy(image.data_, image.data_ + image.length_, data_);\n\n }\n\n return *this;\n\n }\n\n inline void get_pixel(const unsigned int x, const unsigned int y,\n\n unsigned char& red,\n\n unsigned char& green,\n\n unsigned char& blue)\n\n {\n\n blue = data_[(y * row_increment_) + (x * bytes_per_pixel_ + 0)];\n\n green = data_[(y * row_increment_) + (x * bytes_per_pixel_ + 1)];\n\n red = data_[(y * row_increment_) + (x * bytes_per_pixel_ + 2)];\n\n }\n\n\n\n inline void set_pixel(const unsigned int x, const unsigned int y,\n\n const unsigned char red,\n\n const unsigned char green,\n\n const unsigned char blue)\n", "file_path": "include/bitmap_image.hpp", "rank": 79, "score": 31539.403139975977 }, { "content": "#ifndef SCENE_PARSER_H\n\n#define SCENE_PARSER_H\n\n\n\n#include <cassert>\n\n#include <vecmath.h>\n\n\n\n#include \"camera.hpp\"\n\n#include \"light.hpp\"\n\n#include \"object3d.hpp\"\n\n#include \"group.hpp\"\n\n#include \"sphere.hpp\"\n\n#include \"plane.hpp\"\n\n#include \"triangle.hpp\"\n\n#include \"material.hpp\"\n\n#include \"mesh.hpp\"\n\n#include \"transform.hpp\"\n\n#include \"cube_map.hpp\"\n\n#include \"bezier.hpp\"\n\n\n\n#define MAX_PARSER_TOKEN_LENGTH 100\n\n\n\n/*\n", "file_path": "include/scene_parser.hpp", "rank": 80, "score": 31539.01975591877 }, { "content": "\tVector4f homogenized() const;\n\n\n\n\tvoid negate();\n\n\n\n\t// ---- Utility ----\n\n\toperator const float* () const; // automatic type conversion for OpenGL\n\n\toperator float* (); // automatic type conversion for OpenG\n\n\tvoid print() const; \n\n\n\n\tstatic float dot( const Vector4f& v0, const Vector4f& v1 );\n\n\tstatic Vector4f lerp( const Vector4f& v0, const Vector4f& v1, float alpha );\n\n\n\nprivate:\n\n\n\n\tfloat m_elements[ 4 ];\n\n\n\n};\n\n\n\n// component-wise operators\n\nVector4f operator + ( const Vector4f& v0, const Vector4f& v1 );\n", "file_path": "deps/vecmath/include/Vector4f.h", "rank": 81, "score": 31538.98254150903 }, { "content": "\n\n\n\n\n\ninline float sample_segment(float eps, float sigma, float s_max) {\n\n return -log(1.0 - eps * (1.0 - exp(-sigma * s_max))) / sigma;\n\n}\n\n\n\n\n\ninline Vector3f sample_sphere(float e1, float e2) {\n\n float z = 1.0 - 2.0 * e1, sint = sqrt(1.0 - z * z);\n\n return Vector3f(cos(2.0 * M_PI * e2) * sint, sin(2.0 * M_PI * e2) * sint, z);\n\n}\n\n\n\n\n\ninline Vector3f sampleHG(float g, float e1, float e2) {\n\n float s = 1.0 - 2.0 * e1;\n\n float cost = (s + 2.f * g * g * g * (-1.0 + e1) * e1 + g * g * s + 2.f * g * (1.0 - e1 + e1 * e1)) /\n\n ((1.0 + g * s) * (1.0 + g * s));\n\n float sint = sqrt(1.0 - cost * cost);\n\n return Vector3f(cos(2.0 * M_PI * e2) * sint, sin(2.0 * M_PI * e2) * sint, cost);\n", "file_path": "include/simple_method.hpp", "rank": 82, "score": 31538.893085459298 }, { "content": "\tVector2f zw() const;\n\n\tVector2f wx() const;\n\n\n\n\tVector3f xyz() const;\n\n\tVector3f yzw() const;\n\n\tVector3f zwx() const;\n\n\tVector3f wxy() const;\n\n\n\n\tVector3f xyw() const;\n\n\tVector3f yzx() const;\n\n\tVector3f zwy() const;\n\n\tVector3f wxz() const;\n\n\n\n\tfloat abs() const;\n\n\tfloat absSquared() const;\n\n\tvoid normalize();\n\n\tVector4f normalized() const;\n\n\n\n\t// if v.z != 0, v = v / v.w\n\n\tvoid homogenize();\n", "file_path": "deps/vecmath/include/Vector4f.h", "rank": 83, "score": 31538.670802224082 }, { "content": " Vector2f normalized() const;\n\n\n\n void negate();\n\n\n\n\t// ---- Utility ----\n\n operator const float* () const; // automatic type conversion for OpenGL\n\n operator float* (); // automatic type conversion for OpenGL\n\n\tvoid print() const;\n\n\n\n\tVector2f& operator += ( const Vector2f& v );\n\n\tVector2f& operator -= ( const Vector2f& v );\n\n\tVector2f& operator *= ( float f );\n\n\n\n static float dot( const Vector2f& v0, const Vector2f& v1 );\n\n\n\n\tstatic Vector3f cross( const Vector2f& v0, const Vector2f& v1 );\n\n\n\n\t// returns v0 * ( 1 - alpha ) * v1 * alpha\n\n\tstatic Vector2f lerp( const Vector2f& v0, const Vector2f& v1, float alpha );\n\n\n", "file_path": "deps/vecmath/include/Vector2f.h", "rank": 84, "score": 31538.58382247662 }, { "content": " grad(p[BA], x - 1, y, z)), // BLENDED\n\n lerp(u, grad(p[AB], x, y - 1, z), // RESULTS\n\n grad(p[BB], x - 1, y - 1, z))),// FROM 8\n\n lerp(v, lerp(u, grad(p[AA + 1], x, y, z - 1), // CORNERS\n\n grad(p[BA + 1], x - 1, y, z - 1)), // OF CUBE\n\n lerp(u, grad(p[AB + 1], x, y - 1, z - 1),\n\n grad(p[BB + 1], x - 1, y - 1, z - 1))));\n\n }\n\n\n\n\n\n static float octaveNoise(const Vector3f &pt, int octaves) {\n\n float answer = 0;\n\n for (int i = 0; i < octaves; i++) {\n\n float tmp = pow(2.0f, i);\n\n answer += noise(tmp * pt[0], tmp * pt[1], tmp * pt[2]) / float(tmp);\n\n }\n\n return answer;\n\n }\n\n\n\nprivate:\n", "file_path": "include/perlin_noise.hpp", "rank": 85, "score": 31538.50461543154 }, { "content": "\n\n\tvoid invert();\n\n\tQuat4f inverse() const;\n\n\n\n\t// log and exponential maps\n\n\tQuat4f log() const;\n\n\tQuat4f exp() const;\n\n\t\n\n\t// returns unit vector for rotation and radians about the unit vector\n\n\tVector3f getAxisAngle( float* radiansOut );\n\n\n\n\t// sets this quaternion to be a rotation of fRadians about v = < fx, fy, fz >, v need not necessarily be unit length\n\n\tvoid setAxisAngle( float radians, const Vector3f& axis );\n\n\n\n\t// ---- Utility ----\n\n\tvoid print();\n\n \n\n\t // quaternion dot product (a la vector)\n\n\tstatic float dot( const Quat4f& q0, const Quat4f& q1 );\n\n\t\n", "file_path": "deps/vecmath/include/Quat4f.h", "rank": 86, "score": 31538.489449800963 }, { "content": "\tstatic Matrix4f translation( float x, float y, float z );\n\n\tstatic Matrix4f translation( const Vector3f& rTranslation );\n\n\tstatic Matrix4f rotateX( float radians );\n\n\tstatic Matrix4f rotateY( float radians );\n\n\tstatic Matrix4f rotateZ( float radians );\n\n\tstatic Matrix4f rotation( const Vector3f& rDirection, float radians );\n\n\tstatic Matrix4f scaling( float sx, float sy, float sz );\n\n\tstatic Matrix4f uniformScaling( float s );\n\n\tstatic Matrix4f lookAt( const Vector3f& eye, const Vector3f& center, const Vector3f& up );\n\n\tstatic Matrix4f orthographicProjection( float width, float height, float zNear, float zFar, bool directX );\n\n\tstatic Matrix4f orthographicProjection( float left, float right, float bottom, float top, float zNear, float zFar, bool directX );\n\n\tstatic Matrix4f perspectiveProjection( float fLeft, float fRight, float fBottom, float fTop, float fZNear, float fZFar, bool directX );\n\n\tstatic Matrix4f perspectiveProjection( float fovYRadians, float aspect, float zNear, float zFar, bool directX );\n\n\tstatic Matrix4f infinitePerspectiveProjection( float fLeft, float fRight, float fBottom, float fTop, float fZNear, bool directX );\n\n\n\n\t// Returns the rotation matrix represented by a quaternion\n\n\t// uses a normalized version of q\n\n\tstatic Matrix4f rotation( const Quat4f& q );\n\n\n\n\t// returns an orthogonal matrix that's a uniformly distributed rotation\n", "file_path": "deps/vecmath/include/Matrix4f.h", "rank": 87, "score": 31538.359665775635 }, { "content": "\n\n bool valid_;\n\n std::string file_name_;\n\n unsigned char* data_;\n\n unsigned int bytes_per_pixel_;\n\n unsigned int length_;\n\n unsigned int width_;\n\n unsigned int height_;\n\n unsigned int row_increment_;\n\n channel_mode channel_mode_;\n\n};\n\n\n\ninline bool big_endian()\n\n{\n\n unsigned int v = 0x01;\n\n return (1 != reinterpret_cast<char*>(&v)[0]);\n\n}\n\n\n\ninline unsigned short flip(const unsigned short& v)\n\n{\n", "file_path": "include/bitmap_image.hpp", "rank": 88, "score": 31538.300811717087 }, { "content": " flip(bih.y_pels_per_meter);\n\n flip(bih.clr_used);\n\n flip(bih.clr_important);\n\n }\n\n}\n\n\n\ninline void write_bih(std::ofstream& stream, const bitmap_information_header& bih)\n\n{\n\n if (big_endian())\n\n {\n\n flip(bih.size);\n\n flip(bih.width);\n\n flip(bih.height);\n\n flip(bih.planes);\n\n flip(bih.bit_count);\n\n flip(bih.compression);\n\n flip(bih.size_image);\n\n flip(bih.x_pels_per_meter);\n\n flip(bih.y_pels_per_meter);\n\n flip(bih.clr_used);\n", "file_path": "include/bitmap_image.hpp", "rank": 89, "score": 31538.063579748057 }, { "content": " }\n\n stream.close();\n\n }\n\n\n\n inline const unsigned char* data()\n\n {\n\n return data_;\n\n }\n\n\n\n int width(){return width_;}\n\n int height(){return height_;}\n\n\n\n\n\nprivate:\n\n\n\n void create_bitmap()\n\n {\n\n length_ = width_ * height_ * bytes_per_pixel_;\n\n row_increment_ = width_ * bytes_per_pixel_;\n\n if (0 != data_)\n", "file_path": "include/bitmap_image.hpp", "rank": 90, "score": 31537.932966395 }, { "content": "\t// linear (stupid) interpolation\n\n\tstatic Quat4f lerp( const Quat4f& q0, const Quat4f& q1, float alpha );\n\n\n\n\t// spherical linear interpolation\n\n\tstatic Quat4f slerp( const Quat4f& a, const Quat4f& b, float t, bool allowFlip = true );\n\n\t\n\n\t// spherical quadratic interoplation between a and b at point t\n\n\t// given quaternion tangents tanA and tanB (can be computed using squadTangent)\t\n\n\tstatic Quat4f squad( const Quat4f& a, const Quat4f& tanA, const Quat4f& tanB, const Quat4f& b, float t );\n\n\n\n\tstatic Quat4f cubicInterpolate( const Quat4f& q0, const Quat4f& q1, const Quat4f& q2, const Quat4f& q3, float t );\n\n\n\n\t// Log-difference between a and b, used for squadTangent\n\n\t// returns log( a^-1 b )\t\n\n\tstatic Quat4f logDifference( const Quat4f& a, const Quat4f& b );\n\n\n\n\t// Computes a tangent at center, defined by the before and after quaternions\n\n\t// Useful for squad()\n\n\tstatic Quat4f squadTangent( const Quat4f& before, const Quat4f& center, const Quat4f& after );\t\t\n\n\n", "file_path": "deps/vecmath/include/Quat4f.h", "rank": 91, "score": 31537.701216624875 }, { "content": "}\n\n\n\n\n\ninline void generate_cartesian(Vector3f &u, Vector3f &v, Vector3f w) {\n\n\n\n // build cartesian with w(normal)\n\n Vector3f coVec = w;\n\n if (fabs(w.x()) <= fabs(w.y()))\n\n if (fabs(w.x()) <= fabs(w.z())) coVec = Vector3f(0, -w.z(), w.y());\n\n else coVec = Vector3f(-w.y(), w.x(), 0);\n\n else if (fabs(w.y()) <= fabs(w.z())) coVec = Vector3f(-w.z(), 0, w.x());\n\n else coVec = Vector3f(-w.y(), w.x(), 0);\n\n coVec.normalize();\n\n u = Vector3f::cross(w, coVec), v = Vector3f::cross(w, u);\n\n}\n\n\n\n\n\ninline float scatter(const Ray &r, Ray *scattered, float tin, float tout, float &s, float sigma_s) {\n\n\n\n // scatter effect\n", "file_path": "include/simple_method.hpp", "rank": 92, "score": 31537.579383466706 }, { "content": " {\n\n std::ofstream stream(file_name.c_str(),std::ios::binary);\n\n\n\n if (!stream)\n\n {\n\n std::cout << \"bitmap_image::save_image(): Error - Could not open file \" << file_name << \" for writing!\" << std::endl;\n\n return;\n\n }\n\n\n\n bitmap_file_header bfh;\n\n bitmap_information_header bih;\n\n\n\n bih.width = width_;\n\n bih.height = height_;\n\n bih.bit_count = static_cast<unsigned short>(bytes_per_pixel_ << 3);\n\n bih.clr_important = 0;\n\n bih.clr_used = 0;\n\n bih.compression = 0;\n\n bih.planes = 1;\n\n bih.size = 40;\n", "file_path": "include/bitmap_image.hpp", "rank": 93, "score": 31537.52945582936 }, { "content": "inline void write_bfh(std::ofstream& stream, const bitmap_file_header& bfh)\n\n{\n\n if (big_endian())\n\n {\n\n flip(bfh.type);\n\n flip(bfh.size);\n\n flip(bfh.reserved1);\n\n flip(bfh.reserved2);\n\n flip(bfh.off_bits);\n\n }\n\n\n\n write_to_stream(stream,bfh.type);\n\n write_to_stream(stream,bfh.size);\n\n write_to_stream(stream,bfh.reserved1);\n\n write_to_stream(stream,bfh.reserved2);\n\n write_to_stream(stream,bfh.off_bits);\n\n}\n\ninline void read_bih(std::ifstream& stream,bitmap_information_header& bih)\n\n{\n\n read_from_stream(stream,bih.size);\n", "file_path": "include/bitmap_image.hpp", "rank": 94, "score": 31537.082305223543 }, { "content": " {\n\n data_[(y * row_increment_) + (x * bytes_per_pixel_ + 0)] = blue;\n\n data_[(y * row_increment_) + (x * bytes_per_pixel_ + 1)] = green;\n\n data_[(y * row_increment_) + (x * bytes_per_pixel_ + 2)] = red;\n\n }\n\n\n\n inline bool copy_from(const bitmap_image& image)\n\n {\n\n if ((image.height_ != height_) ||\n\n (image.width_ != width_))\n\n {\n\n return false;\n\n }\n\n std::copy(image.data_,image.data_ + image.length_,data_);\n\n return true;\n\n }\n\n\n\n\n\n\n\n void save_image(const std::string& file_name)\n", "file_path": "include/bitmap_image.hpp", "rank": 95, "score": 31536.806400054436 }, { "content": " Group *getGroup() const {\n\n return group;\n\n }\n\n\n\nprivate:\n\n\n\n SceneParser() {\n\n assert(false);\n\n }\n\n\n\n void parseFile();\n\n\n\n void parsePerspectiveCamera();\n\n\n\n void parseBackground();\n\n\n\n void parseLights();\n\n\n\n Light *parseDirectionalLight();\n\n\n", "file_path": "include/scene_parser.hpp", "rank": 96, "score": 31536.44636793831 }, { "content": " Light *parsePointLight();\n\n\n\n Light *parseSphereLight();\n\n\n\n void parseMaterials();\n\n\n\n Material *parseMaterial();\n\n\n\n Noise *parseNoise();\n\n\n\n Object3D *parseObject(char token[MAX_PARSER_TOKEN_LENGTH]);\n\n\n\n Group *parseGroup();\n\n\n\n Sphere *parseSphere();\n\n\n\n Plane *parsePlane();\n\n\n\n Triangle *parseTriangle();\n\n\n", "file_path": "include/scene_parser.hpp", "rank": 97, "score": 31536.402618672084 }, { "content": "//\n\n// Created by XRW's notebook on 2020/6/13.\n\n//\n\n\n\n#ifndef RAY_TRACER_CUBE_MAP_HPP\n\n#define RAY_TRACER_CUBE_MAP_HPP\n\n\n\n#include \"texture.hpp\"\n\n#include \"vecmath.h\"\n\n\n\n#include <string>\n\n\n", "file_path": "include/cube_map.hpp", "rank": 98, "score": 31536.32517233531 }, { "content": "}\n\n\n\n\n\n//inline Vector3f refract(const Vector3f &normal, const Vector3f &in, float n) {\n\n//\n\n// float cosI = -Vector3f::dot(in, normal);\n\n// float cosR2 = 1 - (1 - pow(cosI, 2)) * pow(n, 2);\n\n// return (cosR2 > 1e-6) ? // reflect?\n\n// in * n + normal * (n * cosI - sqrt(cosR2))\n\n// : reflect(normal, in);\n\n//}\n\n\n\n\n\ninline float schlick_fresnel(float cosine, float nt, float n) {\n\n\n\n // an approximation for fresnal\n\n float r0 = (nt - n) / (nt + n);\n\n r0 = r0 * r0;\n\n return r0 + (1 - r0) * pow((1 - cosine), 5.0);\n\n}\n", "file_path": "include/simple_method.hpp", "rank": 99, "score": 31536.304782400242 } ]
C++
src/WebInterface/CController.hpp
Fabio3rs/cppapiframework
14f1b1b42b77edbbf72d9d7f949ea6c9fcfa06a5
#pragma once #ifndef CController_hpp #define CController_hpp #include "../stdafx.hpp" #include "pistache.hpp" struct httpStreamPack { const Pistache::Rest::Request &request; Pistache::Http::ResponseWriter &response; httpStreamPack(const Pistache::Rest::Request &req, Pistache::Http::ResponseWriter &resp) : request(req), response(resp) {} }; class CController { size_t min_json_body_size{0}, max_json_body_size{4 * 1024}; protected: std::string defaulthashfield = "_hash"; bool habilita_hash_input_json = false; public: using msg_pair_t = std::pair<bool, std::string>; const static Pistache::Http::Mime::MediaType JSON_RETURN; static void returnPocoJson(Pistache::Http::Code code, const Poco::JSON::Object::Ptr &json, Pistache::Http::ResponseWriter &response); static auto get_ip_host_from_header(const Pistache::Rest::Request &request) -> std::pair<std::string, std::string>; static auto get_ip_host_from_request(const Pistache::Rest::Request &request) -> std::pair<std::string, std::string>; auto input_json(httpStreamPack httpdata) -> Poco::JSON::Object::Ptr; static auto get_auth(httpStreamPack httpdata) -> std::optional<std::string>; static auto default_json_return(bool success, const std::string &msg) -> Poco::JSON::Object::Ptr; static auto default_json_return(bool success, const std::string &msg, const Poco::UUID &uuid) -> Poco::JSON::Object::Ptr; static auto default_json_return_as_str(bool success, const std::string &msg) -> std::string; static void throw_json_http_exception [[noreturn]] (Pistache::Http::Code code, bool success, const std::string &msg, Pistache::Http::ResponseWriter &response); static void throw_http_exception [[noreturn]] (Pistache::Http::Code code, const std::string &fullreturndata); auto valida_hash_request(const Poco::JSON::Object::Ptr &param, Pistache::Http::ResponseWriter &response) -> bool; static auto hash_json(const Poco::JSON::Object::Ptr &param, const std::string &ignorefield) -> Poco::DigestEngine::Digest; static auto hash_json_cmp(const Poco::JSON::Object::Ptr &param, const std::string &hashfield) -> bool; static auto response_file(const std::string &fullpath, Pistache::Http::ResponseWriter &response) -> bool; static auto should_response_html(const Pistache::Rest::Request &request) -> bool; template <class stream_t> auto response_stream(const stream_t &inputstream, Pistache::Http::ResponseWriter &response) -> bool { inputstream.seekg(0, std::ios::end); auto filesize = inputstream.tellg(); inputstream.seekg(0, std::ios::beg); response.setMime(Pistache::Http::Mime::MediaType( Pistache::Http::Mime::Type::Application, Pistache::Http::Mime::Subtype::Ext, Pistache::Http::Mime::Suffix::Zip)); auto stream = response.stream(Pistache::Http::Code::Ok, static_cast<size_t>(filesize)); for (auto it = std::istreambuf_iterator<char>(inputstream), end = std::istreambuf_iterator<char>(); it != end; it++) { char ch = *it; stream.write(&ch, 1); } stream << Pistache::Http::ends; return true; } virtual void register_routes(const std::string & , Pistache::Rest::Router & ); auto operator=(const CController &) -> CController & = delete; auto operator=(CController &&) -> CController & = delete; void enableInputHashCheck(bool e) { habilita_hash_input_json = e; } template <class T> void route_get(Pistache::Rest::Router &router, const std::string &routepath, T routefun) { Pistache::Rest::Routes::Get(router, routepath, routefun); } template <class T> void route_post(Pistache::Rest::Router &router, const std::string &routepath, T routefun) { Pistache::Rest::Routes::Post(router, routepath, routefun); } CController() = default; CController(const CController &) = default; CController(CController &&) = default; virtual ~CController() = default; }; #endif
#pragma once #ifndef CController_hpp #define CController_hpp #include "../stdafx.hpp" #include "pistache.hpp" struct httpStreamPack { const Pistache::Rest::Request &request; Pistache::Http::ResponseWriter &response; httpStreamPack(const Pistache::Rest::Request &req, Pistache::Http::ResponseWriter &resp) : request(req), response(resp) {} }; class CController { size_t min_json_body_size{0}, max_json_body_size{4 * 1024}; protected: std::string defaulthashfield = "_hash"; bool habilita_hash_input_json = false; public: using msg_pair_t = std::pair<bool, std::string>; const static Pistache::Http::Mime::MediaType JSON_RETURN; static void returnPocoJson(Pistache::Http::Code code, const Poco::JSON::Object::Ptr &json, Pistache::Http::ResponseWriter &response); static auto get_ip_host_from_header(const Pistache::Rest::Request &request) -> std::pair<std::string, std::string>; static auto get_ip_host_from_request(const Pistache::Rest::Request &request) -> std::pair<std::string, std::string>; auto input_json(httpStreamPack httpdata) -> Poco::JSON::Object::Ptr; static auto get_auth(httpStreamPack httpdata) -> std::optional<std::string>; static auto default_js
); inputstream.seekg(0, std::ios::beg); response.setMime(Pistache::Http::Mime::MediaType( Pistache::Http::Mime::Type::Application, Pistache::Http::Mime::Subtype::Ext, Pistache::Http::Mime::Suffix::Zip)); auto stream = response.stream(Pistache::Http::Code::Ok, static_cast<size_t>(filesize)); for (auto it = std::istreambuf_iterator<char>(inputstream), end = std::istreambuf_iterator<char>(); it != end; it++) { char ch = *it; stream.write(&ch, 1); } stream << Pistache::Http::ends; return true; } virtual void register_routes(const std::string & , Pistache::Rest::Router & ); auto operator=(const CController &) -> CController & = delete; auto operator=(CController &&) -> CController & = delete; void enableInputHashCheck(bool e) { habilita_hash_input_json = e; } template <class T> void route_get(Pistache::Rest::Router &router, const std::string &routepath, T routefun) { Pistache::Rest::Routes::Get(router, routepath, routefun); } template <class T> void route_post(Pistache::Rest::Router &router, const std::string &routepath, T routefun) { Pistache::Rest::Routes::Post(router, routepath, routefun); } CController() = default; CController(const CController &) = default; CController(CController &&) = default; virtual ~CController() = default; }; #endif
on_return(bool success, const std::string &msg) -> Poco::JSON::Object::Ptr; static auto default_json_return(bool success, const std::string &msg, const Poco::UUID &uuid) -> Poco::JSON::Object::Ptr; static auto default_json_return_as_str(bool success, const std::string &msg) -> std::string; static void throw_json_http_exception [[noreturn]] (Pistache::Http::Code code, bool success, const std::string &msg, Pistache::Http::ResponseWriter &response); static void throw_http_exception [[noreturn]] (Pistache::Http::Code code, const std::string &fullreturndata); auto valida_hash_request(const Poco::JSON::Object::Ptr &param, Pistache::Http::ResponseWriter &response) -> bool; static auto hash_json(const Poco::JSON::Object::Ptr &param, const std::string &ignorefield) -> Poco::DigestEngine::Digest; static auto hash_json_cmp(const Poco::JSON::Object::Ptr &param, const std::string &hashfield) -> bool; static auto response_file(const std::string &fullpath, Pistache::Http::ResponseWriter &response) -> bool; static auto should_response_html(const Pistache::Rest::Request &request) -> bool; template <class stream_t> auto response_stream(const stream_t &inputstream, Pistache::Http::ResponseWriter &response) -> bool { inputstream.seekg(0, std::ios::end); auto filesize = inputstream.tellg(
random
[ { "content": "class JsonResponse : public ResponseViaReturn {\n\n\n\n public:\n\n JsonResponse(const JsonResponse &) = default;\n\n JsonResponse(JsonResponse &&) = default;\n\n\n\n auto operator=(const JsonResponse &) -> JsonResponse & = default;\n\n auto operator=(JsonResponse &&) -> JsonResponse & = default;\n\n\n\n explicit JsonResponse(const Poco::JSON::Object::Ptr &response,\n\n Code rCode = Code::Ok)\n\n : respData(response), retCode(rCode) {}\n\n\n\n void sendResponse(Req /*ununsed*/, Resp resp) override {\n\n if (respData.isNull()) {\n\n resp.send(retCode, \"{}\", jsonMimeType());\n\n } else {\n\n std::stringstream sstr;\n\n try {\n\n respData->stringify(sstr);\n", "file_path": "src/WebInterface/JsonResponse.hpp", "rank": 0, "score": 117052.28136896525 }, { "content": "class Req {\n\n friend class Session;\n\n friend class WebApp;\n\n\n\n public:\n\n ReqRaw raw;\n\n Resp *resp = nullptr;\n\n\n\n explicit Req(ReqRaw requestRaw) : raw(requestRaw) {}\n\n};\n\n\n", "file_path": "src/WebInterface/httpwrappers.hpp", "rank": 1, "score": 104771.44086046447 }, { "content": "class Resp;\n", "file_path": "src/WebInterface/httpwrappers.hpp", "rank": 2, "score": 104771.44086046447 }, { "content": "class Req;\n\n\n", "file_path": "src/WebInterface/httpwrappers.hpp", "rank": 3, "score": 104771.44086046447 }, { "content": "class Resp {\n\n friend class Session;\n\n friend class WebApp;\n\n\n\n public:\n\n Code RouteStatus = Code::Ok;\n\n Pistache::Http::ResponseWriter &response;\n\n\n\n auto status(Code val) -> Resp & {\n\n RouteStatus = val;\n\n return *this;\n\n }\n\n\n\n auto send(const std::string &body, const MediaType &mime = MediaType())\n\n -> Resp & {\n\n response.send(RouteStatus, body, mime);\n\n return *this;\n\n }\n\n\n\n auto send(Code status, const std::string &body,\n\n const MediaType &mime = MediaType()) -> Resp & {\n\n RouteStatus = status;\n\n response.send(status, body, mime);\n\n return *this;\n\n }\n\n\n\n explicit Resp(Pistache::Http::ResponseWriter &res) : response(res) {}\n\n};\n\n\n", "file_path": "src/WebInterface/httpwrappers.hpp", "rank": 4, "score": 104771.44086046447 }, { "content": "class IntegerValidator : public InputValidator {\n\n\n\n public:\n\n auto validate(std::string_view fieldname, const Poco::Dynamic::Var &s)\n\n -> Poco::Dynamic::Var override {\n\n if (s.isEmpty()) {\n\n return Poco::Dynamic::Var();\n\n }\n\n\n\n try {\n\n std::stoll(s.toString());\n\n } catch (const std::exception &) {\n\n return fail_message(fieldname);\n\n }\n\n\n\n return Poco::Dynamic::Var();\n\n }\n\n\n\n auto validate_and_modify(Poco::JSON::Object::Ptr /*jsondata*/,\n\n std::string_view fieldname,\n", "file_path": "src/utils/InputValidators.hpp", "rank": 5, "score": 85016.76059296328 }, { "content": "class RequiredValidator : public InputValidator {\n\n\n\n public:\n\n auto validate(std::string_view fieldname, const Poco::Dynamic::Var &s)\n\n -> Poco::Dynamic::Var override {\n\n if (s.isEmpty()) {\n\n return fail_message(fieldname);\n\n }\n\n\n\n return Poco::Dynamic::Var();\n\n }\n\n\n\n auto validate_and_modify(Poco::JSON::Object::Ptr /*jsondata*/,\n\n std::string_view fieldname,\n\n const Poco::Dynamic::Var &s)\n\n -> Poco::Dynamic::Var override {\n\n return validate(fieldname, s);\n\n }\n\n\n\n [[nodiscard]] auto fail_message(std::string_view fieldname) const\n", "file_path": "src/utils/InputValidators.hpp", "rank": 6, "score": 85016.76059296328 }, { "content": "class ArrayValidator : public InputValidator {\n\n\n\n using callback_t = Poco::Dynamic::Var(std::string_view, size_t,\n\n Poco::Dynamic::Var);\n\n std::function<callback_t> validatefn;\n\n\n\n public:\n\n auto validate(std::string_view fieldname, const Poco::Dynamic::Var &s)\n\n -> Poco::Dynamic::Var override;\n\n\n\n auto validate_and_modify(Poco::JSON::Object::Ptr /*jsondata*/,\n\n std::string_view fieldname,\n\n const Poco::Dynamic::Var &s)\n\n -> Poco::Dynamic::Var override {\n\n return validate(fieldname, s);\n\n }\n\n\n\n [[nodiscard]] auto fail_message(std::string_view fieldname) const\n\n -> std::string override {\n\n return fieldname.data() + std::string(\" deve ser um array\");\n", "file_path": "src/utils/InputValidators.hpp", "rank": 7, "score": 85016.76059296328 }, { "content": "class ObjectValidator : public InputValidator {\n\n typedef void(callback_t)(std::string_view, ControllerInputValidator &);\n\n\n\n std::function<callback_t> validatefn;\n\n\n\n public:\n\n auto validate(std::string_view fieldname, const Poco::Dynamic::Var &s)\n\n -> Poco::Dynamic::Var override;\n\n\n\n auto validate_and_modify(Poco::JSON::Object::Ptr /*jsondata*/,\n\n std::string_view fieldname,\n\n const Poco::Dynamic::Var &s)\n\n -> Poco::Dynamic::Var override {\n\n return validate(fieldname, s);\n\n }\n\n\n\n [[nodiscard]] auto fail_message(std::string_view fieldname) const\n\n -> std::string override {\n\n return fieldname.data() + std::string(\" deve ser um objeto\");\n\n }\n", "file_path": "src/utils/InputValidators.hpp", "rank": 8, "score": 85016.76059296328 }, { "content": "class StdQueue : public GenericQueue {\n\n std::unordered_map<std::string, std::queue<std::string>> queue_map;\n\n std::unordered_map<std::string,\n\n std::unordered_map<std::string, std::string>>\n\n persistentdata;\n\n\n\n public:\n\n void push(const std::string &queue, const std::string &data) override;\n\n void pushToLater(const std::string &queue, const std::string &data,\n\n std::chrono::system_clock::time_point tp) override;\n\n\n\n auto pop(const std::string &queue, int timeout)\n\n -> std::optional<std::string> override;\n\n\n\n auto getName() const -> std::string override;\n\n void setName(const std::string &name) override;\n\n\n\n auto getNumQueues() const -> size_t { return queue_map.size(); }\n\n auto getQueueSize(const std::string &queue) const -> size_t {\n\n return queue_map.at(queue).size();\n", "file_path": "src/queues/StdQueue.hpp", "rank": 9, "score": 85016.76059296328 }, { "content": "class RedisQueue : public GenericQueue {\n\n std::string aliasname;\n\n\n\n public:\n\n void push(const std::string &queue, const std::string &data) override;\n\n void pushToLater(const std::string &queue, const std::string &data,\n\n std::chrono::system_clock::time_point tp) override;\n\n\n\n auto pop(const std::string &queue, int timeout)\n\n -> std::optional<std::string> override;\n\n\n\n auto getName() const -> std::string override;\n\n void setName(const std::string &name) override;\n\n\n\n auto getPersistentData(const std::string &name) const\n\n -> std::unordered_map<std::string, std::string> override;\n\n\n\n void setPersistentData(\n\n const std::string &name,\n\n const std::unordered_map<std::string, std::string> &data) override;\n", "file_path": "src/queues/RedisQueue.hpp", "rank": 10, "score": 85016.76059296328 }, { "content": "class EmailValidator : public InputValidator {\n\n\n\n public:\n\n auto validate(std::string_view fieldname, const Poco::Dynamic::Var &s)\n\n -> Poco::Dynamic::Var override;\n\n\n\n auto validate_and_modify(Poco::JSON::Object::Ptr /*jsondata*/,\n\n std::string_view fieldname,\n\n const Poco::Dynamic::Var &s)\n\n -> Poco::Dynamic::Var override {\n\n return validate(fieldname, s);\n\n }\n\n\n\n [[nodiscard]] auto fail_message(std::string_view fieldname) const\n\n -> std::string override {\n\n return fieldname.data() + std::string(\" deve ser um email válido\");\n\n }\n\n\n\n EmailValidator(const EmailValidator &) = default;\n\n\n\n EmailValidator();\n\n\n\n ~EmailValidator() override;\n\n};\n\n\n\n/**\n\n *@brief Validador de inteiro ou conversível válido\n\n *@todo colocar parâmetros para tratar dados específicos\n\n */\n", "file_path": "src/utils/InputValidators.hpp", "rank": 11, "score": 85016.76059296328 }, { "content": "class PocoJsonStringify {\n\n\n\n public:\n\n std::string str;\n\n int indent = 0, step = -1;\n\n bool strictJSON = true;\n\n bool escapeAllUnicode = false;\n\n\n\n /**\n\n * @brief função de stringify Poco::JSON::Object\n\n * https://github.com/pocoproject/poco/blob/master/JSON/include/Poco/JSON/Object.h\n\n *\n\n //\n\n // Object.h\n\n //\n\n // Library: JSON\n\n // Package: JSON\n\n // Module: Object\n\n //\n\n // Definition of the Object class.\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 12, "score": 84048.47589756957 }, { "content": "class ValidatorException : public std::exception {\n\n std::string msg, fieldname;\n\n\n\n public:\n\n [[nodiscard]] auto what() const noexcept -> const char * override;\n\n\n\n inline ValidatorException(std::string w, std::string fname) noexcept\n\n : msg(std::move(w)), fieldname(std::move(fname)) {}\n\n\n\n [[nodiscard]] auto to_json() const -> Poco::JSON::Object::Ptr;\n\n\n\n ~ValidatorException() override = default;\n\n};\n\n\n", "file_path": "src/utils/Validator.hpp", "rank": 13, "score": 83914.4860414991 }, { "content": "class StringLengthValidator : public InputValidator {\n\n size_t min;\n\n size_t max;\n\n\n\n public:\n\n auto validate(std::string_view fieldname, const Poco::Dynamic::Var &s)\n\n -> Poco::Dynamic::Var override {\n\n\n\n if (s.isEmpty()) {\n\n return fail_message(fieldname);\n\n }\n\n\n\n std::string str = s.toString();\n\n if (str.empty()) {\n\n return fail_message_empty(fieldname);\n\n }\n\n if (str.size() >= min && str.size() <= max) {\n\n return Poco::Dynamic::Var();\n\n }\n\n return fail_message(fieldname);\n", "file_path": "src/utils/InputValidators.hpp", "rank": 14, "score": 83093.13626801304 }, { "content": "class DefaultIfNotPresentValidator : public InputValidator {\n\n const Poco::Dynamic::Var default_value;\n\n\n\n public:\n\n /**\n\n *@brief Does nothing\n\n *\n\n * @return std::optional<std::string> std::nullopt\n\n */\n\n auto validate(std::string_view /*fieldname*/, const Poco::Dynamic::Var &\n\n /*s*/) -> Poco::Dynamic::Var override {\n\n return Poco::Dynamic::Var();\n\n }\n\n\n\n /**\n\n *@brief Modifica o input se não houver o valor padrão\n\n *\n\n * @param jsondata json da requisição\n\n * @param fieldname nome do campo\n\n * @param s valor do campo\n", "file_path": "src/utils/InputValidators.hpp", "rank": 15, "score": 83093.13626801304 }, { "content": "class OtherPrintJob : public job::QueueableJob {\n\n Poco::JSON::Object::Ptr data;\n\n\n\n public:\n\n QUEUEABLE_SERIALIZE(data, shouldfail)\n\n\n\n [[nodiscard]] auto getName() const -> std::string override {\n\n return getTypeNameByInst(*this);\n\n }\n\n\n\n bool shouldfail{false};\n\n\n\n void handle() override;\n\n\n\n OtherPrintJob() = default;\n\n explicit OtherPrintJob(const Poco::JSON::Object::Ptr &inputdata);\n\n};\n\n\n\nOtherPrintJob::OtherPrintJob(const Poco::JSON::Object::Ptr &inputdata)\n\n : data(inputdata) {}\n", "file_path": "tests/test_jobs.cpp", "rank": 16, "score": 81844.63512700488 }, { "content": "class MockJob : public job::QueueableJob {\n\n Poco::JSON::Object::Ptr jsondata;\n\n std::string strdata;\n\n int integerdata{0};\n\n\n\n std::string jobresult;\n\n\n\n public:\n\n QUEUEABLE_SERIALIZE(jsondata, strdata, integerdata)\n\n\n\n [[nodiscard]] auto getName() const -> std::string override {\n\n return getTypeNameByInst(*this);\n\n }\n\n\n\n void handle() override;\n\n\n\n [[nodiscard]] auto getJobResult() const noexcept -> const std::string & {\n\n return jobresult;\n\n }\n\n\n", "file_path": "tests/test_jobs.cpp", "rank": 17, "score": 81844.63512700488 }, { "content": "class RawStringResponse : public ResponseViaReturn {\n\n\n\n public:\n\n RawStringResponse(const RawStringResponse &) = default;\n\n RawStringResponse(RawStringResponse &&) = default;\n\n\n\n auto operator=(const RawStringResponse &) -> RawStringResponse & = default;\n\n auto operator=(RawStringResponse &&) -> RawStringResponse & = default;\n\n\n\n explicit RawStringResponse(std::string_view response, Code rCode = Code::Ok,\n\n MediaType mime = {})\n\n : text(response), retCode(rCode), mimeType(std::move(mime)) {}\n\n\n\n void sendResponse(Req /*ununsed*/, Resp resp) override {\n\n resp.send(retCode, text, mimeType);\n\n }\n\n\n\n ~RawStringResponse() override;\n\n\n\n private:\n\n std::string text;\n\n Code retCode;\n\n MediaType mimeType;\n\n};\n\n\n", "file_path": "src/WebInterface/httpwrappers.hpp", "rank": 18, "score": 81300.77172957022 }, { "content": "class ExceptionResponseViaReturn : public ResponseViaReturn {\n\n\n\n public:\n\n ExceptionResponseViaReturn(const ExceptionResponseViaReturn &) = default;\n\n ExceptionResponseViaReturn(ExceptionResponseViaReturn &&) = default;\n\n\n\n auto operator=(const ExceptionResponseViaReturn &)\n\n -> ExceptionResponseViaReturn & = default;\n\n auto operator=(ExceptionResponseViaReturn &&)\n\n -> ExceptionResponseViaReturn & = default;\n\n\n\n explicit ExceptionResponseViaReturn(const std::exception &except) {\n\n text = except.what();\n\n }\n\n\n\n void sendResponse(Req req, Resp resp) override {\n\n if (auto Accept = req.raw.headers().get(\"Accept\")) {\n\n std::cout << Accept << std::endl;\n\n\n\n resp.send(Code::Internal_Server_Error,\n", "file_path": "src/WebInterface/httpwrappers.hpp", "rank": 19, "score": 79626.67532624383 }, { "content": "struct primitivepairhash {\n\n public:\n\n template <typename T, typename U>\n\n auto operator()(const std::pair<T, U> &pair) const -> std::size_t {\n\n return std::hash<T>()(pair.first) ^ std::hash<U>()(pair.second);\n\n }\n\n};\n", "file_path": "src/utils/primitivepairhash.hpp", "rank": 21, "score": 56721.094337314695 }, { "content": "struct DatabaseAddress {\n\n std::string host;\n\n std::string db;\n\n std::string user;\n\n std::string pwd;\n\n};\n\n\n", "file_path": "src/Database/DBMigrate.hpp", "rank": 22, "score": 53945.588280297496 }, { "content": "struct RedisServiceAddress {\n\n std::string host;\n\n int serverport{0};\n\n};\n\n\n", "file_path": "src/utils/RedisService.hpp", "rank": 23, "score": 52722.32472727337 }, { "content": "struct logOutputInfo {\n\n std::string filename;\n\n std::ostream *stream{nullptr};\n\n};\n\n\n", "file_path": "src/utils/CLog.hpp", "rank": 24, "score": 52722.32472727337 }, { "content": "class Strutils {\n\n public:\n\n static inline void to_upper(std::string &str) {\n\n std::transform(str.begin(), str.end(), str.begin(),\n\n [](unsigned char c) -> unsigned char {\n\n return static_cast<unsigned char>(std::toupper(c));\n\n });\n\n }\n\n\n\n static inline void to_lower(std::string &str) {\n\n std::transform(str.begin(), str.end(), str.begin(),\n\n [](unsigned char c) -> unsigned char {\n\n return static_cast<unsigned char>(std::tolower(c));\n\n });\n\n }\n\n\n\n static inline void replace_chr(std::string &str, char chin, char chout) {\n\n for (auto &ch : str) {\n\n if (ch == chin) {\n\n ch = chout;\n", "file_path": "src/utils/Strutils.hpp", "rank": 25, "score": 50561.97493791795 }, { "content": "class Validator {\n\n public:\n\n static inline auto parse_json_from_string(const std::string &str)\n\n -> Poco::JSON::Object::Ptr {\n\n Poco::JSON::Parser parser;\n\n Poco::Dynamic::Var jsonvar = parser.parse(str);\n\n return jsonvar.extract<Poco::JSON::Object::Ptr>();\n\n }\n\n\n\n static inline auto\n\n request_to_json(const Pistache::Rest::Request &request, size_t minsize = 0,\n\n size_t maxsize = std::numeric_limits<size_t>::max())\n\n -> Poco::JSON::Object::Ptr {\n\n if (request.body().size() < minsize) {\n\n throw ValidatorException(\"Input data is small than expected\",\n\n \"input\");\n\n }\n\n\n\n if (request.body().size() > maxsize) {\n\n throw ValidatorException(\"Input data is larger than expected\",\n", "file_path": "src/utils/Validator.hpp", "rank": 26, "score": 50561.97493791795 }, { "content": "class AllocationCount{\n\n\n\npublic:\n\n static auto getAllocationCount() -> std::atomic<std::size_t>&;\n\n static auto getDeallocationCount() -> std::atomic<std::size_t>&;\n\n\n\n};\n", "file_path": "tests/allocation_count.hpp", "rank": 27, "score": 49143.9669688578 }, { "content": "class QueueWorker {\n\n protected:\n\n std::shared_ptr<JobsHandler> jobhandler;\n\n std::shared_ptr<GenericQueue> queueServiceInst;\n\n std::shared_ptr<ProcessHelper> processHelperInst;\n\n\n\n int queueTimeout{1}, retryInTimeout{0};\n\n int64_t jobLogExpireSeconds{3600};\n\n\n\n std::atomic<bool> running{true};\n\n std::atomic<bool> forkToHandle{false};\n\n std::atomic<bool> cleanSuccessfulJobsLogs{true};\n\n\n\n static auto allocateJobOutputStream(const Poco::JSON::Object::Ptr &json)\n\n -> std::pair<std::fstream, std::fstream>;\n\n\n\n public:\n\n void setProcessHelper(std::shared_ptr<ProcessHelper> pHelper) {\n\n processHelperInst = std::move(pHelper);\n\n }\n", "file_path": "src/jobhandler/QueueWorker.hpp", "rank": 28, "score": 47842.61857752266 }, { "content": "class RouterWrapper {\n\n\n\n public:\n\n using callbackDecl_t = std::unique_ptr<ResponseViaReturn>(Req, Resp);\n\n\n\n auto operator()(const Pistache::Rest::Request &request,\n\n Pistache::Http::ResponseWriter response)\n\n -> Pistache::Rest::Route::Result {\n\n Resp resp(response);\n\n Req req(request);\n\n\n\n prepareReqResp(req, resp);\n\n\n\n std::unique_ptr<ResponseViaReturn> responseWrapper;\n\n\n\n try {\n\n responseWrapper = func(req, resp);\n\n responseWrapper->sendResponse(req, resp);\n\n } catch (const std::exception &e) {\n\n ExceptionResponseViaReturn except(e);\n", "file_path": "src/WebInterface/httpwrappers.hpp", "rank": 29, "score": 47842.61857752266 }, { "content": "class CConfig {\n\n std::unordered_map<std::string, std::string> data;\n\n\n\n public:\n\n /**\n\n *@brief Configuration singleton instance\n\n *\n\n * @return CConfig&\n\n */\n\n static auto config() -> CConfig &;\n\n\n\n /**\n\n *@brief Searches a variable in the config and if it is not exists return a\n\n *empty string\n\n *\n\n * @param key The variable name\n\n * @return std::string\n\n */\n\n auto operator[](const std::string &key) const noexcept -> std::string;\n\n\n", "file_path": "src/utils/CConfig.hpp", "rank": 30, "score": 47842.61857752266 }, { "content": "class GenericQueue {\n\n private:\n\n /* data */\n\n public:\n\n using datamap_t = std::unordered_map<std::string, std::string>;\n\n \n\n virtual void push(const std::string &queue, const std::string &data) = 0;\n\n virtual void pushToLater(const std::string &queue, const std::string &data,\n\n std::chrono::system_clock::time_point tp) = 0;\n\n\n\n virtual auto pop(const std::string &queue, int timeout)\n\n -> std::optional<std::string> = 0;\n\n\n\n virtual auto getName() const -> std::string = 0;\n\n virtual void setName(const std::string &name) = 0;\n\n\n\n virtual auto getPersistentData(const std::string &name) const\n\n -> std::unordered_map<std::string, std::string> = 0;\n\n\n\n virtual void setPersistentData(\n", "file_path": "src/queues/GenericQueue.hpp", "rank": 31, "score": 47842.61857752266 }, { "content": "class InputValidator {\n\n\n\n public:\n\n /**\n\n *@brief Retorna uma mensagem em caso de falha de validação\n\n *\n\n * @param fieldname nome do campo\n\n * @param s valor do campo\n\n * @return std::optional<std::string> retorno, std::nullopt se estiver tudo\n\n *certo, std::string para mensagem de erro\n\n */\n\n virtual auto validate(std::string_view fieldname,\n\n const Poco::Dynamic::Var &s)\n\n -> Poco::Dynamic::Var = 0;\n\n\n\n /**\n\n *@brief Retorna uma mensagem em caso de falha de validação/Pode modificar o\n\n *conteúdo do campo com um versão do valor corrigida\n\n *\n\n * @param fieldname nome do campo\n", "file_path": "src/utils/InputValidators.hpp", "rank": 32, "score": 47842.61857752266 }, { "content": " class argToString {\n\n const std::string str;\n\n\n\n public:\n\n [[nodiscard]] inline auto getStr() const -> const std::string & {\n\n return str;\n\n }\n\n\n\n // NOLINTNEXTLINE(google-explicit-constructor, hicpp-explicit-conversions)\n\n inline argToString(bool value) : str(value ? \"true\" : \"false\") {}\n\n\n\n // NOLINTNEXTLINE(google-explicit-constructor, hicpp-explicit-conversions)\n\n inline argToString(const char *s) : str(s) {}\n\n\n\n // NOLINTNEXTLINE(google-explicit-constructor, hicpp-explicit-conversions)\n\n inline argToString(const std::exception &e) : str(e.what()) {}\n\n\n\n // NOLINTNEXTLINE(google-explicit-constructor, hicpp-explicit-conversions)\n\n inline argToString(std::string s) : str(std::move(s)) {}\n\n\n", "file_path": "src/utils/RedisService.hpp", "rank": 33, "score": 47842.61857752266 }, { "content": "class RedisService {\n\n using pool_t = BorrowPool<Poco::Redis::Client>;\n\n pool_t pool;\n\n\n\n std::vector<RedisServiceAddress> replicaList;\n\n\n\n std::string password;\n\n\n\n public:\n\n auto get_connection() {\n\n auto borrowed = pool.borrow();\n\n if (!borrowed) {\n\n return borrowed;\n\n }\n\n\n\n connect(*borrowed, borrowed.getId());\n\n return borrowed;\n\n }\n\n\n\n static auto rpush(Poco::Redis::Client &inst,\n", "file_path": "src/utils/RedisService.hpp", "rank": 34, "score": 47842.61857752266 }, { "content": "class DocAPI {\n\n std::fstream log;\n\n std::string lastroute, lastmethod;\n\n\n\n Poco::JSON::Object::Ptr lastroutedata;\n\n\n\n Poco::JSON::Object::Ptr mainobj;\n\n Poco::JSON::Object::Ptr paths;\n\n\n\n DocAPI();\n\n\n\n public:\n\n struct securitySchemaStruct {\n\n std::string type;\n\n std::string scheme;\n\n std::string bearerFormat;\n\n };\n\n\n\n static auto json_to_swagger(const Poco::Dynamic::Var &obj)\n\n -> Poco::JSON::Object::Ptr;\n", "file_path": "src/utils/DocAPI.hpp", "rank": 35, "score": 47842.61857752266 }, { "content": " class argToString {\n\n const std::string str;\n\n\n\n public:\n\n [[nodiscard]] auto getStr() const -> std::string_view { return str; }\n\n\n\n [[nodiscard]] auto size() const { return str.size(); }\n\n\n\n explicit argToString(bool value) : str(value ? \"true\" : \"false\") {}\n\n\n\n explicit argToString(const char *s) : str(s) {}\n\n\n\n explicit argToString(const std::exception &e) : str(e.what()) {}\n\n\n\n explicit argToString(std::string s) : str(std::move(s)) {}\n\n\n\n template <class T,\n\n typename = std::enable_if_t<std::is_arithmetic<T>::value>>\n\n explicit argToString(T value) : str(std::to_string(value)) {}\n\n\n", "file_path": "src/Database/CSql.hpp", "rank": 36, "score": 47842.61857752266 }, { "content": "class ChronoUtils {\n\n public:\n\n static auto\n\n GetDateAndTime(std::chrono::high_resolution_clock::time_point now =\n\n std::chrono::high_resolution_clock::now())\n\n -> std::string {\n\n std::time_t tt = std::chrono::high_resolution_clock::to_time_t(now);\n\n\n\n auto mksec = std::chrono::duration_cast<std::chrono::microseconds>(\n\n now.time_since_epoch())\n\n .count();\n\n mksec %= 1000000;\n\n\n\n std::string str;\n\n\n\n {\n\n std::array<char, 32> buf{};\n\n\n\n size_t strft_res_sz =\n\n strftime(buf.data(), buf.size(), \"%Y/%m/%d %H:%M:%S.\",\n", "file_path": "src/utils/ChronoUtils.hpp", "rank": 37, "score": 47842.61857752266 }, { "content": "class LuaScripts {\n\n public:\n\n /**\n\n * Get the Lua script for computing the size of queue.\n\n *\n\n * KEYS[1] - The name of the primary queue\n\n * KEYS[2] - The name of the \"delayed\" queue\n\n * KEYS[3] - The name of the \"reserved\" queue\n\n *\n\n * @return string\n\n */\n\n static std::string_view size() {\n\n return R\"(\n\nreturn redis.call('llen', KEYS[1]) + redis.call('zcard', KEYS[2]) + redis.call('zcard', KEYS[3])\n\n)\";\n\n }\n\n\n\n /**\n\n * Get the Lua script for pushing jobs onto the queue.\n\n *\n", "file_path": "src/queues/LuaScripts.hpp", "rank": 38, "score": 47842.61857752266 }, { "content": "class DBMigrate {\n\n std::map<std::string, std::string_view> migration_list;\n\n DatabaseAddress databaseInfo;\n\n\n\n DBMigrate() = default;\n\n\n\n public:\n\n /**\n\n *@brief Set the connection info object\n\n *\n\n * @param conn_info dados da conexão com o banco de dados\n\n */\n\n void set_connection_info(const DatabaseAddress &conn_info) {\n\n databaseInfo = conn_info;\n\n }\n\n\n\n /**\n\n *@brief Cria a tabela migrations se não existir\n\n *\n\n */\n", "file_path": "src/Database/DBMigrate.hpp", "rank": 39, "score": 47842.61857752266 }, { "content": "class QueueableJob {\n\n friend class QueueWorker;\n\n\n\n protected:\n\n size_t tries{0}, maxtries{3};\n\n\n\n bool failed{false};\n\n\n\n public:\n\n static constexpr std::string_view jobVersionStr\n\n\n\n#ifdef CUR_JOB_FRAMEWORK_VERSION\n\n {CUR_JOB_FRAMEWORK_VERSION};\n\n#else\n\n {\"\"};\n\n#endif\n\n\n\n static auto getJobSystemVersion() -> std::string {\n\n return std::string(jobVersionStr);\n\n }\n", "file_path": "src/jobhandler/QueueableJob.hpp", "rank": 40, "score": 47842.61857752266 }, { "content": "class JobsHandler {\n\n public:\n\n using datamap_t = std::unordered_map<std::string, std::string>;\n\n\n\n /**\n\n * @brief Get the Type Name object. Uses typeid name to get the typename in\n\n * compile time\n\n *\n\n * @tparam T the type\n\n * @return constexpr std::string_view the name of the type\n\n */\n\n template <class T> static auto getTypeName() -> std::string {\n\n return QueueableJob::concatJobSystemVersion(typeid(T).name());\n\n }\n\n\n\n template <class T>\n\n static auto getTypeNameByInst(const T & /*ununsed*/) -> std::string {\n\n return QueueableJob::concatJobSystemVersion(typeid(T).name());\n\n }\n\n\n", "file_path": "src/jobhandler/JobsHandler.hpp", "rank": 41, "score": 47842.61857752266 }, { "content": "class ProcessHelper {\n\n private:\n\n /* data */\n\n public:\n\n enum waitStatuses { exited, signaled, stopped, continued, unknown };\n\n\n\n virtual auto fork() -> pid_t;\n\n virtual auto wait(pid_t pid, int flags = 0) -> std::pair<waitStatuses, int>;\n\n\n\n virtual auto operator=(const ProcessHelper &) -> ProcessHelper & = default;\n\n virtual auto operator=(ProcessHelper &&) -> ProcessHelper & = default;\n\n\n\n ProcessHelper(const ProcessHelper &) = default;\n\n ProcessHelper(ProcessHelper &&) = default;\n\n\n\n ProcessHelper(/* args */);\n\n virtual ~ProcessHelper();\n\n};\n", "file_path": "src/utils/ProcessHelper.hpp", "rank": 42, "score": 47842.61857752266 }, { "content": "class argToString {\n\n std::string str;\n\n\n\n public:\n\n [[nodiscard]] auto getStr() const -> const std::string & { return str; }\n\n\n\n // NOLINTNEXTLINE(hicpp-explicit-conversions)\n\n argToString(bool value) : str(value ? \"true\" : \"false\") {}\n\n\n\n // NOLINTNEXTLINE(hicpp-explicit-conversions)\n\n argToString(const char *s) : str(s) {}\n\n\n\n // NOLINTNEXTLINE(hicpp-explicit-conversions)\n\n argToString(const std::exception &e) : str(e.what()) {}\n\n\n\n // NOLINTNEXTLINE(hicpp-explicit-conversions)\n\n argToString(std::string s) : str(std::move(s)) {}\n\n\n\n // NOLINTNEXTLINE(hicpp-explicit-conversions)\n\n argToString(const Poco::JSON::Object::Ptr &jsonobj) {\n", "file_path": "src/utils/StrFormat.hpp", "rank": 43, "score": 47842.61857752266 }, { "content": "class CLog {\n\n\n\n public:\n\n ~CLog() noexcept;\n\n void AddToLog(const std::string &Text, const std::string &extraid = \"\");\n\n\n\n template <class... Types>\n\n auto multiRegister(std::string_view format, Types &&...args)\n\n -> std::string {\n\n std::string printbuf =\n\n StrFormat::multiRegister(format, std::forward<Types>(args)...);\n\n\n\n AddToLog(printbuf);\n\n return printbuf;\n\n }\n\n\n\n template <class... Types>\n\n auto multiRegisterLN(std::string_view file, unsigned int line,\n\n std::string_view level, std::string_view format,\n\n Types &&...args) -> std::string {\n", "file_path": "src/utils/CLog.hpp", "rank": 44, "score": 47842.61857752266 }, { "content": "class CSql {\n\n std::mutex sqldrvmtx;\n\n\n\n CSql() = default;\n\n ~CSql() = default;\n\n\n\n public:\n\n CSql(CSql &&) = delete;\n\n auto operator=(CSql &&) -> CSql & = delete;\n\n auto operator=(const CSql &) -> CSql & = delete;\n\n\n\n CSql(const CSql &) = delete;\n\n\n\n static inline auto\n\n high_precision_time_to_str(std::chrono::high_resolution_clock::time_point t)\n\n -> std::string {\n\n std::time_t tt = std::chrono::high_resolution_clock::to_time_t(t);\n\n\n\n auto mksec = std::chrono::duration_cast<std::chrono::microseconds>(\n\n t.time_since_epoch())\n", "file_path": "src/Database/CSql.hpp", "rank": 45, "score": 47842.61857752266 }, { "content": "class ResponseViaReturn {\n\n\n\n public:\n\n virtual void sendResponse(Req req, Resp resp) = 0;\n\n\n\n ResponseViaReturn(const ResponseViaReturn &) = default;\n\n ResponseViaReturn(ResponseViaReturn &&) = default;\n\n\n\n auto operator=(const ResponseViaReturn &) -> ResponseViaReturn & = default;\n\n auto operator=(ResponseViaReturn &&) -> ResponseViaReturn & = default;\n\n\n\n ResponseViaReturn();\n\n virtual ~ResponseViaReturn();\n\n};\n\n\n", "file_path": "src/WebInterface/httpwrappers.hpp", "rank": 46, "score": 46644.1021790642 }, { "content": "class ControllerInputValidator;\n\n\n", "file_path": "src/utils/InputValidators.hpp", "rank": 48, "score": 46644.1021790642 }, { "content": "class WebApp {\n\n public:\n\n auto getPort() const -> Pistache::Port {\n\n return httpEndpoint ? httpEndpoint->getPort() : Pistache::Port{0};\n\n }\n\n\n\n void init(Address addr, size_t thr) {\n\n httpEndpoint = std::make_shared<Pistache::Http::Endpoint>(addr);\n\n\n\n auto opts =\n\n Pistache::Http::Endpoint::options().threads(static_cast<int>(thr));\n\n opts.flags(Pistache::Tcp::Options::ReuseAddr);\n\n\n\n httpEndpoint->init(opts);\n\n }\n\n\n\n void startAsync() {\n\n httpEndpoint->setHandler(router.handler());\n\n httpEndpoint->serveThreaded();\n\n }\n", "file_path": "src/WebInterface/WebApp.hpp", "rank": 49, "score": 46644.1021790642 }, { "content": "class CHttpPool {\n\n using inst_t = std::shared_ptr<Poco::Net::HTTPClientSession>;\n\n using pool_t = BorrowPool<inst_t>;\n\n using sessionid_t = std::pair<std::string, unsigned int>;\n\n\n\n std::mutex sessionmtx;\n\n std::unordered_map<sessionid_t, pool_t, primitivepairhash> sessions;\n\n\n\n public:\n\n auto setupSession(const Poco::URI &uri) -> BorrowedObject<inst_t>;\n\n\n\n static auto default_inst() -> CHttpPool &;\n\n\n\n CHttpPool() = default;\n\n};\n", "file_path": "src/utils/CHttpPool.hpp", "rank": 50, "score": 45536.6924292304 }, { "content": "class ControllerInputModifier {\n\n Poco::JSON::Object::Ptr parameters;\n\n Poco::JSON::Object::Ptr resultadofinal;\n\n Poco::JSON::Object::Ptr resobj;\n\n bool validation_failed;\n\n\n\n public:\n\n static void push_validation_msg(const std::optional<std::string> &str,\n\n Poco::JSON::Array &data) {\n\n if (!str.has_value()) {\n\n return;\n\n }\n\n\n\n data.add(str.value());\n\n }\n\n\n\n void push_val_list(std::string_view fieldname, Poco::JSON::Array arr) {\n\n if (arr.size() == 0)\n\n return;\n\n\n", "file_path": "src/utils/ControllerInputModifier.hpp", "rank": 51, "score": 45536.6924292304 }, { "content": "class GenericDBConnection;\n", "file_path": "src/Database/GenericDBConnection.hpp", "rank": 52, "score": 45536.6924292304 }, { "content": "class ScopedStreamRedirect {\n\n std::ostream &originalStream;\n\n std::streambuf *originalBuffer;\n\n\n\n public:\n\n ScopedStreamRedirect(const ScopedStreamRedirect &) = delete;\n\n auto operator=(const ScopedStreamRedirect &) -> ScopedStreamRedirect & = delete;\n\n ScopedStreamRedirect(ScopedStreamRedirect &&) = delete;\n\n auto operator=(ScopedStreamRedirect &&) -> ScopedStreamRedirect & = delete;\n\n\n\n inline ScopedStreamRedirect(std::ostream &srcStream,\n\n std::ostream &destStream)\n\n : originalStream(srcStream),\n\n originalBuffer(srcStream.rdbuf(destStream.rdbuf())) {}\n\n\n\n inline ~ScopedStreamRedirect() { originalStream.rdbuf(originalBuffer); }\n\n};\n", "file_path": "src/utils/ScopedStreamRedirect.hpp", "rank": 53, "score": 45536.6924292304 }, { "content": "class ControllerInputValidator {\n\n const Poco::JSON::Object::Ptr parameters;\n\n Poco::JSON::Object::Ptr resultadofinal;\n\n Poco::JSON::Object::Ptr resobj;\n\n bool validation_failed;\n\n\n\n public:\n\n /**\n\n *@brief pequeno helper que recebe um pair e guarda o first como\n\n *std::string_view, criado para ser usado em conjunto com\n\n *find_values_not_in_list para testar se um campo json está fora da lista de\n\n *validação\n\n *\n\n */\n\n struct pair_first {\n\n std::string_view str;\n\n\n\n auto operator==(const std::string &strtocmp) const -> bool {\n\n return strtocmp == str;\n\n }\n", "file_path": "src/utils/ControllerInputValidator.hpp", "rank": 54, "score": 45536.6924292304 }, { "content": "class WebInputValidator {\n\n ControllerInputValidator inputValidator;\n\n Pistache::Http::ResponseWriter &response;\n\n std::function<void(ControllerInputValidator &)> fn_obj;\n\n\n\n public:\n\n /**\n\n *@brief Efetura a validação dos campos do json e seta o retorno em caso de\n\n *falha\n\n *\n\n * @return true validado com sucesso\n\n * @return false falha de validação. Response setado com os erros de\n\n *validação. Rota deve retornar\n\n */\n\n auto validate() -> bool;\n\n\n\n /**\n\n *@brief Construct a new Web Input Validator object\n\n *\n\n * @param parameters dados que chegaram no body da rota parseado para objeto\n", "file_path": "src/WebInterface/WebInputValidator.hpp", "rank": 55, "score": 44510.381342208646 }, { "content": "class CPistacheEndpoint {\n\n /**\n\n *@brief Attached WebControllers vector\n\n * Maybe a deque or other non contiguous container will be a better option?\n\n */\n\n std::vector<std::unique_ptr<CController>> WebControllers;\n\n\n\n public:\n\n /**\n\n *@brief Initialize server options\n\n *\n\n * @param addr The address and port of the server\n\n * @param thr The number of threads the server is allowed to use\n\n */\n\n void init(Pistache::Address addr, size_t thr = 2);\n\n\n\n auto getPort() const -> Pistache::Port {\n\n return httpEndpoint ? httpEndpoint->getPort() : Pistache::Port{0};\n\n }\n\n\n", "file_path": "src/WebInterface/CPistacheEndpoint.hpp", "rank": 56, "score": 44510.381342208646 }, { "content": "struct __attribute__((aligned(64))) LogLine {\n\n std::string line;\n\n std::thread::id thrid;\n\n std::chrono::high_resolution_clock::time_point when;\n\n};\n\n} // namespace\n\n\n\nusing logCircleIo_t = CircleMTIO<1024, LogLine>;\n\nstatic std::unique_ptr<logCircleIo_t>\n\n logLinesBuffer(std::make_unique<logCircleIo_t>());\n\n\n\nstatic std::unique_ptr<CLog> logInstance;\n\n\n\nauto CLog::addLinesToLog(CLog &logInst) -> bool {\n\n bool continueRunning = true;\n\n bool shouldFlush = false;\n\n while (continueRunning) {\n\n auto nextLine = logLinesBuffer->next();\n\n\n\n if (nextLine.first != nullptr) {\n", "file_path": "src/utils/CLog.cpp", "rank": 57, "score": 44434.33439032427 }, { "content": "#pragma once\n\n\n\n#include \"../stdafx.hpp\"\n\n\n\n#define STDLOGINFO() std::cout << __FILE__ << \":\" << __LINE__ << \" \"\n\n#define STDLOGERR() std::cerr << __FILE__ << \":\" << __LINE__ << \" \"\n", "file_path": "src/utils/LogDefines.hpp", "rank": 58, "score": 33816.31129604707 }, { "content": " } catch (...) {\n\n sstr.str(\"{}\");\n\n }\n\n resp.send(retCode, sstr.str(), jsonMimeType());\n\n }\n\n }\n\n\n\n ~JsonResponse() override;\n\n\n\n private:\n\n Poco::JSON::Object::Ptr respData;\n\n Code retCode;\n\n};\n\n} // namespace httpwrappers\n", "file_path": "src/WebInterface/JsonResponse.hpp", "rank": 59, "score": 32111.190554919365 }, { "content": " * https://github.com/pocoproject/poco/blob/master/JSON/src/Stringifier.cpp\n\n *\n\n //\n\n // Stringifier.cpp\n\n //\n\n // Library: JSON\n\n // Package: JSON\n\n // Module: Stringifier\n\n //\n\n // Copyright (c) 2012, Applied Informatics Software Engineering GmbH.\n\n // and Contributors.\n\n //\n\n // SPDX-License-Identifier:\tBSL-1.0\n\n //\n\n */\n\n void stringify(const Poco::Dynamic::Var &any) {\n\n using Object = Poco::JSON::Object;\n\n using Array = Poco::JSON::Array;\n\n\n\n const auto &type = any.type();\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 60, "score": 32110.725989085793 }, { "content": "#pragma once\n\n\n\n#include \"httpwrappers.hpp\"\n\n\n\n#include <Poco/JSON/Object.h>\n\n#include <sstream>\n\n\n\nnamespace httpwrappers {\n", "file_path": "src/WebInterface/JsonResponse.hpp", "rank": 61, "score": 32110.199321217027 }, { "content": " // SPDX-License-Identifier:\tBSL-1.0\n\n //\n\n */\n\n void stringify(const Poco::JSON::Array &arr) {\n\n append('[');\n\n bool first = true;\n\n for (const auto &item : arr) {\n\n if (!first) {\n\n append(',');\n\n }\n\n first = false;\n\n stringify(item);\n\n }\n\n append(']');\n\n }\n\n\n\n void stringify(const Poco::JSON::Array::Ptr &arr) { stringify(*arr); }\n\n\n\n /**\n\n * @brief função de stringify baseada em Poco::Dynamic::Var\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 62, "score": 32110.152625248877 }, { "content": "\n\n This enables machine processing of license information based on the SPDX\n\n License Identifiers that are here available: http://spdx.org/licenses/\n\n *\n\n */\n\n#pragma once\n\n#include <Poco/JSON/Array.h>\n\n#include <Poco/JSON/Object.h>\n\n#include <array>\n\n#include <ostream>\n\n#include <string_view>\n\n\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 63, "score": 32109.661898160208 }, { "content": " //\n\n // Copyright (c) 2012, Applied Informatics Software Engineering GmbH.\n\n // and Contributors.\n\n //\n\n // SPDX-License-Identifier:\tBSL-1.0\n\n //\n\n */\n\n void stringify(const Poco::JSON::Object &obj) {\n\n append('{');\n\n bool first = true;\n\n for (const auto &item : obj) {\n\n if (!first) {\n\n append(',');\n\n }\n\n first = false;\n\n formatString(item.first);\n\n append(\":\");\n\n stringify(item.second);\n\n }\n\n append('}');\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 64, "score": 32108.57469103125 }, { "content": " }\n\n\n\n void stringify(const Poco::JSON::Object::Ptr &arr) { stringify(*arr); }\n\n\n\n /**\n\n * @brief função de stringify baseada em Poco::JSON::Array\n\n * https://github.com/pocoproject/poco/blob/master/JSON/include/Poco/JSON/Array.h\n\n *\n\n //\n\n // Array.h\n\n //\n\n // Library: JSON\n\n // Package: JSON\n\n // Module: Array\n\n //\n\n // Definition of the Array class.\n\n //\n\n // Copyright (c) 2012, Applied Informatics Software Engineering GmbH.\n\n // and Contributors.\n\n //\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 65, "score": 32107.668131686267 }, { "content": "#include \"JsonResponse.hpp\"\n\n\n\nnamespace httpwrappers {\n\nJsonResponse::~JsonResponse() = default;\n\n} // namespace httpwrappers\n", "file_path": "src/WebInterface/JsonResponse.cpp", "rank": 66, "score": 32106.406282714473 }, { "content": " Poco::NumberFormatter::appendHex(\n\n tmp, static_cast<unsigned char>(ch & 0x03ffU) + 0xdc00, 4);\n\n append(tmp);\n\n } else if (ch >= 0x80 && ch <= 0xFFFF) {\n\n append(\"\\\\u\");\n\n std::string tmp;\n\n Poco::NumberFormatter::appendHex(\n\n tmp, static_cast<unsigned char>(ch), 4);\n\n append(tmp);\n\n } else {\n\n append(static_cast<char>(ch));\n\n }\n\n }\n\n }\n\n\n\n void append(unsigned char in) noexcept { str += static_cast<char>(in); }\n\n\n\n void append(char in) noexcept { str += in; }\n\n\n\n void append(std::string_view in) noexcept { str += in; }\n\n\n\n template <class input_t>\n\n void append(std::ostream &out, const input_t &in) noexcept {\n\n out << in;\n\n }\n\n};\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 67, "score": 32105.920367906187 }, { "content": " The copyright notices in the Software and this entire statement, including\n\n the above license grant, this restriction and the following disclaimer,\n\n must be included in all copies of the Software, in whole or in part, and\n\n all derivative works of the Software, unless such copies or derivative\n\n works are solely in the form of machine-executable object code generated by\n\n a source language processor.\n\n\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\n FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT\n\n SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE\n\n FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,\n\n ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\n DEALINGS IN THE SOFTWARE.\n\n\n\n ---------------------------------------------------------------------------\n\n Note:\n\n Individual files contain the following tag instead of the full license text.\n\n\n\n SPDX-License-Identifier:\tBSL-1.0\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 68, "score": 32105.34146465262 }, { "content": " void escapeJSONUTF8(const std::string::const_iterator &begin,\n\n const std::string::const_iterator &end) noexcept {\n\n constexpr std::array<uint32_t, 6> offsetsFromUTF8{\n\n 0x00000000UL, 0x00003080UL, 0x000E2080UL,\n\n 0x03C82080UL, 0xFA082080UL, 0x82082080UL};\n\n\n\n std::string::const_iterator it = begin;\n\n\n\n while (it != end) {\n\n uint32_t ch = 0;\n\n unsigned int sz = 0;\n\n\n\n do {\n\n ch <<= 6U;\n\n ch += static_cast<unsigned char>(*it++);\n\n sz++;\n\n } while (it != end &&\n\n (static_cast<unsigned char>(*it) & 0xC0U) == 0x80U &&\n\n sz < 6);\n\n ch -= offsetsFromUTF8[sz - 1];\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 69, "score": 32103.523380940976 }, { "content": "/**\n\n * @file PocoJsonStringify.hpp\n\n * @brief Objetivo desse arquivo é prover funções de stringify de JSON da Poco\n\n * Library usando pouca ou nenhuma alocação em heap. Funções desse arquivo NÃO\n\n possuem funcionamento idêntico às originais da Poco Library.\n\n * This file objective is to provide stringify functions with low or maybe none\n\n heap allocations, this functions don't have the exactly behavior of the Poco\n\n Library original functions, some things are missing. In case of problem use the\n\n original functions.\n\n * @date 2022-01-30\n\n *\n\n Boost Software License - Version 1.0 - August 17th, 2003\n\n\n\n Permission is hereby granted, free of charge, to any person or organization\n\n obtaining a copy of the software and accompanying documentation covered by\n\n this license (the \"Software\") to use, reproduce, display, distribute,\n\n execute, and transmit the Software, and to prepare derivative works of the\n\n Software, and to permit third-parties to whom the Software is furnished to\n\n do so, all subject to the following:\n\n\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 70, "score": 32103.307749883665 }, { "content": "\n\n if (ch == '\\n') {\n\n append(\"\\\\n\");\n\n } else if (ch == '\\t') {\n\n append(\"\\\\t\");\n\n } else if (ch == '\\r') {\n\n append(\"\\\\r\");\n\n } else if (ch == '\\b') {\n\n append(\"\\\\b\");\n\n } else if (ch == '\\f') {\n\n append(\"\\\\f\");\n\n } else if (ch == '\\v') {\n\n append((strictJSON ? \"\\\\u000B\" : \"\\\\v\"));\n\n } else if (ch == '\\a') {\n\n append((strictJSON ? \"\\\\u0007\" : \"\\\\a\"));\n\n } else if (ch == '\\\\') {\n\n append(\"\\\\\\\\\");\n\n } else if (ch == '\\\"') {\n\n append(\"\\\\\\\"\");\n\n } else if (ch == '/') {\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 71, "score": 32102.089675574945 }, { "content": " append(\"\\\\/\");\n\n } else if (ch == '\\0') {\n\n append(\"\\\\u0000\");\n\n } else if (ch < 32 || ch == 0x7f) {\n\n append(\"\\\\u\");\n\n std::string tmp;\n\n Poco::NumberFormatter::appendHex(\n\n tmp, static_cast<unsigned char>(ch), 4);\n\n append(tmp);\n\n } else if (ch > 0xFFFF) {\n\n ch -= 0x10000;\n\n append(\"\\\\u\");\n\n std::string tmp;\n\n Poco::NumberFormatter::appendHex(\n\n tmp,\n\n static_cast<unsigned char>((ch >> 10U) & 0x03ffU) + 0xd800U,\n\n 4);\n\n append(tmp);\n\n append(\"\\\\u\");\n\n tmp.clear();\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 72, "score": 32101.499587095095 }, { "content": " // SPDX-License-Identifier:\tBSL-1.0\n\n //\n\n */\n\n void formatString(const std::string &value) noexcept {\n\n append(\"\\\"\");\n\n\n\n if (escapeAllUnicode) {\n\n escapeJSONUTF8(value.begin(), value.end());\n\n } else {\n\n for (std::string::const_iterator it = value.begin(),\n\n end = value.end();\n\n it != end; ++it) {\n\n if ((*it >= 0 && *it <= 31) || (*it == '\"') || (*it == '\\\\')) {\n\n escapeJSONUTF8(it, it + 1);\n\n } else {\n\n append(*it);\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 73, "score": 32101.190194086073 }, { "content": " any.isTime()) {\n\n formatString(any.convert<std::string>());\n\n } else {\n\n formatString(any.convert<std::string>());\n\n }\n\n }\n\n\n\n /**\n\n * @brief Format string para JSON, fonte base: Poco Library\n\n *\n\n //\n\n // String.h\n\n //\n\n // Library: Foundation\n\n // Package: Core\n\n // Module: String\n\n //\n\n // Copyright (c) 2004-2006, Applied Informatics Software Engineering GmbH.\n\n // and Contributors.\n\n //\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 74, "score": 32100.927049992624 }, { "content": " append(\"\\\"\");\n\n }\n\n\n\n /**\n\n * @brief Escape string UTF8 JSON. Fonte: Poco Library\n\n * https://github.com/pocoproject/poco/blob/fac2437fabf24ff56df7ded9f468d3d855058221/Foundation/src/UTF8String.cpp#L181\n\n *\n\n //\n\n // UTF8String.cpp\n\n //\n\n // Library: Foundation\n\n // Package: Text\n\n // Module: UTF8String\n\n //\n\n // Copyright (c) 2007, Applied Informatics Software Engineering GmbH.\n\n // and Contributors.\n\n //\n\n // SPDX-License-Identifier:\tBSL-1.0\n\n //\n\n */\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 75, "score": 32100.697237177093 }, { "content": " if (type == typeid(Object)) {\n\n stringify(any.extract<Object>());\n\n } else if (type == typeid(Array)) {\n\n stringify(any.extract<Array>());\n\n } else if (type == typeid(Object::Ptr)) {\n\n stringify(*any.extract<Object::Ptr>());\n\n } else if (type == typeid(Array::Ptr)) {\n\n stringify(*any.extract<Array::Ptr>());\n\n } else if (any.isEmpty()) {\n\n append(\"null\");\n\n } else if (any.isNumeric() || any.isBoolean()) {\n\n auto value = any.convert<std::string>();\n\n if (type == typeid(char)) {\n\n formatString(value);\n\n } else {\n\n append(value);\n\n }\n\n } else if (any.isString()) {\n\n formatString(any.extract<std::string>());\n\n } else if (any.isString() || any.isDateTime() || any.isDate() ||\n", "file_path": "src/utils/PocoJsonStringify.hpp", "rank": 76, "score": 32100.37451965083 }, { "content": "#pragma once\n\n\n\n#include <pistache/net.h>\n\n#include <utility>\n\n\n\n#include \"pistache.hpp\"\n\n\n\nnamespace httpwrappers {\n\n\n\nusing Ipv4 = Pistache::Ipv4;\n\nusing ReqRaw = const Pistache::Rest::Request &;\n\nusing RespRaw = Pistache::Http::ResponseWriter;\n\nusing Code = Pistache::Http::Code;\n\nusing Address = Pistache::Address;\n\nusing MediaType = Pistache::Http::Mime::MediaType;\n\nusing Cookie = Pistache::Http::Cookie;\n\nusing cstringref = const std::string &;\n\n\n\ninline auto jsonMimeType() {\n\n return MediaType(Pistache::Http::Mime::Type::Application,\n\n Pistache::Http::Mime::Subtype::Json,\n\n Pistache::Http::Mime::Suffix::None);\n\n}\n\n\n", "file_path": "src/WebInterface/httpwrappers.hpp", "rank": 78, "score": 23.502865470904315 }, { "content": " }\n\n\n\n return inputdata;\n\n } catch (const ValidatorException &e) {\n\n returnPocoJson(Pistache::Http::Code::Bad_Request, e.to_json(),\n\n httpdata.response);\n\n } catch (const std::exception &) {\n\n throw_json_http_exception(Pistache::Http::Code::Bad_Request, false,\n\n \"Falha de leitura do Json\",\n\n httpdata.response);\n\n }\n\n\n\n return nullptr;\n\n}\n\n\n\nauto CController::valida_hash_request(const Poco::JSON::Object::Ptr &param,\n\n Pistache::Http::ResponseWriter &response)\n\n -> bool {\n\n if (!param->has(defaulthashfield)) {\n\n throw_json_http_exception(Pistache::Http::Code::Bad_Request, false,\n", "file_path": "src/WebInterface/CController.cpp", "rank": 80, "score": 19.800965348107233 }, { "content": "\n\n static auto json_return_example_to_swagger(Poco::Dynamic::Var obj)\n\n -> Poco::JSON::Object::Ptr;\n\n\n\n static auto content_schema_json(const Poco::JSON::Object::Ptr &json)\n\n -> Poco::JSON::Object::Ptr;\n\n\n\n void register_route_in(const Pistache::Rest::Request &request);\n\n void register_route_body(const Poco::JSON::Object::Ptr &json);\n\n void register_route_param(const std::string &field);\n\n void register_route_resp_json(Pistache::Http::Code code,\n\n const Poco::JSON::Object::Ptr &json);\n\n void register_route_security(const std::string &securityName);\n\n void register_route_description(const std::vector<std::string> &tags,\n\n const std::string &description);\n\n\n\n void set_contact(const std::string &name, const std::string &email,\n\n const std::string &url);\n\n void add_server(const std::string &description, const std::string &url);\n\n void set_api_info_basic(const std::string &title,\n", "file_path": "src/utils/DocAPI.hpp", "rank": 81, "score": 19.608502054538363 }, { "content": "}\n\n\n\nvoid CController::throw_http_exception(Pistache::Http::Code code,\n\n const std::string &fullreturndata) {\n\n throw Pistache::Http::HttpError(code, fullreturndata);\n\n}\n\n\n\nauto CController::input_json(httpStreamPack httpdata)\n\n -> Poco::JSON::Object::Ptr {\n\n try {\n\n Poco::JSON::Object::Ptr inputdata = Validator::request_to_json(\n\n httpdata.request, min_json_body_size, max_json_body_size);\n\n\n\n /**\n\n *@brief Checagem da hash dos dados do json\n\n *\n\n */\n\n if (habilita_hash_input_json &&\n\n !valida_hash_request(inputdata, httpdata.response)) {\n\n return nullptr;\n", "file_path": "src/WebInterface/CController.cpp", "rank": 82, "score": 19.541468778959565 }, { "content": "#include \"WebInterface/JsonResponse.hpp\"\n\n#include \"WebInterface/WebApp.hpp\"\n\n#include \"WebInterface/httpwrappers.hpp\"\n\n#include <Poco/JSON/Object.h>\n\n#include <chrono>\n\n#include <gtest/gtest.h>\n\n#include <memory>\n\n#include <thread>\n\n\n\nusing namespace webapp;\n\n\n\n// NOLINTBEGIN(hicpp-special-member-functions)\n\n\n\nstatic auto somefn(Req /*ununsed*/, Resp /*ununsed*/)\n\n -> std::unique_ptr<ResponseViaReturn> {\n\n return std::make_unique<RawStringResponse>(\"pato voa\");\n\n}\n\n\n\nTEST(WebAppTest, Startup) {\n\n WebApp app({Ipv4::any(), 3000}, 2);\n", "file_path": "tests/test_httpwrappers.cpp", "rank": 83, "score": 17.841379206651176 }, { "content": "}\n\n\n\nauto CController::get_auth(httpStreamPack httpdata)\n\n -> std::optional<std::string> {\n\n const auto auth_header =\n\n httpdata.request.headers()\n\n .tryGet<Pistache::Http::Header::Authorization>();\n\n\n\n if (!auth_header) {\n\n returnPocoJson(Pistache::Http::Code::Bad_Request,\n\n default_json_return(\n\n false, \"Falta header Authorization com sua chave\"),\n\n httpdata.response);\n\n return std::nullopt;\n\n }\n\n\n\n std::optional<std::string> result = auth_header->value();\n\n\n\n if (result.value_or(std::string()).empty()) {\n\n returnPocoJson(Pistache::Http::Code::Bad_Request,\n", "file_path": "src/WebInterface/CController.cpp", "rank": 84, "score": 16.671594972279276 }, { "content": "}\n\n\n\nvoid DocAPI::register_route_param(const std::string & /*unused*/) {}\n\n\n\nvoid DocAPI::register_route_resp_json(Pistache::Http::Code code,\n\n const Poco::JSON::Object::Ptr &json) {\n\n if (lastroutedata.isNull()) {\n\n return;\n\n }\n\n\n\n auto responses =\n\n lastroutedata->get(\"responses\").extract<Poco::JSON::Object::Ptr>();\n\n\n\n if (responses.isNull()) {\n\n return;\n\n }\n\n\n\n Poco::JSON::Object::Ptr codejson = new Poco::JSON::Object;\n\n\n\n codejson->set(\"content\", content_schema_json(json));\n", "file_path": "src/utils/DocAPI.cpp", "rank": 85, "score": 15.575895907882682 }, { "content": " \"input\");\n\n }\n\n\n\n return parse_json_from_string(request.body());\n\n }\n\n\n\n static inline auto is_alphanum(int ch) -> bool { return isalnum(ch) != 0; }\n\n\n\n template <class T, class... Types>\n\n static inline constexpr auto custom_array(Types... args) {\n\n std::array<T, std::tuple_size<std::tuple<Types...>>::value> a = {\n\n std::forward<Types>(args)...};\n\n\n\n return a;\n\n }\n\n\n\n static auto CheckSQL(const std::string &sql) -> bool {\n\n auto key = custom_array<std::string_view>(\"%\", \"/\", \"union\", \"|\", \"&\",\n\n \"^\", \"#\", \"/*\", \"*/\");\n\n\n", "file_path": "src/utils/Validator.hpp", "rank": 86, "score": 15.165613241674073 }, { "content": " * @return false not failed\n\n */\n\n [[nodiscard]] virtual auto hasFailed() const -> bool { return failed; }\n\n\n\n virtual void handle() = 0;\n\n\n\n QueueableJob();\n\n virtual ~QueueableJob();\n\n\n\n template <class T = Poco::JSON::Object::Ptr>\n\n static void json_obj_get(const Poco::JSON::Object::Ptr &json,\n\n const std::string &key,\n\n Poco::JSON::Object::Ptr &out) {\n\n if (json->has(key) && !json->isNull(key)) {\n\n out = json->getObject(key);\n\n } else {\n\n out.reset();\n\n }\n\n }\n\n\n", "file_path": "src/jobhandler/QueueableJob.hpp", "rank": 87, "score": 14.975558594759951 }, { "content": " except.sendResponse(req, resp);\n\n }\n\n\n\n return Pistache::Rest::Route::Result::Ok;\n\n }\n\n\n\n static void prepareReqResp(Req &req, Resp &resp) {\n\n req.resp = &resp;\n\n // req.session.req = &req;\n\n // req.session.resp = &resp;\n\n }\n\n\n\n explicit RouterWrapper(std::function<callbackDecl_t> callback)\n\n : func(std::move(callback)) {}\n\n\n\n private:\n\n std::function<callbackDecl_t> func;\n\n};\n\n\n\n} // namespace httpwrappers\n", "file_path": "src/WebInterface/httpwrappers.hpp", "rank": 88, "score": 14.958121842845262 }, { "content": "#include \"WebInputValidator.hpp\"\n\n#include \"CController.hpp\"\n\n\n\nauto WebInputValidator::validate() -> bool {\n\n /**\n\n *@brief Chama o callback de validação\n\n *\n\n */\n\n if (fn_obj) {\n\n fn_obj(inputValidator);\n\n }\n\n\n\n /**\n\n *@brief Se houver erros de validação envia o retorno ao cliente, deve\n\n *retornar a rota a partir disso\n\n *\n\n */\n\n auto responsedata = inputValidator.get_response();\n\n if (!responsedata.isNull()) {\n\n CController::returnPocoJson(Pistache::Http::Code::Bad_Request,\n\n responsedata, response);\n\n return false;\n\n }\n\n\n\n return true;\n\n}\n", "file_path": "src/WebInterface/WebInputValidator.cpp", "rank": 89, "score": 14.768027025603851 }, { "content": " const std::string &description);\n\n\n\n void set_security_schema(const std::string &name,\n\n const securitySchemaStruct &description);\n\n\n\n void dump();\n\n\n\n static auto singleton() -> DocAPI &;\n\n};\n\n\n\n/**\n\n *@brief Macros para autodocumentação OpenAPI. Somente acessíveis em debug.\n\n *\n\n */\n\n#ifdef NDEBUG\n\n#define DOCAPI_REGISTER_ROUTE_IN(param)\n\n#define DOCAPI_REGISTER_ROUTE_BODY(param)\n\n#define DOCAPI_RESPONSE_JSON(code, json)\n\n#define DOCAPI_REGISTER_ROUTE_SECURITY(param)\n\n#define DOCAPI_REGISTER_ROUTE_DESCRIPTION(tags, desc)\n", "file_path": "src/utils/DocAPI.hpp", "rank": 91, "score": 14.488659902684233 }, { "content": " Pistache::Http::Mime::Suffix::None);\n\n\n\nvoid CController::returnPocoJson(Pistache::Http::Code code,\n\n const Poco::JSON::Object::Ptr &json,\n\n Pistache::Http::ResponseWriter &response) {\n\n DOCAPI_RESPONSE_JSON(code, json);\n\n\n\n std::stringstream out;\n\n json->stringify(out);\n\n\n\n response.send(code, out.str(), JSON_RETURN);\n\n}\n\n\n\nauto CController::default_json_return(bool success, const std::string &msg)\n\n -> Poco::JSON::Object::Ptr {\n\n Poco::JSON::Object::Ptr result(new Poco::JSON::Object);\n\n\n\n result->set(\"sucesso\", success);\n\n result->set(\"mensagem\", msg);\n\n\n", "file_path": "src/WebInterface/CController.cpp", "rank": 92, "score": 14.48226499858685 }, { "content": "#include \"allocation_count.hpp\"\n\n#include <cstddef>\n\n#include <cstdlib>\n\n\n\nvoid operator delete(void *ptr, size_t blksize) noexcept;\n\n\n\nstatic std::atomic<std::size_t> allocations{0};\n\nstatic std::atomic<std::size_t> deallocations{0};\n\n\n\nauto operator new(std::size_t n) -> void * {\n\n ++allocations;\n\n return malloc(n); // NOLINT(hicpp-no-malloc)\n\n}\n\n\n\nvoid operator delete(void *ptr) noexcept {\n\n ++deallocations;\n\n free(ptr); // NOLINT(hicpp-no-malloc)\n\n}\n\n\n\nvoid operator delete(void *ptr, size_t /*blksize*/) noexcept {\n", "file_path": "tests/allocation_count.cpp", "rank": 93, "score": 13.36600876996075 }, { "content": "#include \"QueueWorker.hpp\"\n\n#include \"../utils/LogDefines.hpp\"\n\n#include <sys/types.h>\n\n#include <sys/wait.h>\n\n#include <unistd.h>\n\n\n\nauto job::QueueWorker::fork_process() -> pid_t {\n\n if (forkToHandle) {\n\n return getProcessHelper()->fork();\n\n }\n\n\n\n return 0;\n\n}\n\n\n\nauto job::QueueWorker::allocateJobOutputStream(\n\n const Poco::JSON::Object::Ptr &json)\n\n -> std::pair<std::fstream, std::fstream> {\n\n auto tmpdir = std::filesystem::temp_directory_path();\n\n std::string tmpfilename = \"tmp\" + json->getValue<std::string>(\"uuid\");\n\n\n", "file_path": "src/jobhandler/QueueWorker.cpp", "rank": 94, "score": 13.313211325492242 }, { "content": " return result;\n\n}\n\n\n\nauto CController::default_json_return(bool success, const std::string &msg,\n\n const Poco::UUID &uuid)\n\n -> Poco::JSON::Object::Ptr {\n\n Poco::JSON::Object::Ptr result(new Poco::JSON::Object);\n\n\n\n result->set(\"sucesso\", success);\n\n result->set(\"mensagem\", msg);\n\n result->set(\"req_uuid\", uuid);\n\n\n\n return result;\n\n}\n\n\n\nauto CController::default_json_return_as_str(bool success,\n\n const std::string &msg)\n\n -> std::string {\n\n\n\n auto json = default_json_return(success, msg);\n", "file_path": "src/WebInterface/CController.cpp", "rank": 95, "score": 13.032223216802553 }, { "content": " router.addNotFoundHandler(RouterWrapper(std::move(func)));\n\n return *this;\n\n }\n\n\n\n WebApp(const WebApp &) = delete;\n\n auto operator=(const WebApp &) -> WebApp & = delete;\n\n\n\n WebApp(const WebApp &&) = delete;\n\n auto operator=(const WebApp &&) -> WebApp & = delete;\n\n\n\n WebApp() = default;\n\n WebApp(Address addr, size_t thr) { init(addr, thr); }\n\n WebApp(uint16_t port, size_t thr) { init({Ipv4::any(), port}, thr); }\n\n ~WebApp();\n\n\n\n private:\n\n void prepareReqResp(Req &req, Resp &resp);\n\n\n\n std::shared_ptr<Pistache::Http::Endpoint> httpEndpoint;\n\n Pistache::Rest::Router router;\n\n std::atomic<bool> keepRunning{true};\n\n};\n\n\n\n} // namespace webapp\n", "file_path": "src/WebInterface/WebApp.hpp", "rank": 96, "score": 12.985028805011883 }, { "content": "#include \"../src/jobhandler/JobsHandler.hpp\"\n\n#include \"../src/jobhandler/QueueWorker.hpp\"\n\n#include \"../src/jobhandler/QueueableJob.hpp\"\n\n#include \"../src/queues/StdQueue.hpp\"\n\n\n\n#include <gtest/gtest.h>\n\n\n\n#include <utility>\n\n\n\nstatic constexpr int JSON_INDENT = 5;\n\nstatic bool jobrunned = false;\n\nstatic const std::string queue_name = \"test_queue_worker:queue:default\";\n\n\n\n/**\n\n * @brief First test job\n\n *\n\n */\n", "file_path": "tests/test_jobs.cpp", "rank": 97, "score": 12.961926691171792 }, { "content": " codejson->set(\"description\", \"\");\n\n\n\n responses->set(std::to_string(static_cast<int>(code)), codejson);\n\n}\n\n\n\nvoid DocAPI::dump() {\n\n\n\n if (!log.is_open()) {\n\n log.open(\"openapi.json\", std::ios::trunc | std::ios::out);\n\n }\n\n\n\n mainobj->set(\"paths\", paths);\n\n mainobj->stringify(log, 4);\n\n log << std::endl;\n\n\n\n log.close();\n\n}\n\n\n\nauto DocAPI::singleton() -> DocAPI & {\n\n static DocAPI docs;\n\n return docs;\n\n}\n", "file_path": "src/utils/DocAPI.cpp", "rank": 98, "score": 12.670651689851248 }, { "content": "\n\n if (arg.second.isNumeric()) {\n\n fullquery += arg.second.toString();\n\n } else {\n\n fullquery += esc_add_q(mscon, arg.second.toString());\n\n }\n\n firstList = false;\n\n }\n\n }\n\n\n\n static auto should_resize_string(size_t desired_size, std::string &str)\n\n -> bool {\n\n return (str.capacity() - str.size()) < desired_size;\n\n }\n\n\n\n static void\n\n build_generic_insert_query_json(std::string &fullquery,\n\n std::string_view table_name,\n\n const Poco::JSON::Object::Ptr &fielddata,\n\n sql::mysql::MySQL_Connection *mscon) {\n", "file_path": "src/Database/CSql.hpp", "rank": 99, "score": 12.58100065786343 } ]
C++
src/main.cpp
Laakeri/pace2020-treedepth-exact
1049abbe6fb4012f027c2fcfce15fff5fa8c169f
#include <iostream> #include <vector> #include <memory> #include <fstream> #include <iomanip> #include <set> #include <cassert> #include <random> #include <sys/resource.h> #include "graph.hpp" #include "io.hpp" #include "utils.hpp" #include "mcs.hpp" #include "staticset.hpp" #include "bitset.hpp" #include "chordalsolve.hpp" #include "best.hpp" #include "preprocessor.hpp" #include "ms_solve.hpp" using namespace sms; #define F first #define S second using std::vector; std::mt19937 gen(1337); void SetStackSize(int64_t sz) { struct rlimit rl; assert(getrlimit(RLIMIT_STACK, &rl) == 0); Log::Write(3, "Cur stack size ", rl.rlim_cur); if (rl.rlim_cur < sz) { rl.rlim_cur = sz; Log::Write(3, "Setting stack size ", sz); assert(setrlimit(RLIMIT_STACK, &rl) == 0); } } template<size_t chunks> int HeurComp(const FGraph<chunks>& graph, int best, double time, const Preprocessor& pp) { Timer timer; timer.start(); int it=0; std::set<uint64_t> gs; int vari = 0; int upd_cnt = 0; int last_add = 0; while (timer.get() < time) { double dupls = 0; if (it > 0) { dupls = (double)(it - (int)gs.size()) / (double)it; } if (dupls > 0.5 && upd_cnt == graph.n() && it - last_add > 10) { vari++; last_add = it; } it++; Timer triang_tmr; triang_tmr.start(); FGraph<chunks> lol_g = graph; mcs::LbTriang(lol_g, gen, vari, upd_cnt); triang_tmr.stop(); double est = (double)gs.size() * (double)it / ((double)it - (double)gs.size()); Log::Write(10, "min tri ", triang_tmr.get(), " ", best, " ", est, " ", lol_g.m(), " ", dupls, " ", vari, " ", upd_cnt); upd_cnt = upd_cnt * 2 + 1; upd_cnt = std::min(upd_cnt, graph.n()); if (gs.count(lol_g.Hash())) { Log::Write(10, "Same triang ", gs.size(), " ", it, " ", est); continue; } gs.insert(lol_g.Hash()); { Timer td_tmr; td_tmr.start(); ChordalSolve<chunks> cs(lol_g); int td = cs.Solve(best-1, vari, std::min(time - timer.get(), triang_tmr.get() + 0.01)); if (td < best) { best = td; Log::Write(3, "Treedepth: ", best); auto resu = cs.Get(best); resu = pp.Reconstruct(resu); resu = ColToPar(pp.org_graph, resu); int got = best::SetBest(resu, true); assert(got <= td); best = got; Log::Write(3, "Got ", got); } } } return best; } template<size_t chunks> int DoSolve2(const SparseGraph& graph, int best, const Preprocessor& pp) { assert(graph.n() <= chunks * BITS && graph.n() > (chunks-1) * BITS); FGraph<chunks> ppg(graph); Log::Write(3, "Solve2 n:", ppg.n(), " m:", ppg.m()); { MSSolve<chunks> mss(ppg); mss.incorrect_msenum_ = true; int ans = mss.Solve(best-1, true); if (ans < best) { best = ans; Log::Write(3, "Heur ans ", ans); auto sol = mss.Get(ans); sol = pp.Reconstruct(sol); sol = ColToPar(pp.org_graph, sol); int got = best::SetBest(sol, true); assert(got <= ans); Log::Write(3, "Ans valid ", got, " ", ans); best = got; Log::Write(3, "Re preprocess"); return best; } } MSSolve<chunks> mss2(ppg); int ans2 = mss2.Solve(best-1, false); if (ans2 < best) { best = ans2; Log::Write(3, "Exact ans ", ans2); auto sol = mss2.Get(ans2); sol = pp.Reconstruct(sol); sol = ColToPar(pp.org_graph, sol); int got = best::SetBest(sol, true); assert(got == ans2); Log::Write(3, "Ans valid ", ans2); } return -1; } template<size_t chunks> void DoSolve1(const SparseGraph& graph, int best, const Preprocessor& pp) { assert(graph.n() <= chunks * BITS && graph.n() > (chunks-1) * BITS); const FGraph<chunks> ppg(graph); Log::Write(3, "Dosolve1 n:", ppg.n(), " m:", ppg.m()); double pp_time = 40; if (ppg.n() <= 50) { pp_time = 1; } else if (ppg.n() <= 75) { pp_time = 5; } else if (ppg.n() <= 100) { pp_time = 20; } else if (ppg.n() <= 150) { pp_time = 30; } else if (ppg.n() <= 200) { pp_time = 40; } else if (ppg.n() <= 250) { pp_time = 50; } else { pp_time = 60; } best = HeurComp<chunks>(ppg, best, pp_time, pp); while (true) { Preprocessor pp2 = pp; SparseGraph pp_graph = pp2.TamakiRules(SparseGraph(ppg), best-1); int nbest = best; if (pp_graph.n() <= BITS) { nbest = DoSolve2<1>(pp_graph, best, pp2); } else if (pp_graph.n() <= 2*BITS) { nbest = DoSolve2<2>(pp_graph, best, pp2); } else if (pp_graph.n() <= 3*BITS) { nbest = DoSolve2<3>(pp_graph, best, pp2); } else if (pp_graph.n() <= 4*BITS) { nbest = DoSolve2<4>(pp_graph, best, pp2); } else if (pp_graph.n() <= 5*BITS) { nbest = DoSolve2<5>(pp_graph, best, pp2); } else if (pp_graph.n() <= 6*BITS) { nbest = DoSolve2<6>(pp_graph, best, pp2); } else if (pp_graph.n() <= 7*BITS) { nbest = DoSolve2<7>(pp_graph, best, pp2); } else if (pp_graph.n() <= 8*BITS) { nbest = DoSolve2<8>(pp_graph, best, pp2); } else if (pp_graph.n() <= 9*BITS) { nbest = DoSolve2<9>(pp_graph, best, pp2); } else if (pp_graph.n() <= 10*BITS) { nbest = DoSolve2<10>(pp_graph, best, pp2); } else { assert(0); } if (nbest == -1) return; assert(nbest >= 0 && nbest < best); best = nbest; Log::Write(3, "Re solve ", best); } } int main() { SetStackSize(8ll * 1024 * 1024); Log::SetLogLevel(3); Io io; SparseGraph graph = io.ReadGraph(std::cin); Log::Write(3, "Input n:", graph.n(), " m:", graph.m()); best::InitBest(graph); assert(graph.IsConnected()); int best = graph.n(); Preprocessor pp; SparseGraph pp_graph = pp.Preprocess(graph); if (pp_graph.n() <= BITS) { DoSolve1<1>(pp_graph, best, pp); } else if (pp_graph.n() <= 2*BITS) { DoSolve1<2>(pp_graph, best, pp); } else if (pp_graph.n() <= 3*BITS) { DoSolve1<3>(pp_graph, best, pp); } else if (pp_graph.n() <= 4*BITS) { DoSolve1<4>(pp_graph, best, pp); } else if (pp_graph.n() <= 5*BITS) { DoSolve1<5>(pp_graph, best, pp); } else if (pp_graph.n() <= 6*BITS) { DoSolve1<6>(pp_graph, best, pp); } else if (pp_graph.n() <= 7*BITS) { DoSolve1<7>(pp_graph, best, pp); } else if (pp_graph.n() <= 8*BITS) { DoSolve1<8>(pp_graph, best, pp); } else if (pp_graph.n() <= 9*BITS) { DoSolve1<9>(pp_graph, best, pp); } else if (pp_graph.n() <= 10*BITS) { DoSolve1<10>(pp_graph, best, pp); } else { assert(0); } best::PrintBest(); }
#include <iostream> #include <vector> #include <memory> #include <fstream> #include <iomanip> #include <set> #include <cassert> #include <random> #include <sys/resource.h> #include "graph.hpp" #include "io.hpp" #include "utils.hpp" #include "mcs.hpp" #include "staticset.hpp" #include "bitset.hpp" #include "chordalsolve.hpp" #include "best.hpp" #include "preprocessor.hpp" #include "ms_solve.hpp" using namespace sms; #define F first #define S second using std::vector; std::mt19937 gen(1337);
template<size_t chunks> int HeurComp(const FGraph<chunks>& graph, int best, double time, const Preprocessor& pp) { Timer timer; timer.start(); int it=0; std::set<uint64_t> gs; int vari = 0; int upd_cnt = 0; int last_add = 0; while (timer.get() < time) { double dupls = 0; if (it > 0) { dupls = (double)(it - (int)gs.size()) / (double)it; } if (dupls > 0.5 && upd_cnt == graph.n() && it - last_add > 10) { vari++; last_add = it; } it++; Timer triang_tmr; triang_tmr.start(); FGraph<chunks> lol_g = graph; mcs::LbTriang(lol_g, gen, vari, upd_cnt); triang_tmr.stop(); double est = (double)gs.size() * (double)it / ((double)it - (double)gs.size()); Log::Write(10, "min tri ", triang_tmr.get(), " ", best, " ", est, " ", lol_g.m(), " ", dupls, " ", vari, " ", upd_cnt); upd_cnt = upd_cnt * 2 + 1; upd_cnt = std::min(upd_cnt, graph.n()); if (gs.count(lol_g.Hash())) { Log::Write(10, "Same triang ", gs.size(), " ", it, " ", est); continue; } gs.insert(lol_g.Hash()); { Timer td_tmr; td_tmr.start(); ChordalSolve<chunks> cs(lol_g); int td = cs.Solve(best-1, vari, std::min(time - timer.get(), triang_tmr.get() + 0.01)); if (td < best) { best = td; Log::Write(3, "Treedepth: ", best); auto resu = cs.Get(best); resu = pp.Reconstruct(resu); resu = ColToPar(pp.org_graph, resu); int got = best::SetBest(resu, true); assert(got <= td); best = got; Log::Write(3, "Got ", got); } } } return best; } template<size_t chunks> int DoSolve2(const SparseGraph& graph, int best, const Preprocessor& pp) { assert(graph.n() <= chunks * BITS && graph.n() > (chunks-1) * BITS); FGraph<chunks> ppg(graph); Log::Write(3, "Solve2 n:", ppg.n(), " m:", ppg.m()); { MSSolve<chunks> mss(ppg); mss.incorrect_msenum_ = true; int ans = mss.Solve(best-1, true); if (ans < best) { best = ans; Log::Write(3, "Heur ans ", ans); auto sol = mss.Get(ans); sol = pp.Reconstruct(sol); sol = ColToPar(pp.org_graph, sol); int got = best::SetBest(sol, true); assert(got <= ans); Log::Write(3, "Ans valid ", got, " ", ans); best = got; Log::Write(3, "Re preprocess"); return best; } } MSSolve<chunks> mss2(ppg); int ans2 = mss2.Solve(best-1, false); if (ans2 < best) { best = ans2; Log::Write(3, "Exact ans ", ans2); auto sol = mss2.Get(ans2); sol = pp.Reconstruct(sol); sol = ColToPar(pp.org_graph, sol); int got = best::SetBest(sol, true); assert(got == ans2); Log::Write(3, "Ans valid ", ans2); } return -1; } template<size_t chunks> void DoSolve1(const SparseGraph& graph, int best, const Preprocessor& pp) { assert(graph.n() <= chunks * BITS && graph.n() > (chunks-1) * BITS); const FGraph<chunks> ppg(graph); Log::Write(3, "Dosolve1 n:", ppg.n(), " m:", ppg.m()); double pp_time = 40; if (ppg.n() <= 50) { pp_time = 1; } else if (ppg.n() <= 75) { pp_time = 5; } else if (ppg.n() <= 100) { pp_time = 20; } else if (ppg.n() <= 150) { pp_time = 30; } else if (ppg.n() <= 200) { pp_time = 40; } else if (ppg.n() <= 250) { pp_time = 50; } else { pp_time = 60; } best = HeurComp<chunks>(ppg, best, pp_time, pp); while (true) { Preprocessor pp2 = pp; SparseGraph pp_graph = pp2.TamakiRules(SparseGraph(ppg), best-1); int nbest = best; if (pp_graph.n() <= BITS) { nbest = DoSolve2<1>(pp_graph, best, pp2); } else if (pp_graph.n() <= 2*BITS) { nbest = DoSolve2<2>(pp_graph, best, pp2); } else if (pp_graph.n() <= 3*BITS) { nbest = DoSolve2<3>(pp_graph, best, pp2); } else if (pp_graph.n() <= 4*BITS) { nbest = DoSolve2<4>(pp_graph, best, pp2); } else if (pp_graph.n() <= 5*BITS) { nbest = DoSolve2<5>(pp_graph, best, pp2); } else if (pp_graph.n() <= 6*BITS) { nbest = DoSolve2<6>(pp_graph, best, pp2); } else if (pp_graph.n() <= 7*BITS) { nbest = DoSolve2<7>(pp_graph, best, pp2); } else if (pp_graph.n() <= 8*BITS) { nbest = DoSolve2<8>(pp_graph, best, pp2); } else if (pp_graph.n() <= 9*BITS) { nbest = DoSolve2<9>(pp_graph, best, pp2); } else if (pp_graph.n() <= 10*BITS) { nbest = DoSolve2<10>(pp_graph, best, pp2); } else { assert(0); } if (nbest == -1) return; assert(nbest >= 0 && nbest < best); best = nbest; Log::Write(3, "Re solve ", best); } } int main() { SetStackSize(8ll * 1024 * 1024); Log::SetLogLevel(3); Io io; SparseGraph graph = io.ReadGraph(std::cin); Log::Write(3, "Input n:", graph.n(), " m:", graph.m()); best::InitBest(graph); assert(graph.IsConnected()); int best = graph.n(); Preprocessor pp; SparseGraph pp_graph = pp.Preprocess(graph); if (pp_graph.n() <= BITS) { DoSolve1<1>(pp_graph, best, pp); } else if (pp_graph.n() <= 2*BITS) { DoSolve1<2>(pp_graph, best, pp); } else if (pp_graph.n() <= 3*BITS) { DoSolve1<3>(pp_graph, best, pp); } else if (pp_graph.n() <= 4*BITS) { DoSolve1<4>(pp_graph, best, pp); } else if (pp_graph.n() <= 5*BITS) { DoSolve1<5>(pp_graph, best, pp); } else if (pp_graph.n() <= 6*BITS) { DoSolve1<6>(pp_graph, best, pp); } else if (pp_graph.n() <= 7*BITS) { DoSolve1<7>(pp_graph, best, pp); } else if (pp_graph.n() <= 8*BITS) { DoSolve1<8>(pp_graph, best, pp); } else if (pp_graph.n() <= 9*BITS) { DoSolve1<9>(pp_graph, best, pp); } else if (pp_graph.n() <= 10*BITS) { DoSolve1<10>(pp_graph, best, pp); } else { assert(0); } best::PrintBest(); }
void SetStackSize(int64_t sz) { struct rlimit rl; assert(getrlimit(RLIMIT_STACK, &rl) == 0); Log::Write(3, "Cur stack size ", rl.rlim_cur); if (rl.rlim_cur < sz) { rl.rlim_cur = sz; Log::Write(3, "Setting stack size ", sz); assert(setrlimit(RLIMIT_STACK, &rl) == 0); } }
function_block-full_function
[ { "content": "class FBitsetSet {\n\n public:\n\n FBitsetSet() {}\n\n FBitsetSet(size_t capacity, double load_factor) {\n\n load_factor_ = load_factor;\n\n assert(chunks > 0);\n\n assert(load_factor_ >= 1.1);\n\n capacity_ = NextPrime((capacity + 1) * load_factor_);\n\n assert((size_t)(capacity_ * load_factor_) > capacity_);\n\n container_.resize(capacity_ * chunks);\n\n }\n\n bool Insert(const FBitset<chunks>& bitset) {\n\n size_t ind = Hash(bitset.data_, capacity_);\n\n while (1) {\n\n if (Zero(IndToPtr(ind, container_))) break;\n\n else if (Equal(IndToPtr(ind, container_), bitset.data_)) return false;\n\n else {\n\n ind++;\n\n if (ind == capacity_) {\n\n ind = 0;\n", "file_path": "src/bitset.hpp", "rank": 0, "score": 50603.376006693186 }, { "content": "class StaticSet {\n\n public:\n\n StaticSet();\n\n explicit StaticSet(const std::vector<T>& values);\n\n explicit StaticSet(const std::vector<std::pair<T, T> >& values);\n\n void Init(const std::vector<T>& values);\n\n void Init(const std::vector<std::pair<T, T> >& values);\n\n int Rank(T value) const;\n\n T Kth(int k) const;\n\n int Size() const;\n\n std::vector<T> Values() const;\n\n private:\n\n std::vector<T> values_;\n\n};\n\n\n\ntemplate<typename T>\n\nStaticSet<T>::StaticSet(const std::vector<T>& values) {\n\n Init(values);\n\n}\n\n\n", "file_path": "src/staticset.hpp", "rank": 1, "score": 26858.099965197867 }, { "content": "class FBitset {\n\n public:\n\n uint64_t data_[chunks];\n\n void Clear() {\n\n for (size_t i=0;i<chunks;i++){\n\n data_[i] = 0;\n\n }\n\n }\n\n FBitset() {\n\n Clear();\n\n }\n\n bool operator<(const FBitset<chunks>& other) const {\n\n for (size_t i=0;i<chunks;i++){\n\n if (data_[i]<other.data_[i]) return true;\n\n else if(data_[i]>other.data_[i]) return false;\n\n }\n\n return false;\n\n }\n\n bool operator==(const FBitset<chunks>& other) const {\n\n for (size_t i=0;i<chunks;i++){\n", "file_path": "src/bitset.hpp", "rank": 2, "score": 26769.31293622844 }, { "content": "class FGraph;\n\n\n", "file_path": "src/graph.hpp", "rank": 3, "score": 26769.31293622844 }, { "content": "class FGraph {\n\n public:\n\n FGraph(int n);\n\n FGraph(std::vector<Edge> edges);\n\n FGraph(const SparseGraph& graph);\n\n int n() const;\n\n int m() const;\n\n bool HasEdge(int v, int u) const;\n\n bool HasEdge(Edge e) const;\n\n void AddEdge(int v, int u);\n\n void AddEdge(Edge e);\n\n const std::vector<int>& Neighbors(int v) const;\n\n std::vector<FBitset<chunks>> CompNeighsBit(const FBitset<chunks>& block) const;\n\n void Dfs2Bit(FBitset<chunks>& vis, FBitset<chunks>& ne) const;\n\n std::vector<FBitset<chunks>> SmallMinsepsHeuristic(int sz) const;\n\n void Dfs(int v, std::vector<char>& block, std::vector<int>& component) const;\n\n std::vector<int> FindComponentAndMark(int v, std::vector<char>& block) const;\n\n bool IsConnectedOrIsolated() const;\n\n std::vector<std::vector<int> > Components(const std::vector<int>& separator) const;\n\n uint64_t Hash() const;\n", "file_path": "src/graph.hpp", "rank": 4, "score": 26769.31293622844 }, { "content": "class FBitsetMap {\n\n public:\n\n FBitsetMap() {}\n\n FBitsetMap(size_t capacity, double load_factor) {\n\n load_factor_ = load_factor;\n\n assert(chunks > 0);\n\n assert(load_factor_ >= 1.1);\n\n capacity_ = NextPrime((capacity + 1) * load_factor_);\n\n assert((size_t)(capacity_ * load_factor_) > capacity_);\n\n container_.resize(capacity_ * chunks);\n\n values_.resize(capacity_);\n\n }\n\n std::pair<int, bool> Insert(const FBitset<chunks>& bitset, int value, bool replace=false) {\n\n assert(value > 0);\n\n size_t ind = Hash(bitset.data_, capacity_);\n\n while (1) {\n\n if (Zero(IndToPtr(ind, container_))) break;\n\n else if (Equal(IndToPtr(ind, container_), bitset.data_)) {\n\n assert(values_[ind] > 0);\n\n if (replace) {\n", "file_path": "src/bitset.hpp", "rank": 5, "score": 25259.797828449326 }, { "content": " class FBitsetIterator {\n\n private:\n\n const FBitset<chunks>* const bitset_;\n\n size_t pos_;\n\n uint64_t tb_;\n\n public:\n\n FBitsetIterator(const FBitset<chunks>* const bitset, size_t pos, uint64_t tb) : bitset_(bitset), pos_(pos), tb_(tb) { }\n\n bool operator!=(const FBitsetIterator& other) const {\n\n return pos_ != other.pos_ || tb_ != other.tb_;\n\n }\n\n const FBitsetIterator& operator++() {\n\n tb_ &= ~-tb_;\n\n while (tb_ == 0 && pos_ < chunks) {\n\n pos_++;\n\n if (pos_ < chunks) {\n\n tb_ = bitset_->data_[pos_];\n\n }\n\n }\n\n return *this;\n\n }\n", "file_path": "src/bitset.hpp", "rank": 6, "score": 25259.797828449326 }, { "content": "#pragma once\n\n\n\n#include <map>\n\n#include <random>\n\n\n\n#include \"graph.hpp\"\n\n#include \"bitset.hpp\"\n\n\n\n#define F first\n\n#define S second\n\n\n\nnamespace sms {\n\nusing std::max;\n\nusing std::vector;\n\n\n", "file_path": "src/ms_solve.hpp", "rank": 8, "score": 20.97395911137485 }, { "content": " #include \"best.hpp\"\n\n\n\n#include <iostream>\n\n#include <vector>\n\n#include <cassert>\n\n\n\n#include \"graph.hpp\"\n\n\n\nusing std::vector;\n\nusing std::cout;\n\nusing std::flush;\n\nusing std::max;\n\n\n\nnamespace sms {\n\nnamespace best {\n\nnamespace {\n\nint GetDepth(int v, const vector<vector<int>>& tr) {\n\n int d = 1;\n\n for (int nv : tr[v]) {\n\n d = max(d, GetDepth(nv, tr)+1);\n", "file_path": "src/best.cpp", "rank": 9, "score": 19.302629134860556 }, { "content": "#pragma once\n\n\n\n#include <map>\n\n\n\n#include \"graph.hpp\"\n\n#include \"bitset.hpp\"\n\n\n\n#define F first\n\n#define S second\n\n\n\nnamespace sms {\n\nusing std::max;\n\nusing std::vector;\n\n\n\ntemplate<size_t chunks>\n", "file_path": "src/chordalsolve.hpp", "rank": 10, "score": 17.761214227935334 }, { "content": "#pragma once\n\n\n\n#include <vector>\n\n#include <ostream>\n\n#include <set>\n\n#include <queue>\n\n\n\n#include \"utils.hpp\"\n\n#include \"staticset.hpp\"\n\n#include \"bitset.hpp\"\n\n\n\nnamespace sms {\n\nnamespace {\n\nusing std::vector;\n\nusing std::queue;\n\nusing std::min;\n\n\n\ntemplate<size_t chunks>\n", "file_path": "src/graph.hpp", "rank": 11, "score": 17.129936615672335 }, { "content": "#include \"graph.hpp\"\n\n\n\n#include <vector>\n\n#include <algorithm>\n\n#include <set>\n\n#include <cassert>\n\n#include <ostream>\n\n#include <iostream>\n\n#include <queue>\n\n\n\n#include \"utils.hpp\"\n\n#include \"bitset.hpp\"\n\n\n\nnamespace sms {\n\nSparseGraph::SparseGraph(int n)\n\n : n_(n), m_(0), adj_list_(n) {\n\n std::vector<int> identity(n);\n\n for (int i = 0; i < n; i++) {\n\n identity[i] = i;\n\n }\n", "file_path": "src/graph.cpp", "rank": 12, "score": 16.902616301912612 }, { "content": "#include \"preprocessor.hpp\"\n\n\n\n#include \"graph.hpp\"\n\n#include \"utils.hpp\"\n\n\n\n#define F first\n\n#define S second\n\n\n\nusing std::vector;\n\nusing std::queue;\n\nusing std::max;\n\nusing std::pair;\n\n\n\nnamespace sms {\n\nnamespace {\n\nint SubtreeSize(int v, const SparseGraph& graph, const vector<int>& parent) {\n\n int sz = 1;\n\n for (int nv : graph.Neighbors(v)) {\n\n if (parent[nv] == v) {\n\n sz += SubtreeSize(nv, graph, parent);\n", "file_path": "src/preprocessor.cpp", "rank": 13, "score": 16.33832451005415 }, { "content": "#pragma once\n\n\n\n#include <cstdint>\n\n#include <cstdlib>\n\n#include <vector>\n\n#include <cstring>\n\n#include <random>\n\n#include <limits>\n\n#include <cassert>\n\n\n\n#define BITS 64\n\n\n\nnamespace sms {\n\ntemplate <size_t chunks>\n", "file_path": "src/bitset.hpp", "rank": 14, "score": 15.813301958377831 }, { "content": "#pragma once\n\n\n\n#include <iostream>\n\n#include <vector>\n\n#include <algorithm>\n\n#include <cstdlib>\n\n#include <chrono>\n\n#include <queue>\n\n#include <random>\n\n\n\n#include \"bitset.hpp\"\n\n\n\nnamespace sms {\n\nnamespace utils {\n\ntemplate<typename T>\n\nvoid SortAndDedup(std::vector<T>& vec);\n\n\n\ntemplate<typename T>\n\nvoid InitZero(std::vector<T>& vec, size_t size);\n\n\n\ntemplate<typename T>\n\nstd::vector<T> PermInverse(const std::vector<T>& perm);\n\n\n\ntemplate<typename T>\n\nT GetRand(T a, T b, std::mt19937& gen);\n\n} // namespace utils\n\n\n", "file_path": "src/utils.hpp", "rank": 15, "score": 14.193625436179888 }, { "content": "#pragma once\n\n\n\n#include <vector>\n\n#include <random>\n\n\n\n#include \"graph.hpp\"\n\n\n\nnamespace sms {\n\nnamespace mcs {\n\ntemplate<size_t chunks>\n\nstd::vector<int> Mcs(const FGraph<chunks>& graph);\n\n\n\ntemplate<size_t chunks>\n\nvoid LbTriang(FGraph<chunks>& graph, std::mt19937& gen, int vari, int upd_cnt);\n\n\n\ntemplate<size_t chunks>\n\nint Treewidth(const FGraph<chunks>& graph);\n\n\n\ntemplate<size_t chunks>\n\nstd::vector<FBitset<chunks>> ChordalMinseps(const FGraph<chunks>& graph);\n", "file_path": "src/mcs.hpp", "rank": 16, "score": 14.029599441394978 }, { "content": "#include \"io.hpp\"\n\n\n\n#include <vector>\n\n#include <string>\n\n#include <istream>\n\n#include <sstream>\n\n#include <algorithm>\n\n#include <cassert>\n\n\n\n#include \"graph.hpp\"\n\n#include \"utils.hpp\"\n\n\n\nnamespace sms {\n\nstd::vector<std::string> GetTokens(std::string s) {\n\n std::stringstream ss;\n\n ss<<s;\n\n std::vector<std::string> tokens;\n\n while (ss>>s) {\n\n tokens.push_back(s);\n\n }\n", "file_path": "src/io.cpp", "rank": 18, "score": 12.158312993549366 }, { "content": "#pragma once\n\n\n\n#include \"graph.hpp\"\n\n\n\nnamespace sms {\n\nnamespace best {\n\nvoid InitBest(const SparseGraph& graph);\n\nint SetBest(const std::vector<int>& par, bool is_best);\n\nvoid PrintBest();\n\n} // namespace best\n\n} // namespace sms\n", "file_path": "src/best.hpp", "rank": 19, "score": 11.45250840417599 }, { "content": "#pragma once\n\n\n\n#include <vector>\n\n#include <string>\n\n#include <istream>\n\n#include <map>\n\n\n\n#include \"graph.hpp\"\n\n#include \"utils.hpp\"\n\n\n\nnamespace sms {\n", "file_path": "src/io.hpp", "rank": 20, "score": 10.428727823316311 }, { "content": "#pragma once\n\n\n\n#include <vector>\n\n\n\n#include \"utils.hpp\"\n\n\n\nnamespace sms {\n\ntemplate<typename T>\n", "file_path": "src/staticset.hpp", "rank": 21, "score": 10.362276670439858 }, { "content": "#pragma once\n\n\n\n#include <vector>\n\n\n\n#include \"graph.hpp\"\n\n\n\nnamespace sms {\n\nstd::vector<int> ColToPar(const SparseGraph& graph, const std::vector<int>& col);\n\n\n", "file_path": "src/preprocessor.hpp", "rank": 22, "score": 10.35365755260138 }, { "content": "#include \"utils.hpp\"\n\n\n\n#include <chrono>\n\n#include <cassert>\n\n\n\nnamespace sms {\n\nint Log::log_level_ = 10000;\n\nvoid Log::SetLogLevel(int lvl) {\n\n log_level_ = lvl;\n\n}\n\n\n\nTimer::Timer() {\n\n timing = false;\n\n elapsedTime = std::chrono::duration<double>(std::chrono::duration_values<double>::zero());\n\n}\n\n\n\nvoid Timer::start() {\n\n if (timing) return;\n\n timing = true;\n\n startTime = std::chrono::steady_clock::now();\n", "file_path": "src/utils.cpp", "rank": 23, "score": 9.457508084277858 }, { "content": "bool FGraph<chunks>::HasEdge(Edge e) const {\n\n return HasEdge(e.first, e.second);\n\n}\n\n\n\ntemplate <size_t chunks>\n\nvoid FGraph<chunks>::AddEdge(int v, int u) {\n\n if (HasEdge(v, u)) return;\n\n assert(v != u);\n\n m_++;\n\n adj_list_[v].push_back(u);\n\n adj_list_[u].push_back(v);\n\n adj_mat2_[v].SetTrue(u);\n\n adj_mat2_[u].SetTrue(v);\n\n}\n\n\n\ntemplate <size_t chunks>\n\nvoid FGraph<chunks>::AddEdge(Edge e) {\n\n AddEdge(e.first, e.second);\n\n}\n\n\n", "file_path": "src/graph.hpp", "rank": 24, "score": 8.769923963210896 }, { "content": " adj_mat2_[i].SetTrue(i);\n\n }\n\n for (auto edge : edges) {\n\n AddEdge(vertex_map_.Rank(edge.first), vertex_map_.Rank(edge.second));\n\n }\n\n}\n\n\n\ntemplate <size_t chunks>\n\nFGraph<chunks>::FGraph(const SparseGraph& graph) {\n\n n_ = graph.n();\n\n assert(BITS * chunks >= n_);\n\n m_ = 0;\n\n adj_list_.resize(n_);\n\n adj_mat2_.resize(n_);\n\n for (int i = 0; i < n_; i++) {\n\n adj_mat2_[i].SetTrue(i);\n\n }\n\n for (auto edge : graph.Edges()) {\n\n assert(0 <= edge.first && edge.first < edge.second && edge.second < n_);\n\n AddEdge(edge.first, edge.second);\n", "file_path": "src/graph.hpp", "rank": 25, "score": 8.561353830648605 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\ntemplate<size_t chunks>\n\nstd::vector<uint64_t> FGraph<chunks>::Labels(const FBitset<chunks>& vert) const {\n\n std::vector<uint64_t> vh(n_);\n\n std::vector<FBitset<chunks>> reach(n_);\n\n int tn = vert.Popcount();\n\n for (int v : vert) {\n\n reach[v].Clear();\n\n reach[v].SetTrue(v);\n\n vh[v] = 1;\n\n }\n\n for (int r = 0; r < tn; r++) {\n\n std::vector<uint64_t> vh_new(n_);\n\n bool fo = false;\n\n for (int v : vert) {\n\n if (reach[v] != vert) {\n", "file_path": "src/graph.hpp", "rank": 26, "score": 8.419534234565972 }, { "content": "template<typename T>\n\nStaticSet<T>::StaticSet() : StaticSet(std::vector<T>()) { }\n\n\n\ntemplate<typename T>\n\nStaticSet<T>::StaticSet(const std::vector<std::pair<T, T> >& values) {\n\n Init(values);\n\n}\n\n\n\ntemplate<typename T>\n\nvoid StaticSet<T>::Init(const std::vector<T>& values) {\n\n values_ = values;\n\n utils::SortAndDedup(values_);\n\n}\n\n\n\ntemplate<typename T>\n\nvoid StaticSet<T>::Init(const std::vector<std::pair<T, T> >& values) {\n\n values_.clear();\n\n for (const std::pair<T, T>& value : values) {\n\n values_.push_back(value.first);\n\n values_.push_back(value.second);\n", "file_path": "src/staticset.hpp", "rank": 27, "score": 8.388803544887942 }, { "content": " std::vector<Edge> Edges() const;\n\n StaticSet<int> VertexMap() const;\n\n std::vector<Edge> FillEdges(FBitset<chunks> bs) const;\n\n int FillSize(FBitset<chunks> bs) const;\n\n int Degree(int v) const;\n\n FBitset<chunks> Neighbors(const FBitset<chunks>& vs) const;\n\n uint64_t Hash2(const FBitset<chunks>& vert) const;\n\n std::vector<uint64_t> Labels(const FBitset<chunks>& vert) const;\n\n std::vector<uint64_t> RefinedLabels(const FBitset<chunks>& vert) const;\n\n std::vector<FBitset<chunks>> BitComps(FBitset<chunks> vis) const;\n\n std::vector<FBitset<chunks>> FullComponentsWithSep(const FBitset<chunks>& minsep) const;\n\n void ShuffleAdjList(std::mt19937& gen);\n\n\n\n int MaxCompSize(const FBitset<chunks>& minsep, const FBitset<chunks>& vert) const;\n\n\n\n bool IsStar(const FBitset<chunks>& vs) const;\n\n std::vector<FBitset<chunks>> StarMinsep(int sz) const;\n\n\n\n void Print(std::ostream& out) const;\n\n\n", "file_path": "src/graph.hpp", "rank": 28, "score": 8.150682129499685 }, { "content": " }\n\n }\n\n }\n\n return p.Value();\n\n}\n\n\n\ntemplate <size_t chunks>\n\nstd::vector<Edge> FGraph<chunks>::Edges() const {\n\n std::vector<Edge> ret;\n\n for (int i = 0; i < n_; i++) {\n\n for (int a : adj_list_[i]) {\n\n if (a > i) ret.push_back({i, a});\n\n }\n\n }\n\n return ret;\n\n}\n\n\n\ntemplate <size_t chunks>\n\nStaticSet<int> FGraph<chunks>::VertexMap() const {\n\n return vertex_map_;\n", "file_path": "src/graph.hpp", "rank": 29, "score": 8.129331729461896 }, { "content": "template <size_t chunks>\n\nconst std::vector<int>& FGraph<chunks>::Neighbors(int v) const {\n\n return adj_list_[v];\n\n}\n\n\n\ntemplate <size_t chunks>\n\nstd::vector<FBitset<chunks>> FGraph<chunks>::CompNeighsBit(const FBitset<chunks>& block) const {\n\n FBitset<chunks> vis = ~block;\n\n std::vector<FBitset<chunks>> ret;\n\n FBitset<chunks> ne;\n\n FBitset<chunks> sep;\n\n for (int i=0;i<n_;i++){\n\n if (vis.Get(i)) {\n\n ne = adj_mat2_[i];\n\n Dfs2Bit(vis, ne);\n\n sep.SetAnd(block, ne);\n\n if (sep.Popcount() > 0) {\n\n ret.push_back(sep);\n\n }\n\n }\n", "file_path": "src/graph.hpp", "rank": 30, "score": 8.12600846554292 }, { "content": " std::vector<FBitset<chunks>> adj_mat2_;\n\n private:\n\n int n_, m_;\n\n StaticSet<int> vertex_map_;\n\n std::vector<std::vector<int> > adj_list_;\n\n};\n\n\n\n\n\ntemplate<size_t chunks>\n\nSparseGraph::SparseGraph(const FGraph<chunks>& graph) {\n\n n_ = graph.n();\n\n m_ = graph.m();\n\n adj_list_.resize(n_);\n\n for (int i = 0; i < n_; i++) {\n\n adj_list_[i] = graph.Neighbors(i);\n\n }\n\n vertex_map_ = graph.VertexMap();\n\n}\n\n\n\n\n", "file_path": "src/graph.hpp", "rank": 31, "score": 7.918275936924308 }, { "content": " assert(space.Get(x) && x != paths[i].back() && graph.HasEdge(x, paths[i].back()));\n\n paths[i].push_back(x);\n\n space.SetFalse(x);\n\n } else {\n\n if (cantake) {\n\n cantake--;\n\n d += len;\n\n }\n\n std::swap(paths[i], paths.back());\n\n paths.pop_back();\n\n i--;\n\n }\n\n }\n\n }\n\n if (a_size + d > szthr) {\n\n return;\n\n }\n\n }\n\n int x = inter.First();\n\n F.SetTrue(x);\n", "file_path": "src/graph.hpp", "rank": 32, "score": 7.860393985855663 }, { "content": "}\n\n\n\ntemplate <size_t chunks>\n\nstd::vector<FBitset<chunks>> FGraph<chunks>::SmallMinsepsHeuristic(int sz) const {\n\n assert(IsConnectedOrIsolated());\n\n std::vector<FBitset<chunks>> minseps;\n\n FBitsetSet<chunks> ff(n_, 2);\n\n for (int i = 0; i < n_; i++) {\n\n if (Neighbors(i).empty()) continue;\n\n for (const FBitset<chunks>& nbs : CompNeighsBit(adj_mat2_[i])) {\n\n if (nbs.Popcount() <= sz && ff.Insert(nbs)) {\n\n minseps.push_back(nbs);\n\n }\n\n }\n\n }\n\n FBitset<chunks> vis;\n\n FBitset<chunks> sep;\n\n FBitset<chunks> ne;\n\n FBitset<chunks> mask;\n\n for (int i=0;i<n_;i++){\n", "file_path": "src/graph.hpp", "rank": 33, "score": 7.740332935955108 }, { "content": " }\n\n void SetFalse(size_t i) {\n\n data_[i/BITS] &= (~((uint64_t)1 << (uint64_t)(i%BITS)));\n\n }\n\n void SetTrue(const std::vector<size_t>& v) {\n\n for (size_t x : v) {\n\n SetTrue(x);\n\n }\n\n }\n\n void SetTrue(const std::vector<int>& v) {\n\n for (int x : v) {\n\n SetTrue(x);\n\n }\n\n }\n\n void SetFalse(const std::vector<int>& v) {\n\n for (int x : v) {\n\n SetFalse(x);\n\n }\n\n }\n\n void FillTrue() {\n", "file_path": "src/bitset.hpp", "rank": 34, "score": 7.593556716167076 }, { "content": " return false;\n\n } else {\n\n fb = true;\n\n }\n\n }\n\n }\n\n return true;\n\n}\n\n\n\ntemplate<size_t chunks>\n\nstd::vector<FBitset<chunks>> FGraph<chunks>::StarMinsep(int sz) const {\n\n assert(IsConnectedOrIsolated());\n\n FBitset<chunks> mask;\n\n for (int i=0;i<n_;i++){\n\n if (!Neighbors(i).empty()) mask.SetTrue(i);\n\n }\n\n for (int i = 0; i < n_; i++) {\n\n if (Degree(i) == 0) continue;\n\n FBitset<chunks> rch = mask;\n\n for (int v : adj_mat2_[i]) {\n", "file_path": "src/graph.hpp", "rank": 35, "score": 7.548919213675186 }, { "content": "template <size_t chunks>\n\nFGraph<chunks>::FGraph(int n) : n_(n), m_(0), adj_list_(n) {\n\n assert(BITS * chunks >= n_);\n\n adj_mat2_.resize(n_);\n\n std::vector<int> identity(n);\n\n for (int i = 0; i < n; i++) {\n\n identity[i] = i;\n\n adj_mat2_[i].SetTrue(i);\n\n }\n\n vertex_map_.Init(identity);\n\n}\n\n\n\ntemplate <size_t chunks>\n\nFGraph<chunks>::FGraph(std::vector<Edge> edges) : vertex_map_(edges) {\n\n n_ = vertex_map_.Size();\n\n assert(BITS * chunks >= n_);\n\n m_ = 0;\n\n adj_list_.resize(n_);\n\n adj_mat2_.resize(n_);\n\n for (int i = 0; i < n_; i++) {\n", "file_path": "src/graph.hpp", "rank": 36, "score": 7.517376662772332 }, { "content": " bool Reco(FBitset<chunks> vert, int k, const std::vector<Edge>& parent_edges);\n\n bool Isom(const FBitset<chunks>& v1, const FBitset<chunks>& v2) const;\n\n};\n\n\n\n\n\nnamespace {\n\nint PathLb(int length) {\n\n if (length <= 0) return 0;\n\n uint32_t x = length;\n\n return 32 - __builtin_clz(x);\n\n}\n\nint CycleLb(int length) {\n\n if (length <= 0) return PathLb(length);\n\n uint32_t x = length;\n\n return 33 - __builtin_clz(x-1);\n\n}\n\nstd::vector<int> path_cycle_found;\n\nvoid pclbdfs(const SparseGraph& graph, int x, std::vector<int>& d, int& lb, std::vector<int>& pt) {\n\n assert(d[x] > 0);\n\n if (PathLb(d[x]) > lb) {\n", "file_path": "src/ms_solve.hpp", "rank": 37, "score": 7.507258722115957 }, { "content": " int cnt = 0;\n\n for (int nx : graph.Neighbors(order[i])) {\n\n if (inv_order[nx] > i) {\n\n cnt++;\n\n }\n\n }\n\n if (cnt <= prev && cnt > 0) {\n\n FBitset<chunks> ms;\n\n for (int nx : graph.Neighbors(order[i])) {\n\n if (inv_order[nx] > i) {\n\n ms.SetTrue(nx);\n\n }\n\n }\n\n ret.push_back(ms);\n\n }\n\n prev = cnt;\n\n }\n\n return ret;\n\n}\n\n} // namespace mcs\n\n} // namespace sms\n", "file_path": "src/mcs.hpp", "rank": 38, "score": 7.285298837408815 }, { "content": " }\n\n utils::SortAndDedup(values_);\n\n}\n\n\n\ntemplate<typename T>\n\nint StaticSet<T>::Rank(T value) const {\n\n return std::lower_bound(values_.begin(), values_.end(), value) - values_.begin();\n\n}\n\n\n\ntemplate<typename T>\n\nT StaticSet<T>::Kth(int k) const {\n\n return values_[k];\n\n}\n\n\n\ntemplate<typename T>\n\nint StaticSet<T>::Size() const {\n\n return values_.size();\n\n}\n\n\n\ntemplate<typename T>\n\nstd::vector<T> StaticSet<T>::Values() const {\n\n return values_;\n\n}\n\n} // namespace sms\n", "file_path": "src/staticset.hpp", "rank": 39, "score": 7.235102495890212 }, { "content": " }\n\n }\n\n\n\n // Pruning\n\n if (a_size + 3*(inter.Popcount() - (sz - F.Popcount())) > szthr) {\n\n std::vector<std::vector<int>> paths;\n\n for (int v : inter) {\n\n paths.push_back({v});\n\n }\n\n FBitset<chunks> space = neB;\n\n space.TurnOff(F);\n\n space.TurnOff(inter);\n\n int d = 0;\n\n int cantake = (int)paths.size() - (sz - F.Popcount());\n\n for (int len = 1; !paths.empty() && cantake > 0; len++) {\n\n for (int i = 0; i < (int)paths.size(); i++) {\n\n assert((int)paths[i].size() == len);\n\n if (graph.adj_mat2_[paths[i].back()].Intersects(space)) {\n\n FBitset<chunks> lol = graph.adj_mat2_[paths[i].back()] & space;\n\n int x = lol.First();\n", "file_path": "src/graph.hpp", "rank": 40, "score": 7.152670848325205 }, { "content": " ne |= adj_mat2_[x];\n\n ret.back().SetTrue(x);\n\n gv &= ~-gv;\n\n }\n\n }\n\n }\n\n}\n\n\n\ntemplate<size_t chunks>\n\nstd::vector<FBitset<chunks>> FGraph<chunks>::FullComponentsWithSep(const FBitset<chunks>& minsep) const {\n\n FBitset<chunks> vis;\n\n vis.FillUpTo(n_);\n\n vis.TurnOff(minsep);\n\n auto comps = BitComps(vis);\n\n for (int i = 0; i < (int)comps.size(); i++) {\n\n if (Neighbors(comps[i]) != minsep) {\n\n std::swap(comps[i], comps.back());\n\n comps.pop_back();\n\n }\n\n }\n", "file_path": "src/graph.hpp", "rank": 41, "score": 6.86101468950423 }, { "content": " q.push(source);\n\n mf.u.SetFalse(source);\n\n while (!q.empty()) {\n\n int x = q.front();\n\n q.pop();\n\n assert(x != sink);\n\n for (size_t ch = 0; ch < chunks; ch++) {\n\n while (mf.u.data_[ch] & mf.g[x].data_[ch]) {\n\n int y = __builtin_ctzll(mf.u.data_[ch] & mf.g[x].data_[ch]) + ch*BITS;\n\n q.push(y);\n\n mf.u.SetFalse(y);\n\n }\n\n }\n\n }\n\n FBitset<chunks2> ret;\n\n for (int i = 0; i < n; i ++) {\n\n if (!mf.u.Get(2+i*2) && mf.u.Get(3+i*2)) {\n\n ret.SetTrue(i);\n\n }\n\n }\n\n return ret;\n\n}\n\n} // namespace\n\n\n\ntypedef std::pair<int, int> Edge;\n\n\n", "file_path": "src/graph.hpp", "rank": 42, "score": 6.775232209030603 }, { "content": " }\n\n void InvertAnd(const FBitset<chunks>& rhs) {\n\n for (size_t i=0;i<chunks;i++){\n\n data_[i] = (~data_[i]) & rhs.data_[i];\n\n }\n\n }\n\n void SetNeg(const FBitset<chunks>& rhs) {\n\n for (size_t i=0;i<chunks;i++){\n\n data_[i] = ~rhs.data_[i];\n\n }\n\n }\n\n void SetNegAnd(const FBitset<chunks>& rhs1, const FBitset<chunks>& rhs2) {\n\n for (size_t i=0;i<chunks;i++){\n\n data_[i] = (~rhs1.data_[i]) & rhs2.data_[i];\n\n }\n\n }\n\n void SetAnd(const FBitset<chunks>& rhs1, const FBitset<chunks>& rhs2) {\n\n for (size_t i=0;i<chunks;i++){\n\n data_[i] = rhs1.data_[i] & rhs2.data_[i];\n\n }\n", "file_path": "src/bitset.hpp", "rank": 43, "score": 6.740640068768947 }, { "content": " }\n\n return d;\n\n}\n\n} // namespace\n\nint best_comp[10101010];\n\nint v_map[10101010];\n\nint best_td;\n\nint n;\n\n\n\nvoid InitBest(const SparseGraph& graph) {\n\n best_td = graph.n();\n\n n = graph.n();\n\n for (int i=0;i<n;i++){\n\n best_comp[i] = i;\n\n v_map[i] = graph.MapBack(i);\n\n assert(v_map[i] == i+1);\n\n }\n\n}\n\n\n\nint SetBest(const vector<int>& par, bool is_best) {\n", "file_path": "src/best.cpp", "rank": 44, "score": 6.735212653320803 }, { "content": " rch.TurnOff(adj_mat2_[v]);\n\n }\n\n bool ok1 = true;\n\n for (const auto& comp : BitComps(rch)) {\n\n if (!IsStar(comp)) {\n\n ok1 = false;\n\n break;\n\n }\n\n }\n\n if (!ok1) continue;\n\n std::vector<FBitset<chunks>> minseps;\n\n FBitsetSet<chunks> ff(2, 2);\n\n for (const FBitset<chunks>& nbs : CompNeighsBit(adj_mat2_[i])) {\n\n if (ff.Insert(nbs)) {\n\n minseps.push_back(nbs);\n\n }\n\n }\n\n Timer tt;\n\n tt.start();\n\n for (int it = 0; it < (int)minseps.size(); it++) {\n", "file_path": "src/graph.hpp", "rank": 45, "score": 6.67049572616297 }, { "content": "}\n\n\n\ntemplate<size_t chunks>\n\nFBitset<chunks> FGraph<chunks>::Neighbors(const FBitset<chunks>& vs) const {\n\n FBitset<chunks> nbs;\n\n for (int v : vs) {\n\n nbs |= adj_mat2_[v];\n\n }\n\n nbs.TurnOff(vs);\n\n return nbs;\n\n}\n\n\n\ntemplate<size_t chunks>\n\nstd::vector<uint64_t> FGraph<chunks>::RefinedLabels(const FBitset<chunks>& vert) const {\n\n std::vector<uint64_t> vh(n_);\n\n std::vector<FBitset<chunks>> reach(n_);\n\n int tn = vert.Popcount();\n\n for (int v : vert) {\n\n vh[v] = 1;\n\n }\n", "file_path": "src/graph.hpp", "rank": 46, "score": 6.621321512719705 }, { "content": " dg[x] = -1;\n\n vert.SetFalse(x);\n\n t--;\n\n }\n\n return mt;\n\n}\n\n} // namespace\n\n\n\n\n\ntemplate<size_t chunks>\n\nMSSolve<chunks>::MSSolve(const FGraph<chunks>& graph) : graph_(graph) {\n\n assert(!bs_cac_.Inited());\n\n bs_cac_ = FBitsetMap<chunks>(2, 1.5);\n\n ms_bs_cac_ = FBitsetMap<chunks>(2, 1.5);\n\n assert(bs_cac_.Inited() && ms_bs_cac_.Inited());\n\n pcs_.push_back({});\n\n ms_pcs_.push_back({});\n\n lb_sieve_ = FLBSieve<chunks>(graph.n(), graph.n()+1);\n\n}\n\n\n", "file_path": "src/ms_solve.hpp", "rank": 47, "score": 6.596931808297469 }, { "content": "template <size_t chunks>\n\nstd::vector<int> FGraph<chunks>::FindComponentAndMark(int v, std::vector<char>& block) const {\n\n std::vector<int> component;\n\n Dfs(v, block, component);\n\n return component;\n\n}\n\n\n\ntemplate <size_t chunks>\n\nbool FGraph<chunks>::IsConnectedOrIsolated() const {\n\n auto cs = Components({});\n\n int f = 0;\n\n for (const auto& c : cs) {\n\n if ((int)c.size() > 1) f++;\n\n }\n\n return f <= 1;\n\n}\n\n\n\ntemplate <size_t chunks>\n\nstd::vector<std::vector<int> > FGraph<chunks>::Components(const std::vector<int>& separator) const {\n\n std::vector<char> blocked(n_);\n", "file_path": "src/graph.hpp", "rank": 48, "score": 6.563418438506021 }, { "content": " }\n\n }\n\n }\n\n }\n\n bool Inited() const {\n\n return capacity_ > 0;\n\n }\n\n size_t ContainerSize() const {\n\n return container_.size();\n\n }\n\n std::vector<FBitset<chunks>> Vector() const {\n\n std::vector<FBitset<chunks>> ret;\n\n ret.reserve(elements_);\n\n for (size_t i = 0; i < capacity_; i++) {\n\n if (Zero(IndToPtr(i, container_))) continue;\n\n ret.push_back(FBitset<chunks>());\n\n for (size_t j = 0; j < chunks; j++) {\n\n ret.back().data_[j] = container_[i*chunks + j];\n\n }\n\n }\n", "file_path": "src/bitset.hpp", "rank": 49, "score": 6.377654850994389 }, { "content": " size_t TotElements() const {\n\n size_t ret = 0;\n\n for (size_t e : elements_) {\n\n ret += e;\n\n }\n\n return ret;\n\n }\n\n private:\n\n int maxlb_ = 0;\n\n size_t len_ = 0;\n\n std::vector<size_t> elements_;\n\n std::vector<std::vector<FBitset<chunks>>> masks_;\n\n std::vector<std::vector<std::vector<uint64_t>>> containers_;\n\n int GetMask(int lb, const FBitset<chunks>& bs) {\n\n int mask = 0;\n\n for (int i=0;i<(int)masks_[lb].size();i++) {\n\n if (bs.Intersects(masks_[lb][i])) {\n\n mask |= (1<<i);\n\n }\n\n }\n", "file_path": "src/bitset.hpp", "rank": 50, "score": 6.208724878371099 }, { "content": " SepRec(graph, a, b, neA, neB, F, minseps, sz, n);\n\n F.SetFalse(x);\n\n\n\n FBitset<chunks> vis = neB;\n\n vis.TurnOff(neA);\n\n vis.TurnOff(graph.adj_mat2_[x]);\n\n neB = graph.adj_mat2_[b];\n\n graph.Dfs2Bit(vis, neB);\n\n if (!neB.Subsumes(F)) return;\n\n\n\n vis.FillTrue();\n\n vis.TurnOff(neB);\n\n graph.Dfs2Bit(vis, neA);\n\n SepRec(graph, a, b, neA, neB, F, minseps, sz, n);\n\n}\n\n\n\ntemplate<size_t chunks>\n\ninline std::vector<FBitset<chunks>> NibbleSmallMinseps(FGraph<chunks> graph, int sz) {\n\n assert(graph.IsConnectedOrIsolated());\n\n int mfi = graph.n() * graph.n();\n", "file_path": "src/graph.hpp", "rank": 51, "score": 6.1942534597216525 }, { "content": " }\n\n return vh;\n\n}\n\n\n\ntemplate<size_t chunks>\n\nuint64_t FGraph<chunks>::Hash2(const FBitset<chunks>& vert) const {\n\n auto vh = Labels(vert);\n\n std::sort(vh.begin(), vh.end());\n\n PolyHash p;\n\n for (uint64_t v : vh) {\n\n if (v > 0) p.Add(v);\n\n }\n\n return p.Value();\n\n}\n\n\n\ntemplate<size_t chunks>\n\nstd::vector<FBitset<chunks>> FGraph<chunks>::BitComps(FBitset<chunks> vis) const {\n\n FBitset<chunks> ne;\n\n std::vector<FBitset<chunks>> ret;\n\n bool fo = false;\n", "file_path": "src/graph.hpp", "rank": 52, "score": 6.18691909195694 }, { "content": " int x = order[i];\n\n int nb = 0;\n\n for (int nx : graph.Neighbors(x)) {\n\n if (inv_order[nx] > i) {\n\n nb++;\n\n }\n\n }\n\n treewidth = std::max(treewidth, nb);\n\n }\n\n return treewidth;\n\n}\n\n\n\ntemplate<size_t chunks>\n\nstd::vector<FBitset<chunks>> ChordalMinseps(const FGraph<chunks>& graph) {\n\n if (graph.m() == 0) return {};\n\n std::vector<int> order = Mcs(graph);\n\n std::vector<int> inv_order = utils::PermInverse(order);\n\n std::vector<FBitset<chunks>> ret;\n\n int prev = 0;\n\n for (int i=graph.n()-2;i>=0;i--){\n", "file_path": "src/mcs.hpp", "rank": 53, "score": 6.17761042593448 }, { "content": " int Degree(int v) const;\n\n std::vector<int> Distances(const std::vector<int>& start) const;\n\n\n\n void Print(std::ostream& out) const;\n\n\n\n std::vector<Edge> Edges() const;\n\n\n\n int MapBack(int v) const;\n\n\n\n bool IsConnected() const;\n\n std::vector<std::vector<int> > Components(const std::vector<int>& separator) const;\n\n std::vector<int> FindComponentAndMark(int v, std::vector<char>& block) const;\n\n\n\n StaticSet<int> VertexMap() const;\n\n\n\n bool IsClique(const std::vector<int>& vs) const;\n\n void ShuffleAdjList(std::mt19937& gen);\n\n\n\n int Mincut(int a, int b) const;\n\n\n\n private:\n\n int n_,m_;\n\n StaticSet<int> vertex_map_;\n\n std::vector<std::vector<int> > adj_list_;\n\n void Dfs(int v, std::vector<char>& blocked, std::vector<int>& component) const;\n\n};\n\n\n\ntemplate <size_t chunks>\n", "file_path": "src/graph.hpp", "rank": 54, "score": 6.077416342484204 }, { "content": "template<size_t chunks>\n\nvector<int> ChordalSolve<chunks>::Get(int goal) {\n\n resu_.resize(graph_.n());\n\n FBitset<chunks> vert;\n\n vert.FillUpTo(graph_.n());\n\n Timer recot;\n\n recot.start();\n\n assert(Reco(vert, goal, {}));\n\n Log::Write(5, \"recot \", recot.get());\n\n for (int i=0;i<graph_.n();i++) {\n\n assert(resu_[i] >= 1 && resu_[i] <= goal);\n\n resu_[i]--;\n\n }\n\n return resu_;\n\n}\n\n} // namespace sms", "file_path": "src/chordalsolve.hpp", "rank": 55, "score": 6.044314572635947 }, { "content": "\n\n // Minsep enum starts\n\n recs2++;\n\n std::vector<FBitset<chunks>> t_minseps;\n\n {\n\n std::vector<std::tuple<int, int, FBitset<chunks>>> tms_sort;\n\n int enum_sz = k-3;\n\n if (incorrect_msenum_) {\n\n if (k >= 17) {\n\n enum_sz = k-7;\n\n } else if (k >= 14) {\n\n enum_sz = k-6;\n\n } else if (k >= 11) {\n\n enum_sz = k-5;\n\n } else if (k >= 8) {\n\n enum_sz = k-4;\n\n }\n\n }\n\n bool do1 = false;\n\n if (n < graph_.n() && can_induce_seps) {\n", "file_path": "src/ms_solve.hpp", "rank": 56, "score": 5.906757837958006 }, { "content": " graph.ShuffleAdjList(gener);\n\n for (int i = 0; i < graph.n(); i++) {\n\n if (graph.Degree(i) > 0) {\n\n std::fill(d.begin(), d.end(), 0);\n\n d[i] = 1;\n\n pclbdfs(graph, i, d, lb, pt);\n\n if (lb >= goal) return lb;\n\n }\n\n }\n\n }\n\n return 0;\n\n}\n\n\n\ntemplate<size_t chunks>\n\nint MMDP(FGraph<chunks> graph) {\n\n std::vector<std::vector<int>> q(graph.n());\n\n std::vector<int> dg(graph.n());\n\n FBitset<chunks> vert;\n\n int vs = 0;\n\n for (int i=0;i<graph.n();i++) {\n", "file_path": "src/ms_solve.hpp", "rank": 57, "score": 5.80228232868651 }, { "content": " ret.data_[i] = data_[i] & other.data_[i];\n\n }\n\n return ret;\n\n }\n\n FBitset<chunks> operator~() const {\n\n FBitset<chunks> ret;\n\n for (size_t i=0;i<chunks;i++){\n\n ret.data_[i] = (~data_[i]);\n\n }\n\n return ret;\n\n }\n\n void Set(size_t i, bool v) {\n\n if (v) {\n\n data_[i/BITS] |= ((uint64_t)1 << (uint64_t)(i%BITS));\n\n } else {\n\n data_[i/BITS] &= (~((uint64_t)1 << (uint64_t)(i%BITS)));\n\n }\n\n }\n\n void SetTrue(size_t i) {\n\n data_[i/BITS] |= ((uint64_t)1 << (uint64_t)(i%BITS));\n", "file_path": "src/bitset.hpp", "rank": 58, "score": 5.782874197466107 }, { "content": " to_upd.push_back({score[i], i});\n\n }\n\n }\n\n std::sort(to_upd.rbegin(), to_upd.rend());\n\n for (int i=0;i<std::min((int)to_upd.size(), upd_cnt);i++) {\n\n int u = to_upd[i].second;\n\n assert(hs[u] >= 0 && score[u] > 0);\n\n hs[u] = HeurLbt(graph, u)*3 + utils::GetRand(0, vari, gen)*3 + utils::GetRand(0, 2, gen);\n\n score[u] = 0;\n\n }\n\n }\n\n}\n\n\n\ntemplate<size_t chunks>\n\nint Treewidth(const FGraph<chunks>& graph) {\n\n if (graph.m() == 0) return 0;\n\n std::vector<int> order = Mcs(graph);\n\n std::vector<int> inv_order = utils::PermInverse(order);\n\n int treewidth = 0;\n\n for (int i = 0; i < graph.n(); i++) {\n", "file_path": "src/mcs.hpp", "rank": 59, "score": 5.749534640576492 }, { "content": " if (!Neighbors(i).empty()) mask.SetTrue(i);\n\n }\n\n for (int i = 0;i<(int)minseps.size(); i++) {\n\n if (i > 0 && i%1000000 == 0) Log::Write(5, \"F enum minseps \", i, \" \", minseps.size());\n\n const FBitset<chunks> tsep = minseps[i];\n\n for (int j : tsep) {\n\n FBitset<chunks> block = minseps[i];\n\n block |= adj_mat2_[j];\n\n vis.SetNegAnd(block, mask);\n\n for (int ch = 0; ch < chunks; ch++) {\n\n while (vis.data_[ch] > 0) {\n\n int k = __builtin_ctzll(vis.data_[ch]) + ch*BITS;\n\n sep = block;\n\n ne = adj_mat2_[k];\n\n Dfs2Bit(vis, ne);\n\n sep.SetAnd(ne, block);\n\n if (sep.Popcount() <= sz && ff.Insert(sep)) {\n\n minseps.push_back(sep);\n\n }\n\n }\n", "file_path": "src/graph.hpp", "rank": 60, "score": 5.645275647612904 }, { "content": " if (ok) {\n\n return {tsep};\n\n }\n\n }\n\n for (int j : tsep) {\n\n if (!adj_mat2_[i].Get(j)) continue;\n\n FBitset<chunks> block = tsep;\n\n block |= adj_mat2_[j];\n\n FBitset<chunks> vis;\n\n vis.SetNegAnd(block, mask);\n\n for (size_t ch = 0; ch < chunks; ch++) {\n\n while (vis.data_[ch] > 0) {\n\n int k = __builtin_ctzll(vis.data_[ch]) + ch*BITS;\n\n FBitset<chunks> sep = block;\n\n FBitset<chunks> ne = adj_mat2_[k];\n\n Dfs2Bit(vis, ne);\n\n sep.SetAnd(ne, block);\n\n if (ff.Insert(sep)) {\n\n minseps.push_back(sep);\n\n }\n", "file_path": "src/graph.hpp", "rank": 61, "score": 5.640577907320587 }, { "content": " }\n\n }\n\n }\n\n }\n\n }\n\n return {};\n\n}\n\n\n\ntemplate<size_t chunks>\n\nvoid FGraph<chunks>::Print(std::ostream& out) const {\n\n out<<\"v e: \"<<n_<<\" \"<<m_<<std::endl;\n\n for (int i = 0; i < n_; i++) {\n\n for (int a : adj_list_[i]) {\n\n out<<i<<\" \"<<a<<std::endl;\n\n }\n\n }\n\n}\n\n\n\ntemplate<size_t chunks>\n\nvoid SepRec(const FGraph<chunks>& graph, int a, int b, FBitset<chunks> neA, FBitset<chunks> neB, FBitset<chunks> F, std::vector<FBitset<chunks>>& minseps, int sz, int n) {\n", "file_path": "src/graph.hpp", "rank": 62, "score": 5.609083870620562 }, { "content": " assert(F.Popcount() <= sz);\n\n FBitset<chunks> inter = neA;\n\n inter &= neB;\n\n assert(inter.Subsumes(F));\n\n if (inter == F) {\n\n minseps.push_back(F);\n\n return;\n\n }\n\n if (F.Popcount() == sz) return;\n\n\n\n int szthr = std::min(neB.Popcount() - inter.Popcount(), (n-F.Popcount())/2);\n\n int a_size = neA.Popcount() - inter.Popcount();\n\n if (a_size > szthr) return;\n\n if (neA.Popcount() - sz > szthr) return;\n\n inter.TurnOff(F);\n\n for (int v : inter) {\n\n if (graph.HasEdge(b, v)) {\n\n F.SetTrue(v);\n\n SepRec(graph, a, b, neA, neB, F, minseps, sz, n);\n\n return;\n", "file_path": "src/graph.hpp", "rank": 63, "score": 5.548666426357695 }, { "content": " WriteImpl(message.first);\n\n std::cerr<<\", \";\n\n WriteImpl(message.second);\n\n std::cerr<<\"}\";\n\n}\n\ntemplate<typename T>\n\ninline void Log::WriteImpl(std::vector<T> message) {\n\n std::cerr<<\"{\";\n\n for (int i=0;i<(int)message.size();i++){\n\n WriteImpl(message[i]);\n\n if (i+1<(int)message.size()) {\n\n std::cerr<<\", \";\n\n }\n\n }\n\n std::cerr<<\"}\";\n\n}\n\ntemplate<size_t chunks>\n\ninline void Log::WriteImpl(FBitset<chunks> b) {\n\n std::cerr<<\"{\";\n\n for (size_t i=0;i<chunks*BITS;i++){\n", "file_path": "src/utils.hpp", "rank": 64, "score": 5.420564381748697 }, { "content": " }\n\n assert(0);\n\n}\n\n\n\ntemplate<size_t chunks>\n\nvector<int> MSSolve<chunks>::Get(int goal) {\n\n for (int i=0;i<graph_.n();i++) {\n\n assert(resu_[i] >= 0 && resu_[i] < goal);\n\n }\n\n return resu_;\n\n}\n\n\n\n} // namespace sms", "file_path": "src/ms_solve.hpp", "rank": 65, "score": 5.414137764038935 }, { "content": " }\n\n }\n\n return sz;\n\n}\n\n\n\nvoid DelTree(int v, SparseGraph& graph, const vector<int>& parent, vector<int>& isols) {\n\n auto nbs = graph.Neighbors(v);\n\n for (int nv : nbs) {\n\n if (parent[nv] == v) {\n\n DelTree(nv, graph, parent, isols);\n\n graph.RemoveEdge(v, nv);\n\n assert(graph.Degree(nv) == 0);\n\n isols.push_back(nv);\n\n }\n\n }\n\n}\n\n} // namespace\n\n\n\nvector<int> Preprocessor::SolveTree(int v, const SparseGraph& graph, const vector<int>& parent) {\n\n vector<int> rank = {1};\n", "file_path": "src/preprocessor.cpp", "rank": 66, "score": 5.399778650899754 }, { "content": "}\n\n\n\ntemplate <size_t chunks>\n\nstd::vector<Edge> FGraph<chunks>::FillEdges(FBitset<chunks> bs) const {\n\n std::vector<Edge> ret;\n\n for (int i=0;i<chunks;i++){\n\n while (bs.data_[i]) {\n\n int v = i*BITS + __builtin_ctzll(bs.data_[i]);\n\n bs.data_[i] &= ~-bs.data_[i];\n\n for (int j=i;j<chunks;j++){\n\n uint64_t td = bs.data_[j] & (~adj_mat2_[v].data_[j]);\n\n while (td) {\n\n int u = j*BITS + __builtin_ctzll(td);\n\n td &= ~-td;\n\n ret.push_back({v, u});\n\n }\n\n }\n\n }\n\n }\n\n return ret;\n", "file_path": "src/graph.hpp", "rank": 68, "score": 5.1030649971434325 }, { "content": " for (int v : separator) {\n\n blocked[v] = true;\n\n }\n\n std::vector<std::vector<int> > components;\n\n for (int i = 0; i < n_; i++) {\n\n if (!blocked[i]) {\n\n components.push_back(FindComponentAndMark(i, blocked));\n\n }\n\n }\n\n return components;\n\n}\n\n\n\ntemplate <size_t chunks>\n\nuint64_t FGraph<chunks>::Hash() const {\n\n PolyHash p;\n\n for (int i = 0; i < n_; i++) {\n\n for (int a : adj_mat2_[i]) {\n\n if (a > i) {\n\n p.Add(i);\n\n p.Add(a);\n", "file_path": "src/graph.hpp", "rank": 69, "score": 5.0341476791588144 }, { "content": " }\n\n return true;\n\n }\n\n FGraph<chunks> t_graph(graph_.n());\n\n std::vector<Edge> tg_edges;\n\n if (parent_edges.empty()) {\n\n for (auto e : graph_.Edges()) {\n\n if (vert.Get(e.F) && vert.Get(e.S)) {\n\n t_graph.AddEdge(e);\n\n tg_edges.push_back(e);\n\n }\n\n }\n\n } else {\n\n for (auto e : parent_edges) {\n\n if (vert.Get(e.F) && vert.Get(e.S)) {\n\n t_graph.AddEdge(e);\n\n tg_edges.push_back(e);\n\n }\n\n }\n\n }\n", "file_path": "src/ms_solve.hpp", "rank": 70, "score": 5.024026859132254 }, { "content": "StaticSet<int> SparseGraph::VertexMap() const {\n\n return vertex_map_;\n\n}\n\n\n\nbool SparseGraph::IsConnected() const {\n\n auto cs = Components({});\n\n return (cs.size() == 1) && ((int)cs[0].size() == n_);\n\n}\n\n\n\nvoid SparseGraph::Dfs(int v, std::vector<char>& block, std::vector<int>& component) const {\n\n block[v] = true;\n\n component.push_back(v);\n\n for (int nv : adj_list_[v]) {\n\n if (!block[nv]) {\n\n Dfs(nv, block, component);\n\n }\n\n }\n\n}\n\n\n\nstd::vector<int> SparseGraph::FindComponentAndMark(int v, std::vector<char>& block) const {\n", "file_path": "src/graph.cpp", "rank": 71, "score": 5.005815170281963 }, { "content": " ret[perm[i]] = i;\n\n }\n\n return ret;\n\n}\n\n\n\ninline int GetU(int x, std::vector<int>& un) {\n\n if (un[x] == x) return x;\n\n else {\n\n un[x] = GetU(un[x], un);\n\n return un[x];\n\n }\n\n}\n\n} // namespace utils\n\n\n\ntemplate<typename T>\n\nvoid Log::WriteImpl(T message) {\n\n std::cerr<<message;\n\n}\n\ntemplate<>\n\ninline void Log::WriteImpl(std::vector<char> message) {\n", "file_path": "src/utils.hpp", "rank": 72, "score": 4.972338372738728 }, { "content": "\n\ntemplate<size_t chunks>\n\nstd::vector<int> Mcs(const FGraph<chunks>& graph) {\n\n std::vector<int> order(graph.n());\n\n static std::vector<int> label;\n\n static std::vector<char> rm;\n\n static std::vector<std::vector<int> > labels;\n\n utils::InitZero(label, graph.n());\n\n utils::InitZero(rm, graph.n());\n\n if (labels.size() < graph.n()) labels.resize(graph.n());\n\n for (int i = 0; i < graph.n(); i++) labels[i].clear();\n\n for (int i = 0; i < graph.n(); i++) labels[0].push_back(i);\n\n int max_label = 0;\n\n for (int it = graph.n() - 1; it >= 0; it--) {\n\n if (labels[max_label].size() == 0) {\n\n max_label--;\n\n it++;\n\n continue;\n\n }\n\n int x = labels[max_label].back();\n", "file_path": "src/mcs.hpp", "rank": 73, "score": 4.904895452266835 }, { "content": " for (size_t i=0;i<chunks;i++) {\n\n cnt += __builtin_popcountll(data_[i]);\n\n }\n\n return cnt;\n\n }\n\n bool Intersects(const FBitset<chunks>& other) const {\n\n for (size_t i=0;i<chunks;i++){\n\n if (data_[i] & other.data_[i]) return true;\n\n }\n\n return false;\n\n }\n\n int IntersectionPopcount(const FBitset<chunks>& other) const {\n\n int cnt = 0;\n\n for (size_t i=0;i<chunks;i++) {\n\n cnt += __builtin_popcountll(data_[i] & other.data_[i]);\n\n }\n\n return cnt;\n\n }\n\n int First() const {\n\n for (size_t i=0;i<chunks;i++) {\n\n if (data_[i]) {\n\n return i*BITS + __builtin_ctzll(data_[i]);\n\n }\n\n }\n\n return chunks * BITS;\n\n }\n\n\n", "file_path": "src/bitset.hpp", "rank": 74, "score": 4.897044364854739 }, { "content": " for (int v : vert) {\n\n assert(k >= 1);\n\n resu_[v] = k--;\n\n }\n\n return true;\n\n }\n\n FGraph<chunks> t_graph(graph_.n());\n\n std::vector<Edge> tg_edges;\n\n if (parent_edges.empty()) {\n\n for (auto e : graph_.Edges()) {\n\n if (vert.Get(e.F) && vert.Get(e.S)) {\n\n t_graph.AddEdge(e);\n\n tg_edges.push_back(e);\n\n }\n\n }\n\n } else {\n\n for (auto e : parent_edges) {\n\n if (vert.Get(e.F) && vert.Get(e.S)) {\n\n t_graph.AddEdge(e);\n\n tg_edges.push_back(e);\n", "file_path": "src/chordalsolve.hpp", "rank": 75, "score": 4.8813749383457905 }, { "content": " }\n\n bool Subsumes(const FBitset<chunks>& other) const {\n\n for (size_t i=0;i<chunks;i++){\n\n if ((data_[i] | other.data_[i]) != data_[i]) return false;\n\n }\n\n return true;\n\n }\n\n std::vector<int> Elements() const {\n\n std::vector<int> ret;\n\n for (size_t i=0;i<chunks;i++){\n\n uint64_t td = data_[i];\n\n while (td) {\n\n ret.push_back(i*BITS + __builtin_ctzll(td));\n\n td &= ~-td;\n\n }\n\n }\n\n return ret;\n\n }\n\n int Popcount() const {\n\n int cnt = 0;\n", "file_path": "src/bitset.hpp", "rank": 76, "score": 4.85523326532919 }, { "content": " int fec = 0;\n\n for (const FBitset<chunks>& cn : graph.CompNeighsBit(graph.adj_mat2_[v])) {\n\n fec += graph.FillSize(cn);\n\n }\n\n return fec;\n\n}\n\n\n\ntemplate<size_t chunks>\n\nvoid LbTriang(FGraph<chunks>& graph, std::mt19937& gen, int vari, int upd_cnt) {\n\n Timer lbt;\n\n lbt.start();\n\n std::vector<int> hs(graph.n());\n\n std::vector<int> upd(graph.n());\n\n std::vector<int> score(graph.n());\n\n for (int i=0;i<graph.n();i++) {\n\n hs[i] = HeurLbt(graph, i)*3 + utils::GetRand(0, vari, gen)*3 + utils::GetRand(0, 2, gen);\n\n }\n\n Log::Write(10, \"lbt init \", lbt.get());\n\n lbt.stop();\n\n for (int it=0;it<graph.n();it++){\n", "file_path": "src/mcs.hpp", "rank": 77, "score": 4.834415451580274 }, { "content": "bool SparseGraph::HasEdge(Edge e) const {\n\n return HasEdge(e.first, e.second);\n\n}\n\n\n\nvoid SparseGraph::AddEdge(int v, int u) {\n\n if (HasEdge(v, u)) return;\n\n assert(v != u);\n\n m_++;\n\n adj_list_[v].push_back(u);\n\n adj_list_[u].push_back(v);\n\n}\n\n\n\nvoid SparseGraph::AddEdge(Edge e) {\n\n AddEdge(e.first, e.second);\n\n}\n\n\n\nvoid SparseGraph::AddEdges(const std::vector<Edge>& edges) {\n\n for (auto& edge : edges) AddEdge(edge);\n\n}\n\n\n", "file_path": "src/graph.cpp", "rank": 78, "score": 4.8342969048264965 }, { "content": " assert(n >= 4 && t_graph.m() >= 3);\n\n\n\n // Special case of a vertex cover separator of size k-1\n\n for (int v : vert) {\n\n if (t_graph.Degree(v) < k) {\n\n FBitset<chunks> cover = t_graph.adj_mat2_[v];\n\n cover.SetFalse(v);\n\n bool isvc = true;\n\n for (auto e : tg_edges) {\n\n if (!cover.Get(e.F) && !cover.Get(e.S)) {\n\n isvc = false;\n\n break;\n\n }\n\n }\n\n if (isvc) {\n\n for (int u : vert) {\n\n resu_[u] = 1;\n\n }\n\n for (int u : cover) {\n\n assert(k >= 2);\n", "file_path": "src/ms_solve.hpp", "rank": 79, "score": 4.82882209359616 }, { "content": " ml.F = -1;\n\n }\n\n }\n\n }\n\n }\n\n return ml;\n\n}\n\n\n\nint Preprocessor::ConstructTreePar(int v, int p, int de, const SparseGraph& graph, const vector<int>& parent) {\n\n assert(tree_par_[v] == -1);\n\n assert(tree_lab_[v] >= 0);\n\n assert(tree_lab_[v] <= de);\n\n pair<int, int> r = CTPDFS(v, -1, de, graph, parent);\n\n assert(r.F >= -1 && r.S >= 0 && r.S <= de);\n\n v = r.F;\n\n de = r.S;\n\n assert(tree_par_[v] == -1);\n\n assert(tree_lab_[v] == de);\n\n if (p != -1) {\n\n tree_par_[v] = p;\n", "file_path": "src/preprocessor.cpp", "rank": 80, "score": 4.813296145209385 }, { "content": " }\n\n }\n\n }\n\n for (int i=0;i<(int)minseps.size();i++) {\n\n assert(minseps[i].Popcount() <= sz);\n\n }\n\n return minseps;\n\n}\n\n\n\ntemplate <size_t chunks>\n\nvoid FGraph<chunks>::Dfs(int v, std::vector<char>& block, std::vector<int>& component) const {\n\n block[v] = true;\n\n component.push_back(v);\n\n for (int nv : adj_list_[v]) {\n\n if (!block[nv]) {\n\n Dfs(nv, block, component);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/graph.hpp", "rank": 81, "score": 4.812538436485951 }, { "content": " assert(mfv < graph.n() && mfi < graph.n() * graph.n());\n\n std::vector<FBitset<chunks>> minseps;\n\n for (int a : graph.Neighbors(mfv)) {\n\n for (int b : graph.Neighbors(mfv)) {\n\n if (a == b || graph.HasEdge(a, b)) continue;\n\n FBitset<chunks> F = graph.adj_mat2_[a];\n\n F &= graph.adj_mat2_[b];\n\n if (F.Popcount() <= sz) {\n\n FBitset<chunks> vis = vert;\n\n vis.TurnOff(graph.adj_mat2_[a]);\n\n FBitset<chunks> neB = graph.adj_mat2_[b];\n\n graph.Dfs2Bit(vis, neB);\n\n SepRec(graph, a, b, graph.adj_mat2_[a], neB, F, minseps, sz, vert.Popcount());\n\n }\n\n }\n\n }\n\n vert.TurnOff(graph.adj_mat2_[mfv]);\n\n for (auto comp : graph.BitComps(vert)) {\n\n FBitset<chunks> nbs = graph.Neighbors(comp);\n\n if (nbs.Popcount() <= sz) {\n", "file_path": "src/graph.hpp", "rank": 82, "score": 4.8114202905818395 }, { "content": "\n\nuint64_t iso_tp = 0;\n\nuint64_t iso_fp = 0;\n\n\n\nTimer startim;\n\n\n\ndouble lastprint = 0;\n\n\n\n\n\ntemplate<size_t chunks>\n\nbool MSSolve<chunks>::Go(FBitset<chunks> vert, int k, const vector<Edge>& parent_edges, const vector<FBitset<chunks>>& parent_minseps, int parent_n, bool can_induce_seps) {\n\n recs++;\n\n tott.start();\n\n int pc_id = PieceId(vert, true, false);\n\n int n = vert.Popcount();\n\n if (n == 1) {\n\n pcs_[pc_id].lb = 1;\n\n } else {\n\n pcs_[pc_id].lb = max(pcs_[pc_id].lb, 2);\n\n }\n", "file_path": "src/ms_solve.hpp", "rank": 83, "score": 4.717904835452268 }, { "content": " Log::Write(5, \"nibbleseps ret \", minseps.size(), \" \", tmr.get());\n\n }\n\n return minseps;\n\n}\n\n\n\ntemplate<size_t chunks>\n\nint FGraph<chunks>::MaxCompSize(const FBitset<chunks>& minsep, const FBitset<chunks>& vert) const {\n\n FBitset<chunks> vis = vert;\n\n vis.TurnOff(minsep);\n\n int ret = 0;\n\n for (size_t ch = 0; ch < chunks; ch++) {\n\n while (vis.data_[ch] > 0) {\n\n int k = __builtin_ctzll(vis.data_[ch]) + ch*BITS;\n\n FBitset<chunks> ne = adj_mat2_[k];\n\n Dfs2Bit(vis, ne);\n\n for (size_t ch2 = 0; ch2 < chunks; ch2++) {\n\n ne.data_[ch2] &= ~vis.data_[ch2];\n\n }\n\n ret = std::max(ret, ne.Popcount());\n\n if (ret > vis.Popcount()) {\n\n return ret;\n\n }\n\n }\n\n }\n\n return ret;\n\n}\n\n} // namespace sms\n", "file_path": "src/graph.hpp", "rank": 84, "score": 4.571038850830165 }, { "content": " return ppg;\n\n}\n\n\n\nvector<int> Preprocessor::Reconstruct(vector<int> colors) const {\n\n if (vertex_map2_.size() > 0) {\n\n assert(colors.size() == vertex_map2_.size());\n\n int nn = org_graph2.n();\n\n vector<int> col0(nn);\n\n for (int i=0;i<nn;i++){\n\n col0[i] = -1;\n\n }\n\n for (int i = 0; i < (int)colors.size(); i++) {\n\n assert(vertex_map2_[i] >= 0 && vertex_map2_[i] < nn);\n\n col0[vertex_map2_[i]] = colors[i];\n\n }\n\n for (int i = (int)tamaki_elim_.size() - 1; i >= 0; i--) {\n\n int x = tamaki_elim_[i].F;\n\n auto nbs = tamaki_elim_[i].S;\n\n assert(x >= 0 && x < nn && col0[x] == -1);\n\n int mic = nn;\n", "file_path": "src/preprocessor.cpp", "rank": 85, "score": 4.5299537958001626 }, { "content": " resu_[u] = k--;\n\n }\n\n return true;\n\n }\n\n }\n\n }\n\n\n\n // Special case of a star separator of size k-2\n\n std::vector<FBitset<chunks>> star_minseps = t_graph.StarMinsep(k-2);\n\n if (!star_minseps.empty()) {\n\n assert(star_minseps.size() == 1);\n\n assert(star_minseps[0].Popcount() <= k-2);\n\n FBitset<chunks> vis = vert;\n\n vis.TurnOff(star_minseps[0]);\n\n for (const auto& comp : t_graph.BitComps(vis)) {\n\n assert(t_graph.IsStar(comp));\n\n if (comp.Popcount() == 2) {\n\n int t = 2;\n\n for (int v : comp) {\n\n resu_[v] = t--;\n", "file_path": "src/ms_solve.hpp", "rank": 86, "score": 4.336406485130617 }, { "content": " if (pcs_[pc_id].lb > k) {\n\n return false;\n\n }\n\n pcs_[pc_id].ub = min(pcs_[pc_id].ub, n);\n\n if (pcs_[pc_id].ub <= k) {\n\n return true;\n\n }\n\n\n\n lbt.start();\n\n FGraph<chunks> t_graph(graph_.n());\n\n std::vector<Edge> tg_edges;\n\n for (auto e : parent_edges) {\n\n if (vert.Get(e.F) && vert.Get(e.S)) {\n\n tg_edges.push_back(e);\n\n t_graph.AddEdge(e);\n\n }\n\n }\n\n\n\n if ((int64_t)t_graph.m() == (int64_t)n*(n-1)/2) {\n\n pcs_[pc_id].ub = n;\n", "file_path": "src/ms_solve.hpp", "rank": 87, "score": 4.302267085097609 }, { "content": " vertex_map_.Init(identity);\n\n}\n\n\n\nSparseGraph::SparseGraph(std::vector<Edge> edges) : vertex_map_(edges) {\n\n n_ = vertex_map_.Size();\n\n m_ = 0;\n\n adj_list_.resize(n_);\n\n for (auto edge : edges) {\n\n AddEdge(vertex_map_.Rank(edge.first), vertex_map_.Rank(edge.second));\n\n }\n\n}\n\n\n\nint SparseGraph::n() const {\n\n return n_;\n\n}\n\n\n\nint SparseGraph::m() const {\n\n return m_;\n\n}\n\n\n", "file_path": "src/graph.cpp", "rank": 88, "score": 4.262704389589876 }, { "content": " int iters = 0;\n\n while (1) {\n\n for (int v : vert) {\n\n reach[v].Clear();\n\n reach[v].SetTrue(v);\n\n }\n\n for (int r = 0; r < tn; r++) {\n\n std::vector<uint64_t> vh_new(n_);\n\n bool fo = false;\n\n for (int v : vert) {\n\n if (reach[v] != vert) {\n\n fo = true;\n\n reach[v] |= Neighbors(reach[v]);\n\n reach[v] &= vert;\n\n uint64_t nhv = 1;\n\n for (int u : adj_list_[v]) {\n\n if (vert.Get(u)) {\n\n nhv *= (vh[u] + 1ull);\n\n nhv %= 1000000007ull;\n\n }\n", "file_path": "src/graph.hpp", "rank": 89, "score": 4.1732038629314605 }, { "content": " int mfv = graph.n();\n\n FBitset<chunks> vert;\n\n for (int i = 0; i < graph.n(); i++) {\n\n if (graph.Degree(i) > 0) {\n\n vert.SetTrue(i);\n\n int fi = graph.FillSize(graph.adj_mat2_[i]);\n\n if (fi < mfi) {\n\n mfi = fi;\n\n mfv = i;\n\n }\n\n }\n\n }\n\n if (vert.Popcount() <= 2) return {};\n\n Timer tmr;\n\n bool wl = false;\n\n if (vert.Popcount() == graph.n()) {\n\n Log::Write(5, \"nibbleseps \", sz);\n\n tmr.start();\n\n wl = true;\n\n }\n", "file_path": "src/graph.hpp", "rank": 90, "score": 4.12771921603783 }, { "content": "}\n\n\n\nbool SparseGraph::IsClique(const std::vector<int>& vs) const {\n\n for (int i = 0; i < (int)vs.size(); i++) {\n\n for (int ii = i+1; ii < (int)vs.size(); ii++) {\n\n if (!HasEdge(vs[i], vs[ii])) {\n\n return false;\n\n }\n\n }\n\n }\n\n return true;\n\n}\n\n\n\nvoid SparseGraph::ShuffleAdjList(std::mt19937& gen) {\n\n for (int i = 0; i < n_; i++) {\n\n std::shuffle(adj_list_[i].begin(), adj_list_[i].end(), gen);\n\n }\n\n}\n\n\n\nnamespace {\n", "file_path": "src/graph.cpp", "rank": 91, "score": 4.124520786102502 }, { "content": " }\n\n }\n\n }\n\n }\n\n vector<pair<int, int>> dgo;\n\n for (int i=0;i<graph.n();i++){\n\n dgo.push_back({graph.Degree(i), i});\n\n }\n\n sort(dgo.begin(), dgo.end());\n\n fo = true;\n\n while (fo) {\n\n fo = false;\n\n for (int i=0;i<graph.n();i++){\n\n int x = dgo[i].second;\n\n if (graph.Degree(x) == 0) continue;\n\n if (!graph.IsClique(graph.Neighbors(x))) continue;\n\n bool ok = true;\n\n for (int nx : graph.Neighbors(x)) {\n\n if (graph.Degree(nx) <= k) {\n\n ok = false;\n", "file_path": "src/preprocessor.cpp", "rank": 92, "score": 4.0897046871918405 }, { "content": " int v = 0;\n\n int be = 1e9;\n\n for (int i=0;i<graph.n();i++){\n\n if (hs[i] >= 0 && hs[i] < be) {\n\n v = i;\n\n be = hs[i];\n\n }\n\n }\n\n assert(hs[v] >= 0 && hs[v] == be);\n\n hs[v] = -1;\n\n for (const FBitset<chunks>& cn : graph.CompNeighsBit(graph.adj_mat2_[v])) {\n\n for (auto fe : graph.FillEdges(cn)) {\n\n graph.AddEdge(fe);\n\n upd[fe.first] = 1;\n\n upd[fe.second] = 1;\n\n }\n\n }\n\n if (upd_cnt == 0) continue;\n\n for (int i=0;i<graph.n();i++){\n\n if (upd[i]) {\n", "file_path": "src/mcs.hpp", "rank": 93, "score": 4.087538684399506 }, { "content": " int ss_lb = lb_sieve_.Get(vert, k+1);\n\n if (ss_lb > 0) {\n\n assert(ss_lb > k);\n\n pcs_[pc_id].lb = ss_lb;\n\n csubt.stop();\n\n return false;\n\n }\n\n csubt.stop();\n\n FGraph<chunks> t_graph(graph_.n());\n\n std::vector<Edge> tg_edges;\n\n for (auto e : parent_edges) {\n\n if (vert.Get(e.F) && vert.Get(e.S)) {\n\n tg_edges.push_back(e);\n\n t_graph.AddEdge(e);\n\n }\n\n }\n\n if ((int64_t)t_graph.m() == (int64_t)n*(n-1)/2) {\n\n pcs_[pc_id].ub = n;\n\n pcs_[pc_id].lb = n;\n\n csubt.start();\n", "file_path": "src/chordalsolve.hpp", "rank": 94, "score": 4.04777572561242 }, { "content": " cover = t_graph.adj_mat2_[v];\n\n cover.SetFalse(v);\n\n bool isvc = true;\n\n for (auto e : tg_edges) {\n\n if (!cover.Get(e.F) && !cover.Get(e.S)) {\n\n isvc = false;\n\n break;\n\n }\n\n }\n\n if (isvc) {\n\n pcs_[pc_id].ub = t_graph.Degree(v)+1;\n\n assert(pcs_[pc_id].ub <= k);\n\n lbt.stop();\n\n return true;\n\n }\n\n }\n\n }\n\n\n\n // Special case of a star separator of size k-2\n\n startim.start();\n", "file_path": "src/ms_solve.hpp", "rank": 95, "score": 4.0422236195706684 }, { "content": " int lb = PathCycleLb(SparseGraph(t_graph), (CycleLb(n)-k)*CycleLb(n), k+1);\n\n pcs_[pc_id].lb = max(pcs_[pc_id].lb, lb);\n\n if (pcs_[pc_id].lb > k) {\n\n FBitset<chunks> sg;\n\n for (int v : path_cycle_found) {\n\n sg.SetTrue(v);\n\n }\n\n subt.start();\n\n lb_sieve_.Insert(sg, pcs_[pc_id].lb);\n\n subt.stop();\n\n lbt.stop();\n\n return false;\n\n }\n\n }\n\n assert(n >= 4 && t_graph.m() >= 3);\n\n\n\n // Special case of a vertex cover separator of size k-1\n\n FBitset<chunks> cover;\n\n for (int v : vert) {\n\n if (t_graph.Degree(v) < k) {\n", "file_path": "src/ms_solve.hpp", "rank": 96, "score": 4.002845853145789 }, { "content": " std::vector<int> component;\n\n Dfs(v, block, component);\n\n return component;\n\n}\n\n\n\nstd::vector<std::vector<int> > SparseGraph::Components(const std::vector<int>& separator) const {\n\n std::vector<char> blocked(n_);\n\n for (int v : separator) {\n\n blocked[v] = true;\n\n }\n\n std::vector<std::vector<int> > components;\n\n for (int i = 0; i < n_; i++) {\n\n if (!blocked[i]) {\n\n components.push_back(FindComponentAndMark(i, blocked));\n\n }\n\n }\n\n return components;\n\n}\n\n\n\nconst std::vector<int>& SparseGraph::Neighbors(int v) const {\n", "file_path": "src/graph.cpp", "rank": 97, "score": 3.925268854874397 }, { "content": " while (1) {\n\n if (!fo) {\n\n for (int j = 0; j < chunks; j++) {\n\n if (vis.data_[j]) {\n\n int x = __builtin_ctzll(vis.data_[j]) + j*BITS;\n\n ne.SetTrue(x);\n\n ret.push_back(FBitset<chunks>());\n\n fo = true;\n\n break;\n\n }\n\n }\n\n if (!fo) return ret;\n\n }\n\n fo = false;\n\n for (int j = 0; j < chunks; j++) {\n\n uint64_t gv = vis.data_[j] & ne.data_[j];\n\n while (gv) {\n\n fo = true;\n\n vis.data_[j] &= (~(gv&-gv));\n\n int x = __builtin_ctzll(gv) + j*BITS;\n", "file_path": "src/graph.hpp", "rank": 98, "score": 3.92042074522086 }, { "content": " minseps.push_back(nbs);\n\n }\n\n FGraph<chunks> ngraph(graph.n());\n\n comp |= nbs;\n\n for (auto e : graph.Edges()) {\n\n if (comp.Get(e.first) && comp.Get(e.second)) {\n\n ngraph.AddEdge(e);\n\n }\n\n }\n\n for (auto e : graph.FillEdges(nbs)) {\n\n ngraph.AddEdge(e);\n\n }\n\n auto rms = NibbleSmallMinseps(ngraph, sz);\n\n for (const auto& sep : rms) {\n\n minseps.push_back(sep);\n\n }\n\n }\n\n\n\n utils::SortAndDedup(minseps);\n\n if (wl) {\n", "file_path": "src/graph.hpp", "rank": 99, "score": 3.8882628306464158 } ]
C++
video/src/window/message_box_common.cpp
vinders/pandora_toolbox
f32e301ebaa2b281a1ffc3d6d0c556091420520a
#if !defined(_WINDOWS) # include <mutex> # include <thread/spin_lock.h> # include "video/_private/_message_box_common.h" using namespace pandora::video; static pandora::thread::SpinLock __lastErrorLock; static pandora::memory::LightString __lastError; void pandora::video::__MessageBox::setLastError(const char* error) noexcept { std::lock_guard<pandora::thread::SpinLock> guard(__lastErrorLock); __lastError = error; } pandora::memory::LightString pandora::video::__MessageBox::getLastError() noexcept { std::lock_guard<pandora::thread::SpinLock> guard(__lastErrorLock); return __lastError; } # ifdef __APPLE__ # define __index(i,maxI) (maxI - i) # else # define __index(i,...) i # endif static const char* __getLabelOK() noexcept { return "OK"; } static const char* __getLabelCancel() noexcept { return "Cancel"; } static const char* __getLabelYes() noexcept { return "Yes"; } static const char* __getLabelNo() noexcept { return "No"; } static const char* __getLabelAbort() noexcept { return "Abort"; } static const char* __getLabelRetry() noexcept { return "Retry"; } static const char* __getLabelIgnore() noexcept { return "Ignore"; } static inline const char* __setActionLabel(const char* action, const char* placeholder) noexcept { return (action != nullptr) ? action : placeholder; } uint32_t pandora::video::__MessageBox::toActionLabels(MessageBox::ActionType actions, const char** outLabels) noexcept { switch (actions) { case MessageBox::ActionType::ok: outLabels[__index(0,0)] = __getLabelOK(); return 1; case MessageBox::ActionType::okCancel: outLabels[__index(0,1)] = __getLabelOK(); outLabels[__index(1,1)] = __getLabelCancel(); return 2; case MessageBox::ActionType::retryCancel: outLabels[__index(0,1)] = __getLabelRetry(); outLabels[__index(1,1)] = __getLabelCancel(); return 2; case MessageBox::ActionType::yesNo: outLabels[__index(0,1)] = __getLabelYes(); outLabels[__index(1,1)] = __getLabelNo(); return 2; case MessageBox::ActionType::yesNoCancel: outLabels[__index(0,2)] = __getLabelYes(); outLabels[__index(1,2)] = __getLabelNo(); outLabels[__index(2,2)] = __getLabelCancel(); return 3; case MessageBox::ActionType::abortRetryIgnore: outLabels[__index(0,2)] = __getLabelAbort(); outLabels[__index(1,2)] = __getLabelRetry(); outLabels[__index(2,2)] = __getLabelIgnore(); return 3; default: outLabels[__index(0,0)] = __getLabelOK(); return 1; } } uint32_t pandora::video::__MessageBox::toActionLabels(const char* action1, const char* action2, const char* action3, const char** outLabels) noexcept { if (action3) { outLabels[__index(0,2)] = __setActionLabel(action1, __getLabelOK()); outLabels[__index(1,2)] = __setActionLabel(action2, __getLabelNo()); outLabels[__index(2,2)] = action3; return 3; } else if (action2) { outLabels[__index(0,1)] = __setActionLabel(action1, __getLabelOK()); outLabels[__index(1,1)] = action2; return 2; } else { outLabels[__index(0,0)] = __setActionLabel(action1, __getLabelOK()); return 1; } } MessageBox::Result pandora::video::__MessageBox::toDialogResult(uint32_t index, uint32_t maxActions) noexcept { # ifdef __APPLE__ if (index == 0 || index > maxActions) return MessageBox::Result::failure; index = maxActions + 1 - index; # else if (index > maxActions) return MessageBox::Result::failure; # endif switch (index) { case 1: return MessageBox::Result::action1; case 2: return MessageBox::Result::action2; case 3: return MessageBox::Result::action3; default: return MessageBox::Result::failure; } } #endif
#if !defined(_WINDOWS) # include <mutex> # include <thread/spin_lock.h> # include "video/_private/_message_box_common.h" using namespace pandora::video; static pandora::thread::SpinLock __lastErrorLock; static pandora::memory::LightString __lastError; void pandora::video::__MessageBox::setLastError(const char* error) noexcept { std::lock_guard<pandora::thread::SpinLock> guard(__lastErrorLock); __lastError = error; } pandora::memory::LightString pandora::video::__MessageBox::getLastError() noexcept { std::lock_guard<pandora::thread::SpinLock> guard(__lastErrorLock); return __lastError; } # ifdef __APPLE__ # define __index(i,maxI) (maxI - i) # else # define __index(i,...) i # endif static const char* __getLabelOK() noexcept { return "OK"; } static const char* __getLabelCancel() noexcept { return "Cancel"; } static const char* __getLabelYes() noexcept { return "Yes"; } static const char* __getLabelNo() noexcept { return "No"; } static const char* __getLabelAbort() noexcept { return "Abort"; } static const char* __getLabelRetry() noexcept { return "Retry"; } static const char* __getLabelIgnore() noexcept { return "Ignore"; } static inline const char* __setActionLabel(const char* action, const char* placeholder) noexcept { return (action != nullptr) ? action : placeholder; } uint32_t pandora::video::__MessageBox::toActionLabels(MessageBox::ActionType actions, const char** outLabels) noexcept { switch (actions) { case MessageBox::ActionType::ok: outLabels[__index(0,0)] = __getLabelOK(); return 1; case MessageBox::ActionType::okCancel: outLabels[__index(0,1)] = __getLabelOK(); outLabels[__index(1,1)] = __getLabelCancel(); return 2; case MessageBox::ActionType::retryCancel: outLabels[__index(0,1)] = __getLabelRetry(); outLabels[__index(1,1)] = __getLabelCancel(); return 2; case MessageBox::ActionType::yesNo: outLabels[__index(0,1)] = __getLabelYes(); outLabels[__index(1,1)] = __getLabelNo();
uint32_t pandora::video::__MessageBox::toActionLabels(const char* action1, const char* action2, const char* action3, const char** outLabels) noexcept { if (action3) { outLabels[__index(0,2)] = __setActionLabel(action1, __getLabelOK()); outLabels[__index(1,2)] = __setActionLabel(action2, __getLabelNo()); outLabels[__index(2,2)] = action3; return 3; } else if (action2) { outLabels[__index(0,1)] = __setActionLabel(action1, __getLabelOK()); outLabels[__index(1,1)] = action2; return 2; } else { outLabels[__index(0,0)] = __setActionLabel(action1, __getLabelOK()); return 1; } } MessageBox::Result pandora::video::__MessageBox::toDialogResult(uint32_t index, uint32_t maxActions) noexcept { # ifdef __APPLE__ if (index == 0 || index > maxActions) return MessageBox::Result::failure; index = maxActions + 1 - index; # else if (index > maxActions) return MessageBox::Result::failure; # endif switch (index) { case 1: return MessageBox::Result::action1; case 2: return MessageBox::Result::action2; case 3: return MessageBox::Result::action3; default: return MessageBox::Result::failure; } } #endif
return 2; case MessageBox::ActionType::yesNoCancel: outLabels[__index(0,2)] = __getLabelYes(); outLabels[__index(1,2)] = __getLabelNo(); outLabels[__index(2,2)] = __getLabelCancel(); return 3; case MessageBox::ActionType::abortRetryIgnore: outLabels[__index(0,2)] = __getLabelAbort(); outLabels[__index(1,2)] = __getLabelRetry(); outLabels[__index(2,2)] = __getLabelIgnore(); return 3; default: outLabels[__index(0,0)] = __getLabelOK(); return 1; } }
function_block-function_prefix_line
[ { "content": " __x11_XSync Sync_ = nullptr;\n", "file_path": "hardware/include/hardware/_private/_libraries_x11.h", "rank": 0, "score": 120923.27754309989 }, { "content": " constexpr inline bool isHyperThreadingCapable() const noexcept { return (logicalCores > physicalCores); }\n", "file_path": "hardware/include/hardware/_private/_cpu_cores_specs.h", "rank": 1, "score": 118511.18231743979 }, { "content": " /// @brief Message box category (user actions available)\n\n enum class ActionType : uint32_t {\n\n ok = 0,\n\n okCancel,\n\n retryCancel,\n\n yesNo,\n\n yesNoCancel,\n\n abortRetryIgnore\n\n };\n", "file_path": "video/include/video/message_box.h", "rank": 2, "score": 114029.27278044225 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nTypes: Iterator, Iterable\n\nMacros: \n\n * Call one of these inside collection class (that implements previous/next) :\n\n _P_FORWARD_ITERATOR_MEMBERS, _P_BIDIRECT_ITERATOR_MEMBERS, _P_RANDOMACCESS_ITERATOR_MEMBERS,\n\n * Or call any of these inside collection class for custom behavior :\n\n _P_ITERATOR_MEMBERS, _P_REVERSE_ITERATOR_MEMBERS\n\n*******************************************************************************/\n\n#pragma once\n\n\n\n#include <cstddef>\n\n#include <cstdint>\n\n#include <cassert>\n\n#include <stdexcept>\n\n#include <iterator>\n\n#include \"./class_traits.h\"\n\n#if !defined(_CPP_REVISION) || _CPP_REVISION != 14\n\n# define __if_constexpr if constexpr\n\n#else\n\n# define __if_constexpr if\n\n#endif\n\n\n\nnamespace pandora {\n\n namespace pattern {\n\n /// @class Iterable\n\n /// @brief Iterable template - object that can be iterated through\n\n template <typename _ItemType, bool _IsReversible = true>\n", "file_path": "pattern/include/pattern/iterator.h", "rank": 3, "score": 101162.81192628943 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\n- description : window events & handlers (system/hardware/input events)\n\n- types : WindowEventHandler, KeyboardEventHandler, MouseEventHandler, TouchEventHandler\n\n- enums : KeyboardEvent, KeyTransition, MouseEvent, MouseButton, TouchEvent\n\n- functions : virtualKeyToChar, mouseWheelDelta, isMouseButtonPressed\n\n- keycodes: see <video/window_keycodes.h>\n\n*******************************************************************************/\n\n#pragma once\n\n\n\n#include <cstdint>\n\n#ifdef _WINDOWS\n\n# include <wchar.h>\n\n#endif\n\n\n\nnamespace pandora {\n\n namespace video {\n", "file_path": "video/include/video/window_events.h", "rank": 4, "score": 98170.88040948717 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nSystem-specific clocks: \n\n - windows: QueryPerformanceClock, MultimediaTimerClock, TickCountClock\n\n - mac os: MachSystemClock\n\n - unix/linux: MonotonicClock\n\nAliases for all systems: HighResolutionClock, HighResolutionAuxClock, SteadyClock\n\n\n\nNote: before using one of these clock, <ClockType>::isSupported() should be verified.\n\n\n\nWarning: High resolution clocks can have unexpected behaviors when they switch to different CPU cores.\n\n It is recommended to set a thread/CPU affinity to 1 core, to always read the clock from the same core.\n\n See <hardware/process_affinity.h>\n\n*******************************************************************************/\n\n#pragma once\n\n\n\n#include <cstddef>\n\n#include <cstdint>\n\n#include <chrono>\n\n#if !defined(_WINDOWS) && (defined(__APPLE__) || defined(__ANDROID__) || defined(__linux__) || defined(__linux) || defined(__unix__) || defined(__unix) || defined(_POSIX_VERSION))\n\n# include <unistd.h>\n\n#endif\n\n\n\nnamespace pandora {\n\n namespace time {\n\n \n\n# if defined(_WINDOWS) || defined(WIN32) || defined(_WIN32) || defined(_WIN64) || defined(_WINNT)\n", "file_path": "time/include/time/system_clocks.h", "rank": 5, "score": 98170.88040948717 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nLocation identification for CPUID property\n\n*******************************************************************************/\n\n#pragma once\n\n\n\n#include <cstddef>\n\n#include <cstdint>\n\n#include <cassert>\n\n#include <string>\n\n#include <system/cpu_arch.h>\n\n#include \"./cpu_vendor.h\"\n\n\n\nnamespace pandora {\n\n namespace hardware {\n", "file_path": "hardware/include/hardware/cpuid_property_location.h", "rank": 6, "score": 95386.61680072578 }, { "content": " /// @class StaticBuffer\n\n /// @brief Direct3D data buffer for shader stage(s): constant/uniform buffer, vertex array buffer, vertex index buffer...\n\n /// * Static buffer: GPU read-write (very fast) / indirect CPU write-only (slow).\n\n /// -> recommended for writable buffers rarely updated by the CPU, or with small data;\n\n /// -> used for static meshes with skinning/skeletal animation in shaders.\n\n /// * Immutable buffer (isImmutable==true): initialized at creation, then GPU read-only (fastest).\n\n /// -> best option for buffers that never change.\n\n /// * For buffers rewritten at least once per frame or with huge data transfers, prefer DynamicBuffer.\n\n /// * For more details about buffer types, see 'DataBufferType' in <video/shader_types.h>.\n\n /// @remarks - To use it, bind it to the associated Renderer object (must be the same as the one used in constructor).\n\n /// - Constant/uniform buffer data type size must be a multiple of 16 byte: add padding in structure/array-item if necessary.\n\n /// - Static buffers are meant to be persistent: should be updated at most once per frame (and less than that if the buffer size is big).\n\n /// - Common practice: * geometry centered around (0;0;0) -> vertex buffers;\n\n /// * world matrix to offset the entire model in the environment -> combined with camera view into constant/uniform buffer;\n\n /// * vertices repositioned in vertex shader by world/view matrix and projection matrix.\n\n class StaticBuffer final {\n\n public:\n\n /// @brief Create data buffer (to store data for shader stages)\n\n /// @param renderer The renderer for which the buffer is created: use the same renderer when binding it or when calling write.\n\n /// @param type Type of buffer to create: constant/uniform buffer, vertex array buffer, vertex index buffer...\n\n /// @param bufferByteSize The total number of bytes of the buffer (sizeof structure/array) -- must be a multiple of 16 bytes for constant/uniform buffers.\n\n /// @warning Initialization/writing is a LOT more efficient when the source data type has a 16-byte alignment (see <system/align.h>).\n\n /// @throws - invalid_argument: if 'bufferByteSize' is 0;\n\n /// - runtime_error: on creation failure.\n\n StaticBuffer(Renderer& renderer, BaseBufferType type, size_t bufferByteSize);\n\n /// @brief Create data buffer (to store data for shader stages) with initial value\n\n /// @param renderer The renderer for which the buffer is created: use the same renderer when binding it or when calling write.\n\n /// @param type Type of buffer to create: constant/uniform buffer, vertex array buffer, vertex index buffer...\n\n /// @param bufferByteSize The total number of bytes of the buffer (sizeof structure/array) -- must be a multiple of 16 bytes for constant/uniform buffers.\n\n /// @param initData Buffer initial value -- structure or array of input values (must not be NULL if immutable)\n\n /// @param isImmutable Buffer data is immutable (true) or can be modified (false)\n\n /// @warning Initialization/writing is a LOT more efficient when the source data type has a 16-byte alignment (see <system/align.h>).\n\n /// @throws - invalid_argument if 'bufferByteSize' is 0 / if immutable and 'initData' is NULL;\n\n /// - runtime_error on creation failure.\n\n StaticBuffer(Renderer& renderer, BaseBufferType type, size_t bufferByteSize, \n", "file_path": "video/include/video/d3d11/static_buffer.h", "rank": 7, "score": 94192.42413437525 }, { "content": " const void* initData, bool isImmutable = false);\n\n \n\n /// @brief Create from native buffer handle - must use D3D11_USAGE_DEFAULT for 'write' to work!\n\n StaticBuffer(BufferHandle handle, size_t bufferSize, BaseBufferType type) noexcept \n\n : _buffer(handle), _bufferSize(bufferSize), _type(type) {}\n\n ~StaticBuffer() noexcept { release(); }\n\n void release() noexcept { ///< Destroy/release static buffer instance\n\n if (this->_buffer) {\n\n try { this->_buffer->Release(); } catch (...) {}\n\n this->_buffer = nullptr;\n\n }\n\n }\n\n \n\n StaticBuffer() = default; ///< Empty buffer -- not usable (only useful to store variable not immediately initialized)\n\n StaticBuffer(const StaticBuffer&) = delete;\n\n StaticBuffer(StaticBuffer&& rhs) noexcept : _buffer(rhs._buffer), _bufferSize(rhs._bufferSize), _type(rhs._type) { rhs._buffer = nullptr; }\n\n StaticBuffer& operator=(const StaticBuffer&) = delete;\n\n StaticBuffer& operator=(StaticBuffer&& rhs) noexcept {\n\n release();\n\n this->_buffer = rhs._buffer; this->_bufferSize = rhs._bufferSize; this->_type = rhs._type;\n", "file_path": "video/include/video/d3d11/static_buffer.h", "rank": 8, "score": 90010.96527682978 }, { "content": " rhs._buffer = nullptr;\n\n return *this;\n\n }\n\n \n\n // -- accessors --\n\n \n\n /// @brief Get native Direct3D 11 compatible buffer handle\n\n inline BufferHandle handle() const noexcept { return this->_buffer; }\n\n /// @brief Get pointer to native Direct3D 11 compatible buffer handle (usable as array of 1 item)\n\n inline const BufferHandle* handleArray() const noexcept { return &(this->_buffer); }\n\n \n\n inline bool isEmpty() const noexcept { return (this->_buffer == nullptr); } ///< Verify if initialized (false) or empty/moved/released (true)\n\n inline size_t size() const noexcept { return this->_bufferSize; } ///< Get buffer byte size\n\n BaseBufferType type() const noexcept { return this->_type; } ///< Get buffer type: uniform/vertex/index\n\n \n\n // -- operations --\n\n\n\n /// @brief Write buffer data (has no effect if buffer is immutable)\n\n /// @param renderer Renderer used in constructor.\n\n /// @param sourceData Structure/array of the same byte size as 'bufferByteSize' in constructor.\n", "file_path": "video/include/video/d3d11/static_buffer.h", "rank": 9, "score": 90005.30920367061 }, { "content": "#if defined(_WINDOWS) && defined(_VIDEO_D3D11_SUPPORT)\n\n# include <cstdint>\n\n# include \"./renderer.h\" // includes D3D11\n\n\n\n namespace pandora {\n\n namespace video {\n\n namespace d3d11 {\n\n /// @class StaticBuffer\n\n /// @brief Direct3D data buffer for shader stage(s): constant/uniform buffer, vertex array buffer, vertex index buffer...\n\n /// * Static buffer: GPU read-write (very fast) / indirect CPU write-only (slow).\n\n /// -> recommended for writable buffers rarely updated by the CPU, or with small data;\n\n /// -> used for static meshes with skinning/skeletal animation in shaders.\n\n /// * Immutable buffer (isImmutable==true): initialized at creation, then GPU read-only (fastest).\n\n /// -> best option for buffers that never change.\n\n /// * For buffers rewritten at least once per frame or with huge data transfers, prefer DynamicBuffer.\n\n /// * For more details about buffer types, see 'DataBufferType' in <video/shader_types.h>.\n\n /// @remarks - To use it, bind it to the associated Renderer object (must be the same as the one used in constructor).\n\n /// - Constant/uniform buffer data type size must be a multiple of 16 byte: add padding in structure/array-item if necessary.\n\n /// - Static buffers are meant to be persistent: should be updated at most once per frame (and less than that if the buffer size is big).\n\n /// - Common practice: * geometry centered around (0;0;0) -> vertex buffers;\n\n /// * world matrix to offset the entire model in the environment -> combined with camera view into constant/uniform buffer;\n\n /// * vertices repositioned in vertex shader by world/view matrix and projection matrix.\n", "file_path": "video/include/video/d3d11/static_buffer.h", "rank": 10, "score": 90004.2483429459 }, { "content": " inline void write(Renderer& renderer, const void* sourceData) {\n\n renderer.context()->UpdateSubresource(this->_buffer, 0, nullptr, sourceData, 0, 0);\n\n }\n\n\n\n private:\n\n BufferHandle _buffer = nullptr;\n\n size_t _bufferSize = 0;\n\n BaseBufferType _type = BaseBufferType::uniform;\n\n };\n\n }\n\n }\n\n }\n\n\n\n#endif\n", "file_path": "video/include/video/d3d11/static_buffer.h", "rank": 11, "score": 90002.1888329022 }, { "content": "/*******************************************************************************\n\nMIT License\n\nCopyright (c) 2021 Romain Vinders\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO WARRANTIES OF MERCHANTABILITY, FITNESS\n\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS\n\nOR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\n\nWHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR\n\nIN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n*******************************************************************************/\n\n#pragma once\n\n\n", "file_path": "video/include/video/d3d11/static_buffer.h", "rank": 12, "score": 89998.89698080903 }, { "content": " /// @brief Current state of a stopwatch\n\n enum class Status: uint32_t {\n\n idle = 0u,\n\n paused = 1u,\n\n running = 2u\n\n };\n\n \n\n /// @brief Create idle stopwatch\n\n Stopwatch() = default;\n\n \n\n Stopwatch(const Stopwatch&) = default;\n\n Stopwatch(Stopwatch&&) noexcept = default;\n\n inline Stopwatch& operator=(const Stopwatch&) = default;\n\n inline Stopwatch& operator=(Stopwatch&&) noexcept = default;\n\n ~Stopwatch() = default;\n\n\n\n // -- stopwatch operations --\n\n\n\n /// @brief Stop measurements and reset elapsed time to zero\n\n inline void reset() noexcept {\n\n this->_status = Status::idle;\n", "file_path": "time/include/time/stopwatch.h", "rank": 13, "score": 76145.33353489889 }, { "content": " ///< Unicode character encoding mode\n\n enum class Encoding : uint32_t {\n\n any = 0u, ///< Undefined -> auto-detection in converters\n\n utf8 = 1u, ///< UTF-8 (byte/multi-byte)\n\n utf16BE = 2u, ///< UTF-16 big-endian (word/multi-word)\n\n utf16LE = 3u ///< UTF-16 little-endian (word/multi-word)\n\n };\n\n \n\n /// ---\n\n \n", "file_path": "io/include/io/encoder.h", "rank": 14, "score": 76145.33353489889 }, { "content": " /// @brief User action chosen to close dialog (or failure if an error occurred)\n\n enum class Result : uint32_t {\n\n failure = 0, ///< error during message box creation\n\n action1 = 1, ///< user action #1 (example: \"yes\" for ActionType \"yesNoCancel\" / \"button1\" for custom actions)\n\n action2 = 2, ///< user action #2 (example: \"no\" for ActionType \"yesNoCancel\" / \"button2\" for custom actions)\n\n action3 = 3, ///< user action #3 (example: \"cancel\" for ActionType \"yesNoCancel\" / \"button3\" for custom actions)\n\n };\n\n \n\n // ---\n\n\n\n /// @brief Show modal message box (wait for user action)\n\n /// @param caption Title of the message box\n\n /// @param message Text content of the message box\n\n /// @param actions Available user actions (buttons)\n\n /// @param icon Optional symbol to display in message box (not used on linux systems)\n\n /// @param isTopMost Make modal dialog appear on top of all windows\n\n /// @param parent Parent window blocked by dialog (optional)\n\n /// @returns Action chosen by user (or Result::failure if the dialog could not be created)\n\n static Result show(const char* caption, const char* message, ActionType actions = ActionType::ok, \n\n IconType icon = IconType::none, bool isTopMost = false, WindowHandle parent = (WindowHandle)0) noexcept;\n\n# if defined(_WINDOWS)\n", "file_path": "video/include/video/message_box.h", "rank": 15, "score": 74574.50921608796 }, { "content": " /// @brief Window visibility state (bit-mask flags)\n\n /// @warning internal dev: values are aligned with impl-specific defines -> keep them aligned\n\n enum class VisibleState : uint32_t {\n\n none = 0u, ///< Hide window\n\n visible = 0x1u, ///< Visible window (on screen)\n\n active = 0x2u, ///< Active window (focus)\n\n minimized = 0x4u, ///< Minimized\n\n maximized = 0x8u ///< Maximization button ON (might be minimized at the same time!)\n\n };\n\n\n\n ~Window() noexcept;\n\n Window(const Window&) = delete;\n\n Window(Window&&) = delete; // no move: registered pointers -> address must not change\n\n Window& operator=(const Window&) = delete;\n\n Window& operator=(Window&&) = delete; // no move\n\n \n\n \n\n // -- accessors --\n\n \n\n WindowHandle handle() const noexcept; ///< Get native window handle\n\n WindowType displayMode() const noexcept; ///< Get window type (fullscreen/borderless/window/...)\n\n WindowBehavior behavior() const noexcept; ///< Get window type (fullscreen/borderless/window/...)\n", "file_path": "video/include/video/window.h", "rank": 16, "score": 74568.47224784468 }, { "content": " /// @brief Timer lateness management mode\n\n enum class DelayHandling: uint32_t {\n\n none = 0, ///< always fixed periods\n\n compensate = 1 ///< when lateness occurs, adapt following period(s) to compensate\n\n };\n\n\n\n /// @brief Measured timer stats\n\n struct TimerStats {\n\n std::chrono::nanoseconds elapsedTime; ///< Time elapsed since time reference (last measures (if provided) or last reset)\n\n int64_t lastTicks = 0LL; ///< Absolute tick count at the end of the last call to \"waitPeriod\"\n\n int64_t lastAuxTicks = 0LL; ///< Absolute tick count of auxiliary clock at the end of the last call to \"waitPeriod\"\n\n uint64_t totalPeriods = 0LL;///< Total number of periods (since last reset)\n\n uint64_t periodCount = 0LL; ///< Number of periods since time reference (last measures (if provided) or last reset)\n\n double frequency = 0.0; ///< Estimated timer frequency since time reference (last measures (if provided) or last reset)\n\n };\n\n \n\n // ---\n\n \n\n /// @class Timer\n\n /// @brief Double clocked synchronization timer\n\n /// @warning - The isSupported() method should be verified for each clock type, before trying to use them with the timer.\n\n /// - For a maximum stability of the clocks, the thread using the timer can set its CPU affinity (see <hardware/process_affinity.h>)\n\n template <typename _ClockType = HighResolutionClock, // main clock used by timer\n\n typename _AuxClockType = HighResolutionAuxClock, // secondary clock used by timer when main clock has unexpected results\n\n DelayHandling _DelayAction = DelayHandling::compensate, // timer behaviour when lateness occurs\n\n bool _UseActivePolling = true> // true: real-time polling (100% CPU usage) / false: thread sleep between checks (less CPU intensive, but not real-time/predictable)\n", "file_path": "time/include/time/timer.h", "rank": 17, "score": 74563.11569788551 }, { "content": " /// @brief Log level type\n\n enum class LogLevel: uint32_t {\n\n none = 0u,\n\n verbose = 1u, // only intended for debug builds\n\n debug = 2u, // only intended for debug builds\n\n informative = 3u,\n\n standard = 4u,\n\n critical = 5u\n\n };\n\n\n", "file_path": "system/include/system/logger.h", "rank": 18, "score": 74563.11569788551 }, { "content": " /// @brief Type of window (display mode + decoration)\n\n enum class WindowType : uint32_t {\n\n fullscreen = 0u, ///< Fullscreen display (warning: no x/y position, no resize mode)\n\n borderless = 1u, ///< Borderless window (splash screen or \"fullscreen window\", no resize mode)\n\n bordered = 2u, ///< Popup dialog with no caption\n\n dialog = 3u, ///< Popup dialog with caption + close button\n\n window = 4u ///< Normal window with caption + buttons\n\n };\n", "file_path": "video/include/video/window.h", "rank": 19, "score": 74563.11569788551 }, { "content": " /// @brief Window behavior settings (bit-mask flags)\n\n enum class WindowBehavior : uint32_t {\n\n none = 0u, ///< no widgets\n\n topMost = 0x1u, ///< window displayed above all non-topmost windows\n\n aboveTaskbar = 0x2u, ///< window displayed above taskbar / status bar (useful for borderless windows)\n\n globalContext = 0x4u ///< Use global context for display/paint/rendering (faster, but shared between all windows with this option).\n\n /// Only recommended for single window apps, for main display window only, or for duplicate/mirrored windows\n\n };\n", "file_path": "video/include/video/window.h", "rank": 20, "score": 74563.11569788551 }, { "content": " /// @brief Delegate usage mode\n\n enum class DelegateMode: uint32_t {\n\n functionPointer = 0u, ///< C-style function pointers: faster (less overhead), less convenient and less possibilities\n\n lambda = 1u ///< std::function and lambdas: easier, more possibilities, but slower\n\n };\n\n \n\n /// @class Delegate\n\n /// @brief Function delegate (function pointer / std::function / lambda)\n\n /// @warning The delegate function signature (_Params) should never contain 'moved' argument types (Type&&) : \n\n /// each moved argument would be 'moved' as many times as the number of registrations -> crash or undefined behaviour.\n\n template <typename _LockType = std::mutex, ///< Internal mutex type.\n\n DelegateMode _Mode = DelegateMode::lambda,///< Type of registrations: function pointers or std::function/lambda.\n\n bool _IsDeferrable = true, ///< Allow registrations/revocations to be deferred (true=deferrable; false=faster)\n\n /// When allowed, registrations/revocations can be triggered in delegate calls.\n\n typename _ReturnType = void, typename ... _Params> ///< Signature of registered functions.\n", "file_path": "pattern/include/pattern/delegate.h", "rank": 21, "score": 74563.11569788551 }, { "content": " /// @brief Window visibility state command param\n\n enum class VisibilityCommand : uint32_t {\n\n hide = 0u, ///< Hide window\n\n show = 1u, ///< Show window (active)\n\n showInactive = 2u, ///< Show window (inactive)\n\n restore = 3u, ///< Show window with original size/position (active, not minimized or maximized anymore)\n\n minimize = 4u, ///< Minimize window (inactive)\n\n maximize = 5u ///< Maximize window (active)\n\n };\n", "file_path": "video/include/video/window.h", "rank": 22, "score": 74563.11569788551 }, { "content": " /// @brief Log content category\n\n enum class LogCategory: uint32_t {\n\n none = 0u,\n\n TRACE = 1u,\n\n INFO = 2u,\n\n EVENT = 3u,\n\n WARNING = 4u,\n\n ERROR = 5u\n\n };\n\n _P_SERIALIZABLE_ENUM_BUFFER(LogCategory, TRACE, INFO, EVENT, WARNING, ERROR);\n\n\n\n\n\n // -- log utility --\n\n\n\n /// @class Logger\n\n /// @brief Simple log writing utility. Not protected against concurrent access (can be used in Locked pattern for such situations)\n\n template <typename _OutputFormatter ///< Output formatter: any movable class that implements:\n\n = BasicLogFormatter /// void write(LogLevel, LogCategory, const char* orig, uint32_t line, const char* format, va_list&),\n\n <std::ofstream,size_t{ 255u }, /// void write(LogLevel, LogCategory, const wchar_t* orig, uint32_t line, const wchar_t* format, va_list&),\n\n DateFormat::year_mm_dd, /// void flush().\n\n TimeFormat::h24_mm_ss_milli> > \n", "file_path": "system/include/system/logger.h", "rank": 23, "score": 74563.11569788551 }, { "content": " /// @brief Show/hide/raw cursor mode\n\n enum class CursorMode : uint32_t {\n\n visible = 0u, ///< Visible cursor + no constraints + normal move events (position)\n\n clipped = 1u, ///< Visible cursor + limit to client area + normal move events (position) -- should only be used for one window\n\n hidden = 2u, ///< Hidden cursor + no constraints + normal move events (position)\n\n hiddenRaw = 3u ///< Hidden cursor + limit to client area + receive raw motion events (delta) -- should only be used for one window\n\n };\n", "file_path": "video/include/video/window.h", "rank": 24, "score": 74563.11569788551 }, { "content": " /// @brief Window resize mode (bit-mask flags)\n\n enum class ResizeMode : uint32_t {\n\n fixed = 0u, ///< fixed width, fixed height (does not allow maximization)\n\n resizableX = 0x1u, ///< resizable width \n\n resizableY = 0x2u, ///< resizable height\n\n resizable = 0x3u, ///< resizable width/height (== resizableX|resizableY)\n\n homothety = 0x4u ///< homothety (ratio kept when resized)\n\n };\n\n /// @brief Pixel position (screen coordinates or window coordinates)\n\n struct PixelPosition {\n\n int32_t x;\n\n int32_t y;\n\n };\n\n /// @brief Pixel size (screen size or window size)\n\n struct PixelSize {\n\n uint32_t width;\n\n uint32_t height;\n\n };\n\n\n\n // ---\n\n \n", "file_path": "video/include/video/window.h", "rank": 25, "score": 74563.11569788551 }, { "content": " /// @brief Resource type\n\n enum class Category : uint32_t {\n\n icon = 0u, ///< Image displayed in caption / taskbar\n\n cursor = 1u, ///< Custom mouse pointer\n\n colorBrush = 2u, ///< Painting color brush (background / text)\n\n menu = 3u ///< Window menu handle\n\n };\n\n \n\n /// @brief Initialize window resource container\n\n WindowResource(WindowResourceHandle handle, Category type) : _handle(handle), _type(type) {}\n\n /// @brief Destroy window resource\n\n ~WindowResource() noexcept;\n\n \n\n WindowResource(const WindowResource&) = delete;\n\n WindowResource(WindowResource&& rhs) noexcept : _handle(rhs._handle), _type(rhs._type) { rhs._handle = (WindowResourceHandle)0; }\n\n WindowResource& operator=(const WindowResource&) = delete;\n\n WindowResource& operator=(WindowResource&& rhs) noexcept { \n\n _handle = rhs._handle; _type = rhs._type; rhs._handle = (WindowResourceHandle)0; \n\n return *this; \n\n }\n\n \n", "file_path": "video/include/video/window_resource.h", "rank": 26, "score": 74563.11569788551 }, { "content": " /// @brief Mouse button identifiers + shift/control modifiers for mouse events\n\n /// @warning internal dev: keep values aligned with button indexes returned by Window impl\n\n enum class MouseButton : uint32_t {\n\n left = 0u,\n\n middle = 1u,\n\n right = 2u,\n\n button4 = 3u,\n\n button5 = 4u\n\n };\n\n bool isMouseButtonPressed(uint8_t activeKeys, MouseButton button) noexcept; ///< Verify button status from event bit-map\n\n \n\n /// @brief Mouse or touch-screen event handling function pointer (click/move/wheel/...):\n\n /// * sender: event origin\n\n /// * event: mouse event type\n\n /// * x: X-coord of the mouse pointer (relative to client area top-left corner) / delta-X (rawMotion)\n\n /// * y: Y-coord of the mouse pointer (relative to client area top-left corner) / delta-Y (rawMotion)\n\n /// * index: value specific to event type (see MouseEvent enum)\n\n /// * activeKeys: bit-map with mouse keys that are currently pressed -> call 'isMouseButtonPressed(activeKeys, <MouseButton>)'\n\n /// * return: true to prevent default system processing, false to allow normal processing\n\n /// @warning Handler should NOT throw exceptions -> use try/catch in it\n\n using MouseEventHandler = bool (*)(Window* sender, MouseEvent event, int32_t x, int32_t y, int32_t index, uint8_t activeKeys);\n\n \n\n \n\n // -- general window/hardware events --\n\n \n", "file_path": "video/include/video/window_events.h", "rank": 27, "score": 73099.8893582226 }, { "content": " /// @brief Symbol displayed in message box\n\n enum class IconType : uint32_t {\n\n none = 0,\n\n info,\n\n question,\n\n warning,\n\n error\n\n };\n", "file_path": "video/include/video/message_box.h", "rank": 28, "score": 73094.66682508987 }, { "content": " /// @brief Window position/size event types\n\n enum class PositionEvent : uint32_t {\n\n none = 0u, ///< no event\n\n sizePositionTrack = 1u, ///< window size/position is being changed (resizing/moving):\n\n /// - posX/posY = tracked client-area position X/Y (absolute);\n\n /// - sizeX/sizeY = tracked client-area width/height.\n\n sizePositionChanged = 2u ///< window size/position changed (resized/maximized/restored/moved):\n\n /// - posX/posY = new client-area position X/Y (absolute);\n\n /// - sizeX/sizeY = new client-area width/height.\n\n };\n\n\n\n /// @brief Window size/position event handling function pointer (move/resize/maximize/...):\n\n /// * sender: event origin\n\n /// * event: window size/position event type\n\n /// * posX/posY: horizontal/vertical positions (see PositionEvent enum)\n\n /// * sizeX/sizeY: horizontal/vertical sizes or limits (see PositionEvent enum)\n\n /// * bool return: true to prevent default size processing, false to allow normal processing\n\n /// @warning Handler should NOT throw exceptions -> use try/catch in it\n\n using PositionEventHandler = bool (*)(Window* sender, PositionEvent event, int32_t posX, int32_t posY, uint32_t sizeX, uint32_t sizeY);\n\n }\n\n}\n", "file_path": "video/include/video/window_events.h", "rank": 29, "score": 73094.66682508987 }, { "content": " /// @brief Type of specifications to process ('all' for complete CPU specs, others for lazy modes)\n\n enum class SpecMode: uint32_t {\n\n none = 0,\n\n vendor = 0x1,\n\n brandName = 0x2,\n\n cores = 0x4,\n\n registers = 0x8,\n\n all = 0xF\n\n };\n\n \n\n /// @brief Verify all the CPU specifications requested in 'mode' (SpecMode bit-map) and store results\n\n explicit CpuSpecs(SpecMode mode = SpecMode::all);\n\n CpuSpecs(const CpuSpecs&) = default;\n\n CpuSpecs(CpuSpecs&&) noexcept = default;\n\n CpuSpecs& operator=(const CpuSpecs&) = default;\n\n CpuSpecs& operator=(CpuSpecs&&) noexcept = default;\n\n\n\n // -- getters --\n\n \n\n constexpr inline CpuVendor vendorId() const noexcept { return this->_vendorId; }\n\n constexpr inline pandora::system::CpuArchitecture archType() const noexcept { return pandora::system::getCpuArchitecture(); }\n", "file_path": "hardware/include/hardware/cpu_specs.h", "rank": 30, "score": 73094.66682508987 }, { "content": " /// @brief Keyboard event types\n\n enum class KeyboardEvent : uint32_t {\n\n none = 0u, ///< no event\n\n keyDown = 1u, ///< any key is pressed (returning true will prevent associated charInput event)\n\n keyUp = 2u, ///< any key is released\n\n altKeyDown = 3u, ///< any key is pressed with ALT key active (returning true will prevent associated charInput event)\n\n altKeyUp = 4u, ///< any key is released with ALT key active\n\n activateMenu = 5u, ///< system key pressed to activate menu bar (F10 on Windows)\n\n charInput = 6u ///< after keyDown/altKeyDown (if handler did not return true), the associated textual character is generated,\n\n /// using shift/alt-gr/accent (only if the key is pressed and corresponds to an actual character).\n\n };\n", "file_path": "video/include/video/window_events.h", "rank": 31, "score": 73094.66682508987 }, { "content": " /// @brief Standard system mouse pointers\n\n enum class SystemCursor : uint32_t {\n\n pointer = 0u, ///< Standard pointer\n\n hand = 1u, ///< Hand pointer (for link/button/action)\n\n wait = 2u, ///< Wait cursor (hourglass / clock)\n\n waitPointer = 3u, ///< Pointer with wait cursor (hourglass / clock)\n\n textIBeam = 4u, ///< I-beam pointer (text selection \"I\")\n\n help = 5u, ///< Help pointer (question mark / book)\n\n forbidden = 6u, ///< Invalid/forbidden (slashed circle / cross)\n\n crosshair = 7u, ///< Crosshair\n\n arrowV = 8u, ///< Vertical arrow\n\n doubleArrowV = 9u, ///< Double-pointed arrow (vertical)\n\n doubleArrowH = 10u,///< Double-pointed arrow (horizontal)\n\n doubleArrow_NE_SW = 11u,///< Double-pointed arrow (oblique North-East -> South-West)\n\n doubleArrow_NW_SE = 12u,///< Double-pointed arrow (oblique North-West -> South-East)\n\n fourPointArrow = 13u ///< Four-pointed arrow\n\n };\n\n \n\n // ---\n\n \n", "file_path": "video/include/video/window_resource.h", "rank": 32, "score": 73094.66682508987 }, { "content": " /// @brief Window/hardware event types\n\n enum class WindowEvent : uint32_t {\n\n none = 0u, ///< no event\n\n windowClosed = 1u, ///< request for closing window (return true to prevent it, or false to allow it).\n\n stateChanged = 2u, ///< window state has changed (visibility/activity):\n\n /// - status = cast to 'WindowActivity'.\n\n suspendResume = 3u, ///< system suspend/resume operation:\n\n /// - status = boolean: 1 (suspend) or 0 (resume).\n\n deviceInterfaceChange = 4u, ///< device interface added/removed (gamepad/joystick/...):\n\n /// - status = boolean: 1 (added) or 0 (removed).\n\n menuCommand = 5u, ///< menu item selected: \n\n /// - status = boolean: 1 (accelerator key) or 0 (menu command item);\n\n /// - posX = if menu command item: menu identifier;\n\n /// - posY = if accelerator key: character code.\n\n monitorChanged = 6u, ///< window DPI has changed or window has moved to a different monitor: \n\n /// - status = content X scaling factor multiplied by 100 (percentage);\n\n /// - posX/posY = suggested adjusted client area width/height;\n\n /// - data = native monitor handle (if monitor has changed) or NULL (if same monitor).\n\n inputLangChanged = 7u, ///< keyboard input language has changed for current window:\n\n /// - status = locale code page;\n\n /// - posX/poxY = locale primary/sublang ID;\n\n /// - data = native language handle/ID.\n\n dropFiles = 8u ///< end of a drag & drop operation:\n\n /// - status = length of 'data' (number of file paths);\n\n /// - posX/posY = drop position X/Y, relative to client area;\n\n /// - data = array of file paths (cast to 'wchar_t**' on Windows, cast to 'char**' on other systems) - do NOT free buffer!\n\n };\n", "file_path": "video/include/video/window_events.h", "rank": 33, "score": 73094.66682508987 }, { "content": " /// @brief Menu item category\n\n enum class ItemType : uint32_t {\n\n text = 0u,\n\n checkboxOn = 1u,\n\n checkboxOff= 2u,\n\n radioOn = 3u,\n\n radioOff = 4u\n\n };\n\n\n\n /// @brief Create main menubar (isSubMenu==false) or popup sub-menu (isSubMenu==true)\n\n WindowMenu(bool isSubMenu);\n\n WindowMenu(const WindowMenu&) = delete;\n\n WindowMenu(WindowMenu&& rhs) noexcept : _handle(rhs._handle), _isSubMenu(rhs._isSubMenu) { rhs._handle = (MenuHandle)0; }\n\n WindowMenu& operator=(const WindowMenu&) = delete;\n\n WindowMenu& operator=(WindowMenu&& rhs) noexcept { \n\n _handle = rhs._handle; _isSubMenu = rhs._isSubMenu; rhs._handle = (MenuHandle)0; \n\n return *this; \n\n }\n\n ~WindowMenu() noexcept;\n\n\n\n inline bool isSubMenu() const noexcept { return this->_isSubMenu; } ///< Verify if instance is sub-menu (true) or main menu\n", "file_path": "video/include/video/window_resource.h", "rank": 34, "score": 73094.66682508987 }, { "content": " /// @brief Serializable value types\n\n enum class Type : uint32_t {\n\n integer = 0u, ///< integer number value\n\n number = 1u, ///< floating-point number value\n\n boolean = 2u, ///< boolean value\n\n text = 3u, ///< string value (text, character, int64, ...)\n\n arrays = 4u, ///< array of values\n\n object = 5u ///< object value (map containing sub-values)\n\n };\n\n using Array = std::vector<SerializableValue>; ///< Array value type\n\n using Object = std::unordered_map<std::string, SerializableValue>; ///< Object value type\n\n \n\n SerializableValue() noexcept { _value.integer = 0; }\n\n SerializableValue(const SerializableValue& rhs) { _copy(rhs); }\n\n SerializableValue(SerializableValue&& rhs) noexcept;\n\n SerializableValue& operator=(const SerializableValue& rhs) { _destroy(); _copy(rhs); return *this; }\n\n SerializableValue& operator=(SerializableValue&&) noexcept;\n\n ~SerializableValue() noexcept { _destroy(); }\n\n\n\n bool operator==(const SerializableValue& rhs) const noexcept;\n\n bool operator!=(const SerializableValue& rhs) const noexcept { return !operator==(rhs); }\n", "file_path": "io/include/io/key_value_serializer.h", "rank": 35, "score": 73094.66682508987 }, { "content": " /// @brief Window visibility/activity\n\n enum class WindowActivity : uint32_t {\n\n hidden = 0u, ///< hidden or minimized/inactive\n\n inactive = 1u, ///< visible/inactive\n\n active = 2u, ///< visible/active\n\n };\n\n\n\n /// @brief Window/hardware event handling function pointer (close/activate/suspend/drop/...):\n\n /// * sender: event origin\n\n /// * event: window/hardware event type\n\n /// * status: event status -or- info about other params (see WindowEvent enum)\n\n /// * posX/posY: horizontal/vertical position -or- primary/secondary ID, specific to event type (see WindowEvent enum)\n\n /// * data: handle/pointer, specific to event type (see WindowEvent enum)\n\n /// * bool return: true to prevent default system processing, false to allow normal processing\n\n /// @warning Handler should NOT throw exceptions -> use try/catch in it\n\n using WindowEventHandler = bool (*)(Window* sender, WindowEvent event, uint32_t status, int32_t posX, int32_t posY, void* data);\n\n \n\n \n\n // -- window position/size events --\n\n \n", "file_path": "video/include/video/window_events.h", "rank": 36, "score": 73094.66682508987 }, { "content": " /// @brief Key transition types\n\n enum class KeyTransition : uint32_t {\n\n same = 0u, ///< the key was already pressed\n\n down = 1u, ///< the key has been pressed\n\n up = 2u ///< the key has been released\n\n };\n\n \n\n /// @brief Convert virtual key to associated character on current system (will not use shift/alt/accent modifiers!)\n\n /// @returns Basic unicode character value (or 0 if not a character key)\n\n# ifdef _WINDOWS\n\n wchar_t virtualKeyToChar(uint32_t keyCode) noexcept;\n\n# else\n\n uint32_t virtualKeyToChar(uint32_t keyCode) noexcept;\n\n# endif\n\n\n\n /// @brief Keyboard event handling function pointer (press/release):\n\n /// * sender: event origin\n\n /// * event: keyboard event type\n\n /// * keyCode: - virtual key code (keyChange/altKeyChange/activateMenu);\n\n /// NB: use <video/window_keycodes.h> definitions to identify key codes.\n\n /// - character value (charInput).\n\n /// * change: - type of transition (keyChange/altKeyChange/activateMenu) -> cast to 'KeyTransition'.\n\n /// - number of repeats (charInput).\n\n /// * return: true to prevent default system processing, false to allow normal processing\n\n /// @warning Handler should NOT throw exceptions -> use try/catch in it\n\n using KeyboardEventHandler = bool (*)(Window* sender, KeyboardEvent event, uint32_t keyCode, uint32_t change);\n\n \n\n \n\n // -- mouse events --\n\n \n", "file_path": "video/include/video/window_events.h", "rank": 37, "score": 73094.66682508987 }, { "content": " /// @brief Mouse event types\n\n enum class MouseEvent : uint32_t {\n\n none = 0u, ///< no event\n\n mouseMove = 1u, ///< mouse move detected in client area (no index) -- not received in CursorMode::hiddenRaw\n\n mouseLeave = 2u, ///< mouse leaves client area (no index/activeKeys)\n\n buttonDown = 3u, ///< button pressed: index = button index (can be cast to 'MouseButton')\n\n buttonUp = 4u, ///< button released: index = button index (can be cast to 'MouseButton')\n\n mouseWheelV = 5u, ///< mouse wheel vertical move: index = distance in multiples of 'mouseWheelDelta()' (forward > 0, backwards < 0)\n\n mouseWheelH = 6u, ///< mouse wheel horizontal move: index = distance in multiples of 'mouseWheelDelta()' (right > 0, left < 0)\n\n rawMotion = 7u ///< raw mouse X/Y delta (normalized) -- only received in CursorMode::hiddenRaw\n\n };\n\n int32_t mouseWheelDelta() noexcept; ///< Distance unit for mouse wheel movements\n\n \n", "file_path": "video/include/video/window_events.h", "rank": 38, "score": 73094.66682508987 }, { "content": " /// @brief Type of cursor position to get/set\n\n enum class CursorPositionType : uint32_t {\n\n relative = 0u, ///< Relative to window client coordinates\n\n absolute = 1u ///< Absolute position on screen\n\n };\n", "file_path": "video/include/video/window.h", "rank": 39, "score": 73094.66682508987 }, { "content": " /// @brief Standard system icons\n\n enum class SystemIcon : uint32_t {\n\n app = 0u, ///< Default application icon\n\n info = 1u, ///< Information icon\n\n question = 2u, ///< Question mark icon\n\n warning = 3u, ///< Warning icon\n\n error = 4u, ///< Error icon\n\n security = 5u, ///< Security icon\n\n system = 6u ///< System logo\n\n };\n\n\n", "file_path": "video/include/video/window_resource.h", "rank": 40, "score": 73094.66682508987 }, { "content": " /// @brief Swap-chain presentation mode (when swapping buffers)\n\n enum class PresentMode : uint32_t {\n\n immediate = 0u, ///< Immediate display (no wait for vertical blanking): most reactive, may cause screen tearing\n\n /// (supported by most GPUs; will use fifo mode instead if not).\n\n fifo = 1u, ///< Vsync / FIFO mode (wait for vertical blanking): no tearing, may cause latency (supported by all GPUs).\n\n /// Note: - allows triple buffering when flip-swap/mailbox is enabled and 3 buffers are created ('framebufferCount').\n\n /// - flip-swap/mailbox is enabled by default if supported (D3D 11.2+ / Vulkan / OpenGL if enabled in driver)\n\n /// and if neither TargetOutputFlag::partialOutput nor TargetOutputFlag::disableFlipSwap are set.\n\n };\n\n \n\n // ---\n\n\n", "file_path": "video/include/video/common_types.h", "rank": 41, "score": 73094.66682508987 }, { "content": " __x11_XDefineCursor DefineCursor_ = nullptr;\n", "file_path": "hardware/include/hardware/_private/_libraries_x11.h", "rank": 42, "score": 72491.11532956171 }, { "content": " int errorBase = 0;\n", "file_path": "hardware/include/hardware/_private/_libraries_x11.h", "rank": 43, "score": 72484.24305724961 }, { "content": " bool useRefraction = false;\n", "file_path": "video/tools/3D_scene/include/options.h", "rank": 44, "score": 72416.7211759675 }, { "content": " bool useVsync = false;\n", "file_path": "video/tools/3D_scene/include/options.h", "rank": 45, "score": 72416.7211759675 }, { "content": " bool useParticles = false;\n", "file_path": "video/tools/3D_scene/include/options.h", "rank": 46, "score": 72416.7211759675 }, { "content": " /// @brief Advanced swap-chain target output params (bit-mask flags)\n\n /// @warning Some of these features are not supported by some APIs (OpenGL) or by old API versions (Direct3D 11.0, OpenGL 4.1...).\n\n /// They'll be ignored if they're not supported.\n\n enum class TargetOutputFlag : uint32_t {\n\n none = 0x0u, ///< No option\n\n variableRefresh = 0x1u, ///< Variable refresh rate display in fullscreen/borderless (tearing).\n\n stereo = 0x2u, ///< Stereo rendering mode, for 3D devices/glasses (only usable in fullscreen mode).\n\n shaderInput = 0x10u,///< Allow using swap-chain output as shader input data.\n\n disableFlipSwap = 0x20u,///< Prevent swap-chain from using flip-swap/mailbox mode if supported:\n\n /// - forces vsync to always block calling thread during buffer swap, even if framebufferCount > 2.\n\n /// - prevents window from being associated with DWM on Windows.\n\n /// - allows partial output swap-chains to be created with Direct3D -> multiple swap-chains allowed for the same window.\n\n /// - warning: a less efficient swapping method will be used instead.\n\n swapNoDiscard = 0x40u ///< Keep existing data after swapping buffers if supported:\n\n /// - GPU resources not released as efficiently -> should only be used if frames aren't completely redrawn.\n\n /// - Required when previous data must be kept (otherwise, the buffer would contain garbage after being swapped).\n\n /// - Can also be useful if 'clearView' is called after every swap call (clearing the buffer also releases GPU resources).\n\n };\n\n\n", "file_path": "video/include/video/common_types.h", "rank": 47, "score": 71733.07046538492 }, { "content": " /// @brief Task processing function usage for thread pool\n\n enum class ThreadRunnerMode : uint32_t {\n\n single = 0, ///< One common function, passed on construction and called for every job (more efficient)\n\n perJob = 1 ///< A different function can be provided for each job (instead of default function, if provided)\n\n };\n", "file_path": "thread/include/thread/thread_pool.h", "rank": 48, "score": 71728.14197821723 }, { "content": " /// @brief Type of task processing function in thread pool\n\n enum class TaskRunnerType : uint32_t {\n\n functionPointer = 0, ///< C pointer to task processing function\n\n lambda = 1 ///< std::function or lambda\n\n };\n\n\n\n /// ---\n\n\n\n /// @class ThreadPool\n\n /// @brief Fixed-size pool of threads, ready to process some asynchronous jobs on demand.\n\n /// @description Simple pool of threads with a pre-defined size.\n\n /// The threads are ready to process some jobs asynchronously, without needing to be created (they already exist).\n\n /// The tasks will be started by any thread in the pool in the order of their arrival.\n\n template <typename _JobParamType, // Type of data container to process (must be movable)\n\n ThreadRunnerMode _Mode = ThreadRunnerMode::single, // one function passed on construction / one per job\n\n TaskRunnerType _FunctionType = TaskRunnerType::functionPointer> // function pointer / lambda\n", "file_path": "thread/include/thread/thread_pool.h", "rank": 49, "score": 71728.14197821723 }, { "content": " /// @brief Memory allocation type for memory pool\n\n enum class MemoryPoolAllocation: uint32_t {\n\n automatic = 0,\n\n onStack = 1,\n\n onHeap = 2,\n\n };\n\n\n\n# define __P_IS_HEAP_POOL() (_Alloc==MemoryPoolAllocation::onHeap || (_Alloc==MemoryPoolAllocation::automatic && (_BytesCapacity + _GuardBandSize*2u) > 8000u))\n\n\n\n /// ---\n\n \n\n /// @class MemoryPool\n\n /// @brief Fixed-size pre-allocated memory pool, for efficient real-time memory management.\n\n template <size_t _BytesCapacity, ///< Usable allocated pool size\n\n size_t _GuardBandSize = size_t{ 0 }, ///< Size of each security guard band (before/after allocated pool)\n\n MemoryPoolAllocation _Alloc = MemoryPoolAllocation::automatic, ///< Allocation type: automatic/onHeap recommended\n\n bool _DoSizeCheck = true> ///< true: verify that commands aren't out of range (recommended) / false: skip verifications (do them externally)\n", "file_path": "memory/include/memory/memory_pool.h", "rank": 50, "score": 71728.14197821723 }, { "content": " /// @brief CPU register memory part\n\n enum class CpuRegisterPart : uint32_t {\n\n eax = 0,\n\n ebx = 1,\n\n ecx = 2,\n\n edx = 3\n\n };\n\n constexpr inline size_t CpuRegisterPart_size() noexcept { return static_cast<size_t>(CpuRegisterPart::edx) + static_cast<size_t>(1u); }\n\n \n\n // ---\n\n \n\n# if _SYSTEM_CPU_ARCH == _SYSTEM_CPU_ARCH_ARM\n\n# define __P_CPUID_MASK uint64_t\n\n# else\n\n# define __P_CPUID_MASK uint32_t\n\n# endif\n\n \n", "file_path": "hardware/include/hardware/cpuid_property_location.h", "rank": 51, "score": 70453.28536619974 }, { "content": " /// @brief Standard file system directories\n\n enum class FileSystemLocation: uint32_t {\n\n // user data directories\n\n home = 0u, ///< User home directory (may be application-specific on embedded/mobile systems)\n\n desktop, ///< User desktop directory (or home directory, if system doesn't have a desktop)\n\n documents, ///< User document files directory\n\n pictures, ///< User image files directory (or user documents, if system doesn't have a picture directory)\n\n music, ///< User audio/music files directory (or user documents, if system doesn't have a music directory)\n\n movies, ///< User video/movie files directory (or user documents, if system doesn't have a movie directory)\n\n downloads, ///< User downloaded files directory (may be application-specific on embedded/mobile systems)\n\n \n\n // application directories\n\n applications, ///< User installed application links directory (right access is platform-specific)\n\n sharedApps, ///< System-wide application links directory (may be the same as 'applications' on some systems)\n\n appMenu, ///< Application menu directories (start menu, ...)\n\n fonts, ///< System directories containing available fonts (read-only)\n\n\n\n // app data directories\n\n appData, ///< Directory where persistent application-specific data can be written (profiles, settings, ...)\n\n localAppData, ///< Directory where local application data can be written\n\n appConfig, ///< Directory where application-specific configuration files for current user can be stored\n", "file_path": "io/include/io/file_system_locations.h", "rank": 52, "score": 70453.28536619974 }, { "content": "inline void printReturn() {\n\n printf(\"> Press ENTER to return to menu...\\n\"); \n\n getchar();\n", "file_path": "logic/tools/search_sort_benchmark/include/display.h", "rank": 53, "score": 70212.5650236348 }, { "content": " Atom XdndActionCopy;\n", "file_path": "hardware/include/hardware/_private/_libraries_x11.h", "rank": 54, "score": 70208.0599749211 }, { "content": " __x11_XGetErrorText GetErrorText_ = nullptr;\n", "file_path": "hardware/include/hardware/_private/_libraries_x11.h", "rank": 55, "score": 70205.90876149318 }, { "content": " __x11_XSetErrorHandler SetErrorHandler_ = nullptr;\n", "file_path": "hardware/include/hardware/_private/_libraries_x11.h", "rank": 56, "score": 70205.90876149318 }, { "content": " bool useMiniMap = false;\n", "file_path": "video/tools/3D_scene/include/options.h", "rank": 57, "score": 70140.50923689644 }, { "content": " bool useMotionBlur = false;\n", "file_path": "video/tools/3D_scene/include/options.h", "rank": 58, "score": 70140.50923689644 }, { "content": " /// @brief Type of path\n\n enum class FileSystemPathType: uint32_t {\n\n absolute = 0,\n\n relative = 1\n\n };\n", "file_path": "io/include/io/file_system_io.h", "rank": 59, "score": 69261.17248879294 }, { "content": " /// @brief Type of entry targeted by a path\n\n enum class FileSystemEntryType: uint32_t {\n\n unknown = 0,\n\n directory = 1,\n\n file = 2,\n\n symlink = 3,\n\n socket = 4\n\n };\n", "file_path": "io/include/io/file_system_io.h", "rank": 60, "score": 69261.17248879294 }, { "content": " enum class ApiChangeType : uint32_t {\n\n rendererChange = 0, // Direct3D/OpenGL/OpenGLES\n\n vsyncChange = 1, // set vsync on/off\n\n viewportChange = 2, // single/multiple swap-chains\n\n monitorChange = 3 // content scale change or HDR on/off\n\n };\n\n\n", "file_path": "video/tools/3D_scene/include/menu_manager.h", "rank": 61, "score": 69261.17248879294 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nMain execution of test cases\n\n*******************************************************************************/\n\n#include \"gtest/gtest.h\"\n\n\n\nint main(int argc, char** argv) {\n\n ::testing::InitGoogleTest(&argc, argv);\n\n return RUN_ALL_TESTS();\n\n}\n", "file_path": "system/test/main.cpp", "rank": 62, "score": 60318.52692721448 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nMain execution of test cases\n\n*******************************************************************************/\n\n#include \"gtest/gtest.h\"\n\n\n\nint main(int argc, char** argv) {\n\n ::testing::InitGoogleTest(&argc, argv);\n\n return RUN_ALL_TESTS();\n\n}\n", "file_path": "time/test/main.cpp", "rank": 63, "score": 60318.52692721448 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nMain execution of test cases\n\n*******************************************************************************/\n\n#include \"gtest/gtest.h\"\n\n\n\nint main(int argc, char** argv) {\n\n ::testing::InitGoogleTest(&argc, argv);\n\n return RUN_ALL_TESTS();\n\n}\n", "file_path": "io/test/main.cpp", "rank": 64, "score": 60318.52692721448 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nMain execution of test cases\n\n*******************************************************************************/\n\n#include \"gtest/gtest.h\"\n\n\n\nint main(int argc, char** argv) {\n\n ::testing::InitGoogleTest(&argc, argv);\n\n return RUN_ALL_TESTS();\n\n}\n", "file_path": "pattern/test/main.cpp", "rank": 65, "score": 60318.52692721448 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nMain execution of test cases\n\n*******************************************************************************/\n\n#include \"gtest/gtest.h\"\n\n\n\nint main(int argc, char** argv) {\n\n ::testing::InitGoogleTest(&argc, argv);\n\n return RUN_ALL_TESTS();\n\n}\n", "file_path": "memory/test/main.cpp", "rank": 66, "score": 60318.52692721448 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nMain execution of test cases\n\n*******************************************************************************/\n\n#include \"gtest/gtest.h\"\n\n\n\nint main(int argc, char** argv) {\n\n ::testing::InitGoogleTest(&argc, argv);\n\n return RUN_ALL_TESTS();\n\n}\n", "file_path": "logic/test/main.cpp", "rank": 67, "score": 60318.52692721448 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nMain execution of test cases\n\n*******************************************************************************/\n\n#include \"gtest/gtest.h\"\n\n\n\nint main(int argc, char** argv) {\n\n ::testing::InitGoogleTest(&argc, argv);\n\n return RUN_ALL_TESTS();\n\n}\n", "file_path": "video/test/main.cpp", "rank": 68, "score": 60318.52692721448 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nMain execution of test cases\n\n*******************************************************************************/\n\n#include \"gtest/gtest.h\"\n\n\n\nint main(int argc, char** argv) {\n\n ::testing::InitGoogleTest(&argc, argv);\n\n return RUN_ALL_TESTS();\n\n}\n", "file_path": "hardware/test/main.cpp", "rank": 69, "score": 60318.52692721448 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nMain execution of test cases\n\n*******************************************************************************/\n\n#include \"gtest/gtest.h\"\n\n\n\nint main(int argc, char** argv) {\n\n ::testing::InitGoogleTest(&argc, argv);\n\n return RUN_ALL_TESTS();\n\n}\n", "file_path": "thread/test/main.cpp", "rank": 70, "score": 60318.52692721448 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nImplementation included in renderer.cpp\n\n(grouped object improves compiler optimizations + greatly reduces executable size)\n\n*******************************************************************************/\n\n#if defined(_WINDOWS) && defined(_VIDEO_D3D11_SUPPORT)\n\n// includes + namespaces: in renderer.cpp\n\n\n\n\n\n// -- create/compile shaders -- ------------------------------------------------\n\n\n\n# ifdef _P_VIDEO_SHADER_COMPILERS\n\n // Get D3D11 shader model ID\n\n static const char* __getShaderModel(ShaderType type) noexcept {\n\n switch (type) {\n\n case ShaderType::vertex: return \"vs_5_0\";\n\n case ShaderType::tessCtrl: return \"hs_5_0\";\n\n case ShaderType::tessEval: return \"ds_5_0\";\n\n case ShaderType::geometry: return \"gs_5_0\";\n\n case ShaderType::fragment: return \"ps_5_0\";\n", "file_path": "video/src/d3d11/shader.hpp", "rank": 71, "score": 58795.044283207884 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nImplementation included in renderer.cpp\n\n(grouped object improves compiler optimizations + greatly reduces executable size)\n\n*******************************************************************************/\n\n#if defined(_VIDEO_VULKAN_SUPPORT)\n\n// includes + namespaces: in renderer.cpp\n\n\n\n\n\n// -- create/compile shaders -- ------------------------------------------------\n\n\n\n# ifdef _P_VIDEO_SHADER_COMPILERS\n", "file_path": "video/src/vulkan/shader.hpp", "rank": 72, "score": 58795.044283207884 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nImplementation included in renderer.cpp\n\n(grouped object improves compiler optimizations + greatly reduces executable size)\n\n*******************************************************************************/\n\n#if defined(_WINDOWS) && defined(_VIDEO_D3D11_SUPPORT)\n\n// includes + namespaces: in renderer.cpp\n\n\n\n\n\n// -----------------------------------------------------------------------------\n\n// static_buffer.h\n\n// -----------------------------------------------------------------------------\n\n\n\n // Create data buffer (to store data for shader stages)\n\n StaticBuffer::StaticBuffer(Renderer& renderer, BaseBufferType type, size_t bufferByteSize) \n\n : _bufferSize(bufferByteSize), _type(type) {\n\n if (bufferByteSize == 0)\n\n throw std::invalid_argument(\"Buffer: size is 0\");\n\n \n\n D3D11_BUFFER_DESC bufferDescriptor = {};\n", "file_path": "video/src/d3d11/buffers.hpp", "rank": 73, "score": 58795.044283207884 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nIncludes hpp implementations at the end of the file \n\n(grouped object improves compiler optimizations + greatly reduces executable size)\n\n*******************************************************************************/\n\n#if defined(_VIDEO_VULKAN_SUPPORT)\n\n# ifdef _WINDOWS\n\n# ifndef __MINGW32__\n\n# pragma warning(push)\n\n# pragma warning(disable: 4100) // disable warnings about unused params\n\n# pragma warning(disable: 6387) // disable warnings about legacy file readers\n\n# pragma warning(disable: 26812) // disable warnings about vulkan unscoped enums\n\n# pragma warning(disable: 4701) // disable init warnings about glm memory\n\n# pragma warning(disable: 6001) // disable init warnings about glm memory\n\n# pragma warning(disable: 26451) // disable init warnings about vulkan and glm types\n\n# pragma warning(disable: 26495) // disable init warnings about vulkan and glm types\n\n# endif\n\n# ifndef NOMINMAX\n\n# define NOMINMAX // no min/max macros\n\n# define WIN32_LEAN_AND_MEAN // exclude rare MFC libraries\n", "file_path": "video/src/vulkan/renderer.cpp", "rank": 74, "score": 58795.044283207884 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nIncludes hpp implementations at the end of the file \n\n(grouped object improves compiler optimizations + greatly reduces executable size)\n\n*******************************************************************************/\n\n#if defined(_WINDOWS) && defined(_VIDEO_D3D11_SUPPORT)\n\n# ifndef __MINGW32__\n\n# pragma warning(push)\n\n# pragma warning(disable: 4100) // disable warnings about unused params\n\n# endif\n\n# include <cstddef>\n\n# include <cstring>\n\n# include <cmath>\n\n# include <stdexcept>\n\n# include <memory/light_string.h>\n\n\n\n# define NOMINMAX\n\n# define NODRAWTEXT\n\n# define NOGDI\n\n# define NOBITMAP\n", "file_path": "video/src/d3d11/renderer.cpp", "rank": 75, "score": 58795.044283207884 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nImplementation included in renderer.cpp\n\n(grouped object improves compiler optimizations + greatly reduces executable size)\n\n*******************************************************************************/\n\n#if defined(_WINDOWS) && defined(_VIDEO_D3D11_SUPPORT)\n\n# ifdef _MSC_VER\n\n# pragma warning(push)\n\n# pragma warning(disable : 6387)\n\n# endif\n\n// includes + namespaces: in renderer.cpp\n\n\n\nstatic constexpr inline const char* __error_resCreationFailed() noexcept { return \"Texture: resource creation failure\"; }\n\nstatic constexpr inline const char* __error_viewCreationFailed() noexcept { return \"Texture: view creation failure\"; }\n\n\n\n\n\n// -- texture params -- --------------------------------------------------------\n\n\n\nuint32_t Texture1DParams::maxMipLevels(uint32_t width) noexcept {\n\n return 1u + (uint32_t)log2f((float)width + 0.01f);\n", "file_path": "video/src/d3d11/texture.hpp", "rank": 76, "score": 58795.044283207884 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nIncludes hpp implementations at the end of the file \n\n(grouped object improves compiler optimizations + reduces executable size)\n\n*******************************************************************************/\n\n#include <cassert>\n\n#include <cstring>\n\n#include <stdexcept>\n\n#include \"io/encoder.h\"\n\n#include \"io/_private/_key_value_serializer_common.h\"\n\n#include \"io/key_value_serializer.h\"\n\n\n\nusing namespace pandora::io;\n\n\n\n\n\n// -- SerializableValue -- -----------------------------------------------------\n\n\n\nSerializableValue::SerializableValue(SerializableValue&& rhs) noexcept {\n\n _value.integer = 0;\n\n memcpy((void*)this, (void*)&rhs, sizeof(SerializableValue));\n", "file_path": "io/src/key_value_serializer.cpp", "rank": 77, "score": 57377.30552131911 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nDescription : Display monitor - Cocoa implementation (Mac OS)\n\n*******************************************************************************/\n\n#if !defined(_WINDOWS) && defined(__APPLE__)\n\n# include <TargetConditionals.h>\n\n#endif\n\n#if !defined(_WINDOWS) && defined(__APPLE__) && (!defined(TARGET_OS_IPHONE) || !TARGET_OS_IPHONE)\n\n# include <cstdint>\n\n# include <cstdlib>\n\n# include <string>\n\n# include <stdexcept>\n\n# include <vector>\n\n# include \"hardware/_private/_libraries_cocoa.h\"\n\n# include \"hardware/_private/_display_monitor_impl_cocoa.h\"\n\n# include \"hardware/display_monitor.h\"\n\n\n\n using namespace pandora::hardware;\n\n \n\n \n", "file_path": "hardware/src/display_monitor_cocoa.cpp", "rank": 78, "score": 57377.30552131911 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nDescription : Display monitor - Android implementation\n\n*******************************************************************************/\n\n#if !defined(_WINDOWS) && defined(__ANDROID__)\n\n# include <cstdint>\n\n# include <string>\n\n# include <stdexcept>\n\n# include <vector>\n\n# include <system/api/android_app.h>\n\n# include \"hardware/_private/_libraries_andr.h\"\n\n# include \"hardware/display_monitor.h\"\n\n\n\n using namespace pandora::hardware;\n\n using pandora::system::AndroidApp;\n\n\n\n\n\n// -- monitor attributes - id/area/description/primary -- ----------------------\n\n\n\n namespace attributes {\n", "file_path": "hardware/src/display_monitor_andr.cpp", "rank": 79, "score": 57377.30552131911 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nIncludes hpp implementations at the end of the file \n\n(grouped object improves compiler optimizations + greatly reduces executable size)\n\n*******************************************************************************/\n\n#ifdef _MSC_VER\n\n# define _CRT_SECURE_NO_WARNINGS\n\n#endif\n\n#include <cstddef>\n\n#include <cstdint>\n\n#include <cstdio>\n\n#include <ctime>\n\n#include <cassert>\n\n#include <string>\n\n#include <vector>\n\n#include <errno.h>\n\n#include \"io/_private/_filesystem_api.h\"\n\n#include \"io/_private/_file_time.h\"\n\n#include \"io/file_system_io.h\"\n\n\n", "file_path": "io/src/file_system_io.cpp", "rank": 80, "score": 57377.30552131911 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nImplementation included in renderer.cpp\n\n(grouped object improves compiler optimizations + greatly reduces executable size)\n\n*******************************************************************************/\n\n#if defined(_WINDOWS) && defined(_VIDEO_D3D11_SUPPORT)\n\n// includes + namespaces: in renderer.cpp\n\n\n\n\n\n// -- camera projection -- -----------------------------------------------------\n\n\n\n // Compute shader projection matrix\n\n void CameraProjection::_computeProjection() noexcept {\n\n _constrainFieldOfView();\n\n float fovRad = DirectX::XMConvertToRadians(this->_fieldOfView);\n\n this->_projection = DirectX::XMMatrixPerspectiveFovLH(fovRad, this->_displayRatio, this->_nearPlane, this->_farPlane);\n\n }\n\n\n\n\n\n// -- camera world/view helpers -- ---------------------------------------------\n", "file_path": "video/src/d3d11/camera_utils.hpp", "rank": 81, "score": 57377.30552131911 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nImplementation included in file_system_io.cpp\n\n(grouped object improves compiler optimizations + greatly reduces executable size)\n\n*******************************************************************************/\n\n// includes + namespaces: in file_system_io.cpp\n\n\n\n\n\n// -- absolute path of current location --\n\n\n\npandora::io::SystemPath FileSystemLocationFinder::currentLocation() {\n\n# ifndef _USE_NATIVE_FILESYSTEM // C++17\n\n std::error_code errorCode;\n\n# ifdef _WINDOWS\n\n pandora::io::SystemPath outPath = std::filesystem::current_path(errorCode).wstring();\n\n# else\n\n pandora::io::SystemPath outPath = std::filesystem::current_path(errorCode).u8string();\n\n# endif\n\n if (!errorCode)\n\n return outPath;\n", "file_path": "io/src/file_system_locations.hpp", "rank": 82, "score": 57377.30552131911 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nDescription : Display monitor - Wayland implementation (Linux)\n\n*******************************************************************************/\n\n#if !defined(_WINDOWS) && !defined(__APPLE__) && !defined(__ANDROID__) && defined(_P_ENABLE_LINUX_WAYLAND) && (defined(__linux__) || defined(__linux) || defined(__unix__) || defined(__unix))\n\n# include <cstdint>\n\n# include <cstdlib>\n\n# include <cstring>\n\n# include <string>\n\n# include <stdexcept>\n\n# include <vector>\n\n# include <unistd.h>\n\n# include \"hardware/_private/_libraries_wln.h\"\n\n# include \"hardware/display_monitor.h\"\n\n\n\n using namespace pandora::hardware;\n\n\n\n\n\n// -- monitor attributes - id/area/description/primary -- ----------------------\n\n\n", "file_path": "hardware/src/display_monitor_wln.cpp", "rank": 83, "score": 57377.30552131911 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nDescription : Display monitor - Win32 implementation (Windows)\n\n*******************************************************************************/\n\n#ifdef _WINDOWS\n\n# ifndef __MINGW32__\n\n# pragma warning(push)\n\n# pragma warning(disable : 26812)\n\n# endif\n\n# include <cstdint>\n\n# include <stdexcept>\n\n# include <vector>\n\n# include \"hardware/_private/_libraries_win32.h\"\n\n# include \"hardware/display_monitor.h\"\n\n\n\n using namespace pandora::hardware;\n\n\n\n\n\n// -- monitor attributes - id/area/description/primary -- ----------------------\n\n\n", "file_path": "hardware/src/display_monitor_win32.cpp", "rank": 84, "score": 57377.30552131911 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nDescription : Display monitor - iOS implementation\n\n*******************************************************************************/\n\n#if !defined(_WINDOWS) && defined(__APPLE__)\n\n# include <TargetConditionals.h>\n\n#endif\n\n#if !defined(_WINDOWS) && defined(__APPLE__) && defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE\n\n# include <cstdint>\n\n# include <string>\n\n# include <stdexcept>\n\n# include <vector>\n\n# include \"hardware/_private/_display_monitor_impl_uikit.h\"\n\n# include \"hardware/display_monitor.h\"\n\n\n\n using namespace pandora::hardware;\n\n \n\n \n\n // -- bindings --\n\n \n", "file_path": "hardware/src/display_monitor_uikit.cpp", "rank": 85, "score": 57377.30552131911 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nDescription : Display monitor - X11 implementation (Linux/BSD)\n\n*******************************************************************************/\n\n#if !defined(_WINDOWS) && !defined(__APPLE__) && !defined(__ANDROID__) && !defined(_P_ENABLE_LINUX_WAYLAND) && (defined(__linux__) || defined(__linux) || defined(__unix__) || defined(__unix))\n\n# include <cstdint>\n\n# include <cstdlib>\n\n# include <string>\n\n# include <stdexcept>\n\n# include <set>\n\n# include <vector>\n\n# include <climits>\n\n# include <cmath>\n\n# include <unistd.h>\n\n# include <X11/Xlib.h>\n\n# include \"hardware/_private/_libraries_x11.h\"\n\n# include \"hardware/display_monitor.h\"\n\n\n\n using namespace pandora::hardware;\n\n\n", "file_path": "hardware/src/display_monitor_x11.cpp", "rank": 86, "score": 57377.30552131911 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nDescription : Window manager + builder - UIKit implementation (iOS)\n\n*******************************************************************************/\n\n#if !defined(_WINDOWS) && defined(__APPLE__)\n\n# include <TargetConditionals.h>\n\n#endif\n\n#if !defined(_WINDOWS) && defined(__APPLE__) && defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE\n\n# include <stdexcept>\n\n# include \"video/window.h\"\n\n\n\n using namespace pandora::video;\n\n using pandora::hardware::DisplayArea;\n\n //using pandora::hardware::DisplayMonitor;\n\n\n\n\n\n// -- Window Builder - new window -- -------------------------------------------\n\n\n\n // configure window class context + create new window\n\n std::unique_ptr<Window> Window::Builder::create(const window_char* contextName, const window_char* caption, \n", "file_path": "video/src/window/uikit/window_uikit.cpp", "rank": 87, "score": 56054.670489220196 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nLocal hardware specifications analyzer (CPU, monitors, display adapters)\n\n*******************************************************************************/\n\n#include <cstdio>\n\n#include <hardware/cpu_specs.h>\n\n#include <hardware/display_monitor.h>\n\n\n\n#if defined(__ANDROID__)\n\n# include <stdexcept>\n\n# include <android/log.h>\n\n# include <system/api/android_app.h>\n\n# define printf(...) __android_log_print(ANDROID_LOG_INFO, \"-\", __VA_ARGS__)\n\n# ifndef LOGE\n\n# define LOGE(...) __android_log_print(ANDROID_LOG_ERROR , \">\", __VA_ARGS__)\n\n# endif\n\n#endif\n\n\n\nusing namespace pandora::hardware;\n\n\n", "file_path": "hardware/tools/hardware_analyzer/src/main.cpp", "rank": 89, "score": 56054.670489220196 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nList common system-specific directories\n\n*******************************************************************************/\n\n#include <cstdio>\n\n#include <io/file_system_locations.h>\n\n\n\n#if defined(__ANDROID__)\n\n# include <stdexcept>\n\n# include <android/log.h>\n\n# include <system/api/android_app.h>\n\n# define printf(...) __android_log_print(ANDROID_LOG_INFO, \"-\", __VA_ARGS__)\n\n# ifndef LOGE\n\n# define LOGE(...) __android_log_print(ANDROID_LOG_ERROR , \">\", __VA_ARGS__)\n\n# endif\n\n#endif\n\n\n\nusing namespace pandora::io;\n\n\n\n// Display list of directories\n", "file_path": "io/tools/system_directories/src/main.cpp", "rank": 90, "score": 56054.670489220196 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nDescription : Window manager + builder - Cocoa implementation (Mac OS)\n\n*******************************************************************************/\n\n#if !defined(_WINDOWS) && defined(__APPLE__)\n\n# include <TargetConditionals.h>\n\n#endif\n\n#if !defined(_WINDOWS) && defined(__APPLE__) && (!defined(TARGET_OS_IPHONE) || !TARGET_OS_IPHONE)\n\n# include <stdexcept>\n\n# include \"video/window_keycodes.h\"\n\n# include \"video/window.h\"\n\n\n\n using namespace pandora::video;\n\n using pandora::hardware::DisplayArea;\n\n //using pandora::hardware::DisplayMonitor;\n\n\n\n\n\n// -- Window Builder - new window -- -------------------------------------------\n\n\n\n // configure window class context + create new window\n", "file_path": "video/src/window/cocoa/window_cocoa.cpp", "rank": 91, "score": 56054.670489220196 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nDescription : Window manager + builder - Wayland implementation (Linux)\n\n*******************************************************************************/\n\n#if !defined(_WINDOWS) && !defined(__APPLE__) && !defined(__ANDROID__) && defined(_P_ENABLE_LINUX_WAYLAND) && (defined(__linux__) || defined(__linux) || defined(__unix__) || defined(__unix))\n\n# include <stdexcept>\n\n# include \"video/window_keycodes.h\"\n\n# include \"video/window.h\"\n\n\n\n using namespace pandora::video;\n\n using pandora::hardware::DisplayArea;\n\n //using pandora::hardware::DisplayMonitor;\n\n\n\n\n\n// -- Window Builder - new window -- -------------------------------------------\n\n\n\n // configure window class context + create new window\n\n std::unique_ptr<Window> Window::Builder::create(const window_char* contextName, const window_char* caption, \n\n WindowHandle parentWindow) { // throws\n\n if (contextName == nullptr || *contextName == 0)\n", "file_path": "video/src/window/wln/window_wln.cpp", "rank": 92, "score": 56054.670489220196 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "video/src/window/win32/window_win32.cpp", "rank": 93, "score": 56054.670489220196 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nImplementation included in key_value_serializer.cpp\n\n(grouped object improves compiler optimizations + reduces executable size)\n\n*******************************************************************************/\n\n// includes: in key_value_serializer.cpp\n\n\n\n\n\n// -- text serialization helpers -- --------------------------------------------\n\n\n\n// copy text value between quotes + escape quote characters (single-line) -- all text values / JSON keys\n\nvoid pandora::io::_copyEscapedTextInQuotes(const char* text, std::string& outBuffer) {\n\n outBuffer += '\"';\n\n if (text != nullptr) {\n\n const char* partBegin = text;\n\n for (const char* it = text; *it; ++it) {\n\n if ( *it == '\"' || *it == '\\\\' || (unsigned char)*it < (unsigned char)0x20u) {\n\n if (it > partBegin) // don't add empty parts\n\n outBuffer += std::string(partBegin, it - partBegin);\n\n \n", "file_path": "io/src/_key_value_serializer_common.hpp", "rank": 94, "score": 56054.670489220196 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nImplementation included in renderer.cpp\n\n(grouped object improves compiler optimizations + greatly reduces executable size)\n\n*******************************************************************************/\n\n#if defined(_WINDOWS) && defined(_VIDEO_D3D11_SUPPORT)\n\n// includes + namespaces: in renderer.cpp\n\n\n\n\n\n// -- texture reader -- --------------------------------------------------------\n\n\n\n void TextureReader::_readMapped1D(const D3D11_MAPPED_SUBRESOURCE& mapped, uint32_t rowBytes, char* outputData) noexcept {\n\n memcpy(outputData, mapped.pData, (size_t)rowBytes);\n\n }\n\n \n\n void TextureReader::_readMapped2D(const D3D11_MAPPED_SUBRESOURCE& mapped, uint32_t rowBytes, uint32_t sliceBytes, char* outputData) noexcept {\n\n if (mapped.RowPitch > rowBytes) { // resource extra row padding\n\n char* outEnd = outputData + (intptr_t)sliceBytes;\n\n for (const char* src = (const char*)mapped.pData; outputData < outEnd; outputData += (intptr_t)rowBytes, src += (intptr_t)mapped.RowPitch)\n\n memcpy(outputData, src, (size_t)rowBytes);\n", "file_path": "video/src/d3d11/texture_reader_writer.hpp", "rank": 95, "score": 56054.670489220196 }, { "content": "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n--------------------------------------------------------------------------------\n\nDescription : Window manager + builder - JNI implementation (Android)\n\n*******************************************************************************/\n\n#if !defined(_WINDOWS) && defined(__ANDROID__)\n\n# include <stdexcept>\n\n# include \"video/window_keycodes.h\"\n\n# include \"video/window.h\"\n\n\n\n using namespace pandora::video;\n\n using pandora::hardware::DisplayArea;\n\n //using pandora::hardware::DisplayMonitor;\n\n\n\n\n\n// -- Window Builder - new window -- -------------------------------------------\n\n\n\n // configure window class context + create new window\n\n std::unique_ptr<Window> Window::Builder::create(const window_char* contextName, const window_char* caption, \n\n WindowHandle parentWindow) { // throws\n\n if (contextName == nullptr || *contextName == 0)\n", "file_path": "video/src/window/andr/window_andr.cpp", "rank": 96, "score": 56054.670489220196 }, { "content": " static inline enum UikitBoxIconId __toNativeIcon(MessageBox::IconType icon) noexcept {\n\n switch (icon) {\n\n case MessageBox::IconType::info: return UIKIT_BOX_ICON_INFO;\n\n case MessageBox::IconType::question: return UIKIT_BOX_ICON_QUESTION;\n\n case MessageBox::IconType::warning: return UIKIT_BOX_ICON_WARNING;\n\n case MessageBox::IconType::error: return UIKIT_BOX_ICON_ERROR;\n\n default: return UIKIT_BOX_ICON_NONE;\n\n }\n\n }\n\n\n\n // ---\n\n\n\n // show modal message box\n\n MessageBox::Result MessageBox::show(const char* caption, const char* message, MessageBox::ActionType actions, \n\n MessageBox::IconType icon, bool, WindowHandle parent) noexcept {\n\n const char* buttons[3] = { nullptr };\n\n uint32_t length = __MessageBox::toActionLabels(actions, &buttons[0]);\n\n \n\n char* error = nullptr;\n\n uint32_t result = __showMessageBox_uikit(caption, message, __toNativeIcon(icon), &buttons[0], length, \n", "file_path": "video/src/window/uikit/message_box_uikit.cpp", "rank": 98, "score": 38.36776942878792 }, { "content": " static inline enum CocoaBoxIconId __toNativeIcon(MessageBox::IconType icon) noexcept {\n\n switch (icon) {\n\n case MessageBox::IconType::info: return COCOA_BOX_ICON_INFO;\n\n case MessageBox::IconType::question: return COCOA_BOX_ICON_QUESTION;\n\n case MessageBox::IconType::warning: return COCOA_BOX_ICON_WARNING;\n\n case MessageBox::IconType::error: return COCOA_BOX_ICON_ERROR;\n\n default: return COCOA_BOX_ICON_NONE;\n\n }\n\n }\n\n\n\n // ---\n\n\n\n // show modal message box\n\n MessageBox::Result MessageBox::show(const char* caption, const char* message, MessageBox::ActionType actions, \n\n MessageBox::IconType icon, bool isTopMost, WindowHandle) noexcept {\n\n const char* buttons[3] = { nullptr };\n\n uint32_t length = __MessageBox::toActionLabels(actions, &buttons[0]);\n\n \n\n char* error = nullptr;\n\n uint32_t result = __showMessageBox_cocoa(caption, message, __toNativeIcon(icon), &buttons[0], length, \n", "file_path": "video/src/window/cocoa/message_box_cocoa.cpp", "rank": 99, "score": 38.36776942878794 } ]
C++
attitude_control/src/attitude_controller.cpp
archipela-go/attitude_control
7124366f405efd5eabeaf93f82dcd5ccc6ccf2e8
#include <ros/ros.h> #include <Eigen/Geometry> #include <mavros_msgs/AttitudeTarget.h> #include <mavros_msgs/ActuatorControl.h> #include <kingfisher_msgs/Drive.h> #include <sensor_msgs/Imu.h> #include <tf/transform_datatypes.h> #include <math.h> using namespace std; namespace attitude_controller { class Node { public: explicit Node(const ros::NodeHandle& pnh); void setpoint_cb(const mavros_msgs::AttitudeTarget::ConstPtr& msg); void imu_cb(const sensor_msgs::Imu::ConstPtr& msg); private: ros::NodeHandle pnh_; ros::Subscriber setpoint_sub_; ros::Time last_setpoint_time_; mavros_msgs::AttitudeTarget last_setpoint_; double throttle_; Eigen::Quaterniond setpoint_q_; bool setpoint_set_; ros::Publisher drive_pub_; ros::Publisher actuator_pub_; ros::Subscriber imu_sub_; Eigen::Quaterniond imu_q_; double kp_; double kd_; double ki_; double last_error_; ros::Time last_error_time_; }; Node::Node(const ros::NodeHandle& pnh) : pnh_(pnh) { setpoint_sub_ = pnh_.subscribe("/att_control/attitude_target", 10, &Node::setpoint_cb, this); imu_sub_ = pnh_.subscribe("/mavros/imu/data", 10, &Node::imu_cb, this); drive_pub_ = pnh_.advertise<kingfisher_msgs::Drive>("/cmd_drive", 10); actuator_pub_ = pnh_.advertise<mavros_msgs::ActuatorControl>("/mavros/actuator_control", 10); setpoint_set_ = false; kp_ = pnh_.param("kp", kp_, 1.0); kd_ = pnh_.param("kd", kd_, 1.0); ki_ = pnh_.param("ki", ki_, 1.0); last_error_ = 0.0; last_error_time_ = ros::Time::now(); ROS_INFO("init attitude_controller"); } void Node::imu_cb(const sensor_msgs::Imu::ConstPtr &msg) { ROS_INFO("imu recieved"); imu_q_ = Eigen::Quaterniond(msg->orientation.w, msg->orientation.x, msg->orientation.y, msg->orientation.z); if (setpoint_set_){ tf::Quaternion imu_tf = tf::Quaternion(imu_q_.x(), imu_q_.y(), imu_q_.z(), imu_q_.w()); tf::Quaternion setpoint_tf = tf::Quaternion(setpoint_q_.x(), setpoint_q_.y(), setpoint_q_.z(), setpoint_q_.w()); double imu_roll, imu_pitch, imu_yaw; double setpoint_roll, setpoint_pitch, setpoint_yaw; tf::Matrix3x3(imu_tf).getRPY(imu_roll, imu_pitch, imu_yaw); tf::Matrix3x3(setpoint_tf).getRPY(setpoint_roll, setpoint_pitch, setpoint_yaw); ROS_INFO_STREAM("imu yaw: " << imu_yaw); ROS_INFO_STREAM("setpoint yaw: " << setpoint_yaw); double error = asin(sin(setpoint_yaw - imu_yaw)); double d_error = (error - last_error_)/(ros::Time::now() - last_error_time_).toSec(); double yaw_effort = 1.0 * error + 0.7 * d_error; last_error_ = error; last_error_time_ = ros::Time::now(); yaw_effort = std::min(1.0, yaw_effort); yaw_effort = std::max(-1.0, yaw_effort); double throttle = throttle_; throttle = std::min(1.0, throttle); throttle = std::max(-1.0, throttle); ROS_INFO_STREAM("yaw effort: " << yaw_effort); ROS_INFO_STREAM("throttle: " << throttle); double left = throttle - yaw_effort; double right = throttle + yaw_effort; left = std::min(1.0, left); left = std::max(-1.0, left); right = std::min(1.0, right); right = std::max(-1.0, right); auto drive_msg = boost::make_shared<kingfisher_msgs::Drive>(); drive_msg->left = left; drive_msg->right = right; drive_pub_.publish(drive_msg); float controls[] = {0, 1, 2, 3, 4, 5, 6, 7}; auto control_msg = boost::make_shared<mavros_msgs::ActuatorControl>(); control_msg->group_mix = 0; control_msg->header.stamp = ros::Time::now(); control_msg->controls[1] = -throttle; control_msg->controls[2] = -yaw_effort; if ((ros::Time::now() - last_setpoint_time_).toSec() < 0.5) actuator_pub_.publish(control_msg); } } void Node::setpoint_cb(const mavros_msgs::AttitudeTarget::ConstPtr &msg) { setpoint_set_ = true; ROS_INFO("setpoint recieved"); last_setpoint_ = *msg; setpoint_q_ = Eigen::Quaterniond(msg->orientation.w, msg->orientation.x, msg->orientation.y, msg->orientation.z); throttle_ = msg->thrust; last_setpoint_time_ = ros::Time::now(); } } int main(int argc, char** argv) { ros::init(argc, argv, "attitude_controller"); ros::NodeHandle pnh("~"); attitude_controller::Node node(pnh); ros::spin(); return 0; }
#include <ros/ros.h> #include <Eigen/Geometry> #include <mavros_msgs/AttitudeTarget.h> #include <mavros_msgs/ActuatorControl.h> #include <kingfisher_msgs/Drive.h> #include <sensor_msgs/Imu.h> #include <tf/transform_datatypes.h> #include <math.h> using namespace std; namespace attitude_co
ix3x3(setpoint_tf).getRPY(setpoint_roll, setpoint_pitch, setpoint_yaw); ROS_INFO_STREAM("imu yaw: " << imu_yaw); ROS_INFO_STREAM("setpoint yaw: " << setpoint_yaw); double error = asin(sin(setpoint_yaw - imu_yaw)); double d_error = (error - last_error_)/(ros::Time::now() - last_error_time_).toSec(); double yaw_effort = 1.0 * error + 0.7 * d_error; last_error_ = error; last_error_time_ = ros::Time::now(); yaw_effort = std::min(1.0, yaw_effort); yaw_effort = std::max(-1.0, yaw_effort); double throttle = throttle_; throttle = std::min(1.0, throttle); throttle = std::max(-1.0, throttle); ROS_INFO_STREAM("yaw effort: " << yaw_effort); ROS_INFO_STREAM("throttle: " << throttle); double left = throttle - yaw_effort; double right = throttle + yaw_effort; left = std::min(1.0, left); left = std::max(-1.0, left); right = std::min(1.0, right); right = std::max(-1.0, right); auto drive_msg = boost::make_shared<kingfisher_msgs::Drive>(); drive_msg->left = left; drive_msg->right = right; drive_pub_.publish(drive_msg); float controls[] = {0, 1, 2, 3, 4, 5, 6, 7}; auto control_msg = boost::make_shared<mavros_msgs::ActuatorControl>(); control_msg->group_mix = 0; control_msg->header.stamp = ros::Time::now(); control_msg->controls[1] = -throttle; control_msg->controls[2] = -yaw_effort; if ((ros::Time::now() - last_setpoint_time_).toSec() < 0.5) actuator_pub_.publish(control_msg); } } void Node::setpoint_cb(const mavros_msgs::AttitudeTarget::ConstPtr &msg) { setpoint_set_ = true; ROS_INFO("setpoint recieved"); last_setpoint_ = *msg; setpoint_q_ = Eigen::Quaterniond(msg->orientation.w, msg->orientation.x, msg->orientation.y, msg->orientation.z); throttle_ = msg->thrust; last_setpoint_time_ = ros::Time::now(); } } int main(int argc, char** argv) { ros::init(argc, argv, "attitude_controller"); ros::NodeHandle pnh("~"); attitude_controller::Node node(pnh); ros::spin(); return 0; }
ntroller { class Node { public: explicit Node(const ros::NodeHandle& pnh); void setpoint_cb(const mavros_msgs::AttitudeTarget::ConstPtr& msg); void imu_cb(const sensor_msgs::Imu::ConstPtr& msg); private: ros::NodeHandle pnh_; ros::Subscriber setpoint_sub_; ros::Time last_setpoint_time_; mavros_msgs::AttitudeTarget last_setpoint_; double throttle_; Eigen::Quaterniond setpoint_q_; bool setpoint_set_; ros::Publisher drive_pub_; ros::Publisher actuator_pub_; ros::Subscriber imu_sub_; Eigen::Quaterniond imu_q_; double kp_; double kd_; double ki_; double last_error_; ros::Time last_error_time_; }; Node::Node(const ros::NodeHandle& pnh) : pnh_(pnh) { setpoint_sub_ = pnh_.subscribe("/att_control/attitude_target", 10, &Node::setpoint_cb, this); imu_sub_ = pnh_.subscribe("/mavros/imu/data", 10, &Node::imu_cb, this); drive_pub_ = pnh_.advertise<kingfisher_msgs::Drive>("/cmd_drive", 10); actuator_pub_ = pnh_.advertise<mavros_msgs::ActuatorControl>("/mavros/actuator_control", 10); setpoint_set_ = false; kp_ = pnh_.param("kp", kp_, 1.0); kd_ = pnh_.param("kd", kd_, 1.0); ki_ = pnh_.param("ki", ki_, 1.0); last_error_ = 0.0; last_error_time_ = ros::Time::now(); ROS_INFO("init attitude_controller"); } void Node::imu_cb(const sensor_msgs::Imu::ConstPtr &msg) { ROS_INFO("imu recieved"); imu_q_ = Eigen::Quaterniond(msg->orientation.w, msg->orientation.x, msg->orientation.y, msg->orientation.z); if (setpoint_set_){ tf::Quaternion imu_tf = tf::Quaternion(imu_q_.x(), imu_q_.y(), imu_q_.z(), imu_q_.w()); tf::Quaternion setpoint_tf = tf::Quaternion(setpoint_q_.x(), setpoint_q_.y(), setpoint_q_.z(), setpoint_q_.w()); double imu_roll, imu_pitch, imu_yaw; double setpoint_roll, setpoint_pitch, setpoint_yaw; tf::Matrix3x3(imu_tf).getRPY(imu_roll, imu_pitch, imu_yaw); tf::Matr
random
[ { "content": "# pixhawk_emulator\n\nCollection of packages that emulate various Pixhawk modules.\n\n\n\n## Installation Instructions\n\nRun the following command before trying to build the code.\n\n``` bash\n\nsudo ln -s /usr/include/eigen3/Eigen /usr/local/include/Eigen\n\n```\n", "file_path": "README.md", "rank": 2, "score": 1.2792672574792734 } ]
C++
source/D2Common/src/Drlg/DrlgDrlgGrid.cpp
eezstreet/D2MOO
28a30aecc69bf43c80e6757a94d533fb37634b68
#include <Drlg/D2DrlgDrlg.h> #include <Drlg/D2DrlgDrlgGrid.h> #include <Drlg/D2DrlgDrlgRoom.h> void(__fastcall* gpfFlagOperations[])(int*, int) = { DRGLGRID_OrFlag, DRGLGRID_AndFlag, DRGLGRID_XorFlag, DRGLGRID_OverwriteFlag, DRGLGRID_OverwriteFlagIfZero, DRGLGRID_AndNegatedFlag }; void __fastcall DRGLGRID_OverwriteFlag(int* pFlag, int nFlag) { *pFlag = nFlag; } void __fastcall DRGLGRID_OrFlag(int* pFlag, int nFlag) { *pFlag |= nFlag; } void __fastcall DRGLGRID_AndFlag(int* pFlag, int nFlag) { *pFlag &= nFlag; } void __fastcall DRGLGRID_XorFlag(int* pFlag, int nFlag) { *pFlag ^= nFlag; } void __fastcall DRGLGRID_OverwriteFlagIfZero(int* pFlag, int nFlag) { if (*pFlag == 0) { *pFlag = nFlag; } } void __fastcall DRGLGRID_AndNegatedFlag(int* pFlag, int nFlag) { *pFlag &= ~nFlag; } BOOL __fastcall DRLGGRID_IsGridValid(D2DrlgGridStrc* pDrlgGrid) { return pDrlgGrid && pDrlgGrid->pCellsFlags; } BOOL __fastcall DRLGGRID_IsPointInsideGridArea(D2DrlgGridStrc* pDrlgGrid, int nX, int nY) { return nX >= 0 && nX < pDrlgGrid->nWidth && nY >= 0 && nY < pDrlgGrid->nHeight; } void __fastcall DRLGGRID_AlterGridFlag(D2DrlgGridStrc* pDrlgGrid, int nX, int nY, int nFlag, FlagOperation eOperation) { gpfFlagOperations[eOperation](&pDrlgGrid->pCellsFlags[nX + pDrlgGrid->pCellsRowOffsets[nY]], nFlag); } int* __fastcall DRLGGRID_GetGridFlagsPointer(D2DrlgGridStrc* pDrlgGrid, int nX, int nY) { return &pDrlgGrid->pCellsFlags[nX + pDrlgGrid->pCellsRowOffsets[nY]]; } int __fastcall DRLGGRID_GetGridFlags(D2DrlgGridStrc* pDrlgGrid, int nX, int nY) { return pDrlgGrid->pCellsFlags[nX + pDrlgGrid->pCellsRowOffsets[nY]]; } void __fastcall DRLGGRID_AlterAllGridFlags(D2DrlgGridStrc* pDrlgGrid, int nFlag, FlagOperation eOperation) { for (int nY = 0; nY < pDrlgGrid->nHeight; ++nY) { for (int nX = 0; nX < pDrlgGrid->nWidth; ++nX) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, eOperation); } } } void __fastcall DRLGGRID_AlterEdgeGridFlags(D2DrlgGridStrc* pDrlgGrid, int nFlag, FlagOperation eOperation) { int* pFlagsFirstRow = &pDrlgGrid->pCellsFlags[pDrlgGrid->pCellsRowOffsets[0]]; int* pFlagsLastRow = &pDrlgGrid->pCellsFlags[pDrlgGrid->pCellsRowOffsets[pDrlgGrid->nHeight - 1]]; for (int i = 0; i < pDrlgGrid->nWidth; ++i) { gpfFlagOperations[eOperation](&pFlagsFirstRow[i], nFlag); gpfFlagOperations[eOperation](&pFlagsLastRow[i], nFlag); } for (int i = 1; i < pDrlgGrid->nHeight; ++i) { const int nCurRowOffset = pDrlgGrid->pCellsRowOffsets[i]; gpfFlagOperations[eOperation](&pDrlgGrid->pCellsFlags[nCurRowOffset + 0 ], nFlag); gpfFlagOperations[eOperation](&pDrlgGrid->pCellsFlags[nCurRowOffset + pDrlgGrid->nWidth - 1], nFlag); } } void __fastcall sub_6FD75DE0(D2DrlgGridStrc* pDrlgGrid, D2DrlgVertexStrc* pDrlgVertex, int nFlag, FlagOperation eOperation, BOOL bAlterNextVertex) { D2DrlgVertexStrc* pNext = pDrlgVertex->pNext; if (pDrlgVertex->nPosX == pNext->nPosX && pDrlgVertex->nPosY == pNext->nPosY) { DRLGGRID_AlterGridFlag(pDrlgGrid, pDrlgVertex->nPosX, pDrlgVertex->nPosY, nFlag, eOperation); return; } int nEndX = 0; int nEndY = 0; int nX = 0; int nY = 0; if (pDrlgVertex->nPosX == pNext->nPosX) { nX = pDrlgVertex->nPosX; if (pDrlgVertex->nPosY >= pNext->nPosY) { nY = pNext->nPosY + 1; nEndY = pDrlgVertex->nPosY; } else { nY = pDrlgVertex->nPosY + 1; nEndY = pNext->nPosY; } while (nY != nEndY) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, eOperation); ++nY; } } else { nY = pDrlgVertex->nPosY; if (pDrlgVertex->nPosX >= pNext->nPosX) { nEndX = pDrlgVertex->nPosX; nX = pNext->nPosX + 1; } else { nEndX = pNext->nPosX; nX = pDrlgVertex->nPosX + 1; } while (nX != nEndX) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, eOperation); ++nX; } } DRLGGRID_AlterGridFlag(pDrlgGrid, pDrlgVertex->nPosX, pDrlgVertex->nPosY, nFlag, eOperation); if (bAlterNextVertex) { DRLGGRID_AlterGridFlag(pDrlgGrid, pDrlgVertex->pNext->nPosX, pDrlgVertex->pNext->nPosY, nFlag, eOperation); } } void __fastcall DRLGGRID_SetVertexGridFlags(D2DrlgGridStrc* pDrlgGrid, D2DrlgVertexStrc* pDrlgVertex, int nFlag) { int nX = 0; int nY = 0; D2DrlgVertexStrc* pVertex = pDrlgVertex; while (pVertex) { nX = pVertex->nPosX; nY = pVertex->nPosY; pVertex = pVertex->pNext; if (nX >= 0 && nX < pDrlgGrid->nWidth && nY >= 0 && nY < pDrlgGrid->nHeight) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, FLAG_OPERATION_OR); } } } void __fastcall sub_6FD75F60(D2DrlgGridStrc* pDrlgGrid, D2DrlgVertexStrc* pDrlgVertex, D2DrlgCoordStrc* pDrlgCoord, int nFlag, FlagOperation eOperation, int nSize) { int nX = pDrlgVertex->nPosX; int nY = pDrlgVertex->nPosY; int nXDiff = pDrlgVertex->pNext->nPosX - nX; int nYDiff = pDrlgVertex->pNext->nPosY - nY; int nXInc = 0; if (nXDiff >= 0) { nXInc = 1; } else { nXDiff = -nXDiff; nXInc = -1; } int nYInc = 0; if (nYDiff >= 0) { nYInc = 1; } else { nYDiff = -nYDiff; nYInc = -1; } int nIndexX = nX - pDrlgCoord->nPosX; int nIndexY = nY - pDrlgCoord->nPosY; int nCheck = 0; if (nXDiff >= nYDiff) { for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX, nY + i)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX, nIndexY + i, nFlag, eOperation); } } for (int j = 0; j < nXDiff; ++j) { nX += nXInc; nCheck += nYDiff; if (nCheck > nXDiff) { nY += nYInc; nCheck -= nXDiff; } nIndexX = nX - pDrlgCoord->nPosX; nIndexY = nY - pDrlgCoord->nPosY; for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX, nY + i)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX, nIndexY + i, nFlag, eOperation); } } } } else { for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX + i, nY)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX + i, nIndexY, nFlag, eOperation); } } for (int j = 0; j < nYDiff; ++j) { nY += nYInc; nCheck += nXDiff; if (nCheck > nYDiff) { nX += nXInc; nCheck -= nYDiff; } nIndexX = nX - pDrlgCoord->nPosX; nIndexY = nY - pDrlgCoord->nPosY; for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX + i, nY)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX + i, nIndexY, nFlag, eOperation); } } } } } void __fastcall DRLGGRID_InitializeGridCells(void* pMemPool, D2DrlgGridStrc* pDrlgGrid, int nWidth, int nHeight) { pDrlgGrid->nWidth = nWidth; pDrlgGrid->nHeight = nHeight; pDrlgGrid->pCellsRowOffsets = (int*)D2_CALLOC_SERVER(pMemPool, sizeof(int) * nHeight * (nWidth + 1)); pDrlgGrid->pCellsFlags = &pDrlgGrid->pCellsRowOffsets[nHeight]; int nRowOffset = 0; for (int i = 0; i < nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = nRowOffset; nRowOffset += nWidth; } pDrlgGrid->unk0x10 = 0; } void __fastcall DRLGGRID_FillGrid(D2DrlgGridStrc* pDrlgGrid, int nWidth, int nHeight, int* pCellPos, int* pCellRowOffsets) { pDrlgGrid->nWidth = nWidth; pDrlgGrid->nHeight = nHeight; pDrlgGrid->pCellsFlags = pCellPos; memset(pCellPos, 0x00, sizeof(int) * nHeight * nWidth); pDrlgGrid->pCellsRowOffsets = pCellRowOffsets; int nRowOffset = 0; for (int i = 0; i < nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = nRowOffset; nRowOffset += nWidth; } pDrlgGrid->unk0x10 = 0; } void __fastcall DRLGGRID_FillNewCellFlags(void* pMemPool, D2DrlgGridStrc* pDrlgGrid, int* pCellPos, D2DrlgCoordStrc* pDrlgCoord, int nWidth) { pDrlgGrid->nWidth = pDrlgCoord->nWidth; pDrlgGrid->nHeight = pDrlgCoord->nHeight; pDrlgGrid->pCellsFlags = &pCellPos[pDrlgCoord->nPosX + nWidth * pDrlgCoord->nPosY]; pDrlgGrid->pCellsRowOffsets = (int32_t*)D2_ALLOC_SERVER(pMemPool, sizeof(int) * pDrlgCoord->nHeight); int nOffset = 0; for (int i = 0; i < pDrlgCoord->nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = nOffset; nOffset += nWidth; } pDrlgGrid->unk0x10 = 1; } void __fastcall DRLGGRID_AssignCellsOffsetsAndFlags(D2DrlgGridStrc* pDrlgGrid, int* pCellPos, D2DrlgCoordStrc* pDrlgCoord, int nWidth, int* pCellFlags) { pDrlgGrid->nWidth = pDrlgCoord->nWidth; pDrlgGrid->nHeight = pDrlgCoord->nHeight; pDrlgGrid->pCellsFlags = &pCellPos[pDrlgCoord->nPosX + nWidth * pDrlgCoord->nPosY]; pDrlgGrid->pCellsRowOffsets = pCellFlags; for (int i = 0; i < pDrlgCoord->nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = i * nWidth; } pDrlgGrid->unk0x10 = 1; } void __fastcall DRLGGRID_FreeGrid(void* pMemPool, D2DrlgGridStrc* pDrlgGrid) { if (pDrlgGrid->pCellsRowOffsets) { D2_FREE_SERVER(pMemPool, pDrlgGrid->pCellsRowOffsets); } pDrlgGrid->pCellsFlags = NULL; pDrlgGrid->pCellsRowOffsets = NULL; } void __fastcall DRLGGRID_ResetGrid(D2DrlgGridStrc* pDrlgGrid) { pDrlgGrid->pCellsFlags = 0; pDrlgGrid->pCellsRowOffsets = 0; }
#include <Drlg/D2DrlgDrlg.h> #include <Drlg/D2DrlgDrlgGrid.h> #include <Drlg/D2DrlgDrlgRoom.h> void(__fastcall* gpfFlagOperations[])(int*, int) = { DRGLGRID_OrFlag, DRGLGRID_AndFlag, DRGLGRID_XorFlag, DRGLGRID_OverwriteFlag, DRGLGRID_OverwriteFlagIfZero, DRGLGRID_AndNegatedFlag }; void __fastcall DRGLGRID_OverwriteFlag(int* pFlag, int nFlag) { *pFlag = nFlag; } void __fastcall DRGLGRID_OrFlag(int* pFlag, int nFlag) { *pFlag |= nFlag; } void __fastcall DRGLGRID_AndFlag(int* pFlag, int nFlag) { *pFlag &= nFlag; } void __fastcall DRGLGRID_XorFlag(int* pFlag, int nFlag) { *pFlag ^= nFlag; } void __fastcall DRGLGRID_OverwriteFlagIfZero(int* pFlag, int nFlag) { if (*pFlag == 0) { *pFlag = nFlag; } } void __fastcall DRGLGRID_AndNegatedFlag(int* pFlag, int nFlag) { *pFlag &= ~nFlag; } BOOL __fastcall DRLGGRID_IsGridValid(D2DrlgGridStrc* pDrlgGrid) { return pDrlgGrid && pDrlgGrid->pCellsFlags; } BOOL __fastcall DRLGGRID_IsPointInsideGridArea(D2DrlgGridStrc* pDrlgGrid, int nX, int nY) { return nX >= 0 && nX < pDrlgGrid->nWidth && nY >= 0 && nY < pDrlgGrid->nHeight; } void __fastcall DRLGGRID_AlterGridFlag(D2DrlgGridStrc* pDrlgGrid, int nX, int nY, int nFlag, FlagOperation eOperation) { gpfFlagOperations[eOperation](&pDrlgGrid->pCellsFlags[nX + pDrlgGrid->pCellsRowOffsets[nY]], nFlag); } int* __fastcall DRLGGRID_GetGridFlagsPointer(D2DrlgGridStrc* pDrlgGrid, int nX, int nY) { return &pDrlgGrid->pCellsFlags[nX + pDrlgGrid->pCellsRowOffsets[nY]]; } int __fastcall DRLGGRID_GetGridFlags(D2DrlgGridStrc* pDrlgGrid, int nX, int nY) { return pDrlgGrid->pCellsFlags[nX + pDrlgGrid->pCellsRowOffsets[nY]]; } void __fastcall DRLGGRID_AlterAllGridFlags(D2DrlgGridStrc* pDrlgGrid, int nFlag, FlagOperation eOperation) { for (int nY = 0; nY < pDrlgGrid->nHeight; ++nY) { for (int nX = 0; nX < pDrlgGrid->nWidth; ++nX) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, eOperation); } } } void __fastcall DRLGGRID_AlterEdgeGridFlags(D2DrlgGridStrc* pDrlgGrid, int nFlag, FlagOperation eOperation) { int* pFlagsFirstRow = &pDrlgGrid->pCellsFlags[pDrlgGrid->pCellsRowOffsets[0]]; int* pFlagsLastRow = &pDrlgGrid->pCellsFlags[pDrlgGrid->pCellsRowOffsets[pDrlgGrid->nHeight - 1]]; for (int i = 0; i < pDrlgGrid->nWidth; ++i) { gpfFlagOperations[eOperation](&pFlagsFirstRow[i], nFlag); gpfFlagOperations[eOperation](&pFlagsLastRow[i], nFlag); } for (int i = 1; i < pDrlgGrid->nHeight; ++i) { const int nCurRowOffset = pDrlgGrid->pCellsRowOffsets[i]; gpfFlagOperations[eOperation](&pDrlgGrid->pCellsFlags[nCurRowOffset + 0 ], nFlag); gpfFlagOperations[eOperation](&pDrlgGrid->pCellsFlags[nCurRowOffset + pDrlgGrid->nWidth - 1], nFlag); } } void __fastcall sub_6FD75DE0(D2DrlgGridStrc* pDrlgGrid, D2DrlgVertexStrc* pDrlgVertex, int nFlag, FlagOperation eOperation, BOOL bAlterNextVertex) { D2DrlgVertexStrc* pNext = pDrlgVertex->pNext; if (pDrlgVertex->nPosX == pNext->nPosX && pDrlgVertex->nPosY == pNext->nPosY) { DRLGGRID_AlterGridFlag(pDrlgGrid, pDrlgVertex->nPosX, pDrlgVertex->nPosY, nFlag, eOperation); return; } int nEndX = 0; int nEndY = 0; int nX = 0; int nY = 0; if (pDrlgVertex->nPosX == pNext->nPosX) { nX = pDrlgVertex->nPosX; if (pDrlgVertex->nPosY >= pNext->nPosY) { nY = pNext->nPosY + 1; nEndY = pDrlgVertex->nPosY; } else { nY = pDrlgVertex->nPosY + 1; nEndY = pNext->nPosY; } while (nY != nEndY) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, eOperation); ++nY; } } else { nY = pDrlgVertex->nPosY; if (pDrlgVertex->nPosX >= pNext->nPosX) { nEndX = pDrlgVertex->nPosX; nX = pNext->nPosX + 1; } else { nEndX = pNext->nPosX; nX = pDrlgVertex->nPosX + 1; } while (nX != nEndX) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, eOperation); ++nX; } } DRLGGRID_AlterGridFlag(pDrlgGrid, pDrlgVertex->nPosX, pDrlgVertex->nPosY, nFlag, eOperation); if (bAlterNextVertex) { DRLGGRID_AlterGridFlag(pDrlgGrid, pDrlgVertex->pNext->nPosX, pDrlgVertex->pNext->nPosY, nFlag, eOperation); } } void __fastcall DRLGGRID_SetVertexGridFlags(D2DrlgGridStrc* pDrlgGrid, D2DrlgVertexStrc* pDrlgVertex, int nFlag) { int nX = 0; int nY = 0; D2DrlgVertexStrc* pVertex = pDrlgVertex; while (pVertex) { nX = pVertex->nPosX; nY = pVertex->nPosY; pVertex = pVertex->pNext; if (nX >= 0 && nX < pDrlgGrid->nWidth && nY >= 0 && nY < pDrlgGrid->nHeight) { DRLGGRID_AlterGridFlag(pDrlgGrid, nX, nY, nFlag, FLAG_OPERATION_OR); } } } void __fastcall sub_6FD75F60(D2DrlgGridStrc* pDrlgGrid, D2DrlgVertexStrc* pDrlgVertex, D2DrlgCoordStrc* pDrlgCoord, int nFlag, FlagOperation eOperation, int nSize) { int nX = pDrlgVertex->nPosX; int nY = pDrlgVertex->nPosY; int nXDiff = pDrlgVertex->pNext->nPosX - nX; int nYDiff = pDrlgVertex->pNext->nPosY - nY; int nXInc = 0; if (nXDiff >= 0) { nXInc = 1; } else { nXDiff = -nXDiff; nXInc = -1; } int nYInc = 0; if (nYDiff >= 0) { nYInc = 1; } else { nYDiff = -nYDiff; nYInc = -1; } int nIndexX = nX - pDrlgCoord->nPosX; int nIndexY = nY - pDrlgCoord->nPosY; int nCheck = 0; if (nXDiff >= nYDiff) { for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX, nY + i)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX, nIndexY + i, nFlag, eOperation); } } for (int j = 0; j < nXDiff; ++j) { nX += nXInc; nCheck += nYDiff; if (nCheck > nXDiff) { nY += nYInc; nCheck -= nXDiff; } nIndexX = nX - pDrlgCoord->nPosX; nIndexY = nY - pDrlgCoord->nPosY; for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX, nY + i)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX, nIndexY + i, nFlag, eOperation); } } } } else { for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX + i, nY)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX + i, nIndexY, nFlag, eOperation); } } for (int j = 0; j < nYDiff; ++j) { nY += nYInc; nCheck += nXDiff; if (nCheck > nYDiff) { nX += nXInc; nCheck -= nYDiff; } nIndexX = nX - pDrlgCoord->nPosX; nIndexY = nY - pDrlgCoord->nPosY; for (int i = 0; i < nSize; ++i) { if (DRLGROOM_AreXYInsideCoordinates(pDrlgCoord, nX + i, nY)) { DRLGGRID_AlterGridFlag(pDrlgGrid, nIndexX + i, nIndexY, nFlag, eOperation); } } } } } void __fastcall DRLGGRID_InitializeGridCells(void* pMemPool, D2DrlgGridStrc* pDrlgGrid, int nWidth, int nHeight) { pDrlgGrid->nWidth = nWidth; pDrlgGrid->nHeight = nHeight; pDrlgGrid->pCellsRowOffsets = (int*)D2_CALLOC_SERVER(pMemPool, sizeof(int) * nHeight * (nWidth + 1)); pDrlgGrid->pCellsFlags = &pDrlgGrid->pCellsRowOffsets[nHeight]; int nRowOffset = 0; for (int i = 0; i < nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = nRowOffset; nRowOffset += nWidth; } pDrlgGrid->unk0x10 = 0; }
void __fastcall DRLGGRID_FillNewCellFlags(void* pMemPool, D2DrlgGridStrc* pDrlgGrid, int* pCellPos, D2DrlgCoordStrc* pDrlgCoord, int nWidth) { pDrlgGrid->nWidth = pDrlgCoord->nWidth; pDrlgGrid->nHeight = pDrlgCoord->nHeight; pDrlgGrid->pCellsFlags = &pCellPos[pDrlgCoord->nPosX + nWidth * pDrlgCoord->nPosY]; pDrlgGrid->pCellsRowOffsets = (int32_t*)D2_ALLOC_SERVER(pMemPool, sizeof(int) * pDrlgCoord->nHeight); int nOffset = 0; for (int i = 0; i < pDrlgCoord->nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = nOffset; nOffset += nWidth; } pDrlgGrid->unk0x10 = 1; } void __fastcall DRLGGRID_AssignCellsOffsetsAndFlags(D2DrlgGridStrc* pDrlgGrid, int* pCellPos, D2DrlgCoordStrc* pDrlgCoord, int nWidth, int* pCellFlags) { pDrlgGrid->nWidth = pDrlgCoord->nWidth; pDrlgGrid->nHeight = pDrlgCoord->nHeight; pDrlgGrid->pCellsFlags = &pCellPos[pDrlgCoord->nPosX + nWidth * pDrlgCoord->nPosY]; pDrlgGrid->pCellsRowOffsets = pCellFlags; for (int i = 0; i < pDrlgCoord->nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = i * nWidth; } pDrlgGrid->unk0x10 = 1; } void __fastcall DRLGGRID_FreeGrid(void* pMemPool, D2DrlgGridStrc* pDrlgGrid) { if (pDrlgGrid->pCellsRowOffsets) { D2_FREE_SERVER(pMemPool, pDrlgGrid->pCellsRowOffsets); } pDrlgGrid->pCellsFlags = NULL; pDrlgGrid->pCellsRowOffsets = NULL; } void __fastcall DRLGGRID_ResetGrid(D2DrlgGridStrc* pDrlgGrid) { pDrlgGrid->pCellsFlags = 0; pDrlgGrid->pCellsRowOffsets = 0; }
void __fastcall DRLGGRID_FillGrid(D2DrlgGridStrc* pDrlgGrid, int nWidth, int nHeight, int* pCellPos, int* pCellRowOffsets) { pDrlgGrid->nWidth = nWidth; pDrlgGrid->nHeight = nHeight; pDrlgGrid->pCellsFlags = pCellPos; memset(pCellPos, 0x00, sizeof(int) * nHeight * nWidth); pDrlgGrid->pCellsRowOffsets = pCellRowOffsets; int nRowOffset = 0; for (int i = 0; i < nHeight; ++i) { pDrlgGrid->pCellsRowOffsets[i] = nRowOffset; nRowOffset += nWidth; } pDrlgGrid->unk0x10 = 0; }
function_block-full_function
[ { "content": "\tuint8_t nInt;\t\t\t\t\t\t\t//0x32\n", "file_path": "source/D2Common/include/D2DataTbls.h", "rank": 0, "score": 99807.58277220109 }, { "content": "\tBOOL bReturn;\t\t\t\t\t\t\t//0x18\n", "file_path": "source/D2CommonDefinitions/include/D2Structs.h", "rank": 1, "score": 99796.58858338752 }, { "content": "void __stdcall SEED_Return();\n", "file_path": "source/D2Common/include/D2Seed.h", "rank": 2, "score": 99796.58858338752 }, { "content": "void __stdcall INVENTORY_Return(char* szFile, int nLine, D2InventoryStrc* pInventory, int nX, int nY, int nInventoryRecordId, BOOL bClient, uint8_t nPage);\n", "file_path": "source/D2Common/include/D2Inventory.h", "rank": 3, "score": 99796.58858338752 }, { "content": "\tuint8_t nInt;\t\t\t\t\t\t//0x23\n", "file_path": "source/D2Common/include/DataTbls/ArenaTbls.h", "rank": 4, "score": 97857.91075264837 }, { "content": "\tuint8_t nPercentInt;\t\t\t\t\t//0x37\n", "file_path": "source/D2Common/include/D2DataTbls.h", "rank": 5, "score": 97857.91075264837 }, { "content": "void __stdcall D2COMMON_11080_Return(int a1);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 6, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_11062_Return(int a1, int a2);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 7, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_10308_Return(int a1, int a2);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 8, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_11296_Return(int a1);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 9, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_10267_Return(int a1, int a2);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 10, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_11077_Return(int a1, int a2);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 11, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_10139_Return();\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 12, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_11236_Return(int a1, int a2, int a3);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 13, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_10251_Return(int a1);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 14, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_10140_Return();\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 15, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_11076_Return(int a1, int a2);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 16, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_11240_Return(int a1, int a2, int a3);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 17, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_10238_Return();\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 18, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_11229_Return(int a1);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 19, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_11241_Return(int a1);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 20, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_10441_Return(int a1, int a2);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 21, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_10856_Return(int a1, int a2, int a3);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 22, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_10303_Return(int a1, int a2);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 23, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_10019_Return(int a1);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 24, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_11237_Return(int a1, int a2, int a3);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 25, "score": 97847.13132770057 }, { "content": "void __stdcall D2COMMON_11277_Return(int a1);\n", "file_path": "source/D2Common/include/UselessOrdinals.h", "rank": 26, "score": 97847.13132770057 }, { "content": "\tuint16_t wReqInt;\t\t\t\t\t\t//0x17A\n", "file_path": "source/D2Common/include/DataTbls/SkillsTbls.h", "rank": 27, "score": 95982.95028052496 }, { "content": "\tuint8_t nElType[3];\t\t\t\t\t\t//0x101\n", "file_path": "source/D2Common/include/DataTbls/MonsterTbls.h", "rank": 28, "score": 95980.2901360862 }, { "content": "\tuint8_t nElMode[3];\t\t\t\t\t\t//0xFE\n", "file_path": "source/D2Common/include/DataTbls/MonsterTbls.h", "rank": 29, "score": 95980.2901360862 }, { "content": "\tuint8_t nElPct[3][3];\t\t\t\t\t//0x104\n", "file_path": "source/D2Common/include/DataTbls/MonsterTbls.h", "rank": 30, "score": 95980.2901360862 }, { "content": "\tint32_t nElDur;\t\t\t\t\t\t\t\t//0x34\n", "file_path": "source/D2Common/include/DataTbls/MonsterTbls.h", "rank": 31, "score": 95980.2901360862 }, { "content": "\tint32_t nElMaxD;\t\t\t\t\t\t\t//0x30\n", "file_path": "source/D2Common/include/DataTbls/MonsterTbls.h", "rank": 32, "score": 94175.87752970976 }, { "content": "\tint32_t nElMinD;\t\t\t\t\t\t\t//0x2C\n", "file_path": "source/D2Common/include/DataTbls/MonsterTbls.h", "rank": 33, "score": 94175.87752970976 }, { "content": "\tD2UnkExcelStrc* pNext;\t\t\t//0x00\n", "file_path": "source/Fog/include/Fog.h", "rank": 34, "score": 89764.55861674144 }, { "content": "\tuint16_t nFlag;\t\t\t\t\t\t\t\t//0x02\n", "file_path": "source/D2Common/include/D2Waypoints.h", "rank": 35, "score": 87756.06395926807 }, { "content": "\tint32_t nY;\t\t\t\t\t\t\t\t//0x08\n", "file_path": "source/D2Common/include/D2Roster.h", "rank": 36, "score": 87744.14860778961 }, { "content": "\tint32_t nY;\t\t\t\t\t\t\t\t\t//0x08\n", "file_path": "source/D2Common/include/Units/Object.h", "rank": 37, "score": 87744.14860778961 }, { "content": "\tint32_t nY;\t\t\t\t\t\t\t\t\t//0x04\n", "file_path": "source/D2Common/include/D2Waypoints.h", "rank": 38, "score": 87744.14860778961 }, { "content": "\tuint16_t nY;\t\t\t\t\t\t\t\t//0x06\n", "file_path": "source/D2Common/include/D2Items.h", "rank": 39, "score": 87744.14860778961 }, { "content": "\tint32_t nY;\t\t\t\t\t\t\t\t\t//0x18\n", "file_path": "source/D2Common/include/Units/Missile.h", "rank": 40, "score": 87744.14860778961 }, { "content": "\tuint16_t nX;\t\t\t\t\t\t\t\t//0x04\n", "file_path": "source/D2Common/include/D2Items.h", "rank": 41, "score": 87743.4960430722 }, { "content": "\tint32_t nX;\t\t\t\t\t\t\t\t//0x04\n", "file_path": "source/D2Common/include/D2Roster.h", "rank": 42, "score": 87743.4960430722 }, { "content": "\tint32_t nX;\t\t\t\t\t\t\t\t\t//0x04\n", "file_path": "source/D2Common/include/Units/Object.h", "rank": 43, "score": 87743.4960430722 }, { "content": "\tint32_t nX;\t\t\t\t\t\t\t\t\t//0x00\n", "file_path": "source/D2Common/include/D2Waypoints.h", "rank": 44, "score": 87743.4960430722 }, { "content": "\tint32_t nX;\t\t\t\t\t\t\t\t\t//0x14\n", "file_path": "source/D2Common/include/Units/Missile.h", "rank": 45, "score": 87743.4960430722 }, { "content": "\tD2ObjectRoomCoordStrc* pNext;\t\t\t//0x0C\n", "file_path": "source/D2Common/include/Units/Object.h", "rank": 46, "score": 87729.12532804564 }, { "content": "\tD2TextNodeStrc* pNext;\t\t//0x08\n", "file_path": "source/D2Common/include/D2Text.h", "rank": 47, "score": 87729.12532804564 }, { "content": "\tD2RosterUnitStrc* pNext;\t\t\t\t//0x80\n", "file_path": "source/D2Common/include/D2Roster.h", "rank": 48, "score": 87729.12532804564 }, { "content": "\tint32_t nSize;\t\t\t\t\t\t\t\t//0x04\n", "file_path": "source/D2CommonDefinitions/include/D2Structs.h", "rank": 49, "score": 86797.08188900801 }, { "content": "\tint32_t nSize;\t\t\t\t\t\t\t\t//0x28\n", "file_path": "source/D2Common/include/Units/UnitFinds.h", "rank": 50, "score": 86797.08188900801 }, { "content": "\tvoid* pMemPool;\t\t\t\t\t\t\t//0x00\n", "file_path": "source/D2Common/include/D2Skills.h", "rank": 51, "score": 86794.44599664316 }, { "content": "\tvoid* pMemPool;\t\t\t\t//0x00\n", "file_path": "source/D2Common/include/D2Text.h", "rank": 52, "score": 86794.44599664316 }, { "content": "\tvoid* pMemPool;\t\t\t\t\t\t\t//0x04\n", "file_path": "source/D2Common/include/D2Inventory.h", "rank": 53, "score": 86794.44599664316 }, { "content": "\tint32_t nWidth;\t\t\t\t\t\t\t\t//0x0C\n", "file_path": "source/D2CommonDefinitions/include/D2Structs.h", "rank": 54, "score": 86792.48360053066 }, { "content": "\tint32_t nHeight;\t\t\t\t\t\t\t//0x08\n", "file_path": "source/D2CommonDefinitions/include/D2Structs.h", "rank": 55, "score": 86791.17732729475 }, { "content": "\tint32_t nY;\t\t\t\t\t\t\t\t\t//0x24\n", "file_path": "source/D2Common/include/Units/UnitFinds.h", "rank": 56, "score": 86787.2613940945 }, { "content": "\tint32_t nY;\t\t\t\t\t\t\t\t\t//0x18\n", "file_path": "source/D2CommonDefinitions/include/D2Structs.h", "rank": 57, "score": 86787.2613940945 }, { "content": "\tint32_t nX;\t\t\t\t\t\t\t\t\t//0x20\n", "file_path": "source/D2Common/include/Units/UnitFinds.h", "rank": 58, "score": 86786.60882937709 }, { "content": "\tint32_t nX;\t\t\t\t\t\t\t\t\t//0x14\n", "file_path": "source/D2CommonDefinitions/include/D2Structs.h", "rank": 59, "score": 86786.60882937709 }, { "content": "\tD2GameStrc* pNext;\t\t\t\t\t\t//0x10\n", "file_path": "source/D2CommonDefinitions/include/D2Structs.h", "rank": 60, "score": 86772.23811435053 }, { "content": "#include \"UselessOrdinals.h\"\n\n\n\n#include \"D2Skills.h\"\n\n#include \"D2StatList.h\"\n\n\n\n\n\nint __stdcall D2COMMON_10016_Return0()\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10016_Return0: Useless\");\n\n\treturn 0;\n\n}\n\n\n\nvoid __stdcall D2COMMON_10019_Return(int a1)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10019_Return: Useless\");\n\n}\n\n\n\nvoid __stdcall D2COMMON_10139_Return()\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10139_Return: Useless\");\n", "file_path": "source/D2Common/src/UselessOrdinals.cpp", "rank": 61, "score": 18.39499824805184 }, { "content": "\n\nvoid __stdcall D2COMMON_11236_Return(int a1, int a2, int a3)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11236_Return: Useless\");\n\n}\n\n\n\nvoid __stdcall D2COMMON_11237_Return(int a1, int a2, int a3)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11237_Return: Useless\");\n\n}\n\n\n\nvoid __stdcall D2COMMON_11240_Return(int a1, int a2, int a3)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11240_Return: Useless\");\n\n}\n\n\n\nvoid __stdcall D2COMMON_11241_Return(int a1)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11241_Return: Useless\");\n\n}\n", "file_path": "source/D2Common/src/UselessOrdinals.cpp", "rank": 62, "score": 17.872610333948742 }, { "content": "\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11048_Return1: Useless\");\n\n\treturn 1;\n\n}\n\n\n\nvoid __stdcall D2COMMON_11062_Return(int a1, int a2)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11062_Return: Useless\");\n\n}\n\n\n\nvoid __stdcall D2COMMON_11076_Return(int a1, int a2)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11076_Return: Useless\");\n\n}\n\n\n\nvoid __stdcall D2COMMON_11077_Return(int a1, int a2)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11077_Return: Useless\");\n\n}\n\n\n\nint __stdcall D2COMMON_11078_Return0(int a1, int a2)\n", "file_path": "source/D2Common/src/UselessOrdinals.cpp", "rank": 63, "score": 16.74726787487611 }, { "content": "void __stdcall D2COMMON_10303_Return(int a1, int a2)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10303_Return: Useless\");\n\n}\n\n\n\nvoid __stdcall D2COMMON_10308_Return(int a1, int a2)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10308_Return: Useless\");\n\n}\n\n\n\nint __stdcall D2COMMON_10309_Return0(int a1)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10309_Return0: Useless\");\n\n\treturn 0;\n\n}\n\n\n\nvoid __stdcall D2COMMON_10441_Return(int a1, int a2)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10441_Return: Useless\");\n\n}\n", "file_path": "source/D2Common/src/UselessOrdinals.cpp", "rank": 64, "score": 16.692155152355355 }, { "content": "}\n\n\n\nvoid __stdcall D2COMMON_10140_Return()\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10140_Return: Useless\");\n\n}\n\n\n\nvoid __stdcall D2COMMON_10238_Return()\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10238_Return: Useless\");\n\n}\n\n\n\nint __stdcall D2COMMON_10239_Return0(int a1, int a2)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10239_Return0: Useless\");\n\n\treturn 0;\n\n}\n\n\n\nvoid __stdcall D2COMMON_10251_Return(int a1)\n\n{\n", "file_path": "source/D2Common/src/UselessOrdinals.cpp", "rank": 65, "score": 16.507316256340218 }, { "content": "\n\nvoid __stdcall D2COMMON_11277_Return(int a1)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11277_Return: Useless\");\n\n}\n\n\n\nvoid __stdcall D2COMMON_11296_Return(int a1)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11296_Return: Useless\");\n\n}\n", "file_path": "source/D2Common/src/UselessOrdinals.cpp", "rank": 66, "score": 16.39723584171184 }, { "content": "#include \"D2DataTbls.h\"\n\n#include <D2Items.h>\n\n\n\n// Inlined in both Parsers\n\nstatic BOOL DATATBLS_AreStringsEqual(const char* szString1, const char* szString2)\n\n{\n\n\tfor (size_t i = strlen(szString2) + 1; i; --i)\n\n\t{\n\n\t\tif (*szString1++ != *szString2++)\n\n\t\t{\n\n\t\t\treturn FALSE;\n\n\t\t}\n\n\t}\n\n\n\n\treturn TRUE;\n\n}\n\n\n\n\n\n//D2Common.0x6FD523E0\n\nvoid __fastcall DATATBLS_CubeMainInputLinker(char* pSrc, void* pRecord, int nOffset, int nPosition, int nTxtRow, int nTxtColumn)\n", "file_path": "source/D2Common/src/DataTbls/HoradricCube.cpp", "rank": 67, "score": 15.841456429405664 }, { "content": "#include \"D2Log.h\"\n\n\n\n#include \"CommonDefinitions.h\"\n\n\n\n\n\nvoid __cdecl LOG_11100(int a1, int a2, int a3, int a4, char* szFile, int nLine, const char* szFormat, ...)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"LOG_11100: Useless\");\n\n}\n\n\n\nvoid __cdecl LOG_11101(int nGame, int nFrame, int nClient, int a4, int nSize, const char* szFormat, ...)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"LOG_11101: Useless\");\n\n}\n\n\n\nvoid __fastcall LOG_11102(void* a1)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"LOG_11102: Useless\");\n\n}\n\n\n", "file_path": "source/D2Common/src/D2Log.cpp", "rank": 68, "score": 15.51857168165908 }, { "content": "#include <ctime>\n\n#include <D2Seed.h>\n\n#include <CommonDefinitions.h>\n\n\n\n//D2Common.0x6FDA5260 (#10916)\n\nvoid __stdcall SEED_Return()\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"SEED_Return: Useless\");\n\n}\n\n\n\n//D2Common.0x6FDAEA80 (#10920)\n\nint __fastcall SEED_GetRandomValue(int nValue)\n\n{\n\n\treturn (0x2F490A95 * (time(NULL) + nValue + GetTickCount()) - 0x2E330917) & 0x7FFFFFFF;\n\n}\n\n\n\n//D2Common.0x6FDAEAB0 (#10912)\n\nvoid __fastcall SEED_InitSeed(D2SeedStrc* pSeed)\n\n{\n\n\tpSeed->nLowSeed = 1;\n", "file_path": "source/D2Common/src/D2Seed.cpp", "rank": 69, "score": 15.345224545791025 }, { "content": "{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11078_Return0: Useless\");\n\n\treturn 0;\n\n}\n\n\n\nvoid __stdcall D2COMMON_11080_Return(int a1)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11080_Return: Useless\");\n\n}\n\n\n\nvoid __stdcall D2COMMON_11229_Return(int a1)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11229_Return: Useless\");\n\n}\n\n\n\nint __stdcall D2COMMON_11235_Return0(int a1)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11235_Return0: Useless\");\n\n\treturn 0;\n\n}\n", "file_path": "source/D2Common/src/UselessOrdinals.cpp", "rank": 70, "score": 14.689291075847612 }, { "content": "#include \"Drlg/D2DrlgActivate.h\"\n\n\n\n#include \"Drlg/D2DrlgDrlg.h\"\n\n#include \"Drlg/D2DrlgDrlgRoom.h\"\n\n#include \"Drlg/D2DrlgPreset.h\"\n\n#include \"Drlg/D2DrlgRoomTile.h\"\n\n#include <DataTbls/LevelsIds.h>\n\n\n\n//TODO: Find names\n\n\n\nint dword_6FDEA6EC;\n\nint dword_6FDEA6F0;\n\nint dword_6FDEA6F4;\n\nint dword_6FDEA6F8;\n\n\n\nvoid (__fastcall* off_6FDE07B0[4])(D2RoomExStrc*) =\n\n{\n\n\tsub_6FD733D0,\n\n\tsub_6FD73450,\n\n\tsub_6FD73550,\n", "file_path": "source/D2Common/src/Drlg/DrlgActivate.cpp", "rank": 72, "score": 13.855664672300254 }, { "content": "#include \"D2StatList.h\"\n\n\n\n#include <D2BitManip.h>\n\n#include \"D2DataTbls.h\"\n\n#include \"D2Environment.h\"\n\n#include \"D2ItemMods.h\"\n\n#include \"D2Items.h\"\n\n#include \"D2States.h\"\n\n#include \"Units/Units.h\"\n\n//TODO: Find names\n\n\n\n\n\n//D2Common.0x6FDB57C0 (#10563)\n\nBOOL __stdcall STATLIST_AreUnitsAligned(D2UnitStrc* pUnit1, D2UnitStrc* pUnit2)\n\n{\n\n\tint nAlignment1 = 0;\n\n\tint nAlignment2 = 0;\n\n\n\n\tif (pUnit1 && pUnit2)\n\n\t{\n", "file_path": "source/D2Common/src/D2StatList.cpp", "rank": 73, "score": 13.404691695352032 }, { "content": "\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10251_Return: Useless\");\n\n}\n\n\n\nint __stdcall D2COMMON_10254_Return0(int a1, int a2)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10254_Return0: Useless\");\n\n\treturn 0;\n\n}\n\n\n\nvoid __stdcall D2COMMON_10267_Return(int a1, int a2)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10267_Return: Useless\");\n\n}\n\n\n\nint __stdcall D2COMMON_10301_Return0(int a1)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10301_Return0: Useless\");\n\n\treturn 0;\n\n}\n\n\n", "file_path": "source/D2Common/src/UselessOrdinals.cpp", "rank": 74, "score": 13.392998565586385 }, { "content": "\treturn 0;\n\n}\n\n\n\nvoid __stdcall D2COMMON_10856_Return(int a1, int a2, int a3)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10856_Return: Useless\");\n\n}\n\n\n\nint __stdcall D2COMMON_10943_Return1(int a1)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_10943_Return1: Useless\");\n\n\treturn 1;\n\n}\n\n\n\nint __stdcall D2COMMON_11009_Return0(int a1)\n\n{\n\n\tREMOVE_LATER_WriteToLogFile(\"D2COMMON_11009_Return0: Useless\");\n\n\treturn 0;\n\n}\n\n\n", "file_path": "source/D2Common/src/UselessOrdinals.cpp", "rank": 75, "score": 13.30035608587086 }, { "content": "//int __fastcall sub_6FDAC5E0(void* a1, int* a2, unsigned int a3, unsigned int a4, unsigned int a5, unsigned int a6)\n\n//{\n\n//\tunsigned int v6; // edi@1\n\n//\tsigned int v7; // ebp@1\n\n//\tunsigned int v8; // eax@2\n\n//\tunsigned int v9; // esi@4\n\n//\tunsigned int v10; // edx@5\n\n//\tsigned int v11; // ecx@8\n\n//\tunsigned int v12; // ebx@9\n\n//\tunsigned int v13; // ebx@9\n\n//\tint v14; // eax@11\n\n//\tint v15; // edx@13\n\n//\tbool v16; // zf@13\n\n//\tint v17; // eax@13\n\n//\tint v18; // esi@13\n\n//\tvoid* v19; // ecx@13\n\n//\tint result; // eax@19\n\n//\tvoid* v21; // [sp+Ch] [bp-Ch]@1\n\n//\tsigned int v22; // [sp+10h] [bp-8h]@2\n\n//\tint* v23; // [sp+14h] [bp-4h]@1\n", "file_path": "source/D2Common/src/Path/PathMisc.cpp", "rank": 76, "score": 13.248601788359714 }, { "content": "#include \"Units/UnitRoom.h\"\n\n\n\n#include \"D2Dungeon.h\"\n\n#include \"D2StatList.h\"\n\n#include \"Units/Units.h\"\n\n#include \"Path/Path.h\"\n\n\n\n\n\n//TODO: Find names (used by Path Functions)\n\nint dword_6FDD2580;\n\nint dword_6FDD2584;\n\n\n\nbool DungeonTestRoomGame(D2RoomStrc* pRoom, int nX, int nY)\n\n{\n\n\treturn nX >= pRoom->nSubtileX && nX < pRoom->nSubtileX + pRoom->nSubtileWidth\n\n\t\t&& nY >= pRoom->nSubtileY && nY < pRoom->nSubtileY + pRoom->nSubtileHeight;\n\n\n\n}\n\n\n\n//D2Common.0x6FDBCF10 (#11279)\n", "file_path": "source/D2Common/src/Units/UnitRoom.cpp", "rank": 77, "score": 12.80522854054946 }, { "content": "#include \"Path/Path.h\"\n\n#include \"Path/Path_IDAStar.h\"\n\n\n\n#include \"D2Collision.h\"\n\n\n\n\n\nconst int dword_6FDD17E0[] =\n\n{\n\n\t57, 92, 72, 94, 119, 80, 14, 84, 93, 17, 20, 37, 87, 70, 100, 108,\n\n\t30, 91, 85, 60, 40, 3, 118, 78, 81, 109, 43, 26, 67, 27, 49, 75, \n\n\t69, 2, 58, 36, 55, 5, 122, 125, 11, 35, 77, 59, 29, 18, 28, 82, \n\n\t63, 86, 83, 105, 90, 48, 101, 38, 115, 9, 41, 89, 95, 19, 1, 61,\n\n\t51, 7, 110, 34, 126, 47, 15, 6, 99, 22, 104, 4, 71, 97, 121, 114,\n\n\t0, 112, 62, 54, 23, 111, 74, 123, 45, 120, 44, 50, 31, 39, 42, 66, \n\n\t33, 113, 53, 64, 16, 65, 106, 96, 88, 24, 46, 56, 103, 68, 10, 117,\n\n\t25, 32, 124, 127, 76, 8, 79, 21, 102, 52, 107, 73, 13, 116, 98, 12\n\n};\n\n\n\nconst int dword_6FDD19E0[] =\n\n{\n", "file_path": "source/D2Common/src/Path/IDAStar.cpp", "rank": 78, "score": 12.804439075105423 }, { "content": "#include \"Drlg/D2DrlgOutJung.h\"\n\n\n\n#include \"D2DataTbls.h\"\n\n#include \"Drlg/D2DrlgOutdoors.h\"\n\n#include \"D2Seed.h\"\n\n#include <DataTbls/LevelsIds.h>\n\n\n\n//TODO: Find names, rename variables + globals\n\n\n\n//D2Common.0x6FD7FC20\n\n//TODO: v19\n\nvoid __fastcall DRLGOUTJUNG_BuildJungle(D2DrlgLevelStrc* pLevel)\n\n{\n\n\tstatic const int dword_6FDCFB18[] =\n\n\t{\n\n\t\t0, 10, 20, 0\n\n\t};\n\n\n\n\tstatic const int dword_6FDCFB28[] =\n\n\t{\n", "file_path": "source/D2Common/src/Drlg/DrlgOutJung.cpp", "rank": 79, "score": 12.710222995402821 }, { "content": "#include \"D2Collision.h\"\n\n\n\n#include \"D2DataTbls.h\"\n\n#include \"D2Dungeon.h\"\n\n#include <D2Lang.h>\n\n#include <D2CMP.h>\n\n\n\n//D2Common.0x6FD41000\n\nvoid __fastcall D2Common_COLLISION_FirstFn_6FD41000(D2RoomStrc* pRoom, D2DrlgTileDataStrc* pTileData, D2TileLibraryEntryStrc* pTileLibraryEntry)\n\n{\n\n\tD2RoomCollisionGridStrc* pCollisionGrid = NULL;\n\n\tuint16_t* pCollisionMask = NULL;\n\n\tuint8_t* v17 = NULL;\n\n\tuint8_t* pTmp = NULL;\n\n\tint nCappedX = 0;\n\n\tint nCappedY = 0;\n\n\tint nIndex = 0;\n\n\tint nX = 0;\n\n\tint nY = 0;\n\n\n", "file_path": "source/D2Common/src/D2Collision.cpp", "rank": 80, "score": 12.606347003606881 }, { "content": "#include \"Drlg/D2DrlgOutWild.h\"\n\n\n\n#include \"Drlg/D2DrlgDrlg.h\"\n\n#include \"Drlg/D2DrlgDrlgGrid.h\"\n\n#include \"Drlg/D2DrlgOutdoors.h\"\n\n#include \"Drlg/D2DrlgOutPlace.h\"\n\n#include \"Drlg/D2DrlgMaze.h\"\n\n#include \"D2Seed.h\"\n\n#include <DataTbls/LevelsIds.h>\n\n\n\n//D2Common.0x6FD84CA0\n\nvoid __fastcall DRLGOUTWILD_GetBridgeCoords(D2DrlgLevelStrc* pLevel, int* pX, int* pY)\n\n{\n\n\tint nX = pLevel->pOutdoors->nGridWidth / 2 - 1;\n\n\n\n\tfor (int nY = 1; nY < pLevel->pOutdoors->nGridWidth - 1; ++nY)\n\n\t{\n\n\t\tif (DRLGGRID_GetGridFlags(pLevel->pOutdoors->pGrid, nX, nY) == 28 && (((unsigned int)DRLGGRID_GetGridFlags(&pLevel->pOutdoors->pGrid[2], nX, nY) >> 16) & 15) == 1)\n\n\t\t{\n\n\t\t\t*pX = nX;\n", "file_path": "source/D2Common/src/Drlg/DrlgOutWild.cpp", "rank": 81, "score": 12.5455466313748 }, { "content": "\tint nSetItemMask = 0;\n\n\tint nMagicPrefix = 0;\n\n\tint nMagicSuffix = 0;\n\n\tint nRarePrefix = 0;\n\n\tint nRareSuffix = 0;\n\n\tint nRunewordId = 0;\n\n\tint nScrollType = 0;\n\n\tint nAutoAffix = 0;\n\n\tint nStatLists = 0;\n\n\tint nFileIndex = 0;\n\n\tint nEarLevel = 0;\n\n\tint nCounter = 0;\n\n\tint nStatId = 0;\n\n\tint nStats = 0;\n\n\tint nValue = 0;\n\n\tint nGold = 0;\n\n\tuint8_t nStorePage = 0;\n\n\tBOOL bIsRuneword = FALSE;\n\n\tBOOL bContinue = FALSE;\n\n\tBOOL bInvalid = FALSE;\n", "file_path": "source/D2Common/src/Items/Items.cpp", "rank": 82, "score": 12.486218082881798 }, { "content": "void __fastcall DRLGROOMTILE_LoadInitRoomTiles(D2RoomExStrc* pRoomEx, D2DrlgGridStrc* pDrlgCoordIndex, D2DrlgGridStrc* pDrlgOutdoorRoom, BOOL bFillBlanks, BOOL bKillEdgeX, BOOL bKillEdgeY)\n\n{\n\n\tD2TileLibraryEntryStrc* v25 = NULL;\n\n\tD2TileLibraryEntryStrc* v27 = NULL;\n\n\tD2DrlgTileDataStrc* v21 = NULL;\n\n\tD2DrlgTileDataStrc* v22 = NULL;\n\n\tD2DrlgTileDataStrc* v26 = NULL;\n\n\tD2RoomExStrc* a6a = NULL;\n\n\tvoid* pMemPool = NULL;\n\n\tunsigned int v18 = 0;\n\n\tunsigned int v15 = 0;\n\n\tunsigned int v31 = 0;\n\n\tint v11 = 0;\n\n\tint v16 = 0;\n\n\tint v35 = 0;\n\n\tint nX = 0;\n\n\tint nY = 0;\n\n\tuint8_t v17 = 0;\n\n\tBOOL bContinue = FALSE;\n\n\n", "file_path": "source/D2Common/src/Drlg/DrlgRoomTile.cpp", "rank": 83, "score": 12.409916027824288 }, { "content": "#include <cstdio>\n\n#include \"D2DataTbls.h\"\n\n#include <D2Lang.h>\n\n#include <D2BitManip.h>\n\n#include <Units/Units.h>\n\n#include <D2States.h>\n\n\n\nD2ArenaTxt* gpArenaTxtTable;\n\nD2CharTemplateTxt* gpCharTemplateTxtTable;\n\nint gnCharTemplateTxtTableRecordCount;\n\nuint32_t gnCharTemplateStartIds[64];\n\nD2BeltsTxt* gpBeltsTxtTable;\n\nD2DataTablesStrc gpDataTables;\n\nD2DataTablesStrc* sgptDataTables = &gpDataTables;\n\nBOOL DATATBLS_LoadFromBin = TRUE;\n\n\n\n//D2Common.0x6FDC412C\n\nvoid __fastcall DATATBLS_CloseFileInMPQ(void* pMemPool, void* pFileHandle)\n\n{\n\n\tD2_ASSERT(pFileHandle);\n", "file_path": "source/D2Common/src/DataTbls/DataTbls.cpp", "rank": 85, "score": 12.26257488629207 }, { "content": "#include \"D2States.h\"\n\n\n\n#include \"D2DataTbls.h\"\n\n#include \"D2StatList.h\"\n\n#include \"Units/UnitRoom.h\"\n\n#include \"Units/Units.h\"\n\n#include <D2BitManip.h>\n\n\n\n//Used in some of the following functions\n\n__forceinline BOOL __fastcall STATES_CheckStateMaskByStateId(int nState, int nStateMask)\n\n{\n\n\tif (nState >= 0 && nState < sgptDataTables->nStatesTxtRecordCount)\n\n\t{\n\n\t\treturn sgptDataTables->fStateMasks[nStateMask][nState >> 5] & gdwBitMasks[nState & 31];\n\n\t}\n\n\n\n\treturn FALSE;\n\n}\n\n\n\n\n", "file_path": "source/D2Common/src/D2States.cpp", "rank": 86, "score": 12.235056812803183 }, { "content": "#include \"D2DataTbls.h\"\n\n\n\n#include \"D2Collision.h\"\n\n\n\n\n\n//D2Common.0x6FDD8480\n\nstatic const int gnFieldXOffsets[] = { 0, 1, 1, 1, 0, -1, -1, -1, 0 };\n\n//D2Common.0xFDD84A4\n\nstatic const int gnFieldYOffsets[] = { -1, -1, 0, 1, 1, 1, 0, -1, 0 };\n\n\n\n\n\n//D2Common.0x6FD51FC0\n\nBOOL __fastcall DATATBLS_LoadExpFieldD2(void* pMemPool)\n\n{\n\n\tchar szPath[80] = {};\n\n\tchar* pExpField = NULL;\n\n\tint nSize = 0;\n\n\n\n\twsprintfA(szPath, \"%s\\\\expfield.d2\", \"DATA\\\\GLOBAL\");\n\n\tpExpField = (char*)DATATBLS_GetBinaryData(pMemPool, szPath, &nSize, __FILE__, __LINE__);\n", "file_path": "source/D2Common/src/DataTbls/FieldTbls.cpp", "rank": 87, "score": 12.068827294358721 }, { "content": "#include <DataTbls/TransformTbls.h>\n\n#include <Fog.h>\n\n\n\n//D2Common.0x6FD733B0 (#10667)\n\nint __stdcall DATATBLS_UNUSED_Return0(int a1, int a2)\n\n{\n\n\tD2_UNREACHABLE;\n\n\treturn 0;\n\n}\n", "file_path": "source/D2Common/src/DataTbls/TransformTbls.cpp", "rank": 88, "score": 12.053637186220389 }, { "content": "#include \"D2DataTbls.h\"\n\n\n\n#include \"D2Composit.h\"\n\n#include \"D2Skills.h\"\n\n#include \"Units/Units.h\"\n\n\n\n\n\n\n\n//D2Common.0x6FD729C0\n\nvoid __fastcall DATATBLS_LoadPlrType_ModeTxt(void* pMemPool)\n\n{\n\n\tD2PlrModeTypeTxt* pPlrModeTypeTxt = NULL;\n\n\tD2PlrModeTypeTxt* pPlrType = NULL;\n\n\tD2PlrModeTypeTxt* pPlrMode = NULL;\n\n\tint nTypeRecords = 0;\n\n\tint nModeRecords = 0;\n\n\n\n\tD2BinFieldStrc pTbl[] =\n\n\t{\n\n\t\t{ \"name\", TXTFIELD_ASCII, 31, 0, NULL },\n", "file_path": "source/D2Common/src/DataTbls/TokenTbls.cpp", "rank": 89, "score": 12.012734441532595 }, { "content": "#include \"D2QuestRecord.h\"\n\n\n\n#include \"D2BitManip.h\"\n\n\n\n\n\n//D2Common.0x6FDAE800 (#11107)\n\nBOOL __stdcall QUESTRECORD_GetQuestState(D2BitBufferStrc* pQuestRecord, int nQuest, int nState)\n\n{\n\n\tD2_ASSERT(pQuestRecord);\n\n\n\n\treturn BITMANIP_GetBitState(pQuestRecord->pBuffer, nState + 8 * sizeof(uint16_t) * nQuest) != 0;\n\n}\n\n\n\n//D2Common.0x6FDAE850 (#11108)\n\nvoid __stdcall QUESTRECORD_SetQuestState(D2BitBufferStrc* pQuestRecord, int nQuest, int nState)\n\n{\n\n\tD2_ASSERT(pQuestRecord);\n\n\n\n\tBITMANIP_SetBitState(pQuestRecord->pBuffer, nState + 8 * sizeof(uint16_t) * nQuest);\n\n}\n", "file_path": "source/D2Common/src/D2QuestRecord.cpp", "rank": 91, "score": 11.967219744730937 }, { "content": "\tif (pInventory)\n\n\t{\n\n\t\treturn INVENTORY_PlaceItemInGrid(pInventory, pItem, nXPos, nYPos, nPage + 2, nInventoryRecordId, bUnused);\n\n\t}\n\n\n\n\treturn FALSE;\n\n}\n\n\n\n//D2Common.0x6FD8F970 (#10250)\n\nvoid __stdcall INVENTORY_Return(char* szFile, int nLine, D2InventoryStrc* pInventory, int nX, int nY, int nInventoryRecordId, BOOL bClient, uint8_t nPage)\n\n{\n\n\treturn;\n\n}\n\n\n\n//D2Common.0x6FD8F980 (#10252)\n\nD2UnitStrc* __stdcall INVENTORY_GetItemFromInventoryPage(D2InventoryStrc* pInventory, int nGridX, int nGridY, int* pX, int* pY, int nInventoryRecordId, uint8_t nPage)\n\n{\n\n\tif (!pInventory || pInventory->dwSignature != 0x1020304)\n\n\t{\n\n\t\treturn nullptr;\n", "file_path": "source/D2Common/src/D2Inventory.cpp", "rank": 92, "score": 11.868081691257274 }, { "content": "\tint nMaxSockets = 0;\n\n\tint nStatLists = 0;\n\n\tint nAnimMode = 0;\n\n\tint nClassId = 0;\n\n\tint nSkillId = 0;\n\n\tint nStatId = 0;\n\n\tint nState = 0;\n\n\tint nValue = 0;\n\n\tint nBits = 0;\n\n\tint nFlag = 0;\n\n\tint nMax = 0;\n\n\n\n\tint v237, v238, v240, v241, v242, v243, v244, v245, v246, v247, v248, v249, v250, v251, v252; //TODO: Change names\n\n\n\n\tuint32_t dwCode = 0;\n\n\tchar szChar = 0;\n\n\tBOOL bRuneword = FALSE;\n\n\tBOOL bError = FALSE;\n\n\tBOOL b109 = FALSE;\n\n\n", "file_path": "source/D2Common/src/Items/Items.cpp", "rank": 93, "score": 11.855870322405224 }, { "content": "#include \"Drlg/D2DrlgDrlgAnim.h\"\n\n\n\n#include \"D2DataTbls.h\"\n\n#include \"Drlg/D2DrlgDrlg.h\"\n\n#include \"Drlg/D2DrlgDrlgGrid.h\"\n\n#include \"Drlg/D2DrlgRoomTile.h\"\n\n#include <D2CMP.h>\n\n\n\n\n\n\n\n//TODO: Variable names\n\n\n\n\n\n//D2Common.0x6FD75480\n\nvoid __fastcall DRLGANIM_InitCache(D2DrlgStrc* pDrlg, D2DrlgTileDataStrc* pTileData)\n\n{\n\n\tD2TileLibraryEntryStrc* ppTileLibraryEntry[40] = {};\n\n\tint nSubIndex = 0;\n\n\tint nIndex = 0;\n\n\n", "file_path": "source/D2Common/src/Drlg/DrlgDrlgAnim.cpp", "rank": 94, "score": 11.726781943432691 }, { "content": "#include <cstring>\n\n#include <D2Chat.h>\n\n#include <D2Lang.h>\n\n\n\n\n\n//D2Common.0x6FDC3BF0 (#10892)\n\nD2HoverTextStrc* __stdcall CHAT_AllocHoverMsg(void* pMemPool, const char* szText, int nTimeout)\n\n{\n\n\tD2HoverTextStrc* pHoverMsg = NULL;\n\n\tsize_t nTextLength = 0;\n\n\tint nLength = 0;\n\n\n\n\tnTextLength = strlen(szText);\n\n\tif (nTextLength == 0)\n\n\t{\n\n\t\treturn NULL;\n\n\t}\n\n\n\n\tif (nTextLength < 255)\n\n\t{\n", "file_path": "source/D2Common/src/D2Chat.cpp", "rank": 95, "score": 11.705312346680444 }, { "content": "\n\n\n\n//D2Common.0x6FD92640 (#10844)\n\nvoid __stdcall D2Common_10844_ITEMMODS_First(int nDataBits, int* pLayer, int* pValue)\n\n{\n\n\t*pLayer = nDataBits & 511;\n\n\t*pValue = (nDataBits >> 9) & 31;\n\n}\n\n\n\n//D2Common.0x6FD92670 (#10846)\n\nvoid __stdcall D2Common_10846(int nDataBits, int* a2, int* a3, int* a4, int* a5)\n\n{\n\n\t*a2 = nDataBits & 511;\n\n\t*a3 = (nDataBits >> 9) & 31;\n\n\t*a4 = (nDataBits >> 14) & 255;\n\n\t*a5 = (nDataBits >> 22) & 255;\n\n}\n\n\n\n//D2Common.0x6FD926C0 (#11293)\n\nBOOL __stdcall ITEMMODS_GetItemCharges(D2UnitStrc* pItem, int nSkillId, int nSkillLevel, int* pValue, D2StatListStrc** ppStatList)\n", "file_path": "source/D2Common/src/Items/ItemMods.cpp", "rank": 96, "score": 11.703604976390743 }, { "content": "#include \"D2Waypoints.h\"\n\n\n\n#include \"D2DataTbls.h\"\n\n\n\nstatic const int gnNumberOfWaypoints = 7 * 8 * sizeof(short);\n\n\n\n//D2Common.0x6FDC3D20 (#11153)\n\nBOOL __stdcall WAYPOINTS_GetLevelIdFromWaypointNo(short nWaypointNo, int* pLevelId)\n\n{\n\n\t*pLevelId = 0;\n\n\n\n\tif (nWaypointNo < 255)\n\n\t{\n\n\t\tfor (int i = 1; i < sgptDataTables->nLevelsTxtRecordCount; ++i)\n\n\t\t{\n\n\t\t\tif (DATATBLS_GetLevelsTxtRecord(i)->nWaypoint == nWaypointNo)\n\n\t\t\t{\n\n\t\t\t\t*pLevelId = i;\n\n\t\t\t\treturn TRUE;\n\n\t\t\t}\n", "file_path": "source/D2Common/src/D2Waypoints.cpp", "rank": 97, "score": 11.69656963265086 }, { "content": "#include \"D2Dungeon.h\"\n\n\n\n#include \"D2Collision.h\"\n\n#include \"D2DataTbls.h\"\n\n#include <DataTbls/LevelsIds.h>\n\n#include \"Drlg/D2DrlgActivate.h\"\n\n#include \"Drlg/D2DrlgDrlg.h\"\n\n#include \"Drlg/D2DrlgDrlgAnim.h\"\n\n#include \"Drlg/D2DrlgDrlgLogic.h\"\n\n#include \"Drlg/D2DrlgDrlgRoom.h\"\n\n#include \"Drlg/D2DrlgDrlgWarp.h\"\n\n#include \"Drlg/D2DrlgPreset.h\"\n\n#include \"D2Environment.h\"\n\n#include \"Units/UnitRoom.h\"\n\n#include \"Units/Units.h\"\n\n#include \"D2Seed.h\"\n\n\n\n\n\n//D2Common.0x6FD8B8A0 (#10038)\n\nD2DrlgActStrc* __stdcall DUNGEON_AllocAct(uint8_t nAct, uint32_t nInitSeed, BOOL bClient, D2GameStrc* pGame, uint8_t nDifficulty, void* pMemPool, int nTownLevelId, AUTOMAPFN pfAutoMap, TOWNAUTOMAPFN pfTownAutoMap)\n", "file_path": "source/D2Common/src/D2Dungeon.cpp", "rank": 98, "score": 11.565185467605374 }, { "content": "\t\t\telse\n\n\t\t\t{\n\n\t\t\t\t*(int*)((char*)pRecord + nOffset) = -1;\n\n\t\t\t}\n\n\t\t}\n\n\t\telse\n\n\t\t{\n\n\t\t\t*(int*)((char*)pRecord + nOffset) = -1;\n\n\t\t}\n\n\t}\n\n}\n\n\n\n//D2Common.0x6FD49E40\n\nvoid __fastcall DATATBLS_SkillDescCalcLinker(char* pSrc, void* pRecord, int nOffset, int nPosition, int nTxtRow, int nTxtColumn)\n\n{\n\n\tunsigned int nNewSize = 0;\n\n\tunsigned int nSizeEx = 0;\n\n\tunsigned int nSize = 0;\n\n\tint nBufferSize = 0;\n\n\tchar* pCode = NULL;\n", "file_path": "source/D2Common/src/DataTbls/SkillsTbls.cpp", "rank": 99, "score": 11.361435745031603 } ]
Rust
rust/tests/integration_test.rs
dandyvica/rbf
449a99a30854ad8ca35032dbecfeb3af465a77aa
use rbf::reader::{Reader, ReaderLazyness}; use rbf::record::{AsciiMode, UTF8Mode}; use rbf::vector_of; pub mod setup { use rbf::layout::Layout; use rbf::record::{AsciiMode, UTF8Mode}; pub fn layout_load_layout_ascii() -> Layout<AsciiMode> { Layout::<AsciiMode>::new("./tests/test.xml").unwrap() } pub fn layout_load_layout_utf8() -> Layout<UTF8Mode> { Layout::<UTF8Mode>::new("./tests/test.xml").unwrap() } } #[test] fn record_filter() { let layout = setup::layout_load_layout_ascii(); let r_ll = layout.get("LL").unwrap(); assert_eq!(r_ll.calculated_length, 353); let types = ["A".to_string(), "N".to_string()]; for f in r_ll { assert!(f.len() <= 26); assert!(types.contains(&f.ftype.id)); assert!(f.name.len() <= 4); } let fields = r_ll.filter(|f| f.length >= 25); assert_eq!(fields.unwrap().len(), 2); let r_dup = layout.get("DP").unwrap(); let f_dup = r_dup.filter(|f| f.name == "F5").unwrap(); assert_eq!(f_dup.len(), 4); for (i, f) in f_dup.iter().enumerate() { assert_eq!(f.multiplicity, i); } } #[test] fn record_remove() { let mut layout = setup::layout_load_layout_ascii(); { let r_ll = layout.get_mut("LL").unwrap(); r_ll.remove(|f| f.index == 0); assert_eq!(r_ll[0].name, "W1"); assert_eq!(r_ll.count(), 26); r_ll.remove(|f| f.name.starts_with("W1")); assert_eq!(r_ll.count(), 15); r_ll.remove(|f| f.name != "W2"); assert_eq!(r_ll.count(), 1); } { let r_nb = layout.get_mut("NB").unwrap(); r_nb.remove(|f| !["N1", "N2"].contains(&&*f.name)); assert_eq!(r_nb.count(), 2); } } #[test] fn record_iterator() { let mut layout = setup::layout_load_layout_ascii(); { let r_ll = layout.get("LL").unwrap(); for f in r_ll { assert!(f.length < 27); } } { { let r_ll = layout.get_mut("LL").unwrap(); for f in r_ll { f.length = 10; } } let r_ll = layout.get("LL").unwrap(); let count = r_ll.count(); let sum: usize = vector_of!(r_ll, length).iter().sum(); assert_eq!(sum, 10 * count); } } #[test] fn field_multiplicity() { let layout = setup::layout_load_layout_ascii(); let r_dp = layout .get("DP") .unwrap() .filter(|f| f.name == "F5") .unwrap(); assert_eq!(r_dp.len(), 4); for (i, f) in r_dp.iter().enumerate() { assert_eq!(f.multiplicity, i); } } #[should_panic] #[allow(unused_variables)] #[test] fn reader_stringent() { let layout = setup::layout_load_layout_ascii(); let mut reader = Reader::<AsciiMode>::new("./tests/test_ascii.data", layout); reader.set_lazyness(ReaderLazyness::Strict); while let Some(rec) = reader.next() {} } #[test] fn reader_lazy() { let layout = setup::layout_load_layout_utf8(); let mut reader = Reader::<UTF8Mode>::new("./tests/test_utf8.data", layout); let letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; let digits = "123456789"; let greek = "αβγδεζηθικλμνξοπρστυφχψω"; while let Some((_stats, rec)) = reader.next() { match rec.name.as_ref() { "LL" => { assert_eq!(rec.get_value("ID"), "LL"); for (i, l) in letters.chars().enumerate() { let fname = format!("W{}", i + 1); assert_eq!(rec.get_value(&fname), l.to_string().repeat(i + 1)); } } "NB" => { assert_eq!(rec.get_value("ID"), "NB"); for (i, n) in digits.chars().enumerate() { let fname = format!("N{}", i + 1); assert_eq!(rec.get_value(&fname), n.to_string().repeat(i + 1)); } } "GL" => { assert_eq!(rec.get_value("ID"), "GL"); for (i, l) in greek.chars().enumerate() { let fname = format!("G{}", i + 1); assert_eq!(rec.get_value(&fname), l.to_string().repeat(i + 1)); } } "DP" => { assert_eq!(rec.get_value("ID"), "DP"); assert_eq!(rec.get("F5").unwrap()[0].value(), "AAAAA"); assert_eq!(rec.get("F5").unwrap()[1].value(), "BBBBB"); assert_eq!(rec.get("F5").unwrap()[2].value(), "CCCCC"); assert_eq!(rec.get("F5").unwrap()[3].value(), "DDDDD"); } _ => panic!( "record name <{}> not found in file <{}>", rec.name, "./tests/test.data" ), } } }
use rbf::reader::{Reader, ReaderLazyness}; use rbf::record::{AsciiMode, UTF8Mode}; use rbf::vector_of; pub mod setup { use rbf::layout::Layout; use rbf::record::{AsciiMode, UTF8Mode}; pub fn layout_load_layout_ascii() -> Layout<AsciiMode> { Layout::<AsciiMode>::new("./tests/test.xml").unwrap() } pub fn layout_load_layout_utf8() -> Layout<UTF8Mode> { Layout::<UTF8Mode>::new("./tests/test.xml").unwrap() } } #[test] fn record_filter() { let layout = setup::layout_load_layout_ascii(); let r_ll = layout.get("LL").unwrap(); assert_eq!(r_ll.calculated_length, 353); let types = ["A".to_string(), "N".to_string()]; for f in r_ll { assert!(f.len() <= 26); assert!(types.contains(&f.ftype.id)); assert!(f.name.len() <= 4); } let fields = r_ll.filter(|f| f.length >= 25); assert_eq!(fields.unwrap().len(), 2); let r_dup = layout.get("DP").unwrap(); let f_dup = r_dup.filter(|f| f.name == "F5").unwrap(); assert_eq!(f_dup.len(), 4); for (i, f) in f_dup.iter().enumerate() { assert_eq!(f.multiplicity, i); } } #[test] fn record_remove() { let mut layout = setup::layout_load_layout_ascii(); { let r_ll = layout.get_mut("LL").unwrap(); r_ll.remove(|f| f.index == 0); assert_eq!(r_ll[0].name, "W1"); assert_eq!(r_ll.count(), 26); r_ll.remove(|f| f.name.starts_with("W1")); assert_eq!(r_ll.count(), 15); r_ll.remove(|f| f.name != "W2"); assert_eq!(r_ll.count(), 1); } { let r_nb = layout.get_mut("NB").unwrap(); r_nb.remove(|f| !["N1", "N2"].contains(&&*f.name)); assert_eq!(r_nb.count(), 2); } } #[test] fn record_iterator() { let mut layout = setup::layout_load_layout_ascii(); { let r_ll = layout.get("LL").unwrap(); for f in r_ll { assert!(f.length < 27); } } { { let r_ll = layout.get_mut("LL").unwrap(); for f in r_ll { f.length = 10; } } let r_ll = layout.get("LL").unwrap(); let count = r_ll.count(); let sum: usize = vector_of!(r_ll, length).iter().sum(); assert_eq!(sum, 10 * count); } } #[test] fn field_multiplicity() { let layout = setup::layout_load_layout_ascii(); let r_dp = layout .get("DP") .unwrap() .filter(|f| f.name == "F5") .unwrap(); assert_eq!(r_dp.len(), 4); for (i, f) in r_dp.iter().enumerate() { assert_eq!(f.multiplicity, i); } } #[should_panic] #[allow(unused_variables)] #[test] fn reader_stringent() { let layout = setup::layout_load_layout_ascii(); let mut reader = Reader::<AsciiMode>::new("./tests/test_ascii.data", layout); reader.set_lazyness(ReaderLazyness::Strict); while let Some(rec) = reader.next() {} } #[test] fn reader_lazy() { let layout = setup::layout_load_layout_utf8(); let mut reader = Reader::<UTF8Mode>::new("./tests/test_utf8.data", layout); let letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; let digits = "123456789"; let greek = "αβγδεζηθικλμνξοπρστυφχψω"; while let Some((_stats, rec)) = reader.next() { match rec.name.as_ref() { "LL" => { assert_eq!(rec.get_value("ID"), "LL"); for (i, l) in letters.chars().enumerate() { let fname = format!("W{}", i + 1); assert_eq!(rec.get_value(&fname), l.to_string().repeat(i + 1)); } } "NB" => { assert_eq!(rec.get_value("ID"), "NB"); for (i, n) in digits.chars().enumerate() { let fname = format!("N{}", i + 1); assert_eq!(rec.get_value(&fname), n.to_string().repeat(i + 1)); } } "GL" => { assert_eq!(rec.get_value("ID"), "GL"); for (i, l) in greek.chars().enumerate() { let fname = format!("G{}", i + 1); assert_eq!(rec.get_value(&fname), l.to_string().repeat(i + 1)); } } "DP" => { assert_eq!(rec.get_value("ID"), "DP"); assert_eq!(rec.get("F5").unwrap()[0].value(), "AAAAA"); assert_eq!(rec.get("F5").unwrap()[1].value(), "BBBBB"); assert_eq!(rec.get("F5").unwrap()[2].value(), "CCCCC"); assert_eq!(rec.get("F5").unwrap()[3].value(), "DDDDD"); } _ =>
panic!( "record name <{}> not found in file <{}>", rec.name, "./tests/test.data" ), } } }
function_block-function_prefix_line
[ { "content": "fn main() {\n\n let mut nb_lines: usize = 0;\n\n let mut nb_records: HashMap<String, usize> = HashMap::new();\n\n\n\n // get arguments\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() == 1 {\n\n println!(\"Usage: {} layout_file data_file\", args[0]);\n\n std::process::exit(1);\n\n }\n\n\n\n // load layout (suppose only ascii data)\n\n let layout = error_check!(Layout::<AsciiMode>::new(&args[1]));\n\n\n\n // create reader\n\n let mut reader = Reader::new(&args[2], layout);\n\n\n\n // loop through records\n\n while let Some((_stats, rec)) = reader.next() {\n", "file_path": "rust/examples/count_recs.rs", "rank": 3, "score": 135227.94178684085 }, { "content": "// how long is it to read a Layout\n\nfn load_layout(bench: &mut Bencher) {\n\n bench.iter(|| rbf::layout::setup::layout_load_layout_ascii(\"./tests/test.xml\"))\n\n}\n\n\n", "file_path": "rust/benches/read_bench.rs", "rank": 4, "score": 127436.53513690011 }, { "content": " def setUp(self):\n\n self.layout = Layout(\"world_data.xml\")\n", "file_path": "python/test/test_reader.py", "rank": 5, "score": 108236.44476239223 }, { "content": " def setUp(self):\n\n self.ft1 = FieldType(\"AN\", \"string\")\n\n self.f1 = Field(\"FIELD1\", \"Alpha field\", self.ft1, 10)\n\n\n\n self.ft2 = FieldType(\"I\", \"integer\")\n\n self.f2 = Field(\"FIELD2\", \"Integer field\", self.ft2, 10)\n\n\n\n self.ft3 = FieldType(\"N\", \"decimal\")\n\n self.f3 = Field(\"FIELD3\", \"Decimal field\", self.ft3, 10)\n\n\n\n self.ft4 = FieldType(\"D\", \"date\")\n\n self.f4 = Field(\"FIELD4\", \"Date field\", self.ft4, 8)\n\n\n\n self.ft5 = FieldType(\"T\", \"time\")\n", "file_path": "python/test/test_field.py", "rank": 6, "score": 108151.08590830238 }, { "content": " def setUp(self):\n\n self.assertRaises(ValueError, Layout, \"foo.xml\")\n\n self.layout = Layout(\"world_data.xml\")\n\n\n\n # test meta data\n\n self.assertEqual(self.layout.version, \"1.0\")\n\n self.assertEqual(self.layout.description, \"Continents, countries, cities\")\n\n self.assertEqual(self.layout.schema, \"world_data\")\n\n self.assertEqual(self.layout.ignoreLine, \"^#\")\n\n self.assertEqual(self.layout.skipField, \"ID\")\n\n self.assertEqual(self.layout.mapper, \"type:1 map:0..4\")\n\n\n\n self.assertEqual(len(self.layout.records()), 2)\n\n\n\n for rec in self.layout:\n", "file_path": "python/test/test_layout.py", "rank": 7, "score": 108143.89627385438 }, { "content": "class TestReader(unittest.TestCase):\n\n\n\n def setUp(self):\n\n self.assertRaises(ValueError, Layout, \"foo.xml\")\n\n self.layout = Layout(\"world_data.xml\")\n\n\n\n # test meta data\n\n self.assertEqual(self.layout.version, \"1.0\")\n\n self.assertEqual(self.layout.description, \"Continents, countries, cities\")\n\n self.assertEqual(self.layout.schema, \"world_data\")\n\n self.assertEqual(self.layout.ignoreLine, \"^#\")\n\n self.assertEqual(self.layout.skipField, \"ID\")\n\n self.assertEqual(self.layout.mapper, \"type:1 map:0..4\")\n\n\n\n self.assertEqual(len(self.layout.records()), 2)\n\n\n\n for rec in self.layout:\n\n self.assertEqual(rec.name in [\"COUN\",\"CONT\"], True)\n\n\n\n def test1(self):\n\n self.layout.delete(['COUN'])\n\n self.assertEqual(len(self.layout.records()), 1)\n\n\n\n def test2(self):\n\n self.layout = Layout(\"world_data.xml\")\n\n self.layout.simplify([\"CONT:NAME,AREA\", \"COUN:POPULATION\"])\n", "file_path": "python/test/test_layout.py", "rank": 11, "score": 106426.39888355172 }, { "content": "type ConvertFunc<T> = fn(&str) -> T;\n\n\n\nimpl<T> Compare<T>\n\nwhere\n\n T: PartialOrd + FromStr + Sized,\n\n{\n\n /// Converts a string to type T.\n\n fn to_t(value: &str) -> T {\n\n let converted = match value.parse::<T>() {\n\n Ok(v) => v,\n\n Err(_) => panic!(\"unable to convert string value {}\", value),\n\n };\n\n converted\n\n }\n\n\n\n /// Compares two strings according to the comparison function.\n\n /// Convert strings first to the associated type Output.\n\n fn compare(lhs: &str, rhs: &str, cnv: ConvertFunc<T>, cmp: CompareFunc<T>) -> bool {\n\n let l = cnv(lhs);\n\n let r = cnv(rhs);\n", "file_path": "rust/src/types/compare.rs", "rank": 12, "score": 104024.09373628316 }, { "content": "class TestFieldType(unittest.TestCase):\n\n\n\n def setUp(self):\n\n self.assertRaises(ValueError, FieldType, \"A/N\", \"STR\")\n\n self.ft = FieldType(\"A/N\", \"string\")\n\n\n\n def test_0(self):\n\n self.assertEqual(self.ft.base, str)\n\n\n\n def test_xml_cons(self):\n\n self.doc = minidom.parseString('<fieldtype name=\"AN\" type=\"string\" pattern=\"[\\w/\\*\\.,\\-]+\" format=\"%-*.*s\"/>')\n\n self.xml_node = self.doc.childNodes[0]\n\n self.ft = FieldType.from_xml_node(self.xml_node)\n\n self.assertEqual(self.ft.pattern, \"[\\w/\\*\\.,\\-]+\")\n\n self.assertEqual(self.ft.format, \"%-*.*s\")\n\n\n\n def test_conversion(self):\n\n self.doc = minidom.parseString('<fieldtype name=\"D\" type=\"date\" pattern=\"[0-9]+\" date_format=\"%Y%m%d\"/>')\n\n self.xml_node = self.doc.childNodes[0]\n\n self.ft = FieldType.from_xml_node(self.xml_node)\n\n self.assertEqual(self.ft.date_format, \"%Y%m%d\")\n\n\n\n self.assertEqual(self.ft.convert(\"20160226\"), datetime(2016, 2, 26))\n\n\n\n self.doc = minidom.parseString('<fieldtype name=\"T\" type=\"time\" pattern=\"[0-9]+\" time_format=\"%H%M\"/>')\n\n self.xml_node = self.doc.childNodes[0]\n\n self.ft = FieldType.from_xml_node(self.xml_node)\n\n self.assertEqual(self.ft.time_format, \"%H%M\")\n\n\n", "file_path": "python/test/test_fieldtype.py", "rank": 13, "score": 102970.35237810874 }, { "content": "pub trait BaseType {\n\n //fn get_name(&self) -> String { self.get_type_name() }\n\n fn get_name(&self) -> &'static str;\n\n fn set_format(&mut self, fmt: &str);\n\n fn get_format(&self) -> &str;\n\n fn eq(&self, lhs: &str, rhs: &str) -> bool;\n\n fn lt(&self, lhs: &str, rhs: &str) -> bool;\n\n fn gt(&self, lhs: &str, rhs: &str) -> bool;\n\n}\n\n\n\n/// Convenient conversion from a string ref.\n\nimpl<'a> From<&'a str> for Box<BaseType> {\n\n fn from(original: &'a str) -> Box<BaseType> {\n\n match original {\n\n \"string\" => Box::new(StringType::new(\"\")),\n\n \"decimal\" => Box::new(DecimalType::new(\"\")),\n\n \"int\" => Box::new(SignedIntegerType::new(\"\")),\n\n \"uint\" => Box::new(UnsignedIntegerType::new(\"\")),\n\n \"date\" => Box::new(DateType::new(\"\")),\n\n \"time\" => Box::new(TimeType::new(\"\")),\n", "file_path": "rust/src/types/base.rs", "rank": 14, "score": 101612.25166348915 }, { "content": "type CompareFunc<T> = fn(&T, &T) -> bool;\n", "file_path": "rust/src/types/compare.rs", "rank": 15, "score": 98611.0757581588 }, { "content": "// try to bench record set_value\n\nfn set_value(bench: &mut Bencher) {\n\n let mut rec = rbf::record::setup::set_up_by_offset::<AsciiMode>();\n\n bench.iter(|| rec.set_value(\"AAAAAAAAAABBBBBBBBBBCCCCCCCCCCCCCCCCCCCCDDDDDDDDDD\"))\n\n}\n\n\n", "file_path": "rust/benches/read_bench.rs", "rank": 16, "score": 98227.89699784144 }, { "content": "// try to bench record set_value\n\nfn set_value_huge_100(bench: &mut Bencher) {\n\n let mut rec = rbf::record::setup::set_up_by_length_huge::<AsciiMode>(100);\n\n let s = \"A\".to_string().repeat(1000);\n\n\n\n bench.iter(|| rec.set_value(&s))\n\n}\n\n\n", "file_path": "rust/benches/read_bench.rs", "rank": 17, "score": 95848.68349670523 }, { "content": "// try to bench record set_value\n\nfn set_value_huge_1000(bench: &mut Bencher) {\n\n let mut rec = rbf::record::setup::set_up_by_length_huge::<AsciiMode>(1000);\n\n let s = \"A\".to_string().repeat(10000);\n\n\n\n bench.iter(|| rec.set_value(&s))\n\n}\n\n\n", "file_path": "rust/benches/read_bench.rs", "rank": 18, "score": 95848.68349670523 }, { "content": "import sys\n\nimport unittest\n\n\n\nfrom rbf.layout import Layout\n\nfrom rbf.reader import Reader\n\n\n\nclass TestReader(unittest.TestCase):\n\n\n\n def setUp(self):\n\n self.layout = Layout(\"world_data.xml\")\n\n self.reader = Reader(\"world_data.txt\", self.layout, lambda x: x[0:4])\n\n \n\n def test_bad_conv(self):\n\n self.assertRaises(ValueError, Reader, \"foo.txt\", None, lambda x: x[0:4])\n\n\n\n def test_loop1(self):\n\n import itertools\n\n top5 = list(itertools.islice(self.reader, 5))\n\n rec = top5[4]\n\n self.assertEqual(\";\".join(rec.array_of('value')), \"COUN;China Tibet;2620000;Lhasa\")\n\n\n\n def test_loop2(self):\n\n self.l = list(self.reader)\n\n #self.assertEqual(\";\".join(self.l[4].array_of('value')), \"COUN;China Tibet;2620000;Lhasa\")\n\n\n\n\n\nif __name__ == '__main__':\n\n unittest.main()\n\n\n", "file_path": "python/test/test_reader.py", "rank": 19, "score": 93716.67457964577 }, { "content": "// try to bench record set_value\n\nfn set_value_huge_utf8_1000(bench: &mut Bencher) {\n\n let mut rec = rbf::record::setup::set_up_by_length_huge::<UTF8Mode>(1000);\n\n let s = \"α\".to_string().repeat(10000);\n\n\n\n bench.iter(|| rec.set_value(&s))\n\n}\n\n\n\n// fn next_record_id(bench: &mut Bencher) {\n\n// // load our layout\n\n// let layout = Layout::<AsciiMode>::new(\"./tests/test.xml\");\n\n\n\n// // create reader\n\n// fn mapper(x: &str) -> &str {\n\n// &x[0..2]\n\n// };\n\n// let mut reader = Reader::<AsciiMode>::new(\"./tests/test_ascii.data\", layout, mapper);\n\n\n\n// bench.iter(|| reader.next_record_id())\n\n// }\n\n\n", "file_path": "rust/benches/read_bench.rs", "rank": 20, "score": 93654.67842809192 }, { "content": "import unittest\n\nfrom datetime import datetime, date, time\n\nfrom xml.dom import minidom\n\nfrom rbf.fieldtype import FieldType\n\nfrom rbf.field import Field\n\n\n\nclass TestField(unittest.TestCase):\n\n\n\n def setUp(self):\n\n self.ft1 = FieldType(\"AN\", \"string\")\n\n self.f1 = Field(\"FIELD1\", \"Alpha field\", self.ft1, 10)\n\n\n\n self.ft2 = FieldType(\"I\", \"integer\")\n\n self.f2 = Field(\"FIELD2\", \"Integer field\", self.ft2, 10)\n\n\n\n self.ft3 = FieldType(\"N\", \"decimal\")\n\n self.f3 = Field(\"FIELD3\", \"Decimal field\", self.ft3, 10)\n\n\n\n self.ft4 = FieldType(\"D\", \"date\")\n\n self.f4 = Field(\"FIELD4\", \"Date field\", self.ft4, 8)\n\n\n\n self.ft5 = FieldType(\"T\", \"time\")\n\n self.f5 = Field(\"FIELD5\", \"Time field\", self.ft5, 4)\n\n\n\n def test_bad_cons(self):\n\n self.assertRaises(ValueError, Field, \"\", \"Alpha field 1\", self.ft1, 10)\n\n self.assertRaises(ValueError, Field, \"FIELD1\", \"Alpha field\", self.ft1, -1)\n\n\n\n \"\"\"\n\n def test_other_cons(self):\n\n self.doc = minidom.parseString('<field name=\"FIELD\" description=\"Field desc\" length=\"10\" type=\"AN\"/>')\n\n self.xml_node = self.doc.childNodes[0]\n\n self.other_f = Field.from_xml_node(self.xml_node)\n\n self.assertEqual(self.other_f.name, \"FIELD\")\n\n self.assertEqual(self.other_f.description, \"Field desc\")\n\n self.assertEqual(self.other_f.ftype.name, \"AN\")\n\n self.assertEqual(self.other_f.length, 10)\n\n \"\"\"\n\n\n\n def test_equality(self):\n\n self.assertEqual(self.f1, Field(\"FIELD1\", \"Alpha field\", self.ft1, 10))\n\n\n\n def test_properties(self):\n\n self.assertEqual(self.f1.name, \"FIELD1\")\n\n self.assertEqual(self.f1.description, \"Alpha field\")\n\n self.assertEqual(self.f1.ftype.name, \"AN\")\n\n self.assertEqual(self.f1.length, 10)\n\n\n\n def test_set_value(self):\n\n self.f1.value = \" XXX\"\n\n self.assertEqual(self.f1.value, \"XXX\")\n\n self.assertEqual(self.f1.raw_value, \" XXX\")\n\n\n\n def test_init(self):\n\n self.f1.initialize()\n\n self.assertEqual(self.f1.raw_value, \" \"*self.f1.length)\n\n\n\n self.f2.initialize()\n\n self.assertEqual(self.f2.raw_value, \"0\"*self.f2.length)\n\n\n\n self.f3.initialize()\n\n self.assertEqual(self.f3.raw_value, \"0\"*self.f3.length)\n\n\n\n def test_reset(self):\n\n setattr(self.ft1, \"format\", \"%*.*s\")\n\n self.f1.reset(\"AAA\")\n\n self.assertEqual(self.f1.raw_value, \" \"*7+\"AAA\")\n\n\n\n setattr(self.ft1, \"format\", \"%-*.*s\")\n\n self.f1.reset(\"AAA\")\n\n self.assertEqual(self.f1.raw_value, \"AAA\"+\" \"*7)\n\n\n\n setattr(self.ft2, \"format\", \"%0*d\")\n\n self.f2.reset(314)\n\n self.assertEqual(self.f2.raw_value, \"0000000314\")\n\n\n\n setattr(self.ft2, \"format\", \"%*d\")\n\n self.f2.reset(314)\n\n self.assertEqual(self.f2.raw_value, \" 314\")\n\n\n\n setattr(self.ft3, \"format\", \"%0*.2f\")\n\n self.f3.reset(3.14)\n\n self.assertEqual(self.f3.raw_value, \"0000003.14\")\n\n\n\n setattr(self.ft3, \"format\", \"%*.2f\")\n\n self.f3.reset(3.14)\n\n self.assertEqual(self.f3.raw_value, \" 3.14\")\n\n\n\n def test_convert(self):\n\n self.f4.value = \"20000101\"\n\n\n\n \n\n\n\nif __name__ == '__main__':\n\n unittest.main()\n\n\n", "file_path": "python/test/test_field.py", "rank": 21, "score": 93627.38073799075 }, { "content": "import sys\n\nimport unittest\n\n\n\nfrom rbf.layout import Layout\n\n\n\nclass TestReader(unittest.TestCase):\n\n\n\n def setUp(self):\n\n self.assertRaises(ValueError, Layout, \"foo.xml\")\n\n self.layout = Layout(\"world_data.xml\")\n\n\n\n # test meta data\n\n self.assertEqual(self.layout.version, \"1.0\")\n\n self.assertEqual(self.layout.description, \"Continents, countries, cities\")\n\n self.assertEqual(self.layout.schema, \"world_data\")\n\n self.assertEqual(self.layout.ignoreLine, \"^#\")\n\n self.assertEqual(self.layout.skipField, \"ID\")\n\n self.assertEqual(self.layout.mapper, \"type:1 map:0..4\")\n\n\n\n self.assertEqual(len(self.layout.records()), 2)\n\n\n\n for rec in self.layout:\n\n self.assertEqual(rec.name in [\"COUN\",\"CONT\"], True)\n\n\n\n def test1(self):\n\n self.layout.delete(['COUN'])\n\n self.assertEqual(len(self.layout.records()), 1)\n\n\n\n def test2(self):\n\n self.layout = Layout(\"world_data.xml\")\n\n self.layout.simplify([\"CONT:NAME,AREA\", \"COUN:POPULATION\"])\n\n self.assertEqual(self.layout[\"CONT\"].array_of('name'), [\"NAME\",\"AREA\"])\n\n\n\n\n\n\n\nif __name__ == '__main__':\n\n unittest.main()\n\n\n", "file_path": "python/test/test_layout.py", "rank": 22, "score": 93619.8596660785 }, { "content": "class TestReader(unittest.TestCase):\n\n\n\n def setUp(self):\n\n self.layout = Layout(\"world_data.xml\")\n\n self.reader = Reader(\"world_data.txt\", self.layout, lambda x: x[0:4])\n\n \n\n def test_bad_conv(self):\n\n self.assertRaises(ValueError, Reader, \"foo.txt\", None, lambda x: x[0:4])\n\n\n\n def test_loop1(self):\n\n import itertools\n\n top5 = list(itertools.islice(self.reader, 5))\n\n rec = top5[4]\n\n self.assertEqual(\";\".join(rec.array_of('value')), \"COUN;China Tibet;2620000;Lhasa\")\n\n\n\n def test_loop2(self):\n\n self.l = list(self.reader)\n", "file_path": "python/test/test_reader.py", "rank": 23, "score": 89418.42042948687 }, { "content": "class TestField(unittest.TestCase):\n\n\n\n def setUp(self):\n\n self.ft1 = FieldType(\"AN\", \"string\")\n\n self.f1 = Field(\"FIELD1\", \"Alpha field\", self.ft1, 10)\n\n\n\n self.ft2 = FieldType(\"I\", \"integer\")\n\n self.f2 = Field(\"FIELD2\", \"Integer field\", self.ft2, 10)\n\n\n\n self.ft3 = FieldType(\"N\", \"decimal\")\n\n self.f3 = Field(\"FIELD3\", \"Decimal field\", self.ft3, 10)\n\n\n\n self.ft4 = FieldType(\"D\", \"date\")\n\n self.f4 = Field(\"FIELD4\", \"Date field\", self.ft4, 8)\n\n\n\n self.ft5 = FieldType(\"T\", \"time\")\n\n self.f5 = Field(\"FIELD5\", \"Time field\", self.ft5, 4)\n\n\n\n def test_bad_cons(self):\n\n self.assertRaises(ValueError, Field, \"\", \"Alpha field 1\", self.ft1, 10)\n\n self.assertRaises(ValueError, Field, \"FIELD1\", \"Alpha field\", self.ft1, -1)\n\n\n\n \"\"\"\n\n def test_other_cons(self):\n\n self.doc = minidom.parseString('<field name=\"FIELD\" description=\"Field desc\" length=\"10\" type=\"AN\"/>')\n\n self.xml_node = self.doc.childNodes[0]\n\n self.other_f = Field.from_xml_node(self.xml_node)\n\n self.assertEqual(self.other_f.name, \"FIELD\")\n\n self.assertEqual(self.other_f.description, \"Field desc\")\n\n self.assertEqual(self.other_f.ftype.name, \"AN\")\n\n self.assertEqual(self.other_f.length, 10)\n\n \"\"\"\n\n\n\n def test_equality(self):\n\n self.assertEqual(self.f1, Field(\"FIELD1\", \"Alpha field\", self.ft1, 10))\n\n\n\n def test_properties(self):\n\n self.assertEqual(self.f1.name, \"FIELD1\")\n\n self.assertEqual(self.f1.description, \"Alpha field\")\n\n self.assertEqual(self.f1.ftype.name, \"AN\")\n\n self.assertEqual(self.f1.length, 10)\n\n\n\n def test_set_value(self):\n\n self.f1.value = \" XXX\"\n\n self.assertEqual(self.f1.value, \"XXX\")\n\n self.assertEqual(self.f1.raw_value, \" XXX\")\n\n\n\n def test_init(self):\n\n self.f1.initialize()\n\n self.assertEqual(self.f1.raw_value, \" \"*self.f1.length)\n\n\n\n self.f2.initialize()\n\n self.assertEqual(self.f2.raw_value, \"0\"*self.f2.length)\n\n\n\n self.f3.initialize()\n\n self.assertEqual(self.f3.raw_value, \"0\"*self.f3.length)\n\n\n\n def test_reset(self):\n\n setattr(self.ft1, \"format\", \"%*.*s\")\n\n self.f1.reset(\"AAA\")\n\n self.assertEqual(self.f1.raw_value, \" \"*7+\"AAA\")\n\n\n\n setattr(self.ft1, \"format\", \"%-*.*s\")\n\n self.f1.reset(\"AAA\")\n\n self.assertEqual(self.f1.raw_value, \"AAA\"+\" \"*7)\n\n\n\n setattr(self.ft2, \"format\", \"%0*d\")\n\n self.f2.reset(314)\n\n self.assertEqual(self.f2.raw_value, \"0000000314\")\n\n\n\n setattr(self.ft2, \"format\", \"%*d\")\n\n self.f2.reset(314)\n\n self.assertEqual(self.f2.raw_value, \" 314\")\n\n\n\n setattr(self.ft3, \"format\", \"%0*.2f\")\n\n self.f3.reset(3.14)\n\n self.assertEqual(self.f3.raw_value, \"0000003.14\")\n\n\n\n setattr(self.ft3, \"format\", \"%*.2f\")\n\n self.f3.reset(3.14)\n\n self.assertEqual(self.f3.raw_value, \" 3.14\")\n\n\n\n def test_convert(self):\n", "file_path": "python/test/test_field.py", "rank": 24, "score": 89302.62584729506 }, { "content": " def test_reader\n\n assert_raise(ArgumentError) { Reader.new(\"foo.data\") }\n\n\n\n l = Layout.new(\"world_data.xml\")\n\n r = Reader.new(\"world_data.txt\", l, lambda {|x| x[0..3]})\n\n\n\n r.each {|rec| print rec.name }\n\n end\n\n\n\nend\n", "file_path": "ruby/test/test.rb", "rank": 25, "score": 88061.32573071786 }, { "content": " def test_field\n\n string_type = FieldType.new(\"A/N\", \"string\")\n\n f = Field.new(\"FIELD1\", \"Description of field 1\", string_type, 10)\n\n\n\n f.value = \" AAAAAAAAAAAAAAAA \"\n\n assert_equal(f.type.type, :string)\n\n assert_equal(f.value, \"AAAAAAAAAAAAAAAA\")\n\n\n\n ov_type = FieldType.new(\"N\", \"overpunch\")\n\n f = Field.new(\"FIELD1\", \"Description of field 1\", ov_type, 15)\n\n f.value = \" 123A \"\n\n assert_equal(f.value, \"1231\")\n\n end\n\n\n", "file_path": "ruby/test/test.rb", "rank": 26, "score": 87962.96297075412 }, { "content": " def test_layout\n\n assert_raise(ArgumentError) { Layout.new(\"foo.xml\") }\n\n l = Layout.new(\"world_data.xml\")\n\n r = l[\"COUN\"]\n\n\n\n assert_equal(r.name, \"COUN\")\n\n\n\n l.each {|k,v| print v.name}\n\n end\n\n\n", "file_path": "ruby/test/test.rb", "rank": 27, "score": 87954.67803892161 }, { "content": " def test(self):\n\n self.e1 = Element(\"ELEMENT1\", \"Alpha element 1\", 10)\n\n self.e2 = Element(\"ELEMENT1\", \"Alpha element 1\", 10)\n\n\n\n self.assertRaises(ValueError, Element, \"\", \"Alpha element 1\", 10)\n\n self.assertRaises(ValueError, Element, \"ELEMENT1\", \"Alpha element 1\", -1)\n\n\n\n self.assertEqual(self.e1.name, \"ELEMENT1\")\n\n self.assertEqual(self.e1.description, \"Alpha element 1\")\n\n self.assertEqual(self.e1.length, 10)\n\n\n\n self.assertEqual(self.e1, self.e2)\n\n\n\n self.e1.name = \"ELEMENT2\"\n\n self.e1.description = \"Alpha element 2\"\n\n self.e1.length = 20\n\n\n\n self.assertEqual(self.e1.name, \"ELEMENT2\")\n\n self.assertEqual(self.e1.description, \"Alpha element 2\")\n\n self.assertEqual(self.e1.length, 20)\n\n\n", "file_path": "python/test/test_element.py", "rank": 28, "score": 84058.03088491215 }, { "content": " def test(self):\n\n self.writer = writer(\"\", WriterStyle.text)\n\n\n\n self.ft = FieldType(\"A/N\", \"string\")\n\n self.rec = Record(\"RECORD1\", \"Description of record 1\")\n\n self.rec.append(Field(\"LONG_FIELD1\", \"Description of field 1\", self.ft, 10))\n\n self.rec.append(Field(\"LONG_FIELD2\", \"Description of field 2\", self.ft, 5))\n\n self.rec.append(Field(\"LONG_FIELD2\", \"Description of field 2\", self.ft, 5))\n\n self.rec.append(Field(\"LONG_FIELD3\", \"Description of field 3\", self.ft, 10))\n\n\n\n self.line = \"A\"*10 + \"B\"*5 + \"C\"*5 + \"D\"*10\n\n self.rec.value = self.line\n\n\n\n self.writer.to_tag(self.rec)\n\n\n", "file_path": "python/test/test_writer.py", "rank": 29, "score": 84058.03088491215 }, { "content": " # :call-seq:\n\n # new(rb_file, layout, mapper) -> new_Reader\n\n #\n\n # ==== Arguments\n\n #\n\n # * +rb_file+ - recrd-based file\n\n # * +layout+ - Layout object\n\n # * +mapper+ - a func or lambda returning the record ID from the line read\n\n def initialize(rb_file, layout, mapper)\n\n # check arguments\n\n raise ArgumentError, \"input file #{rb_file} is not found!\" unless File.exist?(rb_file)\n\n\n\n # save args\n\n @rb_file = rb_file\n\n @layout = layout\n\n @mapper = mapper\n\n\n\n end\n\n\n", "file_path": "ruby/lib/reader.rb", "rank": 30, "score": 80929.68331528478 }, { "content": " class Reader\n\n {\n\n private:\n\n ReaderData _rdata;\n\n\n\n public:\n\n\n\n Reader(const string& rb_file, Layout& layout, function <string (string)> mapper): \n\n _rdata{rb_file, layout, mapper} {}\n\n\n\n Reader() = delete;\n\n Reader(const Reader& other) = delete;\n\n Reader& operator=(const Reader& other) = delete;\n\n\n\n\n\n // to loop through records within a rb-file\n\n ReaderIterator begin();\n\n ReaderIterator end();\n\n };\n\n\n\n}\n\n\n\n#endif\n", "file_path": "cpp/include/reader.h", "rank": 31, "score": 80040.9576816639 }, { "content": " class Layout\n\n {\n\n private:\n\n string _xml_file; // xml file name for underlying layout\n\n map<string, RecordPtr> _record_map; // hold records as a map with key = record name\n\n\n\n public:\n\n /*!\n\n * @brief Layout deleted constructor\n\n */\n\n Layout() = delete;\n\n Layout(const Layout& other) = delete;\n\n Layout& operator=(const Layout& other) = delete;\n\n\n\n /*!\n\n * @brief Layout constructor\n\n * @param[in] xml_file xml layout file name\n\n * @param[in] initial_record_size pre_allocate every record in the layout with\n\n * this parameter\n\n */\n", "file_path": "cpp/include/layout.h", "rank": 32, "score": 79869.29723868199 }, { "content": "class Reader(object):\n\n \"\"\"\n\n create a representation of a record-based file\n\n\n\n :param str rb_file: file name to read (should match the format described by the **Layout** object)\n\n :param Layout layout: Layout object\n\n :param lambda mapper: function which identify the record key in the record list from the current line\n\n\n\n ::\n\n\n\n >>> from rbf.layout import Layout\n\n >>> from rbf.reader import Reader\n\n >>> layout = Layout(\"world_data.xml\")\n\n >>> reader = Reader(\"world_data.txt\", layout, lambda x: x[0:4])\n\n\n\n \"\"\"\n\n\n\n def __init__(self, rb_file: str, layout: Layout, mapper):\n\n # verify arguments\n\n if not os.path.isfile(rb_file):\n\n raise ValueError(\"Input file {0} not found!!\".format(rb_file))\n\n\n\n # save members\n\n self._layout = layout\n\n self._mapper = mapper\n\n self._rb_file = rb_file\n\n\n\n def _map_record_from_line(self, line: str) -> str:\n\n \"\"\"\n\n\n\n return a record object from the line read from file\n\n\n\n :param str line: string read from file\n\n\n\n \"\"\"\n\n # try to discover record name from line read\n\n record_name = self._mapper(line)\n\n\n\n # but we don't find it! stop or just skip the line?\n\n if record_name not in self._layout:\n\n return None\n\n\n\n # record is found in read line\n\n return self._layout[record_name]\n\n\n\n def __iter__(self):\n\n \"\"\"\n\n iterator to read each line of the record-based file and return the matching Record object\n\n\n\n ::\n\n\n\n # then loop\n\n for rec in reader:\n\n print(rec)\n\n\n\n\n\n \"\"\"\n\n # read file line by line\n\n with open(self._rb_file, 'r') as fh:\n\n for line in fh:\n\n # strip out \\n\n\n line = line.rstrip(\"\\n\")\n\n\n\n # get record if any\n\n rec = self._map_record_from_line(line)\n\n\n\n # rec not found: just loop\n\n if not rec:\n\n continue\n\n\n\n # set line value\n\n rec.value = line\n\n\n\n # return record to our \"for\" loop\n\n yield rec\n\n\n\n def by_record_name(self, recname_list: list):\n\n \"\"\" \n\n iterator to only fetch records which name is matching the **recname_list**\n\n \n\n \"\"\"\n\n for rec in self:\n\n if rec.name in recname_list: \n", "file_path": "python/rbf/reader.py", "rank": 33, "score": 79308.49605208483 }, { "content": "class Field(Element):\n\n \"\"\"Define a data field with its name, description, type and length\n\n\n\n This class should be used with its companion class **Record**. If a record can\n\n be mapped to a line of text within a file, then a field is a substring from\n\n that line, with a fixed length.\n\n\n\n Each field is holding the substring in the **value** and **raw_value** properties.\n\n\n\n :param str name: name of the field\n\n :param str description: description of the field\n\n :param FieldType fieldtype: format of the field (type of data found in the field)\n\n :param int length: number of bytes of the field\n\n\n\n ::\n\n\n\n >>> from rbf.fieldtype import FieldType\n\n >>> from rbf.field import Field\n\n >>> f = Field(\"FIELD1\", \"This is field #1\", FieldType(\"A/N\",\"string\"), 5)\n\n >>> f\n\n name=<FIELD1> description=<This is field #1> length=<5> data_type_representation=<A/N> data_type_description=<string> type=<BaseType.string> value=<> raw_value=<> offset=<0> index=<0> bounds=<0:0>\n\n >>> f.name\n\n 'FIELD1'\n\n >>> f.description\n\n 'This is field #1'\n\n >>> f.length\n\n 5\n\n >>> f.type\n\n data_type_representation=<A/N> data_type_description=<string> type=<BaseType.string>\n\n >>> f.type.type\n\n <BaseType.string: 'string'>\n\n >>> f.offset\n\n 0\n\n >>> f.index\n\n 0\n\n >>> f.lower_bound\n\n 0\n\n >>> f.upper_bound\n\n 0\n\n >>> f.value=\" 45 \"\n\n >>> f.value\n\n '45'\n\n >>> f.raw_value\n\n ' 45 '\n\n \"\"\"\n\n def __init__(self, name: str, description: str, field_type: FieldType, length: int):\n\n # call parent class ctor\n\n super(self.__class__, self).__init__(name, description, length)\n\n\n\n # boilerplate code\n\n self.ftype = field_type\n\n\n\n # raw value is not stripped\n\n self.raw_value = self.str_value = \"\"\n\n\n\n # those attributes will be set when adding field into a record\n\n self.offset = self.index = self.lower_bound = self.upper_bound = 0\n\n\n\n \"\"\"\n\n @classmethod\n\n def from_xml_node(cls, xml_node):\n\n second constructor to build a field from an XML-string like <field name=\"FIELD\" description=\"Field desc\" length=\"15\" type=\"A\"/>\n\n\n\n :param str xml_node: XML-node object describing a field type\n\n\n\n fname = xml_node.attributes['name'].value\n\n fdesc = xml_node.attributes['description'].value\n\n ftype = ftypes[xml_node.attributes['type'].value]\n\n flength = int(xml_node.attributes['length'].value)\n\n\n\n # create class instance\n\n f = cls(fname, fdesc, ftype, flength)\n\n\n\n return f\n\n \"\"\"\n\n\n\n @property\n\n def value(self):\n\n \"\"\"\n\n * the stripped value as the field\n\n * property (r/w) -> str\n\n \"\"\"\n\n return self.str_value\n\n\n\n @value.setter\n\n def value(self, s):\n\n \"\"\" set value for this field \"\"\"\n\n # copy value as is\n\n self.raw_value = s\n\n\n\n # strip out blanks from string\n\n self.str_value = s.strip()\n\n\n\n def convert(self):\n\n \"\"\"\n\n convert value attribute stored as string to a converted scalar value according to its type\n\n \"\"\"\n\n return self.ftype.convert(self.value)\n\n\n\n def initialize(self):\n\n \"\"\"\n\n initialize field value to its initial value as set by field type\n\n\n\n \"\"\"\n\n self.reset(self.ftype.init)\n\n\n\n def reset(self, new_value):\n\n \"\"\"\n\n format the **new_ value** argument as a string according to the field type format\n\n\n\n :param str new_value: value to format\n\n\n\n \"\"\"\n\n fmt = self.ftype.format.replace('*', str(self.length))\n\n self.value = fmt % new_value\n\n\n\n def __eq__(self, other) -> bool:\n\n \"\"\" Field equality \"\"\"\n\n return super(Field, self).__eq__(other) and self.ftype == other.ftype\n\n\n\n def __repr__(self) -> str:\n\n #return \"{0} {1} value=<{2}> raw_value=<{3}> offset=<{4}> index=<{5}> bounds=<{6}:{7}>\".format(super(self.__class__, self).__repr__(), \\\n\n # self.ftype, self.value, self.raw_value, self.offset, self.index, self.lower_bound, self.upper_bound)\n\n return \"{0} {1.ftype} value=<{1.value}> raw_value=<{1.raw_value}> offset=<{1.offset}> index=<{1.index}> bounds=<{1.lower_bound}:{1.upper_bound}>\" \\\n", "file_path": "python/rbf/field.py", "rank": 34, "score": 79187.8054297074 }, { "content": "class Layout(Element):\n\n \"\"\"\n\n Define a structure of a record-based file by reading\n\n the XML file description given as argument and load its description into a dictionary.\n\n This class is merely a dictionary of Record objects\n\n\n\n :param str xml_file: name of the file describing record file structure\n\n :raises ValueError:\n\n * if **xml_file** is not accessible\n\n\n\n ::\n\n\n\n >>> from rbf.layout import Layout\n\n >>> layout = Layout(\"world_data.xml\")\n\n >>> layout.name\n\n 'world_data.xml'\n\n >>> layout.description\n\n 'Continents, countries, cities'\n\n >>> len(layout[\"CONT\"])\n\n 88\n\n >>> len(layout[\"COUN\"])\n\n 74\n\n >>> \"FOO\" in layout\n\n False\n\n >>> \"CONT\" in layout\n\n True\n\n\n\n \"\"\"\n\n def __init__(self, xml_file: str):\n\n # check file if accessible\n\n if not os.path.isfile(xml_file):\n\n raise ValueError(\"XML description file {0} not found!!\".format(xml_file))\n\n\n\n # call parent ctor\n\n super(self.__class__, self).__init__(xml_file, \"\", 0)\n\n\n\n # init record dict\n\n self._record = {}\n\n\n\n # init field type dict\n\n ftypes = {}\n\n\n\n # parse document\n\n doc = minidom.parse(xml_file)\n\n\n\n # get <meta> attributes\n\n meta = doc.getElementsByTagName(\"meta\")[0]\n\n\n\n # automatically add meta tags\n\n self._add_meta_tags(meta)\n\n\n\n # build field types dict\n\n for node in doc.getElementsByTagName(\"fieldtype\"):\n\n # create Field object and save it into dict\n\n ft = FieldType.from_xml_node(node)\n\n ftypes[ft.name] = ft\n\n\n\n # loop on all records\n\n for rec in doc.getElementsByTagName(\"record\"):\n\n # create first rec object\n\n recname = rec.attributes['name'].value\n\n recdesc = rec.attributes['description'].value\n\n\n\n self._record[recname] = Record(recname, recdesc)\n\n\n\n # now loop on fields and append field to record\n\n for node in rec.childNodes:\n\n if node.nodeType == node.ELEMENT_NODE and node.nodeName == \"field\":\n\n fname = node.attributes['name'].value\n\n fdesc = node.attributes['description'].value\n\n ftype = ftypes[node.attributes['type'].value]\n\n flength = int(node.attributes['length'].value)\n\n\n\n # add field to record\n\n self._record[recname].append(Field(fname, fdesc, ftype, flength))\n\n\n\n def _add_meta_tags(self, meta):\n\n \"\"\" useful helper to add meta tags automatically\n\n\n\n :param meta: meta tag object when reading the <meta> XML tag from layout file\n\n \"\"\"\n\n for tag_name in meta.attributes.keys():\n\n setattr(self, meta.attributes[tag_name].name, meta.attributes[tag_name].value)\n\n\n\n def __getitem__(self, key: str) -> Record:\n\n \"\"\" return the corresponding record\n\n\n\n :param key: name of the record to fetch\n\n :return: the matching record object\n\n :raises ValueError:\n\n * if **key** is not found\n\n \"\"\"\n\n if key not in self._record:\n\n raise ValueError(\"key {0} not found in record {1}!!\".format(key, self.name))\n\n return self._record[key]\n\n\n\n def __iter__(self):\n\n \"\"\" loop iterator \"\"\"\n\n for k in sorted(self._record):\n\n yield self._record[k]\n\n\n\n def __contains__(self, key: str) -> bool:\n\n \"\"\"\n\n :param key: name of the record\n\n :return: true is record name is found in record dict\n\n \"\"\"\n\n return key in self._record\n\n\n\n def __str__(self) -> str:\n\n # as layout derives from Element, first call the super printer\n\n return str(self.__dict__)\n\n\n\n def records(self) -> dict:\n\n \"\"\" return record dictionary \"\"\"\n\n return self._record\n\n\n\n def keep(self, recnames: list):\n\n \"\"\" keep only records in the given list\n\n\n\n :param list recnames: list of record names to keep in layout\n\n\n\n \"\"\"\n\n self._record = { k:v for k,v in self._record.items() if k in recnames }\n\n\n\n def delete(self, recnames: list):\n\n \"\"\" delete only records in the given list\n\n\n\n :param list recnames: list of record names to delete from layout\n\n\n\n \"\"\"\n\n self._record = { k:v for k,v in self._record.items() if k not in recnames }\n\n\n\n def prune(self, fnames: list):\n\n \"\"\" delete from all records the field names in flist\n\n \n\n :param list flist: list of comma separated field names to delete from all records\n\n \n\n \"\"\"\n\n for rec in self: \n\n rec.delete(fnames)\n\n\n\n def simplify(self, data):\n\n \"\"\" delete record and fields not matching the given list\n\n\n\n :param str data: list of records and fields, like this:\n\n [\"RECORD1:F2,F3,F4\", \"RECORD2:D5,D6\"]\n\n\n\n \"\"\"\n\n rec_list = []\n\n\n\n for s in data:\n\n # extract record name and list of fields\n\n (recname, fields) = s.split(':')\n\n field_list = [e.strip() for e in fields.split(',')]\n\n\n\n # build the list of record to keep to only keep them\n\n rec_list.append(recname.strip())\n\n\n\n # for each field, only keep those we want\n\n rec = self[rec_list[-1]]\n\n rec.keep(field_list)\n\n\n\n # now delete unwanted record\n", "file_path": "python/rbf/layout.py", "rank": 35, "score": 79144.9422530348 }, { "content": " # build a new field\n\n # :call-seq:\n\n # new(name, description, type, length) -> new_Field\n\n def initialize(name, description, type, length)\n\n super(name, description, length)\n\n\n\n @type = type\n\n end\n\n\n\n\n", "file_path": "ruby/lib/field.rb", "rank": 36, "score": 78956.48252871653 }, { "content": "# Use this class to read a record-based file and loop through each record.\n\n#\n\n# ==== Example\n\n#\n\n# # import our lib (should be in your RUBYLIB env variable)\n\n# require 'rbf'\n\n#\n\n# # open rbf file and convert it to a CSV (;) file\n\n# layout = Layout.new(\"world_data.xml\")\n\n# reader = Reader.new(\"world_data.txt\", layout, lambda {|x| x[0..3]})\n\n#\n\n# # convert record to CSV (;-separated)\n\n# reader.each {|rec| puts rec.array_of(:@value).join(\";\") }\n\n# \n\nclass Reader\n\n\n", "file_path": "ruby/lib/reader.rb", "rank": 37, "score": 78280.54400586811 }, { "content": " def setUp(self):\n\n self.ft = FieldType(\"A/N\", \"string\")\n\n self.rec = Record(\"RECORD1\", \"Description of record 1\")\n\n self.rec.append(Field(\"FIELD1\", \"Description of field 1\", self.ft, 10))\n\n self.rec.append(Field(\"FIELD2\", \"Description of field 2\", self.ft, 5))\n\n self.rec.append(Field(\"FIELD2\", \"Description of field 2\", self.ft, 5))\n\n self.rec.append(Field(\"FIELD3\", \"Description of field 3\", self.ft, 10))\n\n\n", "file_path": "python/test/test_record.py", "rank": 38, "score": 77653.36844703014 }, { "content": " def setUp(self):\n\n self.assertRaises(ValueError, FieldType, \"A/N\", \"STR\")\n", "file_path": "python/test/test_fieldtype.py", "rank": 39, "score": 77653.36844703014 }, { "content": " def setUp(self):\n", "file_path": "python/test/test_basetype.py", "rank": 40, "score": 77653.36844703014 }, { "content": " def test_loop1(self):\n\n import itertools\n\n top5 = list(itertools.islice(self.reader, 5))\n\n rec = top5[4]\n", "file_path": "python/test/test_reader.py", "rank": 41, "score": 77222.81039574459 }, { "content": " def test_loop2(self):\n\n self.l = list(self.reader)\n", "file_path": "python/test/test_reader.py", "rank": 42, "score": 77222.81039574459 }, { "content": " def test_init(self):\n\n self.f1.initialize()\n\n self.assertEqual(self.f1.raw_value, \" \"*self.f1.length)\n\n\n\n self.f2.initialize()\n\n self.assertEqual(self.f2.raw_value, \"0\"*self.f2.length)\n\n\n\n self.f3.initialize()\n", "file_path": "python/test/test_field.py", "rank": 43, "score": 77141.0543545639 }, { "content": " def test_reset(self):\n\n setattr(self.ft1, \"format\", \"%*.*s\")\n\n self.f1.reset(\"AAA\")\n\n self.assertEqual(self.f1.raw_value, \" \"*7+\"AAA\")\n\n\n\n setattr(self.ft1, \"format\", \"%-*.*s\")\n\n self.f1.reset(\"AAA\")\n\n self.assertEqual(self.f1.raw_value, \"AAA\"+\" \"*7)\n\n\n\n setattr(self.ft2, \"format\", \"%0*d\")\n\n self.f2.reset(314)\n\n self.assertEqual(self.f2.raw_value, \"0000000314\")\n\n\n\n setattr(self.ft2, \"format\", \"%*d\")\n\n self.f2.reset(314)\n\n self.assertEqual(self.f2.raw_value, \" 314\")\n\n\n\n setattr(self.ft3, \"format\", \"%0*.2f\")\n\n self.f3.reset(3.14)\n\n self.assertEqual(self.f3.raw_value, \"0000003.14\")\n\n\n\n setattr(self.ft3, \"format\", \"%*.2f\")\n\n self.f3.reset(3.14)\n", "file_path": "python/test/test_field.py", "rank": 44, "score": 77141.0543545639 }, { "content": " def test_convert(self):\n", "file_path": "python/test/test_field.py", "rank": 45, "score": 77141.0543545639 }, { "content": " def test_properties(self):\n\n self.assertEqual(self.f1.name, \"FIELD1\")\n\n self.assertEqual(self.f1.description, \"Alpha field\")\n\n self.assertEqual(self.f1.ftype.name, \"AN\")\n", "file_path": "python/test/test_field.py", "rank": 46, "score": 77141.0543545639 }, { "content": " def test_equality(self):\n", "file_path": "python/test/test_field.py", "rank": 47, "score": 77141.0543545639 }, { "content": " def test_bad_conv(self):\n", "file_path": "python/test/test_reader.py", "rank": 48, "score": 74952.57977653151 }, { "content": "class TestBaseType(unittest.TestCase):\n\n\n\n def setUp(self):\n\n self.assertRaises(ValueError, BaseType, \"foo\")\n\n\n\n def test_0(self):\n\n self.ft = BaseType(\"string\")\n\n self.assertEqual(self.ft.basic_type[\"base\"], str)\n\n\n\n def test_1(self):\n\n self.ft = BaseType(\"string\")\n\n self.assertEqual(self.ft.convert(\"3.14\"), \"3.14\")\n\n\n\n self.ft = BaseType(\"decimal\")\n\n self.assertEqual(self.ft.convert(\"3.14\"), 3.14)\n\n\n\n self.ft = BaseType(\"integer\")\n\n self.assertEqual(self.ft.convert(\"314\"), 314)\n\n\n\n self.ft = BaseType(\"date\")\n\n self.ft.convert(\"XXXX0226\", \"%Y%m%d\")\n\n self.assertEqual(self.ft.convert(\"20160226\", \"%Y%m%d\"), datetime(2016, 2, 26))\n\n\n\n self.ft = BaseType(\"time\")\n\n self.assertEqual(self.ft.convert(\"121013\", \"%H%M%S\"), datetime(1900,1,1,12,10,13))\n\n \n\n def test_2(self):\n\n self.ft = BaseType(\"string\")\n", "file_path": "python/test/test_basetype.py", "rank": 49, "score": 74942.85774147556 }, { "content": " def test_bad_cons(self):\n\n self.assertRaises(ValueError, Field, \"\", \"Alpha field 1\", self.ft1, 10)\n", "file_path": "python/test/test_field.py", "rank": 50, "score": 74874.13473131687 }, { "content": " def test_set_value(self):\n\n self.f1.value = \" XXX\"\n\n self.assertEqual(self.f1.value, \"XXX\")\n", "file_path": "python/test/test_field.py", "rank": 51, "score": 74874.13473131687 }, { "content": "class TestRbf < Test::Unit::TestCase\n\n\n", "file_path": "ruby/test/test.rb", "rank": 52, "score": 74783.83213806276 }, { "content": "// Simple exemple of rbf usage. Just counts the number of records and record IDs in the file.\n\n// Usage:count_recs layout_file data_file\n\nuse std::collections::HashMap;\n\nuse std::env;\n\n\n\n#[macro_use]\n\nextern crate rbf;\n\nuse rbf::layout::Layout;\n\nuse rbf::reader::Reader;\n\nuse rbf::record::AsciiMode;\n\n\n", "file_path": "rust/examples/count_recs.rs", "rank": 53, "score": 74555.95592076365 }, { "content": " nb_lines += 1;\n\n\n\n // if key doesn't exists, set to 1\n\n if nb_records.contains_key(&rec.name) {\n\n *nb_records.get_mut(&rec.name).unwrap() += 1;\n\n } else {\n\n nb_records.insert(rec.name.clone(), 1);\n\n }\n\n }\n\n\n\n // print out results\n\n println!(\"Input file has {} lines\", nb_lines);\n\n\n\n for (recname, i) in nb_records {\n\n println!(\"Number of {} records = {} \", recname, i);\n\n }\n\n}\n", "file_path": "rust/examples/count_recs.rs", "rank": 54, "score": 74551.18303315228 }, { "content": "mod root;\n\n#[macro_use]\n\npub mod base;\n\nmod compare;\n\npub mod datetime;\n\npub mod decimal;\n\npub mod fieldtype;\n\npub mod int;\n\npub mod string;\n\npub mod uint;\n", "file_path": "rust/src/types/mod.rs", "rank": 55, "score": 74406.59356996571 }, { "content": " def test2(self):\n\n self.layout = Layout(\"world_data.xml\")\n\n self.layout.simplify([\"CONT:NAME,AREA\", \"COUN:POPULATION\"])\n", "file_path": "python/test/test_layout.py", "rank": 56, "score": 73041.10988581812 }, { "content": " def test1(self):\n\n self.layout.delete(['COUN'])\n", "file_path": "python/test/test_layout.py", "rank": 57, "score": 73041.10988581812 }, { "content": "# A field is the tiniest piece of information found in a record-based file\n\n#\n\n# ==== Example\n\n# \n\n# f = Field.new(\"FIELD1\", \"Description of field 1\", FieldType.new(\"A\", \"string\"), 10)\n\nclass Field < Element\n\n # offset (in bytes) from the beginning of the record in which field is found (starts from 0)\n\n attr_accessor :offset\n\n\n\n # index of the field within its record\n\n attr_accessor :index\n\n\n\n # lower offset of the field within a record (starting byte position of the field, starting with for the first field)\n\n attr_accessor :lower_bound\n\n\n\n # upper offset of the field (ending position of the field)\n\n attr_accessor :upper_bound\n\n\n\n # when a field name is duplicated, it comes in handy to refer to that field\n\n # with a unique name. This alternate name is built by just adding it's index\n\n # to the field name\n\n attr_accessor :alternate_name\n\n\n\n # field type as defined in the layout definition file\n\n attr_reader :type\n\n\n\n # when set, the blank stripped value of the field\n\n attr_reader :value\n\n\n\n # when set, the blank non-stripped value of the field (original value)\n\n attr_reader :raw_value\n\n\n", "file_path": "ruby/lib/field.rb", "rank": 58, "score": 72384.80135472972 }, { "content": "# This class is a representation of all records and classes composing a record-based file.\n\n# As a convention, the layout is defined is an XML file, with the following convention:\n\n#\n\n# * root tag is <rbfile>\n\n# * field type tag (<fieldtype> XML tag)\n\n# * record tag with the following attributes: name & description (<record> XML tag)\n\n# * field tag inside a record tag (<field> XML tag)\n\n#\n\n# Suppose you get the following layout file (world_data.xml) describing a record-based file\n\n# listing continent data:\n\n#\n\n# <?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n# <!-- inspired from https://en.wikipedia.org/wiki/List_of_continents_by_GDP_%28nominal%29 -->\n\n# <!-- and http://www.nationsonline.org/oneworld/asia.htm -->\n\n# <rbfile\n\n# xmlns=\"http://www.w3schools.com\"\n\n# xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n\n# xsi:schemaLocation=\"http://www.w3schools.com rbf.xsd\"\n\n# >\n\n#\n\n# <meta version=\"1.0\" description=\"Continents, countries, cities\" ignoreLine=\"^#\" skipField=\"ID\" mapper=\"type:1 map:0..4\"/>\n\n#\n\n# <fieldtype name=\"CHAR\" type=\"string\" pattern=\"\\w+\" format=\"\"/>\n\n# <fieldtype name=\"NUM\" type=\"decimal\"/>\n\n# <fieldtype name=\"INT\" type=\"integer\"/>\n\n#\n\n# <record name=\"CONT\" description=\"Continent data\">\n\n# <field name=\"ID\" description=\"Record ID\" length=\"4\" type=\"CHAR\"/>\n\n# <field name=\"NAME\" description=\"Name of the continent\" length=\"15\" type=\"CHAR\"/>\n\n# <field name=\"AREA\" description=\"Area of the continent\" length=\"20\" type=\"NUM\"/>\n\n# <field name=\"POPULATION\" description=\"Population of the continent\" length=\"20\" type=\"NUM\"/>\n\n# <field name=\"DENSITY\" description=\"Density per km2\" length=\"9\" type=\"NUM\"/>\n\n# <field name=\"CITY\" description=\"Most populus city\" length=\"20\" type=\"CHAR\"/>\n\n# </record>\n\n#\n\n# <record name=\"COUN\" description=\"Country data\">\n\n# <field name=\"ID\" description=\"Record ID\" length=\"4\" type=\"CHAR\"/>\n\n# <field name=\"NAME\" description=\"Name of the country\" length=\"30\" type=\"CHAR\"/>\n\n# <field name=\"POPULATION\" description=\"Number of inhabitants\" length=\"20\" type=\"INT\"/>\n\n# <field name=\"CAPITAL\" description=\"Capital of the country\" length=\"20\" type=\"CHAR\"/>\n\n# </record>\n\n#\n\n# </rbfile>\n\n#\n\n# ==== Example\n\n#\n\n# # create a layout object from XML definition file layout.xml\n\n# layout = Layout.new(\"world_data.xml\")\n\n#\n\n# # get continent data record\n\n# cont = layout[\"CONT\"]\n\n# cont.size #=> 6\n\n# cont[0].type.type #=> :string\n\n#\n\n# \n\nclass Layout < Element\n\n include Enumerable\n\n\n\n # hash of records, key is record name, value is Record object\n\n attr_reader :rmap\n\n\n", "file_path": "ruby/lib/layout.rb", "rank": 59, "score": 72360.94330482474 }, { "content": "type Decimal = f64;\n\npub type DecimalType = RootType<Decimal>;\n\n\n\nimpl BaseType for DecimalType {\n\n // Returns name of the type\n\n get_name!(self, \"decimal\");\n\n\n\n /// Sets the format using the RootType method\n\n set_format!(self, fmt);\n\n\n\n /// Gets for format string.\n\n get_format!(self);\n\n\n\n fn eq(&self, lhs: &str, rhs: &str) -> bool {\n\n Compare::<Decimal>::eq(lhs, rhs)\n\n }\n\n\n\n fn lt(&self, lhs: &str, rhs: &str) -> bool {\n\n Compare::<Decimal>::lt(lhs, rhs)\n\n }\n\n\n\n fn gt(&self, lhs: &str, rhs: &str) -> bool {\n\n Compare::<Decimal>::gt(lhs, rhs)\n\n }\n\n}\n", "file_path": "rust/src/types/decimal.rs", "rank": 60, "score": 70895.74183727152 }, { "content": "fn to_date(value: &str, fmt: &str) -> NaiveDate {\n\n let converted = match NaiveDate::parse_from_str(value, fmt) {\n\n Ok(v) => v,\n\n Err(e) => panic!(\n\n \"unable to convert string value {}, error={}\",\n\n value,\n\n e.description()\n\n ),\n\n };\n\n converted\n\n}\n\n\n", "file_path": "rust/src/types/datetime.rs", "rank": 61, "score": 70200.6515475722 }, { "content": "fn to_time(value: &str, fmt: &str) -> NaiveTime {\n\n let converted = match NaiveTime::parse_from_str(value, fmt) {\n\n Ok(v) => v,\n\n Err(e) => panic!(\n\n \"unable to convert string value {}, error={}\",\n\n value,\n\n e.description()\n\n ),\n\n };\n\n converted\n\n}\n\n\n\npub type DateType = RootType<NaiveDate>;\n\npub type TimeType = RootType<NaiveTime>;\n\n\n\nimpl BaseType for DateType {\n\n // Returns name of the type\n\n get_name!(self, \"date\");\n\n\n\n /// Sets the format using the RootType method\n", "file_path": "rust/src/types/datetime.rs", "rank": 62, "score": 70200.6515475722 }, { "content": "type SignedInteger = i64;\n\npub type SignedIntegerType = RootType<SignedInteger>;\n\n\n\nimpl BaseType for SignedIntegerType {\n\n // Returns name of the type\n\n get_name!(self, \"int\");\n\n\n\n /// Sets the format using the RootType method\n\n set_format!(self, fmt);\n\n\n\n /// Gets for format string.\n\n get_format!(self);\n\n\n\n fn eq(&self, lhs: &str, rhs: &str) -> bool {\n\n Compare::<SignedInteger>::eq(lhs, rhs)\n\n }\n\n\n\n fn lt(&self, lhs: &str, rhs: &str) -> bool {\n\n Compare::<SignedInteger>::lt(lhs, rhs)\n\n }\n\n\n\n fn gt(&self, lhs: &str, rhs: &str) -> bool {\n\n Compare::<SignedInteger>::gt(lhs, rhs)\n\n }\n\n}\n", "file_path": "rust/src/types/int.rs", "rank": 63, "score": 69447.38799946565 }, { "content": "type UnsignedInteger = u64;\n\npub type UnsignedIntegerType = RootType<UnsignedInteger>;\n\n\n\nimpl BaseType for UnsignedIntegerType {\n\n // Returns name of the type\n\n get_name!(self, \"uint\");\n\n\n\n /// Sets the format using the RootType method\n\n set_format!(self, fmt);\n\n\n\n /// Gets for format string.\n\n get_format!(self);\n\n\n\n fn eq(&self, lhs: &str, rhs: &str) -> bool {\n\n Compare::<UnsignedInteger>::eq(lhs, rhs)\n\n }\n\n\n\n fn lt(&self, lhs: &str, rhs: &str) -> bool {\n\n Compare::<UnsignedInteger>::lt(lhs, rhs)\n\n }\n\n\n\n fn gt(&self, lhs: &str, rhs: &str) -> bool {\n\n Compare::<UnsignedInteger>::gt(lhs, rhs)\n\n }\n\n}\n", "file_path": "rust/src/types/uint.rs", "rank": 64, "score": 69447.38799946565 }, { "content": " class Field : public DataElement\n\n {\n\n private:\n\n FieldType _field_type; // associated FieldType object\n\n\n\n string _str_value; // when set, store the value of the field (blank stripped)\n\n string _raw_value; // when set, store the raw value of the field\n\n\n\n unsigned int _index {0}; // index of the field within a record\n\n unsigned int _offset {0}; // offset of the field among its brothers\n\n unsigned int _lower_bound {0}; // lower & upper bounds when field is added to a record\n\n unsigned int _upper_bound {0};\n\n\n\n public:\n\n /*!\n\n * @brief Field default constructor\n\n * @details Create a new **Field** object with empty name, description, null length and VOID **type**\n\n */\n\n //Field(): DataElement(), _field_type(FieldType()) { cout << \"Field default ctor called!\" << endl; cout.flush(); }\n\n Field() = default;\n", "file_path": "cpp/include/field.h", "rank": 65, "score": 67732.33478251935 }, { "content": " def match(self, value):\n\n \"\"\"\n\n Test if a value matches the base type pattern\n\n\n\n :param str value: value to test\n\n\n\n \"\"\"\n", "file_path": "python/rbf/basetype.py", "rank": 66, "score": 67355.12853832814 }, { "content": " def count(self) -> int:\n\n \"\"\" return the number of fields in the record \"\"\"\n", "file_path": "python/rbf/record.py", "rank": 67, "score": 67306.40372324342 }, { "content": "# This class defines a field type. The list of possible field types are:\n\n# :string, :decimal, :integer, :date, :time, :overpunch\n\n#\n\n# ==== Example\n\n# \n\n# ft = FieldType.new(\"A/N\", \"string\")\n\n# ft.name #=> \"A/N\"\n\n# ft.description #=> \"string\"\n\n# ft.length #=> 0\n\n# ft.type #=> :string\n\nclass FieldType < Element\n\n # the field type as a symbol\n\n attr_reader :type\n\n\n\n # list of possible values for a field type\n\n @@admissible_types = [:string, :decimal, :integer, :date, :time, :overpunch]\n\n\n", "file_path": "ruby/lib/fieldtype.rb", "rank": 68, "score": 66987.7827695445 }, { "content": "fn as_hash(attributes: &Vec<OwnedAttribute>) -> HashMap<&str, &str> {\n\n // loop through attributes to create a hash. Not present in xml_rs (?!)\n\n let mut h: HashMap<&str, &str> = HashMap::new();\n\n\n\n for own_attr in attributes {\n\n h.insert(&own_attr.name.local_name, &own_attr.value);\n\n }\n\n h\n\n}\n\n\n\nimpl<T> Layout<T> {\n\n /// Reads the XML layout file to create record and field structs.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `xml_file` - full file name and path of the XML layout file\n\n ///\n\n ///\n\n /// # Panics\n\n /// If `xml_file` could not be read \n", "file_path": "rust/src/layout.rs", "rank": 69, "score": 66781.77077357359 }, { "content": " # Returns the number of fields in the record\n\n # :call-seq:\n\n # count -> fixnum\n\n def count\n\n @flist.count\n\n end\n\n\n\n # Field access with either a string (field name) or an integer (field index). If a field name is passed,\n\n # as fields could have the same name, an array of Field objects is returned.\n\n #\n\n # :call-seq:\n\n # [](key) -> Field or ary of Field objects\n\n #\n\n # ==== Arguments\n\n #\n\n # * +key+ - return the field object matching field name (if key is a string), or its index in the record\n\n # if key is an integer\n\n #\n\n # === Raise\n\n #\n\n # +ArgumentError+ if:\n\n # * key (string) is not found\n\n # * key (integer) is out of bounds\n", "file_path": "ruby/lib/record.rb", "rank": 70, "score": 66777.83667119166 }, { "content": "static lua_State *L;\n", "file_path": "rust/src/obsolete/lua/luacall.c", "rank": 71, "score": 65925.3328511209 }, { "content": "#[allow(unused_variables)]\n\nfn main() {\n\n // get arguments\n\n let args: Vec<String> = env::args().collect();\n\n\n\n // get arguments\n\n if args.len() == 1 {\n\n println!(\"Usage: {} layout_file data_file\", args[0]);\n\n std::process::exit(1);\n\n }\n\n\n\n // tick time\n\n let now = Instant::now();\n\n\n\n // load layout (suppose only ascii data)\n\n let layout = error_check!(Layout::<AsciiMode>::new(&args[1]));\n\n\n\n // create reader\n\n let mut reader = Reader::new(&args[2], layout);\n\n\n\n // loop through records\n", "file_path": "rust/examples/just_read.rs", "rank": 72, "score": 64478.4582699903 }, { "content": "fn main() {\n\n // get arguments\n\n let args: Vec<String> = env::args().collect();\n\n\n\n // get arguments\n\n if args.len() == 1 {\n\n println!(\"Usage: {} layout_file data_file\", args[0]);\n\n std::process::exit(1);\n\n }\n\n\n\n // load layout (suppose only ascii data)\n\n let layout = error_check!(Layout::<AsciiMode>::new(&args[1]));\n\n\n\n // create reader\n\n let mut reader = Reader::new(&args[2], layout);\n\n\n\n // loop through records\n\n while let Some((_stats, rec)) = reader.next() {\n\n println!(\"{}\", rec);\n\n }\n\n}\n", "file_path": "rust/examples/just_display.rs", "rank": 73, "score": 64478.4582699903 }, { "content": "fn main() {\n\n let lua_dir = format!(\"{}/src/lua\", env::current_dir().unwrap().display());\n\n\n\n // compile luacall library: OS dependant\n\n if cfg!(target_os = \"linux\") {\n\n // makefile is using a special env variable\n\n Command::new(\"make\")\n\n .env(\"LUA_DIR\", lua_dir)\n\n .args(&[\"-f\", \"src/lua/luacall_linux.mak\"])\n\n .status()\n\n .expect(\"failed to make!\");\n\n }\n\n \n\n\n\n println!(r\"cargo:rustc-link-lib=static=lua\");\n\n println!(r\"cargo:rustc-link-lib=static=luacall\"); \n\n println!(r\"cargo:rustc-link-search=/home/m330421/projects/rbfrust/src/lua\");\n\n println!(r\"cargo:rustc-link-search=/usr/local/lib\"); \n\n}", "file_path": "rust/src/obsolete/build.rs", "rank": 74, "score": 64478.4582699903 }, { "content": " class FieldType : public DataElement\n\n {\n\n DataType _data_type; ///< field type converted to enum type\n\n\n\n public:\n\n /*!\n\n * @brief FieldType default constructor\n\n * @details Create a new **FieldType** object with empty description, representation and VOID **DataType**\n\n */\n\n FieldType(): DataElement(), _data_type(DataType::VOID) {}\n\n\n\n /*!\n\n * @brief FieldType class constructor\n\n * @param[in] name type nickname\n\n * @param[in] description type representation\n\n *\n\n * @code \n\n * auto ft1 = FieldType(\"NUM\", \"decimal\");\n\n * @endcode\n\n */\n", "file_path": "cpp/include/fieldtype.h", "rank": 75, "score": 63885.51200418569 }, { "content": "import sys\n\nimport unittest\n\n\n\nfrom rbf.fieldtype import FieldType\n\nfrom rbf.field import Field\n\nfrom rbf.record import Record\n\n\n\nclass TestRecord(unittest.TestCase):\n\n\n\n def setUp(self):\n\n self.ft = FieldType(\"A/N\", \"string\")\n\n self.rec = Record(\"RECORD1\", \"Description of record 1\")\n\n self.rec.append(Field(\"FIELD1\", \"Description of field 1\", self.ft, 10))\n\n self.rec.append(Field(\"FIELD2\", \"Description of field 2\", self.ft, 5))\n\n self.rec.append(Field(\"FIELD2\", \"Description of field 2\", self.ft, 5))\n\n self.rec.append(Field(\"FIELD3\", \"Description of field 3\", self.ft, 10))\n\n\n\n self.line = \"A\"*10 + \"B\"*5 + \"C\"*5 + \"D\"*10\n\n\n\n def test_0(self):\n\n self.assertRaises(ValueError, Record, \"\", \"Description of record 1\")\n\n\n\n with self.assertRaises(ValueError):\n\n f = self.rec[\"FOO\"]\n\n\n\n def test_1(self):\n\n self.assertEqual(self.rec.name, \"RECORD1\")\n\n self.assertEqual(self.rec.description, \"Description of record 1\")\n\n self.assertEqual(self.rec.length, 30)\n\n self.assertEqual(len(self.rec), 30)\n\n self.assertEqual(self.rec.count(), 4)\n\n\n\n def test_2(self):\n\n self.f1 = self.rec[0]\n\n self.f2 = self.rec[1]\n\n self.f3 = self.rec[2]\n\n self.f4 = self.rec[3]\n\n\n\n self.assertEqual(self.f1.name, \"FIELD1\")\n\n self.assertEqual(self.f2.name, \"FIELD2\")\n\n self.assertEqual(self.f3.name, \"FIELD2\")\n\n self.assertEqual(self.f4.name, \"FIELD3\")\n\n\n\n self.assertEqual(self.f1.index, 0)\n\n self.assertEqual(self.f2.index, 1)\n\n self.assertEqual(self.f3.index, 2)\n\n self.assertEqual(self.f4.index, 3)\n\n\n\n self.assertEqual(self.f1.offset, 0)\n\n self.assertEqual(self.f2.offset, 10)\n\n self.assertEqual(self.f3.offset, 15)\n\n self.assertEqual(self.f4.offset, 20)\n\n\n\n def test_3(self):\n\n f = self.rec[\"FIELD1\"]\n\n self.assertEqual(len(f), 1)\n\n\n\n f = self.rec[2:4]\n\n self.assertEqual(f[1].name, \"FIELD3\")\n\n\n\n def test_4(self):\n\n self.rec.value = self.line\n\n self.assertEqual(self.rec.value, self.line)\n\n\n\n self.assertEqual(self.rec.FIELD1, \"A\"*10)\n\n self.assertEqual(self.rec.FIELD3, \"D\"*10)\n\n\n\n def test_5(self):\n\n self.assertEqual(self.rec.array_of('name'), [\"FIELD1\", \"FIELD2\", \"FIELD2\", \"FIELD3\"])\n\n\n\n def test_6(self):\n\n self.f1 = self.rec[0]\n\n self.f2 = self.rec[1]\n\n self.f3 = self.rec[2]\n\n self.f4 = self.rec[3]\n\n\n\n it = iter(self.rec)\n\n\n\n self.assertEqual(next(it), self.f1)\n\n self.assertEqual(next(it), self.f2)\n\n self.assertEqual(next(it), self.f3)\n\n self.assertEqual(next(it), self.f4)\n\n\n\n def test_7(self):\n\n self.rec.value = self.line\n\n self.assertEqual(self.rec.array_of('name'), ['FIELD1', 'FIELD2', 'FIELD2', 'FIELD3'])\n\n\n\n def test_8(self):\n\n self.rec.value = self.line\n\n self.assertEqual(self.rec.as_dict(), {'FIELD1': 'AAAAAAAAAA', 'FIELD3': 'DDDDDDDDDD', 'FIELD2': ['BBBBB', 'CCCCC']})\n\n\n\n def test_9(self):\n\n self.rec.delete(['FIELD2', 'FIELD1'])\n\n self.assertEqual('FIELD1' in self.rec, False)\n\n self.assertEqual('FIELD2' in self.rec, False)\n\n self.assertEqual('FIELD3' in self.rec, True)\n\n\n\n def test_10(self):\n\n self.rec.keep(['FIELD2'])\n\n self.assertEqual('FIELD1' in self.rec, False)\n\n self.assertEqual('FIELD2' in self.rec, True)\n\n self.assertEqual('FIELD3' in self.rec, False)\n\n\n\n def test_11(self):\n\n self.rec = Record(\"RECORD1\", \"Description of record 1\")\n\n\n\n # read data from external file\n\n for line in open(\"./record.data\"):\n\n a = [s.strip() for s in line.split(';')]\n\n self.rec.append(Field(a[0], a[1], self.ft, int(a[2])))\n\n\n\n self.assertEqual(len(self.rec), 400)\n\n self.assertEqual(self.rec.count(), 94)\n\n\n\n self.assertEqual(len(self.rec[\"COUN\"]), 4)\n\n self.assertEqual(self.rec[28].name, \"COUN\")\n\n \n\n self.assertEqual(\"COUN\" in self.rec, True)\n\n self.assertEqual(\"XXXX\" not in self.rec, True)\n\n\n\n self.assertEqual(\"COUN\" in self.rec.array_of('name'), True)\n\n\n\n l = self.rec.array_of('length')\n\n self.assertEqual(sum(l), 400)\n\n\n\n self.rec.delete([\"COUN\",\"RCID\"])\n\n self.assertEqual(\"COUN\" not in self.rec, True)\n\n with self.assertRaises(ValueError):\n\n f = self.rec[\"COUN\"]\n\n\n\n self.rec.keep([\"CABI\",\"STPO\",\"ORAC\"])\n\n self.assertEqual(\"CABI\" in self.rec, True)\n\n self.assertEqual(len(self.rec.array_of('name')), 4*3)\n\n\n\nif __name__ == '__main__':\n\n unittest.main()\n\n\n", "file_path": "python/test/test_record.py", "rank": 76, "score": 61723.73836997368 }, { "content": "import unittest\n\nfrom rbf.element import Element\n\n\n\nclass TestElement(unittest.TestCase):\n\n\n\n def test(self):\n\n self.e1 = Element(\"ELEMENT1\", \"Alpha element 1\", 10)\n\n self.e2 = Element(\"ELEMENT1\", \"Alpha element 1\", 10)\n\n\n\n self.assertRaises(ValueError, Element, \"\", \"Alpha element 1\", 10)\n\n self.assertRaises(ValueError, Element, \"ELEMENT1\", \"Alpha element 1\", -1)\n\n\n\n self.assertEqual(self.e1.name, \"ELEMENT1\")\n\n self.assertEqual(self.e1.description, \"Alpha element 1\")\n\n self.assertEqual(self.e1.length, 10)\n\n\n\n self.assertEqual(self.e1, self.e2)\n\n\n\n self.e1.name = \"ELEMENT2\"\n\n self.e1.description = \"Alpha element 2\"\n\n self.e1.length = 20\n\n\n\n self.assertEqual(self.e1.name, \"ELEMENT2\")\n\n self.assertEqual(self.e1.description, \"Alpha element 2\")\n\n self.assertEqual(self.e1.length, 20)\n\n\n\n self.assertEqual(self.e1.__repr__(), \"name=<ELEMENT2> description=<Alpha element 2> length=<20>\")\n\n\n\nif __name__ == '__main__':\n\n unittest.main()\n\n\n", "file_path": "python/test/test_element.py", "rank": 77, "score": 61723.73836997368 }, { "content": "import unittest\n\n\n\nfrom rbf.fieldtype import FieldType\n\nfrom rbf.field import Field\n\nfrom rbf.record import Record\n\nfrom rbf.writer.writer import writer, WriterStyle\n\n\n\nclass TestElement(unittest.TestCase):\n\n\n\n def test(self):\n\n self.writer = writer(\"\", WriterStyle.text)\n\n\n\n self.ft = FieldType(\"A/N\", \"string\")\n\n self.rec = Record(\"RECORD1\", \"Description of record 1\")\n\n self.rec.append(Field(\"LONG_FIELD1\", \"Description of field 1\", self.ft, 10))\n\n self.rec.append(Field(\"LONG_FIELD2\", \"Description of field 2\", self.ft, 5))\n\n self.rec.append(Field(\"LONG_FIELD2\", \"Description of field 2\", self.ft, 5))\n\n self.rec.append(Field(\"LONG_FIELD3\", \"Description of field 3\", self.ft, 10))\n\n\n\n self.line = \"A\"*10 + \"B\"*5 + \"C\"*5 + \"D\"*10\n\n self.rec.value = self.line\n\n\n\n self.writer.to_tag(self.rec)\n\n\n\n self.assertEqual('RECORD1:LONG_FIELD1=\"AAAAAAAAAA\" LONG_FIELD2=\"BBBBB\" LONG_FIELD2=\"CCCCC\" LONG_FIELD3=\"DDDDDDDDDD\"\\n', str(self.writer)) \n\n\n\n\n\nif __name__ == '__main__':\n\n unittest.main()\n\n\n", "file_path": "python/test/test_writer.py", "rank": 78, "score": 61723.73836997368 }, { "content": "import unittest\n\nfrom xml.dom import minidom\n\nfrom datetime import datetime, date, time\n\nfrom rbf.fieldtype import BaseType\n\nfrom rbf.fieldtype import FieldType\n\n\n\nclass TestFieldType(unittest.TestCase):\n\n\n\n def setUp(self):\n\n self.assertRaises(ValueError, FieldType, \"A/N\", \"STR\")\n\n self.ft = FieldType(\"A/N\", \"string\")\n\n\n\n def test_0(self):\n\n self.assertEqual(self.ft.base, str)\n\n\n\n def test_xml_cons(self):\n\n self.doc = minidom.parseString('<fieldtype name=\"AN\" type=\"string\" pattern=\"[\\w/\\*\\.,\\-]+\" format=\"%-*.*s\"/>')\n\n self.xml_node = self.doc.childNodes[0]\n\n self.ft = FieldType.from_xml_node(self.xml_node)\n\n self.assertEqual(self.ft.pattern, \"[\\w/\\*\\.,\\-]+\")\n\n self.assertEqual(self.ft.format, \"%-*.*s\")\n\n\n\n def test_conversion(self):\n\n self.doc = minidom.parseString('<fieldtype name=\"D\" type=\"date\" pattern=\"[0-9]+\" date_format=\"%Y%m%d\"/>')\n\n self.xml_node = self.doc.childNodes[0]\n\n self.ft = FieldType.from_xml_node(self.xml_node)\n\n self.assertEqual(self.ft.date_format, \"%Y%m%d\")\n\n\n\n self.assertEqual(self.ft.convert(\"20160226\"), datetime(2016, 2, 26))\n\n\n\n self.doc = minidom.parseString('<fieldtype name=\"T\" type=\"time\" pattern=\"[0-9]+\" time_format=\"%H%M\"/>')\n\n self.xml_node = self.doc.childNodes[0]\n\n self.ft = FieldType.from_xml_node(self.xml_node)\n\n self.assertEqual(self.ft.time_format, \"%H%M\")\n\n\n\n self.assertEqual(self.ft.convert(\"1210\"), datetime(1900,1,1,12,10,0)) \n\n\n\nif __name__ == '__main__':\n\n unittest.main()\n\n\n", "file_path": "python/test/test_fieldtype.py", "rank": 79, "score": 61723.73836997368 }, { "content": "import unittest\n\nfrom datetime import datetime, date, time\n\nfrom rbf.basetype import BaseType\n\n\n\nclass TestBaseType(unittest.TestCase):\n\n\n\n def setUp(self):\n\n self.assertRaises(ValueError, BaseType, \"foo\")\n\n\n\n def test_0(self):\n\n self.ft = BaseType(\"string\")\n\n self.assertEqual(self.ft.basic_type[\"base\"], str)\n\n\n\n def test_1(self):\n\n self.ft = BaseType(\"string\")\n\n self.assertEqual(self.ft.convert(\"3.14\"), \"3.14\")\n\n\n\n self.ft = BaseType(\"decimal\")\n\n self.assertEqual(self.ft.convert(\"3.14\"), 3.14)\n\n\n\n self.ft = BaseType(\"integer\")\n\n self.assertEqual(self.ft.convert(\"314\"), 314)\n\n\n\n self.ft = BaseType(\"date\")\n\n self.ft.convert(\"XXXX0226\", \"%Y%m%d\")\n\n self.assertEqual(self.ft.convert(\"20160226\", \"%Y%m%d\"), datetime(2016, 2, 26))\n\n\n\n self.ft = BaseType(\"time\")\n\n self.assertEqual(self.ft.convert(\"121013\", \"%H%M%S\"), datetime(1900,1,1,12,10,13))\n\n \n\n def test_2(self):\n\n self.ft = BaseType(\"string\")\n\n self.assertEqual(bool(self.ft.match(\"AAA\")), True)\n\n\n\n\n\nif __name__ == '__main__':\n\n unittest.main()\n\n\n", "file_path": "python/test/test_basetype.py", "rank": 80, "score": 61723.73836997368 }, { "content": "class FieldType(BaseType):\n\n \"\"\"\n\n Even if a field within a record-based file is nothing but an ASCII file, some fields within\n\n a record can represent numerical, alphumerical, etc type of data. This class holds the type\n\n of field.\n\n\n\n :param str data_type_representation: nickname of the field type\n\n :param str data_type_description: base type which is restricted to a list of admissible types\n\n\n\n ::\n\n\n\n >>> from rbf.fieldtype import FieldType\n\n >>> ft = FieldType(\"A/N\", \"string\")\n\n >>> ft\n\n data_type_representation=<A/N> data_type_description=<string> type=<BaseType.string>\n\n >>> ft.name\n\n 'A/N'\n\n >>> ft.description\n\n 'string'\n\n >>> ft.type\n\n <BaseType.string: 'string'>\n\n\n\n \"\"\"\n\n def __init__(self, name: str, type_of_string: str):\n\n # call parent ctor\n\n super(FieldType,self).__init__(type_of_string)\n\n self.name = name\n\n\n\n def convert(self, value):\n\n \"\"\"\n\n convert a value according to its base type\n\n \"\"\"\n\n if self.type_as_string == \"date\":\n\n return super().convert(value, self.date_format)\n\n elif self.type_as_string == \"time\":\n\n return super().convert(value, self.time_format)\n\n elif self.type_as_string in ['decimal','integer']:\n\n # sometimes, we need to get rid of leading 0 as conversion will fail if\n\n # a negative sign is found (e.g.: 00000-6) \n\n s = value.lstrip(\"0\")\n\n if s == \"\": \n\n return super().convert(\"0\")\n\n else:\n\n return super().convert(value.lstrip(\"0\"))\n\n else:\n\n return super().convert(value) \n\n\n\n @classmethod\n\n def from_xml_node(cls, xml_node):\n\n \"\"\"\n\n second constructor to build a field type from an XML-string like <fieldtype name=\"A\" type=\"string\"/>\n\n\n\n :param str xml_node: XML-node object describing a field type\n\n\n\n \"\"\"\n\n name = xml_node.attributes['name'].value\n\n type_of_string = xml_node.attributes['type'].value\n\n\n\n # create class instance\n\n ft = cls(name, type_of_string) \n\n \n\n # add any attribute we might find in the <fieldtype> tag\n\n # other than name or type tag XML attribute\n\n for tag_name in [t for t in xml_node.attributes.keys() if t not in ['name', 'type']]:\n\n setattr(ft, tag_name, xml_node.attributes[tag_name].value)\n\n\n\n return ft\n\n\n\n def __repr__(self) -> str:\n", "file_path": "python/rbf/fieldtype.py", "rank": 81, "score": 61085.15628942777 }, { "content": " # return true if a field name is found in the record\n\n # :call-seq:\n\n # include?(fname) -> bool\n\n #\n\n # ==== Arguments\n\n #\n\n # +fname+ - field name\n\n def include?(fname)\n\n @fmap.include?(fname)\n\n end\n\n\n", "file_path": "ruby/lib/record.rb", "rank": 82, "score": 60816.658277345574 }, { "content": "pub trait Exportable {\n\n fn to_html(&self) -> String;\n\n}\n\n\n\nimpl<T> Exportable for Record<T> {\n\n /// Converts a record to an HTML table with all fields data.\n\n fn to_html(&self) -> String {\n\n let mut s = String::with_capacity(200 * self.count());\n\n\n\n // record description\n\n s += format!(\n\n \"<h2><span class=\\\"label label-primary\\\">{}-{}-{}</span></h2>\",\n\n self.name, self.description, self.calculated_length\n\n )\n\n .as_str();\n\n\n\n // fields description\n\n s += format!(\"<table class=\\\"table table-striped\\\">\").as_str();\n\n s += format!(\"<thead><tr><th>#</th><th>Field name</th><th>Description</th>\").as_str();\n\n s +=\n", "file_path": "rust/src/exportable.rs", "rank": 83, "score": 60763.68406723355 }, { "content": "/// This trait will be implemented by readers\n\npub trait ReadMode {\n\n fn set_value(&mut self, value: &str);\n\n}\n\n\n\n/// Implement Ascii read mode\n\nimpl ReadMode for Record<AsciiMode> {\n\n /// Sets the record value (which is equivalent to setting all fields).\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// use rbf::vector_of;\n\n /// use rbf::record::ReadMode;\n\n /// use rbf::record::{AsciiMode, setup::set_up_by_length};\n\n /// \n\n /// let mut rec = set_up_by_length::<AsciiMode>(); \n\n ///\n\n /// let s = \"AAAAAAAAAABBBBBBBBBBCCCCCCCCCCCCCCCCCCCCDDDDDDDDDD\";\n\n /// rec.set_value(&s);\n\n ///\n\n /// assert_eq!(rec[0].value(), \"AAAAAAAAAA\");\n", "file_path": "rust/src/record.rs", "rank": 84, "score": 59058.915654005774 }, { "content": " def test_element\n\n e = Element.new(\"FIELD1\", \"Description of field 1\", 10)\n\n assert_equal(e.name, \"FIELD1\")\n\n assert_equal(e.description, \"Description of field 1\")\n\n assert_equal(e.length, 10)\n\n end\n\n\n", "file_path": "ruby/test/test.rb", "rank": 85, "score": 52819.10245305112 }, { "content": " def test_record\n\n assert_raise(ArgumentError) { Record.new(\"\", \"Description of field 1\", \"A/N\", 10) }\n\n\n\n string_type = FieldType.new(\"A/N\", \"string\")\n\n f1 = Field.new(\"FIELD1\", \"Description of field 1\", string_type, 10)\n\n f2 = Field.new(\"FIELD2\", \"Description of field 2\", string_type, 20)\n\n f3 = Field.new(\"FIELD3\", \"Description of field 3\", string_type, 10)\n\n f4 = Field.new(\"FIELD4\", \"Description of field 4\", string_type, 5)\n\n f5 = Field.new(\"FIELD5\", \"Description of field 5\", string_type, 5)\n\n r = Record.new(\"RECORD1\", \"Description of record 1\")\n\n\n\n assert_equal(r.name, \"RECORD1\")\n\n\n\n r << f1\n\n r << f2\n\n r << f3\n\n r << f4\n\n r << f5\n\n\n\n assert_equal(r.include?(\"FIELD1\"), true)\n", "file_path": "ruby/test/test.rb", "rank": 86, "score": 52819.10245305112 }, { "content": " def test_fieldtype\n\n ft = FieldType.new(\"A/N\", \"string\")\n\n assert_equal(ft.type, :string)\n\n assert_raise(ArgumentError) { FieldType.new(\"C\", \"complex\") }\n\n end\n\n\n", "file_path": "ruby/test/test.rb", "rank": 87, "score": 52819.10245305112 }, { "content": "\n\n assert_equal(r.count, 5)\n\n assert_equal(r.length, 50)\n\n\n\n assert_raise(ArgumentError) { p r[14.3] }\n\n assert_raise(ArgumentError) { p r[\"XXX\"] }\n\n assert_raise(ArgumentError) { p r[-1] }\n\n assert_raise(ArgumentError) { p r[5] }\n\n assert_equal(r[2], f3)\n\n assert_equal(r[\"FIELD3\"][0], f3)\n\n\n\n r.value = \"AAAAAAAAAABBBBBBBBBBBBBBBBBBBBCCCCCCCCCCDDDDDEEEEE\"\n\n assert_equal(f1.value, \"AAAAAAAAAA\")\n\n assert_equal(f2.value, \"BBBBBBBBBBBBBBBBBBBB\")\n\n assert_equal(f3.value, \"CCCCCCCCCC\")\n\n assert_equal(f4.value, \"DDDDD\")\n\n assert_equal(f5.value, \"EEEEE\")\n\n\n\n assert_equal(r.array_of(:@name), [\"FIELD1\",\"FIELD2\",\"FIELD3\",\"FIELD4\",\"FIELD5\"]) \n\n assert_equal(r.value, \"AAAAAAAAAABBBBBBBBBBBBBBBBBBBBCCCCCCCCCCDDDDDEEEEE\")\n\n\n\n r.each {|f| p f }\n\n #r.each_with_index {|f,i| p i }\n\n end\n\n\n", "file_path": "ruby/test/test.rb", "rank": 88, "score": 50306.50523688826 }, { "content": "require \"rbf\"\n\n\n\nrequire \"test/unit\"\n\n\n\n\n", "file_path": "ruby/test/test.rb", "rank": 89, "score": 50304.44133841287 }, { "content": " def test_1(self):\n\n self.assertEqual(self.rec.name, \"RECORD1\")\n\n self.assertEqual(self.rec.description, \"Description of record 1\")\n\n self.assertEqual(self.rec.length, 30)\n\n self.assertEqual(len(self.rec), 30)\n", "file_path": "python/test/test_record.py", "rank": 90, "score": 49065.86787679342 }, { "content": " def test_0(self):\n\n self.assertRaises(ValueError, Record, \"\", \"Description of record 1\")\n\n\n\n with self.assertRaises(ValueError):\n", "file_path": "python/test/test_record.py", "rank": 91, "score": 49065.86787679342 }, { "content": " def test_0(self):\n", "file_path": "python/test/test_fieldtype.py", "rank": 92, "score": 49065.86787679342 }, { "content": " def test_5(self):\n", "file_path": "python/test/test_record.py", "rank": 93, "score": 49065.86787679342 }, { "content": " def test_1(self):\n\n self.ft = BaseType(\"string\")\n\n self.assertEqual(self.ft.convert(\"3.14\"), \"3.14\")\n\n\n\n self.ft = BaseType(\"decimal\")\n\n self.assertEqual(self.ft.convert(\"3.14\"), 3.14)\n\n\n\n self.ft = BaseType(\"integer\")\n\n self.assertEqual(self.ft.convert(\"314\"), 314)\n\n\n\n self.ft = BaseType(\"date\")\n\n self.ft.convert(\"XXXX0226\", \"%Y%m%d\")\n\n self.assertEqual(self.ft.convert(\"20160226\", \"%Y%m%d\"), datetime(2016, 2, 26))\n\n\n\n self.ft = BaseType(\"time\")\n", "file_path": "python/test/test_basetype.py", "rank": 94, "score": 49065.86787679342 }, { "content": " def test_3(self):\n\n f = self.rec[\"FIELD1\"]\n\n self.assertEqual(len(f), 1)\n\n\n\n f = self.rec[2:4]\n", "file_path": "python/test/test_record.py", "rank": 95, "score": 49065.86787679342 }, { "content": " def test_4(self):\n\n self.rec.value = self.line\n\n self.assertEqual(self.rec.value, self.line)\n\n\n\n self.assertEqual(self.rec.FIELD1, \"A\"*10)\n", "file_path": "python/test/test_record.py", "rank": 96, "score": 49065.86787679342 }, { "content": " def test_2(self):\n\n self.ft = BaseType(\"string\")\n", "file_path": "python/test/test_basetype.py", "rank": 97, "score": 49065.86787679342 }, { "content": " def test_2(self):\n\n self.f1 = self.rec[0]\n\n self.f2 = self.rec[1]\n\n self.f3 = self.rec[2]\n\n self.f4 = self.rec[3]\n\n\n\n self.assertEqual(self.f1.name, \"FIELD1\")\n\n self.assertEqual(self.f2.name, \"FIELD2\")\n\n self.assertEqual(self.f3.name, \"FIELD2\")\n\n self.assertEqual(self.f4.name, \"FIELD3\")\n\n\n\n self.assertEqual(self.f1.index, 0)\n\n self.assertEqual(self.f2.index, 1)\n\n self.assertEqual(self.f3.index, 2)\n\n self.assertEqual(self.f4.index, 3)\n\n\n\n self.assertEqual(self.f1.offset, 0)\n\n self.assertEqual(self.f2.offset, 10)\n\n self.assertEqual(self.f3.offset, 15)\n", "file_path": "python/test/test_record.py", "rank": 98, "score": 49065.86787679342 }, { "content": " def test_0(self):\n\n self.ft = BaseType(\"string\")\n", "file_path": "python/test/test_basetype.py", "rank": 99, "score": 49065.86787679342 } ]
Rust
query/src/pipelines/new/processors/transforms/hash_join/chaining_hash_table.rs
lichuang/databend
e6a76e22e4d30683a152bdd29b5d5b60c96a9c8f
use std::sync::Arc; use std::sync::Mutex; use std::sync::RwLock; use common_datablocks::DataBlock; use common_datavalues::Column; use common_datavalues::ColumnRef; use common_datavalues::ConstColumn; use common_datavalues::DataSchemaRef; use common_exception::Result; use common_planners::Expression; use crate::common::ExpressionEvaluator; use crate::pipelines::new::processors::transforms::hash_join::hash::HashUtil; use crate::pipelines::new::processors::transforms::hash_join::hash::HashVector; use crate::pipelines::new::processors::transforms::hash_join::row::compare_and_combine; use crate::pipelines::new::processors::transforms::hash_join::row::RowPtr; use crate::pipelines::new::processors::transforms::hash_join::row::RowSpace; use crate::pipelines::new::processors::HashJoinState; use crate::sessions::QueryContext; pub struct ChainingHashTable { ref_count: Mutex<usize>, is_finished: Mutex<bool>, build_expressions: Vec<Expression>, probe_expressions: Vec<Expression>, ctx: Arc<QueryContext>, hash_table: RwLock<Vec<Vec<RowPtr>>>, row_space: RowSpace, } impl ChainingHashTable { pub fn try_create( build_expressions: Vec<Expression>, probe_expressions: Vec<Expression>, build_data_schema: DataSchemaRef, _probe_data_schema: DataSchemaRef, ctx: Arc<QueryContext>, ) -> Result<Self> { Ok(Self { row_space: RowSpace::new(build_data_schema), ref_count: Mutex::new(0), is_finished: Mutex::new(false), build_expressions, probe_expressions, ctx, hash_table: RwLock::new(vec![]), }) } fn hash(&self, columns: &[ColumnRef], row_count: usize) -> Result<HashVector> { let hash_values = columns .iter() .map(HashUtil::compute_hash) .collect::<Result<Vec<HashVector>>>()?; Ok(HashUtil::combine_hashes(&hash_values, row_count)) } fn apply_bitmask(hash_vector: &HashVector, mask: u64) -> HashVector { let mut result = HashVector::with_capacity(hash_vector.len()); for hash in hash_vector { result.push(*hash & mask); } result } fn compute_hash_table_size(rows_count: usize) -> usize { let mut power = 1; while power < rows_count { power <<= 1; } power } } impl HashJoinState for ChainingHashTable { fn build(&self, input: DataBlock) -> Result<()> { let build_keys = self .build_expressions .iter() .map(|expr| { ExpressionEvaluator::eval(self.ctx.try_get_function_context()?, expr, &input) }) .collect::<Result<Vec<ColumnRef>>>()?; let hash_values = self.hash(&build_keys, input.num_rows())?; self.row_space.push(input, hash_values)?; Ok(()) } fn probe(&self, input: &DataBlock) -> Result<Vec<DataBlock>> { let probe_keys = self .probe_expressions .iter() .map(|expr| { ExpressionEvaluator::eval(self.ctx.try_get_function_context()?, expr, input) }) .collect::<Result<Vec<ColumnRef>>>()?; let hash_table = self.hash_table.read().unwrap(); let hash_values = self.hash(&probe_keys, input.num_rows())?; let hash_values = ChainingHashTable::apply_bitmask(&hash_values, (hash_table.len() - 1) as u64); let mut results: Vec<DataBlock> = vec![]; for (i, hash_value) in hash_values.iter().enumerate().take(input.num_rows()) { let probe_result_ptrs = hash_table[*hash_value as usize].as_slice(); if probe_result_ptrs.is_empty() { continue; } let result_block = self.row_space.gather(probe_result_ptrs)?; let probe_block = DataBlock::block_take_by_indices(input, &[i as u32])?; let mut replicated_probe_block = DataBlock::empty(); for (i, col) in probe_block.columns().iter().enumerate() { let replicated_col = ConstColumn::new(col.clone(), result_block.num_rows()).arc(); replicated_probe_block = replicated_probe_block .add_column(replicated_col, probe_block.schema().field(i).clone())?; } let build_keys = self .build_expressions .iter() .map(|expr| { ExpressionEvaluator::eval( self.ctx.try_get_function_context()?, expr, &result_block, ) }) .collect::<Result<Vec<ColumnRef>>>()?; let current_probe_keys: Vec<ColumnRef> = probe_keys .iter() .map(|col| { let column = col.slice(i, 1); ConstColumn::new(column, result_block.num_rows()).arc() }) .collect(); let output = compare_and_combine( replicated_probe_block, result_block, &build_keys, &current_probe_keys, self.ctx.clone(), )?; results.push(output); } Ok(results) } fn attach(&self) -> Result<()> { let mut count = self.ref_count.lock().unwrap(); *count += 1; Ok(()) } fn detach(&self) -> Result<()> { let mut count = self.ref_count.lock().unwrap(); *count -= 1; if *count == 0 { self.finish()?; let mut is_finished = self.is_finished.lock().unwrap(); *is_finished = true; Ok(()) } else { Ok(()) } } fn is_finished(&self) -> Result<bool> { Ok(*self.is_finished.lock().unwrap()) } fn finish(&self) -> Result<()> { let mut hash_table = self.hash_table.write().unwrap(); hash_table.resize( ChainingHashTable::compute_hash_table_size(self.row_space.num_rows()), Default::default(), ); { let chunks = self.row_space.chunks.write().unwrap(); for chunk_index in 0..chunks.len() { let chunk = &chunks[chunk_index]; let hash_values = ChainingHashTable::apply_bitmask( &chunk.hash_values, (hash_table.len() - 1) as u64, ); for (row_index, hash_value) in hash_values.iter().enumerate().take(chunk.num_rows()) { let ptr = RowPtr { chunk_index: chunk_index as u32, row_index: row_index as u32, }; hash_table[*hash_value as usize].push(ptr); } } } Ok(()) } }
use std::sync::Arc; use std::sync::Mutex; use std::sync::RwLock; use common_datablocks::DataBlock; use common_datavalues::Column; use common_datavalues::ColumnRef; use common_datavalues::ConstColumn; use common_datavalues::DataSchemaRef; use common_exception::Result; use common_planners::Expression; use crate::common::ExpressionEvaluator; use crate::pipelines::new::processors::transforms::hash_join::hash::HashUtil; use crate::pipelines::new::processors::transforms::hash_join::hash::HashVector; use crate::pipelines::new::processors::transforms::hash_join::row::compare_and_combine; use crate::pipelines::new::processors::transforms::hash_join::row::RowPtr; use crate::pipelines::new::processors::transforms::hash_join::row::RowSpace; use crate::pipelines::new::processors::HashJoinState; use crate::sessions::QueryContext; pub struct ChainingHashTable { ref_count: Mutex<usize>, is_finished: Mutex<bool>, build_expressions: Vec<Expression>, probe_expressions: Vec<Expression>, ctx: Arc<QueryContext>, hash_table: RwLock<Vec<Vec<RowPtr>>>, row_space: RowSpace, } impl ChainingHashTable { pub fn try_create( build_expressions: Vec<Expression>, probe_expressions: Vec<Expression>, build_data_schema: DataSchemaRef, _probe_data_schema: DataSchemaRef, ctx: Arc<QueryContext>, ) -> Result<Self> { Ok(Self { row_space: RowSpace::new(build_data_schema), ref_count: Mutex::new(0), is_finished: Mutex::new(false), build_expressions, probe_expressions, ctx, hash_table: RwLock::new(vec![]), }) } fn hash(&self, columns: &[ColumnRef
k(HashUtil::combine_hashes(&hash_values, row_count)) } fn apply_bitmask(hash_vector: &HashVector, mask: u64) -> HashVector { let mut result = HashVector::with_capacity(hash_vector.len()); for hash in hash_vector { result.push(*hash & mask); } result } fn compute_hash_table_size(rows_count: usize) -> usize { let mut power = 1; while power < rows_count { power <<= 1; } power } } impl HashJoinState for ChainingHashTable { fn build(&self, input: DataBlock) -> Result<()> { let build_keys = self .build_expressions .iter() .map(|expr| { ExpressionEvaluator::eval(self.ctx.try_get_function_context()?, expr, &input) }) .collect::<Result<Vec<ColumnRef>>>()?; let hash_values = self.hash(&build_keys, input.num_rows())?; self.row_space.push(input, hash_values)?; Ok(()) } fn probe(&self, input: &DataBlock) -> Result<Vec<DataBlock>> { let probe_keys = self .probe_expressions .iter() .map(|expr| { ExpressionEvaluator::eval(self.ctx.try_get_function_context()?, expr, input) }) .collect::<Result<Vec<ColumnRef>>>()?; let hash_table = self.hash_table.read().unwrap(); let hash_values = self.hash(&probe_keys, input.num_rows())?; let hash_values = ChainingHashTable::apply_bitmask(&hash_values, (hash_table.len() - 1) as u64); let mut results: Vec<DataBlock> = vec![]; for (i, hash_value) in hash_values.iter().enumerate().take(input.num_rows()) { let probe_result_ptrs = hash_table[*hash_value as usize].as_slice(); if probe_result_ptrs.is_empty() { continue; } let result_block = self.row_space.gather(probe_result_ptrs)?; let probe_block = DataBlock::block_take_by_indices(input, &[i as u32])?; let mut replicated_probe_block = DataBlock::empty(); for (i, col) in probe_block.columns().iter().enumerate() { let replicated_col = ConstColumn::new(col.clone(), result_block.num_rows()).arc(); replicated_probe_block = replicated_probe_block .add_column(replicated_col, probe_block.schema().field(i).clone())?; } let build_keys = self .build_expressions .iter() .map(|expr| { ExpressionEvaluator::eval( self.ctx.try_get_function_context()?, expr, &result_block, ) }) .collect::<Result<Vec<ColumnRef>>>()?; let current_probe_keys: Vec<ColumnRef> = probe_keys .iter() .map(|col| { let column = col.slice(i, 1); ConstColumn::new(column, result_block.num_rows()).arc() }) .collect(); let output = compare_and_combine( replicated_probe_block, result_block, &build_keys, &current_probe_keys, self.ctx.clone(), )?; results.push(output); } Ok(results) } fn attach(&self) -> Result<()> { let mut count = self.ref_count.lock().unwrap(); *count += 1; Ok(()) } fn detach(&self) -> Result<()> { let mut count = self.ref_count.lock().unwrap(); *count -= 1; if *count == 0 { self.finish()?; let mut is_finished = self.is_finished.lock().unwrap(); *is_finished = true; Ok(()) } else { Ok(()) } } fn is_finished(&self) -> Result<bool> { Ok(*self.is_finished.lock().unwrap()) } fn finish(&self) -> Result<()> { let mut hash_table = self.hash_table.write().unwrap(); hash_table.resize( ChainingHashTable::compute_hash_table_size(self.row_space.num_rows()), Default::default(), ); { let chunks = self.row_space.chunks.write().unwrap(); for chunk_index in 0..chunks.len() { let chunk = &chunks[chunk_index]; let hash_values = ChainingHashTable::apply_bitmask( &chunk.hash_values, (hash_table.len() - 1) as u64, ); for (row_index, hash_value) in hash_values.iter().enumerate().take(chunk.num_rows()) { let ptr = RowPtr { chunk_index: chunk_index as u32, row_index: row_index as u32, }; hash_table[*hash_value as usize].push(ptr); } } } Ok(()) } }
], row_count: usize) -> Result<HashVector> { let hash_values = columns .iter() .map(HashUtil::compute_hash) .collect::<Result<Vec<HashVector>>>()?; O
function_block-random_span
[ { "content": "// No logical type is specified\n\n// Use Default options\n\npub fn default_column_cast(column: &ColumnRef, data_type: &DataTypeImpl) -> Result<ColumnRef> {\n\n let func_ctx = FunctionContext::default();\n\n cast_with_type(\n\n column,\n\n &column.data_type(),\n\n data_type,\n\n &DEFAULT_CAST_OPTIONS,\n\n &func_ctx,\n\n )\n\n}\n\n\n", "file_path": "common/functions/src/scalars/expressions/cast_with_type.rs", "rank": 0, "score": 327483.0130870672 }, { "content": "pub fn missing(reason: impl ToString) -> impl FnOnce() -> Incompatible {\n\n let s = reason.to_string();\n\n move || Incompatible { reason: s }\n\n}\n", "file_path": "common/proto-conv/src/util.rs", "rank": 1, "score": 281738.5966363752 }, { "content": "fn cast(column: &ColumnRef, data_type: &DataTypeImpl) -> Result<ColumnRef> {\n\n let arrow_array = column.as_arrow_array();\n\n let arrow_options = ArrowOption {\n\n wrapped: true,\n\n partial: false,\n\n };\n\n let result = cast::cast(arrow_array.as_ref(), &data_type.arrow_type(), arrow_options)?;\n\n let result: ArrayRef = Arc::from(result);\n\n Ok(result.into_column())\n\n}\n\n\n\ncriterion_group!(benches, add_benchmark);\n\ncriterion_main!(benches);\n\n\n\nuse rand::distributions::Distribution;\n\nuse rand::distributions::Standard;\n\nuse rand::rngs::StdRng;\n\nuse rand::Rng;\n\nuse rand::SeedableRng;\n", "file_path": "common/datavalues/benches/eq.rs", "rank": 2, "score": 271331.1851770259 }, { "content": "pub fn clickhouse_router() -> impl Endpoint {\n\n Route::new()\n\n .at(\n\n \"/\",\n\n post(clickhouse_handler_post).get(clickhouse_handler_get),\n\n )\n\n .with(poem::middleware::Compression)\n\n}\n", "file_path": "query/src/servers/http/clickhouse_handler.rs", "rank": 3, "score": 268910.65994060994 }, { "content": "pub fn build_path_keys(column: &ColumnRef) -> Result<Vec<Vec<DataValue>>> {\n\n if column.is_const() {\n\n let const_column: &ConstColumn = Series::check_get(column)?;\n\n return build_path_keys(const_column.inner());\n\n }\n\n\n\n let mut path_keys: Vec<Vec<DataValue>> = vec![];\n\n for i in 0..column.len() {\n\n path_keys.push(vec![column.get(i)]);\n\n }\n\n Ok(path_keys)\n\n}\n\n\n", "file_path": "common/functions/src/scalars/semi_structureds/get.rs", "rank": 4, "score": 267883.32579536753 }, { "content": "pub fn parse_path_keys(column: &ColumnRef) -> Result<Vec<Vec<DataValue>>> {\n\n let column: &StringColumn = if column.is_const() {\n\n let const_column: &ConstColumn = Series::check_get(column)?;\n\n Series::check_get(const_column.inner())?\n\n } else {\n\n Series::check_get(column)?\n\n };\n\n\n\n let dialect = &GenericDialect {};\n\n let mut path_keys: Vec<Vec<DataValue>> = vec![];\n\n for v in column.iter() {\n\n if v.is_empty() {\n\n return Err(ErrorCode::SyntaxException(\n\n \"Bad compound object's field path name: '' in GET_PATH\",\n\n ));\n\n }\n\n let definition = std::str::from_utf8(v).unwrap();\n\n let mut tokenizer = Tokenizer::new(dialect, definition);\n\n match tokenizer.tokenize() {\n\n Ok((tokens, position_map)) => {\n", "file_path": "common/functions/src/scalars/semi_structureds/get.rs", "rank": 5, "score": 267883.32579536753 }, { "content": "pub fn equal(lhs: &dyn Column, rhs: &dyn Column) -> bool {\n\n if lhs.data_type_id() != rhs.data_type_id() || lhs.len() != lhs.len() {\n\n return false;\n\n }\n\n\n\n if lhs.is_const() || rhs.is_const() {\n\n return equal(\n\n lhs.convert_full_column().as_ref(),\n\n rhs.convert_full_column().as_ref(),\n\n );\n\n }\n\n\n\n use crate::PhysicalTypeID::*;\n\n\n\n match lhs.data_type_id().to_physical_type() {\n\n Null => true,\n\n Nullable => {\n\n let lhs: &NullableColumn = lhs.as_any().downcast_ref().unwrap();\n\n let rhs: &NullableColumn = rhs.as_any().downcast_ref().unwrap();\n\n\n", "file_path": "common/datavalues/src/columns/eq.rs", "rank": 6, "score": 266042.47040293826 }, { "content": "pub fn deserialize_struct<PB, T, ErrFn, CtxFn, D>(\n\n buf: &[u8],\n\n err_code_fn: ErrFn,\n\n context_fn: CtxFn,\n\n) -> Result<T>\n\nwhere\n\n PB: common_protos::prost::Message + Default,\n\n T: FromToProto<PB>,\n\n ErrFn: FnOnce(String) -> ErrorCode + std::marker::Copy,\n\n D: Display,\n\n CtxFn: FnOnce() -> D + std::marker::Copy,\n\n{\n\n let p: PB =\n\n common_protos::prost::Message::decode(buf).map_err_to_code(err_code_fn, context_fn)?;\n\n let v: T = FromToProto::from_pb(p).map_err_to_code(err_code_fn, context_fn)?;\n\n\n\n Ok(v)\n\n}\n", "file_path": "common/management/src/serde/pb_serde.rs", "rank": 7, "score": 265273.3837657278 }, { "content": "#[allow(clippy::borrowed_box)]\n\npub fn test_eval(op: &str, columns: &[ColumnRef]) -> Result<ColumnRef> {\n\n let mut rows_size = 0;\n\n let mut arguments = Vec::with_capacity(columns.len());\n\n let mut arguments_type = Vec::with_capacity(columns.len());\n\n\n\n for (index, arg_column) in columns.iter().enumerate() {\n\n let f = ColumnWithField::new(\n\n arg_column.clone(),\n\n DataField::new(&format!(\"dummy_{}\", index), arg_column.data_type()),\n\n );\n\n\n\n arguments_type.push(arg_column.data_type());\n\n\n\n rows_size = arg_column.len();\n\n arguments.push(f);\n\n }\n\n\n\n let mut types = Vec::with_capacity(columns.len());\n\n for t in arguments_type.iter() {\n\n types.push(t);\n\n }\n\n\n\n test_eval_with_type(op, rows_size, &arguments, &types)\n\n}\n\n\n", "file_path": "common/functions/tests/it/scalars/scalar_function_test.rs", "rank": 8, "score": 259954.4043662932 }, { "content": "pub fn serialize_struct<PB: common_protos::prost::Message, ErrFn, CtxFn, D>(\n\n value: &impl FromToProto<PB>,\n\n err_code_fn: ErrFn,\n\n context_fn: CtxFn,\n\n) -> Result<Vec<u8>>\n\nwhere\n\n ErrFn: FnOnce(String) -> ErrorCode + std::marker::Copy,\n\n D: Display,\n\n CtxFn: FnOnce() -> D + std::marker::Copy,\n\n{\n\n let p = value.to_pb().map_err_to_code(err_code_fn, context_fn)?;\n\n let mut buf = vec![];\n\n common_protos::prost::Message::encode(&p, &mut buf).map_err_to_code(err_code_fn, context_fn)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "common/management/src/serde/pb_serde.rs", "rank": 9, "score": 248079.67094891059 }, { "content": "pub fn make_column_def(\n\n name: impl Into<String>,\n\n quote_style: Option<char>,\n\n data_type: DataType,\n\n) -> ColumnDef {\n\n ColumnDef {\n\n name: Ident {\n\n value: name.into(),\n\n quote_style,\n\n },\n\n data_type,\n\n collation: None,\n\n options: vec![],\n\n }\n\n}\n\n\n", "file_path": "query/tests/it/sql/sql_parser.rs", "rank": 10, "score": 247234.30849219233 }, { "content": "pub fn sum_primitive<T, SumT>(column: &ColumnRef, validity: Option<&Bitmap>) -> Result<SumT>\n\nwhere\n\n T: PrimitiveType + AsPrimitive<SumT>,\n\n SumT: PrimitiveType + std::ops::AddAssign,\n\n{\n\n let inner: &PrimitiveColumn<T> = Series::check_get(column)?;\n\n\n\n if let Some(validity) = validity {\n\n let mut sum = SumT::default();\n\n // TODO use simd version\n\n inner.iter().zip(validity.iter()).for_each(|(t, b)| {\n\n if b {\n\n sum += t.as_();\n\n }\n\n });\n\n\n\n Ok(sum)\n\n } else {\n\n let mut sum = SumT::default();\n\n inner.iter().for_each(|t| {\n\n sum += t.as_();\n\n });\n\n\n\n Ok(sum)\n\n }\n\n}\n", "file_path": "common/functions/src/aggregates/aggregate_sum.rs", "rank": 11, "score": 245383.75923435047 }, { "content": "/// Determines if the set of `Expression`'s are a valid projection on the input\n\n/// `Expression::Column`'s.\n\npub fn find_columns_not_satisfy_exprs(\n\n columns: &[Expression],\n\n exprs: &[Expression],\n\n) -> Result<Option<Expression>> {\n\n columns.iter().try_for_each(|c| match c {\n\n Expression::Column(_) => Ok(()),\n\n\n\n _ => Err(ErrorCode::SyntaxException(\n\n \"Expression::Column are required\".to_string(),\n\n )),\n\n })?;\n\n\n\n let exprs = find_column_exprs(exprs);\n\n for expr in &exprs {\n\n if !columns.contains(expr) {\n\n return Ok(Some(expr.clone()));\n\n }\n\n }\n\n Ok(None)\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 12, "score": 239469.8465076925 }, { "content": "pub fn cast_column_field(\n\n column_with_field: &ColumnWithField,\n\n from_type: &DataTypeImpl,\n\n target_type: &DataTypeImpl,\n\n func_ctx: &FunctionContext,\n\n) -> Result<ColumnRef> {\n\n cast_with_type(\n\n column_with_field.column(),\n\n from_type,\n\n target_type,\n\n &DEFAULT_CAST_OPTIONS,\n\n func_ctx,\n\n )\n\n}\n\n\n", "file_path": "common/functions/src/scalars/expressions/cast_with_type.rs", "rank": 13, "score": 239463.317929649 }, { "content": "pub fn subexpr(min_precedence: u32) -> impl FnMut(Input) -> IResult<Expr> {\n\n move |i| {\n\n let higher_prec_expr_element =\n\n |i| {\n\n expr_element(i).and_then(|(rest, elem)| {\n\n match PrattParser::<std::iter::Once<_>>::query(&mut ExprParser, &elem).unwrap()\n\n {\n\n Affix::Infix(prec, _) | Affix::Prefix(prec) | Affix::Postfix(prec)\n\n if prec <= Precedence(min_precedence) =>\n\n {\n\n Err(nom::Err::Error(Error::from_error_kind(\n\n i,\n\n ErrorKind::Other(\"expected more tokens for expression\"),\n\n )))\n\n }\n\n _ => Ok((rest, elem)),\n\n }\n\n })\n\n };\n\n\n", "file_path": "common/ast/src/parser/expr.rs", "rank": 14, "score": 238090.24991719902 }, { "content": "#[inline]\n\nfn try_extract_inner(column: &ColumnRef) -> Result<(&ColumnRef, Bitmap)> {\n\n let (all_is_null, validity) = column.validity();\n\n let first_flag = if all_is_null {\n\n false\n\n } else {\n\n validity.map(|c| c.get_bit(0)).unwrap_or(true)\n\n };\n\n\n\n let (column, validity) = if column.is_const() {\n\n let mut bitmap = MutableBitmap::with_capacity(1);\n\n bitmap.push(first_flag);\n\n let c: &ConstColumn = unsafe { Series::static_cast(column) };\n\n (c.inner(), bitmap.into())\n\n } else if column.is_nullable() {\n\n let c: &NullableColumn = unsafe { Series::static_cast(column) };\n\n (c.inner(), c.ensure_validity().clone())\n\n } else {\n\n let mut bitmap = MutableBitmap::with_capacity(1);\n\n bitmap.push(first_flag);\n\n (column, bitmap.into())\n", "file_path": "common/datavalues/src/scalars/viewer.rs", "rank": 15, "score": 235641.25934155926 }, { "content": "pub fn match_token(kind: TokenKind) -> impl FnMut(Input) -> IResult<&Token> {\n\n move |i| match i.0.get(0).filter(|token| token.kind == kind) {\n\n Some(token) => Ok((i.slice(1..), token)),\n\n _ => Err(nom::Err::Error(Error::from_error_kind(\n\n i,\n\n ErrorKind::ExpectToken(kind),\n\n ))),\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! rule {\n\n ($($tt:tt)*) => { nom_rule::rule!(\n\n $crate::parser::util::match_text,\n\n $crate::parser::util::match_token,\n\n $($tt)*)\n\n }\n\n}\n\n\n", "file_path": "common/ast/src/parser/util.rs", "rank": 16, "score": 235294.88035000203 }, { "content": "#[inline]\n\nfn get_null_mask(column: &ColumnRef) -> usize {\n\n if !column.is_const() && !column.only_null() && column.is_nullable() {\n\n usize::MAX\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "common/datavalues/src/scalars/viewer.rs", "rank": 17, "score": 234212.36921653108 }, { "content": "#[inline]\n\nfn non_const_mask(column: &ColumnRef) -> usize {\n\n if !column.is_const() && !column.only_null() {\n\n usize::MAX\n\n } else {\n\n 0\n\n }\n\n}\n", "file_path": "common/datavalues/src/scalars/viewer.rs", "rank": 18, "score": 234212.36921653108 }, { "content": "/// QUOTE: (From arrow2::arrow::compute::comparison::primitive)\n\npub fn primitive_simd_op_boolean<T, F>(l: &ColumnRef, r: &ColumnRef, op: F) -> Result<BooleanColumn>\n\nwhere\n\n T: PrimitiveType + Simd8,\n\n F: Fn(T::Simd, T::Simd) -> u8,\n\n{\n\n debug_assert!(\n\n l.len() == r.len(),\n\n \"Size of columns must match to apply binary expression\"\n\n );\n\n\n\n let res = match (l.is_const(), r.is_const()) {\n\n (false, false) => {\n\n let lhs: &PrimitiveColumn<T> = Series::check_get(l)?;\n\n let lhs_chunks_iter = lhs.values().chunks_exact(8);\n\n let lhs_remainder = lhs_chunks_iter.remainder();\n\n\n\n let rhs: &PrimitiveColumn<T> = Series::check_get(r)?;\n\n let rhs_chunks_iter = rhs.values().chunks_exact(8);\n\n let rhs_remainder = rhs_chunks_iter.remainder();\n\n\n", "file_path": "common/functions/src/scalars/expressions/binary.rs", "rank": 19, "score": 233398.44026439646 }, { "content": "pub fn match_text(text: &'static str) -> impl FnMut(Input) -> IResult<&Token> {\n\n move |i| match i.0.get(0).filter(|token| token.text() == text) {\n\n Some(token) => Ok((i.slice(1..), token)),\n\n _ => Err(nom::Err::Error(Error::from_error_kind(\n\n i,\n\n ErrorKind::ExpectText(text),\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "common/ast/src/parser/util.rs", "rank": 20, "score": 230934.70144550543 }, { "content": "pub fn column_def(i: Input) -> IResult<ColumnDefinition> {\n\n #[derive(Clone)]\n\n enum ColumnConstraint<'a> {\n\n Nullable(bool),\n\n DefaultExpr(Box<Expr<'a>>),\n\n }\n\n\n\n let nullable = alt((\n\n value(ColumnConstraint::Nullable(true), rule! { NULL }),\n\n value(ColumnConstraint::Nullable(false), rule! { NOT ~ ^NULL }),\n\n ));\n\n let default_expr = map(\n\n rule! {\n\n DEFAULT ~ ^#subexpr(NOT_PREC)\n\n },\n\n |(_, default_expr)| ColumnConstraint::DefaultExpr(Box::new(default_expr)),\n\n );\n\n\n\n map(\n\n rule! {\n", "file_path": "common/ast/src/parser/statement.rs", "rank": 21, "score": 229813.8807299788 }, { "content": "pub fn col(name: &str) -> Expression {\n\n Expression::Column(name.to_string())\n\n}\n", "file_path": "common/planners/src/plan_expression_column.rs", "rank": 22, "score": 222138.96341800518 }, { "content": "#[inline]\n\npub fn string_to_date(date_str: impl AsRef<[u8]>) -> Option<NaiveDate> {\n\n let s = std::str::from_utf8(date_str.as_ref()).ok();\n\n s.and_then(|c| c.parse::<NaiveDate>().ok())\n\n}\n", "file_path": "common/functions/src/scalars/expressions/cast_from_string.rs", "rank": 23, "score": 221333.5875184884 }, { "content": "#[inline]\n\nfn intdiv_scalar<O>(l: impl AsPrimitive<f64>, r: impl AsPrimitive<f64>, ctx: &mut EvalContext) -> O\n\nwhere\n\n f64: AsPrimitive<O>,\n\n O: IntegerType + Zero,\n\n{\n\n let l = l.as_();\n\n let r = r.as_();\n\n if std::intrinsics::unlikely(r == 0.0) {\n\n ctx.set_error(ErrorCode::BadArguments(\"Division by zero\"));\n\n return O::zero();\n\n }\n\n (l / r).as_()\n\n}\n\n\n\npub struct ArithmeticIntDivFunction;\n\n\n\nimpl ArithmeticIntDivFunction {\n\n pub fn try_create_func(\n\n _display_name: &str,\n\n args: &[&DataTypeImpl],\n", "file_path": "common/functions/src/scalars/arithmetics/arithmetic_intdiv.rs", "rank": 24, "score": 220750.6817009413 }, { "content": "// put_uvarint encodes a uint64 into buf and returns the number of bytes written.\n\n// If the buffer is too small, put_uvarint will panic.\n\npub fn put_uvarint(mut buffer: impl AsMut<[u8]>, x: u64) -> usize {\n\n let mut i = 0;\n\n let mut mx = x;\n\n let buf = buffer.as_mut();\n\n while mx >= 0x80 {\n\n buf[i] = mx as u8 | 0x80;\n\n mx >>= 7;\n\n i += 1;\n\n }\n\n buf[i] = mx as u8;\n\n i + 1\n\n}\n", "file_path": "common/io/src/binary_write.rs", "rank": 25, "score": 220705.26493420475 }, { "content": "pub fn wrap_nullable(data_type: &DataTypeImpl) -> DataTypeImpl {\n\n if !data_type.can_inside_nullable() {\n\n return data_type.clone();\n\n }\n\n NullableType::new_impl(data_type.clone())\n\n}\n\n\n", "file_path": "common/datavalues/src/types/data_type.rs", "rank": 26, "score": 219110.52216025858 }, { "content": "pub fn remove_nullable(data_type: &DataTypeImpl) -> DataTypeImpl {\n\n if matches!(data_type.data_type_id(), TypeID::Nullable) {\n\n let nullable = data_type.as_any().downcast_ref::<NullableType>().unwrap();\n\n return nullable.inner_type().clone();\n\n }\n\n data_type.clone()\n\n}\n\n\n", "file_path": "common/datavalues/src/types/data_type.rs", "rank": 27, "score": 219110.52216025858 }, { "content": "#[inline]\n\npub fn aggregate_types(args: &[DataTypeImpl]) -> Result<DataTypeImpl> {\n\n match args.len() {\n\n 0 => Result::Err(ErrorCode::BadArguments(\"Can't aggregate empty args\")),\n\n 1 => Ok(args[0].clone()),\n\n _ => {\n\n let left = args[0].clone();\n\n let right = aggregate_types(&args[1..args.len()])?;\n\n merge_types(&left, &right)\n\n }\n\n }\n\n}\n\n\n", "file_path": "common/datavalues/src/types/type_coercion.rs", "rank": 28, "score": 214682.71567521285 }, { "content": "pub fn equal(lhs: &DataTypeImpl, rhs: &DataTypeImpl) -> bool {\n\n if lhs.data_type_id() != rhs.data_type_id() {\n\n return false;\n\n }\n\n\n\n use crate::prelude::TypeID::*;\n\n match lhs.data_type_id() {\n\n Boolean | UInt8 | UInt16 | UInt32 | UInt64 | Int8 | Int16 | Int32 | Int64 | Float32\n\n | Float64 | String | Date | Interval | Null | Variant | VariantArray | VariantObject => {\n\n true\n\n }\n\n\n\n Timestamp => {\n\n let lhs: &TimestampType = lhs.as_any().downcast_ref().unwrap();\n\n let rhs: &TimestampType = rhs.as_any().downcast_ref().unwrap();\n\n\n\n lhs.precision() == rhs.precision()\n\n }\n\n\n\n Nullable => {\n", "file_path": "common/datavalues/src/types/eq.rs", "rank": 29, "score": 213537.9152098058 }, { "content": "pub fn from_arrow_field(f: &ArrowField) -> DataTypeImpl {\n\n if let Some(custom_name) = f.metadata.get(ARROW_EXTENSION_NAME) {\n\n let metadata = f.metadata.get(ARROW_EXTENSION_META).cloned();\n\n match custom_name.as_str() {\n\n \"Date\" => return DateType::new_impl(),\n\n \"Timestamp\" => match metadata {\n\n Some(meta) => {\n\n let mut chars = meta.chars();\n\n let precision = chars.next().unwrap().to_digit(10).unwrap();\n\n return TimestampType::new_impl(precision as usize);\n\n }\n\n None => return TimestampType::new_impl(0),\n\n },\n\n \"Interval\" => return IntervalType::new_impl(metadata.unwrap().into()),\n\n \"Variant\" => return VariantType::new_impl(),\n\n \"VariantArray\" => return VariantArrayType::new_impl(),\n\n \"VariantObject\" => return VariantObjectType::new_impl(),\n\n _ => {}\n\n }\n\n }\n", "file_path": "common/datavalues/src/types/data_type.rs", "rank": 30, "score": 211900.9520318681 }, { "content": "/// Perform `lhs == rhs` operation on two arrays.\n\npub fn eq<T>(lhs: &PrimitiveColumn<T>, rhs: &PrimitiveColumn<T>) -> BooleanColumn\n\nwhere\n\n T: PrimitiveType + comparison::Simd8,\n\n T::Simd: comparison::Simd8PartialEq,\n\n{\n\n compare_op(lhs, rhs, |a, b| a.eq(b))\n\n}\n\n\n", "file_path": "common/datavalues/benches/eq.rs", "rank": 31, "score": 209592.30476167693 }, { "content": "pub fn from_arrow_type(dt: &ArrowType) -> DataTypeImpl {\n\n match dt {\n\n ArrowType::Null => DataTypeImpl::Null(NullType {}),\n\n ArrowType::UInt8 => DataTypeImpl::UInt8(UInt8Type::default()),\n\n ArrowType::UInt16 => DataTypeImpl::UInt16(UInt16Type::default()),\n\n ArrowType::UInt32 => DataTypeImpl::UInt32(UInt32Type::default()),\n\n ArrowType::UInt64 => DataTypeImpl::UInt64(UInt64Type::default()),\n\n ArrowType::Int8 => DataTypeImpl::Int8(Int8Type::default()),\n\n ArrowType::Int16 => DataTypeImpl::Int16(Int16Type::default()),\n\n ArrowType::Int32 => DataTypeImpl::Int32(Int32Type::default()),\n\n ArrowType::Int64 => DataTypeImpl::Int64(Int64Type::default()),\n\n ArrowType::Boolean => DataTypeImpl::Boolean(BooleanType::default()),\n\n ArrowType::Float32 => DataTypeImpl::Float32(Float32Type::default()),\n\n ArrowType::Float64 => DataTypeImpl::Float64(Float64Type::default()),\n\n\n\n // TODO support other list\n\n ArrowType::LargeList(f) => {\n\n let inner = from_arrow_field(f);\n\n DataTypeImpl::Array(ArrayType::create(inner))\n\n }\n", "file_path": "common/datavalues/src/types/data_type.rs", "rank": 32, "score": 208784.46393912073 }, { "content": "pub fn assert_numeric(data_type: &DataTypeImpl) -> Result<()> {\n\n if !data_type.data_type_id().is_numeric() {\n\n return Err(ErrorCode::IllegalDataType(format!(\n\n \"Expected a numeric type, but got {}\",\n\n data_type.name()\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "common/functions/src/scalars/function_common.rs", "rank": 33, "score": 208784.4639391207 }, { "content": "pub fn assert_string(data_type: &DataTypeImpl) -> Result<()> {\n\n if !data_type.data_type_id().is_string() {\n\n return Err(ErrorCode::IllegalDataType(format!(\n\n \"Expected a string type, but got {}\",\n\n data_type.name()\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "common/functions/src/scalars/function_common.rs", "rank": 34, "score": 208784.4639391207 }, { "content": "pub trait Column: Send + Sync {\n\n fn as_any(&self) -> &dyn Any;\n\n /// Type of data that column contains. It's an underlying physical type:\n\n /// Int32 for Date, Int64 for Timestamp, so on.\n\n fn data_type_id(&self) -> TypeID {\n\n self.data_type().data_type_id()\n\n }\n\n fn data_type(&self) -> DataTypeImpl;\n\n\n\n fn column_type_name(&self) -> String;\n\n\n\n fn is_nullable(&self) -> bool {\n\n false\n\n }\n\n\n\n fn is_null(&self) -> bool {\n\n false\n\n }\n\n\n\n fn is_const(&self) -> bool {\n", "file_path": "common/datavalues/src/columns/column.rs", "rank": 35, "score": 207855.32385999153 }, { "content": "/// Convert any `Expression` to an `Expression::Column`.\n\npub fn expr_as_column_expr(expr: &Expression) -> Result<Expression> {\n\n match expr {\n\n Expression::Column(_) => Ok(expr.clone()),\n\n _ => Ok(Expression::Column(expr.column_name())),\n\n }\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 36, "score": 207428.62590067097 }, { "content": "/// Collect all deeply nested `Expression::Column`'s. They are returned in order of\n\n/// appearance (depth first), with duplicates omitted.\n\npub fn find_column_exprs(exprs: &[Expression]) -> Vec<Expression> {\n\n find_exprs_in_exprs(exprs, &|nest_exprs| {\n\n matches!(nest_exprs, Expression::Column(_))\n\n })\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 37, "score": 207428.19322241502 }, { "content": "pub fn display_fmt<T: std::fmt::Display, I: IntoIterator<Item = T>>(\n\n iter: I,\n\n head: &str,\n\n len: usize,\n\n typeid: TypeID,\n\n f: &mut std::fmt::Formatter<'_>,\n\n) -> std::fmt::Result {\n\n let result = display_helper(iter);\n\n write!(\n\n f,\n\n \"{} \\t typeid: {:?}\\t len: {}\\t data: [{}]\",\n\n head,\n\n typeid,\n\n len,\n\n result.join(\", \")\n\n )\n\n}\n\n\n\nmacro_rules! fmt_dyn {\n\n ($column:expr, $ty:ty, $f:expr) => {{\n", "file_path": "common/datavalues/src/columns/column.rs", "rank": 38, "score": 206795.65627994295 }, { "content": "// typetag did not support generic impls, so we have to do this\n\npub fn create_primitive_datatype<T: PrimitiveType>() -> DataTypeImpl {\n\n match (T::SIGN, T::FLOATING, T::SIZE) {\n\n (false, false, 1) => DataTypeImpl::UInt8(UInt8Type { _t: PhantomData }),\n\n (false, false, 2) => DataTypeImpl::UInt16(UInt16Type { _t: PhantomData }),\n\n (false, false, 4) => DataTypeImpl::UInt32(UInt32Type { _t: PhantomData }),\n\n (false, false, 8) => DataTypeImpl::UInt64(UInt64Type { _t: PhantomData }),\n\n\n\n (true, false, 1) => DataTypeImpl::Int8(Int8Type { _t: PhantomData }),\n\n (true, false, 2) => DataTypeImpl::Int16(Int16Type { _t: PhantomData }),\n\n (true, false, 4) => DataTypeImpl::Int32(Int32Type { _t: PhantomData }),\n\n (true, false, 8) => DataTypeImpl::Int64(Int64Type { _t: PhantomData }),\n\n\n\n (true, true, 4) => DataTypeImpl::Float32(Float32Type { _t: PhantomData }),\n\n (true, true, 8) => DataTypeImpl::Float64(Float64Type { _t: PhantomData }),\n\n\n\n _ => unimplemented!(),\n\n }\n\n}\n\n\n\nmacro_rules! impl_numeric {\n", "file_path": "common/datavalues/src/types/type_primitive.rs", "rank": 39, "score": 205814.34252770513 }, { "content": "pub fn assert_date_or_timestamp(data_type: &DataTypeImpl) -> Result<()> {\n\n if !data_type.data_type_id().is_date_or_date_time() {\n\n return Err(ErrorCode::IllegalDataType(format!(\n\n \"Expected a date or timestamp type, but got {}\",\n\n data_type.name()\n\n )));\n\n }\n\n Ok(())\n\n}\n", "file_path": "common/functions/src/scalars/function_common.rs", "rank": 40, "score": 205807.63799242623 }, { "content": "/// Inject current tracing::Span info into tonic request meta\n\n/// before sending request to a tonic server.\n\n/// Then the tonic server will be able to chain a distributed tracing.\n\n///\n\n/// A tonic client should call this function just before sending out the request.\n\n///\n\n/// The global propagater must be installed, e.g. by calling: TODO\n\npub fn inject_span_to_tonic_request<T>(mes: impl tonic::IntoRequest<T>) -> tonic::Request<T> {\n\n let curr = tracing::Span::current();\n\n let cx = curr.context();\n\n\n\n let mut request = mes.into_request();\n\n\n\n global::get_text_map_propagator(|propagator| {\n\n propagator.inject_context(&cx, &mut MetadataMapInjector(request.metadata_mut()))\n\n });\n\n\n\n request\n\n}\n\n\n", "file_path": "common/tracing/src/tracing_to_jaeger.rs", "rank": 41, "score": 205581.50220316253 }, { "content": "/// Setup building environment:\n\n/// - Watch git HEAD to trigger a rebuild;\n\n/// - Generate vergen instruction to setup environment variables for building databend components. See: https://docs.rs/vergen/5.1.8/vergen/ ;\n\n/// - Generate databend environment variables, e.g., authors.\n\npub fn setup() {\n\n if Path::new(\".git/HEAD\").exists() {\n\n println!(\"cargo:rerun-if-changed=.git/HEAD\");\n\n }\n\n add_building_env_vars();\n\n}\n\n\n", "file_path": "common/building/src/lib.rs", "rank": 42, "score": 204689.0127160601 }, { "content": "pub fn format_data_type_sql(data_type: &DataTypeImpl) -> String {\n\n let notnull_type = remove_nullable(data_type);\n\n match data_type.is_nullable() {\n\n true => format!(\"{} NULL\", notnull_type.sql_name()),\n\n false => notnull_type.sql_name(),\n\n }\n\n}\n", "file_path": "common/datavalues/src/types/data_type.rs", "rank": 43, "score": 202961.29172416666 }, { "content": "fn databend_eq(lhs: &ColumnRef, rhs: &ColumnRef) -> Result<ColumnRef> {\n\n let mut validity: Option<Bitmap> = None;\n\n let (_, valid) = lhs.validity();\n\n validity = combine_validities_2(validity.clone(), valid.cloned());\n\n let lhs = Series::remove_nullable(lhs);\n\n\n\n let (_, valid) = rhs.validity();\n\n validity = combine_validities_2(validity.clone(), valid.cloned());\n\n let rhs = Series::remove_nullable(rhs);\n\n\n\n let lhs_type = remove_nullable(&lhs.data_type());\n\n let rhs_type = remove_nullable(&rhs.data_type());\n\n let lhs_id = lhs_type.data_type_id().to_physical_type();\n\n let rhs_id = rhs_type.data_type_id().to_physical_type();\n\n\n\n let col = with_match_physical_primitive_type_error!(lhs_id, |$L| {\n\n with_match_physical_primitive_type_error!(rhs_id, |$R| {\n\n let left: &<$L as Scalar>::ColumnType = unsafe { Series::static_cast(&lhs) };\n\n let right: &<$R as Scalar>::ColumnType = unsafe { Series::static_cast(&rhs) };\n\n\n", "file_path": "common/datavalues/benches/eq.rs", "rank": 44, "score": 202344.2416592075 }, { "content": "// coercion rules for compare operations. This is a superset of all numerical coercion rules.\n\npub fn compare_coercion(lhs_type: &DataTypeImpl, rhs_type: &DataTypeImpl) -> Result<DataTypeImpl> {\n\n let lhs_id = lhs_type.data_type_id();\n\n let rhs_id = rhs_type.data_type_id();\n\n\n\n if lhs_type.eq(rhs_type) {\n\n // same type => equality is possible\n\n return Ok(lhs_type.clone());\n\n }\n\n\n\n if lhs_id.is_numeric() && rhs_id.is_numeric() {\n\n return numerical_coercion(lhs_type, rhs_type, true);\n\n }\n\n\n\n // one of is nothing\n\n {\n\n if lhs_id == TypeID::Null {\n\n return Ok(wrap_nullable(rhs_type));\n\n }\n\n\n\n if rhs_id == TypeID::Null {\n", "file_path": "common/datavalues/src/types/type_coercion.rs", "rank": 45, "score": 201546.5257972664 }, { "content": "pub fn merge_types(lhs_type: &DataTypeImpl, rhs_type: &DataTypeImpl) -> Result<DataTypeImpl> {\n\n if lhs_type.is_nullable() || rhs_type.is_nullable() {\n\n let lhs_type = remove_nullable(lhs_type);\n\n let rhs_type = remove_nullable(rhs_type);\n\n let merge_types = merge_types(&lhs_type, &rhs_type)?;\n\n return Ok(wrap_nullable(&merge_types));\n\n }\n\n\n\n let lhs_id = lhs_type.data_type_id();\n\n let rhs_id = rhs_type.data_type_id();\n\n\n\n match (lhs_id, rhs_id) {\n\n (Null, _) => Ok(wrap_nullable(rhs_type)),\n\n (_, Null) => Ok(wrap_nullable(lhs_type)),\n\n\n\n (Array, Array) => {\n\n let a = lhs_type.as_any().downcast_ref::<ArrayType>().unwrap();\n\n let b = rhs_type.as_any().downcast_ref::<ArrayType>().unwrap();\n\n\n\n let typ = merge_types(a.inner_type(), b.inner_type())?;\n", "file_path": "common/datavalues/src/types/type_coercion.rs", "rank": 46, "score": 201546.5257972664 }, { "content": "#[inline]\n\npub fn string_to_timestamp(date_str: impl AsRef<[u8]>, tz: &Tz) -> Option<DateTime<Tz>> {\n\n let s = std::str::from_utf8(date_str.as_ref()).ok();\n\n s.and_then(|c| tz.datetime_from_str(c, \"%Y-%m-%d %H:%M:%S%.f\").ok())\n\n}\n\n\n", "file_path": "common/functions/src/scalars/expressions/cast_from_string.rs", "rank": 47, "score": 200677.29523177404 }, { "content": "fn databend_eq_simd(lhs: &ColumnRef, rhs: &ColumnRef) -> Result<ColumnRef> {\n\n let mut validity: Option<Bitmap> = None;\n\n let (_, valid) = lhs.validity();\n\n validity = combine_validities_2(validity.clone(), valid.cloned());\n\n let lhs = Series::remove_nullable(lhs);\n\n\n\n let (_, valid) = rhs.validity();\n\n validity = combine_validities_2(validity.clone(), valid.cloned());\n\n let rhs = Series::remove_nullable(rhs);\n\n\n\n let lhs_type = remove_nullable(&lhs.data_type());\n\n let rhs_type = remove_nullable(&rhs.data_type());\n\n let least_supertype = compare_coercion(&lhs_type, &rhs_type)?;\n\n\n\n let col0 = if lhs_type != least_supertype {\n\n cast(&lhs, &least_supertype)?\n\n } else {\n\n lhs\n\n };\n\n\n", "file_path": "common/datavalues/benches/eq.rs", "rank": 48, "score": 200553.4126001514 }, { "content": "#[inline]\n\nfn div_scalar(l: impl AsPrimitive<f64>, r: impl AsPrimitive<f64>, _ctx: &mut EvalContext) -> f64 {\n\n l.as_() / r.as_()\n\n}\n\n\n\npub struct ArithmeticDivFunction;\n\n\n\nimpl ArithmeticDivFunction {\n\n pub fn try_create_func(\n\n _display_name: &str,\n\n args: &[&DataTypeImpl],\n\n ) -> Result<Box<dyn Function>> {\n\n with_match_primitive_types_error!(args[0].data_type_id(), |$T| {\n\n with_match_primitive_types_error!(args[1].data_type_id(), |$D| {\n\n BinaryArithmeticFunction::<$T, $D, f64, _>::try_create_func(\n\n DataValueBinaryOperator::Div,\n\n Float64Type::new_impl(),\n\n div_scalar\n\n )\n\n })\n\n })\n", "file_path": "common/functions/src/scalars/arithmetics/arithmetic_div.rs", "rank": 49, "score": 200213.23768688508 }, { "content": "pub fn reduce_block_stats<T: Borrow<ColumnsStatistics>>(stats: &[T]) -> Result<ColumnsStatistics> {\n\n let len = stats.len();\n\n\n\n // transpose Vec<HashMap<_,(_,_)>> to HashMap<_, (_, Vec<_>)>\n\n let col_stat_list = stats.iter().fold(HashMap::new(), |acc, item| {\n\n item.borrow().iter().fold(\n\n acc,\n\n |mut acc: HashMap<ColumnId, Vec<&ColumnStatistics>>, (col_id, stats)| {\n\n let entry = acc.entry(*col_id);\n\n match entry {\n\n Entry::Occupied(_) => {\n\n entry.and_modify(|v| v.push(stats));\n\n }\n\n Entry::Vacant(_) => {\n\n entry.or_insert_with(|| vec![stats]);\n\n }\n\n }\n\n acc\n\n },\n\n )\n", "file_path": "query/src/storages/fuse/statistics/reducers.rs", "rank": 50, "score": 198700.05353787378 }, { "content": "pub fn init_query_logger(\n\n log_name: &str,\n\n dir: &str,\n\n) -> (Vec<WorkerGuard>, Arc<dyn Subscriber + Send + Sync>) {\n\n let mut guards = vec![];\n\n\n\n let rolling_appender = RollingFileAppender::new(Rotation::HOURLY, dir, log_name);\n\n let (rolling_writer, rolling_writer_guard) = tracing_appender::non_blocking(rolling_appender);\n\n let format = tracing_subscriber::fmt::format()\n\n .without_time()\n\n .with_target(false)\n\n .with_level(false)\n\n .compact();\n\n guards.push(rolling_writer_guard);\n\n\n\n let subscriber = tracing_subscriber::fmt()\n\n .with_writer(rolling_writer)\n\n .event_format(format)\n\n .finish();\n\n\n\n (guards, Arc::new(subscriber))\n\n}\n\n\n", "file_path": "common/tracing/src/logging.rs", "rank": 51, "score": 198604.60079528496 }, { "content": "pub fn set_env_config() {\n\n let mut config = Config::default();\n\n *config.git_mut().sha_kind_mut() = ShaKind::Short;\n\n\n\n if let Err(e) = vergen(config) {\n\n eprintln!(\"{}\", e);\n\n }\n\n}\n\n\n", "file_path": "common/building/src/lib.rs", "rank": 52, "score": 198604.60079528496 }, { "content": "pub fn unary_simd_op<L, O, F, const N: usize>(l: &ColumnRef, op: F) -> Result<PrimitiveColumn<O>>\n\nwhere\n\n L: PrimitiveType + SimdElement,\n\n O: PrimitiveType + SimdElement,\n\n F: Fn(Simd<L, N>) -> Simd<O, N>,\n\n LaneCount<N>: SupportedLaneCount,\n\n{\n\n let left: &PrimitiveColumn<L> = Series::check_get(l)?;\n\n let lhs_chunks = left.values().chunks_exact(N);\n\n let lhs_remainder = lhs_chunks.remainder();\n\n\n\n let mut values = Vec::<O>::with_capacity(l.len());\n\n lhs_chunks.for_each(|lhs| {\n\n let res = op(Simd::from_slice(lhs));\n\n values.extend_from_slice(res.as_array())\n\n });\n\n\n\n if !lhs_remainder.is_empty() {\n\n let lhs = from_incomplete_chunk(lhs_remainder, L::default());\n\n let res = op(lhs);\n\n values.extend_from_slice(&res.as_array()[0..lhs_remainder.len()])\n\n };\n\n\n\n Ok(PrimitiveColumn::<O>::new_from_vec(values))\n\n}\n", "file_path": "common/functions/src/scalars/expressions/unary.rs", "rank": 53, "score": 197318.7733626415 }, { "content": "// Get the latest tag:\n\n// git describe --tags --abbrev=0\n\n// v0.6.99-nightly\n\npub fn add_env_git_tag() {\n\n let r = run_script::run_script!(r#\"git describe --tags --abbrev=0\"#);\n\n let tag = match r {\n\n Ok((_, output, _)) => output,\n\n Err(e) => e.to_string(),\n\n };\n\n println!(\"cargo:rustc-env=VERGEN_GIT_SEMVER={}\", tag);\n\n}\n\n\n", "file_path": "common/building/src/lib.rs", "rank": 54, "score": 195804.93723127537 }, { "content": "pub fn set_panic_hook() {\n\n // Set a panic hook that records the panic as a `tracing` event at the\n\n // `ERROR` verbosity level.\n\n //\n\n // If we are currently in a span when the panic occurred, the logged event\n\n // will include the current span, allowing the context in which the panic\n\n // occurred to be recorded.\n\n std::panic::set_hook(Box::new(|panic| {\n\n let backtrace = Backtrace::force_capture();\n\n let backtrace = format!(\"{:?}\", backtrace);\n\n if let Some(location) = panic.location() {\n\n tracing::error!(\n\n message = %panic,\n\n backtrace = %backtrace,\n\n panic.file = location.file(),\n\n panic.line = location.line(),\n\n panic.column = location.column(),\n\n );\n\n } else {\n\n tracing::error!(message = %panic, backtrace = %backtrace);\n\n }\n\n }));\n\n}\n", "file_path": "common/tracing/src/panic_hook.rs", "rank": 55, "score": 195804.93723127537 }, { "content": "pub fn add_env_credits_info() {\n\n let metadata_command = cargo_metadata::MetadataCommand::new();\n\n\n\n let deps = match cargo_license::get_dependencies_from_cargo_lock(metadata_command, false, false)\n\n {\n\n Ok(v) => v,\n\n Err(err) => {\n\n tracing::error!(\"{:?}\", err);\n\n vec![]\n\n }\n\n };\n\n\n\n let names: Vec<String> = deps.iter().map(|x| (&x.name).to_string()).collect();\n\n let versions: Vec<String> = deps.iter().map(|x| x.version.to_string()).collect();\n\n let licenses: Vec<String> = deps\n\n .iter()\n\n .map(|x| match &x.license {\n\n None => \"UNKNOWN\".to_string(),\n\n Some(license) => license.to_string(),\n\n })\n", "file_path": "common/building/src/lib.rs", "rank": 56, "score": 195804.93723127537 }, { "content": "pub fn add_env_commit_authors() {\n\n let r = run_script::run_script!(\n\n // use email to uniq authors\n\n r#\"git shortlog HEAD -sne | awk '{$1=\"\"; sub(\" \", \" \\\"\"); print }' | awk -F'<' '!x[$1]++' | \\\n\n awk -F'<' '!x[$2]++' | awk -F'<' '{gsub(/ +$/, \"\\\",\", $1); print $1}' | sort | xargs\"#\n\n );\n\n let authors = match r {\n\n Ok((_, output, _)) => output,\n\n Err(e) => e.to_string(),\n\n };\n\n println!(\"cargo:rustc-env=DATABEND_COMMIT_AUTHORS={}\", authors);\n\n}\n\n\n", "file_path": "common/building/src/lib.rs", "rank": 57, "score": 195804.93723127537 }, { "content": "/// Initialize unit test tracing for metasrv\n\npub fn init_meta_ut_tracing() {\n\n static START: Once = Once::new();\n\n\n\n START.call_once(|| {\n\n let mut g = META_UT_LOG_GUARD.as_ref().lock().unwrap();\n\n *g = Some(do_init_meta_ut_tracing(\n\n \"unittest-meta\",\n\n \"_logs_unittest\",\n\n \"DEBUG\",\n\n ));\n\n });\n\n}\n\n\n\nstatic META_UT_LOG_GUARD: Lazy<Arc<Mutex<Option<Vec<WorkerGuard>>>>> =\n\n Lazy::new(|| Arc::new(Mutex::new(None)));\n\n\n\npub struct EventFormatter {}\n\n\n\nimpl<S, N> FormatEvent<S, N> for EventFormatter\n\nwhere\n", "file_path": "common/tracing/src/logging.rs", "rank": 58, "score": 195804.93723127537 }, { "content": "pub fn init_default_metrics_recorder() {\n\n static START: Once = Once::new();\n\n START.call_once(init_prometheus_recorder)\n\n}\n\n\n", "file_path": "common/metrics/src/recorder.rs", "rank": 59, "score": 195804.93723127537 }, { "content": "/// Init tracing for unittest.\n\n/// Write logs to file `unittest`.\n\npub fn init_default_ut_tracing() {\n\n static START: Once = Once::new();\n\n\n\n START.call_once(|| {\n\n let mut g = GLOBAL_UT_LOG_GUARD.as_ref().lock().unwrap();\n\n *g = Some(init_global_tracing(\"unittest\", \"_logs_unittest\", \"DEBUG\"));\n\n });\n\n}\n\n\n\nstatic GLOBAL_UT_LOG_GUARD: Lazy<Arc<Mutex<Option<Vec<WorkerGuard>>>>> =\n\n Lazy::new(|| Arc::new(Mutex::new(None)));\n\n\n", "file_path": "common/tracing/src/logging.rs", "rank": 60, "score": 195804.93723127537 }, { "content": "pub fn try_into_table_info(\n\n hms_table: hms::Table,\n\n fields: Vec<hms::FieldSchema>,\n\n) -> Result<TableInfo> {\n\n let schema = Arc::new(try_into_schema(fields)?);\n\n let meta = TableMeta {\n\n schema,\n\n engine: HIVE_TABLE_ENGIE.to_owned(),\n\n created_on: Utc::now(),\n\n ..Default::default()\n\n };\n\n\n\n let table_info = TableInfo {\n\n ident: TableIdent {\n\n table_id: 0,\n\n seq: 0,\n\n },\n\n desc: \"\".to_owned(),\n\n name: hms_table.table_name.unwrap_or_default(),\n\n meta,\n\n };\n\n\n\n Ok(table_info)\n\n}\n\n\n", "file_path": "query/src/catalogs/hive/converters.rs", "rank": 61, "score": 195804.93723127537 }, { "content": "pub fn resolve_table(\n\n ctx: &QueryContext,\n\n object_name: &ObjectName,\n\n statement_name: &str,\n\n) -> Result<(String, String, String)> {\n\n let idents = &object_name.0;\n\n match idents.len() {\n\n 0 => Err(ErrorCode::SyntaxException(format!(\n\n \"table name must be specified in statement `{}`\",\n\n statement_name\n\n ))),\n\n 1 => Ok((\n\n ctx.get_current_catalog(),\n\n ctx.get_current_database(),\n\n idents[0].value.clone(),\n\n )),\n\n 2 => Ok((\n\n ctx.get_current_catalog(),\n\n idents[0].value.clone(),\n\n idents[1].value.clone(),\n", "file_path": "query/src/sql/statements/statement_common.rs", "rank": 62, "score": 195804.93723127537 }, { "content": "pub fn resolve_database(\n\n ctx: &QueryContext,\n\n name: &ObjectName,\n\n statement_name: &str,\n\n) -> Result<(String, String)> {\n\n let idents = &name.0;\n\n match idents.len() {\n\n 0 => Err(ErrorCode::SyntaxException(format!(\n\n \"database name must be specified in statement `{}`\",\n\n statement_name\n\n ))),\n\n 1 => Ok((ctx.get_current_catalog(), idents[0].value.clone())),\n\n 2 => Ok((idents[0].value.clone(), idents[1].value.clone())),\n\n _ => Err(ErrorCode::SyntaxException(format!(\n\n \"database name should be [`catalog`].`db` in statement {}\",\n\n statement_name\n\n ))),\n\n }\n\n}\n", "file_path": "query/src/sql/statements/statement_common.rs", "rank": 63, "score": 195804.93723127537 }, { "content": "pub fn add_building_env_vars() {\n\n set_env_config();\n\n add_env_git_tag();\n\n add_env_commit_authors();\n\n add_env_credits_info();\n\n}\n\n\n", "file_path": "common/building/src/lib.rs", "rank": 64, "score": 195804.93723127537 }, { "content": "/// Coercion rule for numerical types: The type that both lhs and rhs\n\n/// can be casted to for numerical calculation, while maintaining\n\n/// maximum precision\n\npub fn numerical_coercion(\n\n lhs_type: &DataTypeImpl,\n\n rhs_type: &DataTypeImpl,\n\n allow_overflow: bool,\n\n) -> Result<DataTypeImpl> {\n\n let lhs_id = lhs_type.data_type_id();\n\n let rhs_id = rhs_type.data_type_id();\n\n\n\n let has_float = lhs_id.is_floating() || rhs_id.is_floating();\n\n let has_integer = lhs_id.is_integer() || rhs_id.is_integer();\n\n let has_signed = lhs_id.is_signed_numeric() || rhs_id.is_signed_numeric();\n\n\n\n let size_of_lhs = lhs_id.numeric_byte_size()?;\n\n let size_of_rhs = rhs_id.numeric_byte_size()?;\n\n\n\n let max_size_of_unsigned_integer = cmp::max(\n\n if lhs_id.is_signed_numeric() {\n\n 0\n\n } else {\n\n size_of_lhs\n", "file_path": "common/datavalues/src/types/type_coercion.rs", "rank": 65, "score": 195804.93723127537 }, { "content": "pub fn eval_aggr(\n\n name: &str,\n\n params: Vec<DataValue>,\n\n columns: &[ColumnWithField],\n\n rows: usize,\n\n) -> Result<ColumnRef> {\n\n let factory = AggregateFunctionFactory::instance();\n\n let arguments = columns.iter().map(|c| c.field().clone()).collect();\n\n let cols: Vec<ColumnRef> = columns.iter().map(|c| c.column().clone()).collect();\n\n\n\n let func = factory.get(name, params, arguments)?;\n\n let data_type = func.return_type()?;\n\n\n\n let eval = EvalAggr::new(func.clone());\n\n func.accumulate(eval.addr, &cols, None, rows)?;\n\n let mut builder = data_type.create_mutable(1024);\n\n func.merge_result(eval.addr, builder.as_mut())?;\n\n Ok(builder.to_column())\n\n}\n", "file_path": "common/functions/src/aggregates/aggregator_common.rs", "rank": 66, "score": 195804.93723127537 }, { "content": "#[inline]\n\nfn sub_scalar<O>(l: impl AsPrimitive<O>, r: impl AsPrimitive<O>, _ctx: &mut EvalContext) -> O\n\nwhere O: PrimitiveType + Sub<Output = O> {\n\n l.as_() - r.as_()\n\n}\n\n\n", "file_path": "common/functions/src/scalars/arithmetics/arithmetic_minus.rs", "rank": 67, "score": 195613.28264786093 }, { "content": "#[inline]\n\nfn mul_scalar<O>(l: impl AsPrimitive<O>, r: impl AsPrimitive<O>, _ctx: &mut EvalContext) -> O\n\nwhere O: PrimitiveType + Mul<Output = O> {\n\n l.as_() * r.as_()\n\n}\n\n\n", "file_path": "common/functions/src/scalars/arithmetics/arithmetic_mul.rs", "rank": 68, "score": 195613.28264786093 }, { "content": "#[inline]\n\nfn add_scalar<O>(l: impl AsPrimitive<O>, r: impl AsPrimitive<O>, _ctx: &mut EvalContext) -> O\n\nwhere O: PrimitiveType + Add<Output = O> {\n\n l.as_() + r.as_()\n\n}\n\n\n", "file_path": "common/functions/src/scalars/arithmetics/arithmetic_plus.rs", "rank": 69, "score": 195613.28264786093 }, { "content": "pub fn optimize(expression: SExpr, _ctx: OptimizeContext) -> Result<SExpr> {\n\n let mut heuristic = HeuristicOptimizer::create()?;\n\n let s_expr = heuristic.optimize(expression)?;\n\n // TODO: enable cascades optimizer\n\n // let mut cascades = CascadesOptimizer::create(ctx);\n\n // cascades.optimize(s_expr)\n\n\n\n Ok(s_expr)\n\n}\n", "file_path": "query/src/sql/optimizer/mod.rs", "rank": 70, "score": 193958.64358757256 }, { "content": "/// Rebuilds an `expr` with columns that refer to aliases replaced by the\n\n/// alias' underlying `expr`.\n\npub fn resolve_aliases_to_exprs(\n\n expr: &Expression,\n\n aliases: &HashMap<String, Expression>,\n\n) -> Result<Expression> {\n\n clone_with_replacement(expr, &|nest_exprs| match nest_exprs {\n\n Expression::Column(name) => {\n\n if let Some(aliased_expr) = aliases.get(name) {\n\n Ok(Some(aliased_expr.clone()))\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n _ => Ok(None),\n\n })\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 71, "score": 193156.7002041515 }, { "content": "/// convert expr to Verifiable Expression\n\n/// Rules: (section 5.2 of http://vldb.org/pvldb/vol14/p3083-edara.pdf)\n\npub fn build_verifiable_expr(\n\n expr: &Expression,\n\n schema: &DataSchemaRef,\n\n stat_columns: &mut StatColumns,\n\n) -> Expression {\n\n let unhandled = lit(true);\n\n\n\n let (exprs, op) = match expr {\n\n Expression::Literal { .. } => return expr.clone(),\n\n Expression::ScalarFunction { op, args } => (args.clone(), op.clone()),\n\n Expression::BinaryExpression { left, op, right } => match op.to_lowercase().as_str() {\n\n \"and\" => {\n\n let left = build_verifiable_expr(left, schema, stat_columns);\n\n let right = build_verifiable_expr(right, schema, stat_columns);\n\n return left.and(right);\n\n }\n\n \"or\" => {\n\n let left = build_verifiable_expr(left, schema, stat_columns);\n\n let right = build_verifiable_expr(right, schema, stat_columns);\n\n return left.or(right);\n", "file_path": "query/src/storages/index/range_filter.rs", "rank": 72, "score": 193150.22520822613 }, { "content": "// An role can be granted with multiple roles, find all the related roles in a DFS manner\n\npub fn find_all_related_roles(\n\n cache: &HashMap<String, RoleInfo>,\n\n role_identities: &[String],\n\n) -> Vec<RoleInfo> {\n\n let mut visited: HashSet<String> = HashSet::new();\n\n let mut result: Vec<RoleInfo> = vec![];\n\n let mut q: VecDeque<String> = role_identities.iter().cloned().collect();\n\n while let Some(role_identity) = q.pop_front() {\n\n if visited.contains(&role_identity) {\n\n continue;\n\n }\n\n let cache_key = role_identity.to_string();\n\n visited.insert(role_identity);\n\n let role = match cache.get(&cache_key) {\n\n None => continue,\n\n Some(role) => role,\n\n };\n\n result.push(role.clone());\n\n for related_role in role.grants.roles() {\n\n q.push_back(related_role);\n\n }\n\n }\n\n result\n\n}\n", "file_path": "query/src/users/role_cache_mgr.rs", "rank": 73, "score": 193150.22520822613 }, { "content": "#[inline]\n\npub fn numerical_arithmetic_coercion(\n\n op: &DataValueBinaryOperator,\n\n lhs_type: &DataTypeImpl,\n\n rhs_type: &DataTypeImpl,\n\n) -> Result<DataTypeImpl> {\n\n let lhs_id = lhs_type.data_type_id();\n\n let rhs_id = rhs_type.data_type_id();\n\n\n\n // error on any non-numeric type\n\n if !lhs_id.is_numeric() || !rhs_id.is_numeric() {\n\n return Result::Err(ErrorCode::BadDataValueType(format!(\n\n \"DataValue Error: Unsupported ({:?}) {} ({:?})\",\n\n lhs_type, op, rhs_type\n\n )));\n\n };\n\n\n\n let has_signed = lhs_id.is_signed_numeric() || rhs_id.is_signed_numeric();\n\n let has_float = lhs_id.is_floating() || rhs_id.is_floating();\n\n let max_size = cmp::max(lhs_id.numeric_byte_size()?, rhs_id.numeric_byte_size()?);\n\n\n", "file_path": "common/datavalues/src/types/type_coercion.rs", "rank": 74, "score": 193150.22520822613 }, { "content": "pub fn cast_from_timestamp(\n\n column: &ColumnRef,\n\n from_type: &DataTypeImpl,\n\n data_type: &DataTypeImpl,\n\n cast_options: &CastOptions,\n\n func_ctx: &FunctionContext,\n\n) -> Result<(ColumnRef, Option<Bitmap>)> {\n\n let c = Series::remove_nullable(column);\n\n let c: &Int64Column = Series::check_get(&c)?;\n\n let size = c.len();\n\n\n\n let date_time64 = from_type.as_any().downcast_ref::<TimestampType>().unwrap();\n\n\n\n match data_type.data_type_id() {\n\n TypeID::String => {\n\n let mut builder = MutableStringColumn::with_capacity(size);\n\n let tz = func_ctx.tz;\n\n for v in c.iter() {\n\n let s = timestamp_to_string(\n\n tz.timestamp(*v / 1_000_000, (*v % 1_000_000 * 1_000) as u32),\n", "file_path": "common/functions/src/scalars/expressions/cast_from_datetimes.rs", "rank": 75, "score": 193150.22520822613 }, { "content": "pub fn cast_to_variant(\n\n column: &ColumnRef,\n\n from_type: &DataTypeImpl,\n\n data_type: &DataTypeImpl,\n\n _func_ctx: &FunctionContext,\n\n) -> Result<(ColumnRef, Option<Bitmap>)> {\n\n let column = Series::remove_nullable(column);\n\n let size = column.len();\n\n\n\n if data_type.data_type_id() == TypeID::VariantArray {\n\n return Err(ErrorCode::BadDataValueType(format!(\n\n \"Expression type does not match column data type, expecting ARRAY but got {}\",\n\n from_type.data_type_id()\n\n )));\n\n } else if data_type.data_type_id() == TypeID::VariantObject {\n\n return Err(ErrorCode::BadDataValueType(format!(\n\n \"Expression type does not match column data type, expecting OBJECT but got {}\",\n\n from_type.data_type_id()\n\n )));\n\n }\n", "file_path": "common/functions/src/scalars/expressions/cast_with_type.rs", "rank": 76, "score": 193150.22520822613 }, { "content": "pub fn cast_from_variant(\n\n column: &ColumnRef,\n\n data_type: &DataTypeImpl,\n\n func_ctx: &FunctionContext,\n\n) -> Result<(ColumnRef, Option<Bitmap>)> {\n\n let column = Series::remove_nullable(column);\n\n let json_column: &VariantColumn = if column.is_const() {\n\n let const_column: &ConstColumn = Series::check_get(&column)?;\n\n Series::check_get(const_column.inner())?\n\n } else {\n\n Series::check_get(&column)?\n\n };\n\n let size = json_column.len();\n\n let mut bitmap = new_mutable_bitmap(size, true);\n\n\n\n with_match_primitive_type_id!(data_type.data_type_id(), |$T| {\n\n let mut builder = ColumnBuilder::<$T>::with_capacity(size);\n\n\n\n for (row, value) in json_column.iter().enumerate() {\n\n match value.as_ref() {\n", "file_path": "common/functions/src/scalars/expressions/cast_from_variant.rs", "rank": 77, "score": 193150.22520822613 }, { "content": "#[test]\n\npub fn test_format_field_name() {\n\n use databend_query::sql::exec::decode_field_name;\n\n use databend_query::sql::exec::format_field_name;\n\n let display_name = \"column_name123名字\".to_string();\n\n let index = 12321;\n\n let field_name = format_field_name(display_name.as_str(), index);\n\n let (decoded_name, decoded_index) = decode_field_name(field_name.as_str()).unwrap();\n\n assert!(decoded_name == display_name && decoded_index == index);\n\n}\n", "file_path": "query/tests/it/sql/exec/mod.rs", "rank": 78, "score": 193150.22520822613 }, { "content": "pub fn codegen_arithmetic_type() {\n\n use DataValueBinaryOperator::*;\n\n use DataValueUnaryOperator::*;\n\n\n\n let dest = Path::new(\"common/datavalues/src/types\");\n\n let path = dest.join(\"arithmetics_type.rs\");\n\n\n\n let mut file = File::create(&path).expect(\"open\");\n\n // Write the head.\n\n writeln!(\n\n file,\n\n \"// Copyright 2021 Datafuse Labs.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \\\"License\\\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \\\"AS IS\\\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n// This code is generated by common/codegen. DO NOT EDIT.\n\nuse crate::PrimitiveType;\n\nuse crate::IntegerType;\n\n\n", "file_path": "common/codegen/src/writes/arithmetics_type.rs", "rank": 79, "score": 193150.22520822613 }, { "content": "pub fn construct_numeric_type(\n\n is_signed: bool,\n\n is_floating: bool,\n\n byte_size: usize,\n\n) -> Result<DataTypeImpl> {\n\n match (is_signed, is_floating, byte_size) {\n\n (false, false, 1) => Ok(UInt8Type::new_impl()),\n\n (false, false, 2) => Ok(UInt16Type::new_impl()),\n\n (false, false, 4) => Ok(UInt32Type::new_impl()),\n\n (false, false, 8) => Ok(UInt64Type::new_impl()),\n\n (false, true, 4) => Ok(Float32Type::new_impl()),\n\n (false, true, 8) => Ok(Float64Type::new_impl()),\n\n (true, false, 1) => Ok(Int8Type::new_impl()),\n\n (true, false, 2) => Ok(Int16Type::new_impl()),\n\n (true, false, 4) => Ok(Int32Type::new_impl()),\n\n (true, false, 8) => Ok(Int64Type::new_impl()),\n\n (true, true, 1) => Ok(Float32Type::new_impl()),\n\n (true, true, 2) => Ok(Float32Type::new_impl()),\n\n (true, true, 4) => Ok(Float32Type::new_impl()),\n\n (true, true, 8) => Ok(Float64Type::new_impl()),\n", "file_path": "common/datavalues/src/types/type_coercion.rs", "rank": 80, "score": 193150.22520822613 }, { "content": "pub fn cast_with_type(\n\n column: &ColumnRef,\n\n from_type: &DataTypeImpl,\n\n target_type: &DataTypeImpl,\n\n cast_options: &CastOptions,\n\n func_ctx: &FunctionContext,\n\n) -> Result<ColumnRef> {\n\n // they are pyhsically the same type\n\n if &column.data_type() == target_type {\n\n return Ok(column.clone());\n\n }\n\n\n\n if target_type.data_type_id() == TypeID::Null {\n\n return Ok(Arc::new(NullColumn::new(column.len())));\n\n }\n\n\n\n if from_type.data_type_id() == TypeID::Null {\n\n //all is null\n\n if target_type.is_nullable() {\n\n return target_type.create_constant_column(&DataValue::Null, column.len());\n", "file_path": "common/functions/src/scalars/expressions/cast_with_type.rs", "rank": 81, "score": 193150.22520822613 }, { "content": "pub fn cast_from_date(\n\n column: &ColumnRef,\n\n from_type: &DataTypeImpl,\n\n data_type: &DataTypeImpl,\n\n cast_options: &CastOptions,\n\n func_ctx: &FunctionContext,\n\n) -> Result<(ColumnRef, Option<Bitmap>)> {\n\n let c = Series::remove_nullable(column);\n\n let c: &Int32Column = Series::check_get(&c)?;\n\n let size = c.len();\n\n\n\n match data_type.data_type_id() {\n\n TypeID::String => {\n\n let mut builder = ColumnBuilder::<Vu8>::with_capacity(size);\n\n\n\n for v in c.iter() {\n\n let utc = \"UTC\".parse::<Tz>().unwrap();\n\n let s = timestamp_to_string(utc.timestamp(*v as i64 * 24 * 3600, 0_u32), DATE_FMT);\n\n builder.append(s.as_bytes());\n\n }\n", "file_path": "common/functions/src/scalars/expressions/cast_from_datetimes.rs", "rank": 82, "score": 193150.22520822613 }, { "content": "pub fn cast_from_string(\n\n column: &ColumnRef,\n\n from_type: &DataTypeImpl,\n\n data_type: &DataTypeImpl,\n\n cast_options: &CastOptions,\n\n func_ctx: &FunctionContext,\n\n) -> Result<(ColumnRef, Option<Bitmap>)> {\n\n let str_column = Series::remove_nullable(column);\n\n let str_column: &StringColumn = Series::check_get(&str_column)?;\n\n let size = str_column.len();\n\n let mut bitmap = new_mutable_bitmap(size, true);\n\n\n\n match data_type.data_type_id() {\n\n TypeID::Date => {\n\n let mut builder = ColumnBuilder::<i32>::with_capacity(size);\n\n\n\n for (row, v) in str_column.iter().enumerate() {\n\n if let Some(d) = string_to_date(v) {\n\n builder.append((d.num_days_from_ce() - EPOCH_DAYS_FROM_CE) as i32);\n\n } else {\n", "file_path": "common/functions/src/scalars/expressions/cast_from_string.rs", "rank": 83, "score": 193150.22520822613 }, { "content": "// path_as_root set to true when we create external stage\n\n// path_as_root set to false when we copy from external stage\n\npub fn parse_stage_storage(\n\n location: &str,\n\n credential_options: &BTreeMap<String, String>,\n\n encryption_options: &BTreeMap<String, String>,\n\n) -> Result<(StageStorage, String)> {\n\n // Parse uri.\n\n // 's3://<bucket>[/<path>]'\n\n let uri = location.parse::<http::Uri>().map_err(|_e| {\n\n ErrorCode::SyntaxException(\n\n \"File location uri must be specified, for example: 's3://<bucket>[/<path>]'\",\n\n )\n\n })?;\n\n let bucket = uri\n\n .host()\n\n .ok_or_else(|| {\n\n ErrorCode::SyntaxException(\n\n \"File location uri must be specified, for example: 's3://<bucket>[/<path>]'\",\n\n )\n\n })?\n\n .to_string();\n", "file_path": "query/src/sql/statements/statement_common.rs", "rank": 84, "score": 193150.22520822613 }, { "content": "pub fn validate_function_arg(\n\n name: &str,\n\n args_len: usize,\n\n variadic_arguments: Option<(usize, usize)>,\n\n num_arguments: usize,\n\n) -> Result<()> {\n\n match variadic_arguments {\n\n Some((start, end)) => {\n\n return if args_len < start || args_len > end {\n\n Err(ErrorCode::NumberArgumentsNotMatch(format!(\n\n \"Function `{}` expect to have [{}, {}] arguments, but got {}\",\n\n name, start, end, args_len\n\n )))\n\n } else {\n\n Ok(())\n\n };\n\n }\n\n None => {\n\n return if num_arguments != args_len {\n\n Err(ErrorCode::NumberArgumentsNotMatch(format!(\n\n \"Function `{}` expect to have {} arguments, but got {}\",\n\n name, num_arguments, args_len\n\n )))\n\n } else {\n\n Ok(())\n\n };\n\n }\n\n }\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_validator.rs", "rank": 85, "score": 193150.22520822613 }, { "content": "/// Parse a SQL string into `Statement`s.\n\npub fn parse_sql<'a>(\n\n sql_tokens: &'a [Token<'a>],\n\n backtrace: &'a Backtrace<'a>,\n\n) -> Result<Vec<Statement<'a>>> {\n\n match statements(Input(sql_tokens, backtrace)) {\n\n Ok((rest, stmts)) if rest[0].kind == TokenKind::EOI => Ok(stmts),\n\n Ok((rest, _)) => Err(ErrorCode::SyntaxException(\n\n rest[0].display_error(\"unable to parse rest of the sql\".to_string()),\n\n )),\n\n Err(nom::Err::Error(err) | nom::Err::Failure(err)) => {\n\n Err(ErrorCode::SyntaxException(err.display_error(())))\n\n }\n\n Err(nom::Err::Incomplete(_)) => unreachable!(),\n\n }\n\n}\n", "file_path": "common/ast/src/parser/mod.rs", "rank": 86, "score": 192013.5260238416 }, { "content": "pub fn next_port() -> u32 {\n\n 29000u32 + (GlobalSequence::next() as u32)\n\n}\n\n\n\npub struct MetaSrvTestContext {\n\n // /// To hold a per-case logging guard\n\n // logging_guard: (WorkerGuard, DefaultGuard),\n\n pub config: configs::Config,\n\n\n\n pub meta_node: Option<Arc<MetaNode>>,\n\n\n\n pub grpc_srv: Option<Box<GrpcServer>>,\n\n}\n\n\n\nimpl MetaSrvTestContext {\n\n /// Create a new Config for test, with unique port assigned\n\n pub fn new(id: u64) -> MetaSrvTestContext {\n\n let config_id = next_port();\n\n\n\n let mut config = configs::Config::empty();\n", "file_path": "metasrv/tests/it/tests/service.rs", "rank": 87, "score": 192013.5260238416 }, { "content": "fn eval_trunc(columns: &ColumnsWithField) -> Result<ColumnRef> {\n\n let mut ctx = EvalContext::default();\n\n match columns.len() {\n\n 1 => {\n\n with_match_primitive_type_id!(columns[0].data_type().data_type_id(), |$S| {\n\n let col = scalar_unary_op::<$S, f64, _>(columns[0].column(), trunc, &mut ctx)?;\n\n Ok(Arc::new(col))\n\n },{\n\n unreachable!()\n\n })\n\n }\n\n\n\n _ => {\n\n with_match_primitive_type_id!(columns[0].data_type().data_type_id(), |$S| {\n\n with_match_primitive_type_id!(columns[1].data_type().data_type_id(), |$T| {\n\n let col = scalar_binary_op::<$S, $T, f64, _>(\n\n columns[0].column(),\n\n columns[1].column(),\n\n trunc_to,\n\n &mut ctx,\n", "file_path": "common/functions/src/scalars/maths/round.rs", "rank": 88, "score": 191321.57846137753 }, { "content": "fn eval_round(columns: &ColumnsWithField) -> Result<ColumnRef> {\n\n let mut ctx = EvalContext::default();\n\n match columns.len() {\n\n 1 => {\n\n with_match_primitive_type_id!(columns[0].data_type().data_type_id(), |$S| {\n\n let col = scalar_unary_op::<$S, f64, _>(columns[0].column(), round, &mut ctx)?;\n\n Ok(Arc::new(col))\n\n },{\n\n unreachable!()\n\n })\n\n }\n\n\n\n _ => {\n\n with_match_primitive_type_id!(columns[0].data_type().data_type_id(), |$S| {\n\n with_match_primitive_type_id!(columns[1].data_type().data_type_id(), |$T| {\n\n let col = scalar_binary_op::<$S, $T, f64, _>(\n\n columns[0].column(),\n\n columns[1].column(),\n\n round_to,\n\n &mut ctx\n", "file_path": "common/functions/src/scalars/maths/round.rs", "rank": 89, "score": 191321.57846137753 }, { "content": "// cast using arrow's cast compute\n\npub fn arrow_cast_compute(\n\n column: &ColumnRef,\n\n from_type: &DataTypeImpl,\n\n data_type: &DataTypeImpl,\n\n cast_options: &CastOptions,\n\n func_ctx: &FunctionContext,\n\n) -> Result<(ColumnRef, Option<Bitmap>)> {\n\n if data_type.data_type_id().is_variant() {\n\n return cast_to_variant(column, from_type, data_type, func_ctx);\n\n }\n\n\n\n let arrow_array = column.as_arrow_array();\n\n let arrow_options = cast_options.as_arrow();\n\n let result = cast::cast(arrow_array.as_ref(), &data_type.arrow_type(), arrow_options)?;\n\n let result: ArrayRef = Arc::from(result);\n\n let bitmap = result.validity().cloned();\n\n Ok((result.into_column(), bitmap))\n\n}\n\n\n", "file_path": "common/functions/src/scalars/expressions/cast_with_type.rs", "rank": 90, "score": 190636.06941754158 }, { "content": "#[allow(clippy::borrowed_box)]\n\npub fn test_eval_with_type(\n\n op: &str,\n\n rows_size: usize,\n\n arguments: &[ColumnWithField],\n\n arguments_type: &[&DataTypeImpl],\n\n) -> Result<ColumnRef> {\n\n let func = FunctionFactory::instance().get(op, arguments_type)?;\n\n func.return_type();\n\n func.eval(FunctionContext::default(), arguments, rows_size)\n\n}\n", "file_path": "common/functions/tests/it/scalars/scalar_function_test.rs", "rank": 91, "score": 190629.4915733163 }, { "content": "pub fn get_sort_descriptions(\n\n schema: &DataSchemaRef,\n\n exprs: &[Expression],\n\n) -> Result<Vec<SortColumnDescription>> {\n\n let mut sort_columns_descriptions = vec![];\n\n for x in exprs {\n\n match *x {\n\n Expression::Sort {\n\n ref expr,\n\n asc,\n\n nulls_first,\n\n ..\n\n } => {\n\n let column_name = expr.to_data_field(schema)?.name().clone();\n\n sort_columns_descriptions.push(SortColumnDescription {\n\n column_name,\n\n asc,\n\n nulls_first,\n\n });\n\n }\n", "file_path": "query/src/pipelines/transforms/transform_sort_partial.rs", "rank": 92, "score": 190629.4915733163 }, { "content": "#[inline]\n\npub fn build_regexp_from_pattern(\n\n fn_name: &str,\n\n pat: &[u8],\n\n mt: Option<&[u8]>,\n\n) -> Result<BytesRegex> {\n\n let pattern = match pat.is_empty() {\n\n true => \"^$\",\n\n false => simdutf8::basic::from_utf8(pat).map_err(|e| {\n\n ErrorCode::BadArguments(format!(\n\n \"Unable to convert the {} pattern to string: {}\",\n\n fn_name, e\n\n ))\n\n })?,\n\n };\n\n // the default match type value is 'i', if it is empty\n\n let mt = match mt {\n\n Some(mt) => {\n\n if mt.is_empty() {\n\n \"i\".as_bytes()\n\n } else {\n", "file_path": "common/functions/src/scalars/strings/regexp_like.rs", "rank": 93, "score": 190629.4915733163 }, { "content": "pub fn extract_value_by_path(\n\n column: &ColumnRef,\n\n path_keys: Vec<Vec<DataValue>>,\n\n input_rows: usize,\n\n ignore_case: bool,\n\n) -> Result<ColumnRef> {\n\n let column: &VariantColumn = if column.is_const() {\n\n let const_column: &ConstColumn = Series::check_get(column)?;\n\n Series::check_get(const_column.inner())?\n\n } else {\n\n Series::check_get(column)?\n\n };\n\n\n\n let mut builder = NullableColumnBuilder::<VariantValue>::with_capacity(input_rows);\n\n for path_key in path_keys.iter() {\n\n if path_key.is_empty() {\n\n for _ in 0..column.len() {\n\n builder.append_null();\n\n }\n\n continue;\n", "file_path": "common/functions/src/scalars/semi_structureds/get.rs", "rank": 94, "score": 190629.4915733163 }, { "content": "#[inline]\n\npub fn numerical_unary_arithmetic_coercion(\n\n op: &DataValueUnaryOperator,\n\n val_type: &DataTypeImpl,\n\n) -> Result<DataTypeImpl> {\n\n let type_id = val_type.data_type_id();\n\n // error on any non-numeric type\n\n if !type_id.is_numeric() {\n\n return Result::Err(ErrorCode::BadDataValueType(format!(\n\n \"DataValue Error: Unsupported ({:?})\",\n\n type_id\n\n )));\n\n };\n\n\n\n match op {\n\n DataValueUnaryOperator::Negate => {\n\n let has_float = type_id.is_floating();\n\n let has_signed = type_id.is_signed_numeric();\n\n let numeric_size = type_id.numeric_byte_size()?;\n\n let max_size = if has_signed {\n\n numeric_size\n\n } else {\n\n next_size(numeric_size)\n\n };\n\n construct_numeric_type(true, has_float, max_size)\n\n }\n\n }\n\n}\n\n\n", "file_path": "common/datavalues/src/types/type_coercion.rs", "rank": 95, "score": 190629.4915733163 }, { "content": "#[inline]\n\npub fn validate_regexp_arguments(\n\n fn_name: &str,\n\n pos: i64,\n\n occur: Option<i64>,\n\n ro: Option<i64>,\n\n mt: Option<&[u8]>,\n\n) -> Result<()> {\n\n if pos < 1 {\n\n return Err(ErrorCode::BadArguments(format!(\n\n \"Incorrect arguments to {}: position must be positive, but got {}\",\n\n fn_name, pos\n\n )));\n\n }\n\n if let Some(occur) = occur {\n\n if occur < 1 {\n\n return Err(ErrorCode::BadArguments(format!(\n\n \"Incorrect arguments to {}: occurrence must be positive, but got {}\",\n\n fn_name, occur\n\n )));\n\n }\n", "file_path": "common/functions/src/scalars/strings/regexp_instr.rs", "rank": 96, "score": 190629.4915733163 }, { "content": "fn data_type_enum_create(ty: &DataTypeImpl, values: &[DataValue]) -> Result<ColumnRef> {\n\n ty.create_column(values)\n\n}\n\n\n\ncriterion_group!(benches, add_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "common/datavalues/benches/data_type.rs", "rank": 97, "score": 189986.91567261558 }, { "content": "pub fn wrap_cast_if_needed(scalar: Scalar, target_type: &DataTypeImpl) -> Scalar {\n\n if scalar.data_type() != *target_type {\n\n let cast = CastExpr {\n\n from_type: scalar.data_type(),\n\n argument: Box::new(scalar),\n\n target_type: target_type.clone(),\n\n };\n\n cast.into()\n\n } else {\n\n scalar\n\n }\n\n}\n", "file_path": "query/src/sql/planner/binder/scalar_common.rs", "rank": 98, "score": 189259.49450080685 }, { "content": "/// Returns fixed seedable RNG\n\npub fn seedable_rng() -> StdRng {\n\n StdRng::seed_from_u64(42)\n\n}\n", "file_path": "common/datavalues/benches/if_else_then.rs", "rank": 99, "score": 189213.862459832 } ]
Rust
src/crawler.rs
Ayush1325/webcrawler-woc
086941224ead6f814bd02442021db8f742e5c874
/*! Module Containing the Crawler functions. */ use crate::extractors::links; use futures::{stream, StreamExt}; use links::Link; use reqwest::Url; use std::time::Duration; use std::{collections::HashSet, sync::Arc}; use tokio::sync::mpsc; fn init_reqwest_client(timeout: u64) -> Result<reqwest::Client, String> { let client_builder = reqwest::ClientBuilder::new().timeout(Duration::new(timeout, 0)); match client_builder.build() { Ok(x) => Ok(x), Err(_) => Err("Could not build http client".to_string()), } } fn init_dns_resolver() -> Result<trust_dns_resolver::TokioAsyncResolver, String> { match trust_dns_resolver::TokioAsyncResolver::tokio_from_system_conf() { Ok(x) => Ok(x), Err(_) => Err("Could not build dns resolver".to_string()), } } pub async fn crawl_with_depth( origin_url: Link, crawl_depth: usize, whitelist: Option<HashSet<url::Host>>, blacklist: Option<HashSet<url::Host>>, word_list: HashSet<String>, tx_output: mpsc::Sender<Link>, tx_selenium: mpsc::Sender<String>, task_limit: usize, timeout: u64, ) -> Result<(), String> { let mut to_crawl: HashSet<Url> = HashSet::new(); let mut crawled: HashSet<Url> = HashSet::new(); let mut dont_crawl: HashSet<Url> = HashSet::new(); let word_list = Arc::new(word_list); let client = init_reqwest_client(timeout)?; let resolver = init_dns_resolver()?; to_crawl.insert(origin_url.url); for _ in 0..crawl_depth { println!("Crawling {} URls", to_crawl.len()); let (tx_cralwer, mut rx_crawler) = mpsc::channel::<Link>(task_limit); to_crawl.iter().cloned().for_each(|x| { let tx_clone = tx_cralwer.clone(); let tx_selenium_clone = tx_selenium.clone(); let client_clone = client.clone(); let resolver_clone = resolver.clone(); let word_list_clone = word_list.clone(); tokio::spawn(async move { crawl_page( x, client_clone, tx_clone, tx_selenium_clone, task_limit, resolver_clone, word_list_clone, ) .await }); }); to_crawl.clear(); drop(tx_cralwer); while let Some(link) = rx_crawler.recv().await { if link.crawled { crawled.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } else { let should_crawl = link.should_crawl(&whitelist, &blacklist); if should_crawl && !crawled.contains(&link.url) { to_crawl.insert(link.url); } else if !should_crawl && !dont_crawl.contains(&link.url) { dont_crawl.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } } } } stream::iter(to_crawl) .map(|x| links::Link::new_from_url(&x)) .for_each_concurrent(task_limit, |x| async { let _ = tx_output.send(x).await; }) .await; Ok(()) } pub async fn crawl_no_depth( origin_url: Link, whitelist: Option<HashSet<url::Host>>, blacklist: Option<HashSet<url::Host>>, word_list: HashSet<String>, tx_output: mpsc::Sender<Link>, tx_selenium: mpsc::Sender<String>, task_limit: usize, timeout: u64, ) -> Result<(), String> { let mut to_crawl: HashSet<Url> = HashSet::new(); let mut crawled: HashSet<Url> = HashSet::new(); let mut dont_crawl: HashSet<Url> = HashSet::new(); let word_list = Arc::new(word_list); let client = init_reqwest_client(timeout)?; let resolver = init_dns_resolver()?; to_crawl.insert(origin_url.url.clone()); let mut first_crawl = true; while !to_crawl.is_empty() { println!("Crawling {} URls", to_crawl.len()); let (tx_cralwer, mut rx_crawler) = mpsc::channel::<Link>(task_limit); if first_crawl { let tx_clone = tx_cralwer.clone(); let client_clone = client.clone(); let url = origin_url.url.clone(); tokio::spawn(async move { crawl_sitemaps(url, tx_clone, task_limit, client_clone).await; }); first_crawl = false; } to_crawl.iter().cloned().for_each(|x| { let tx_clone = tx_cralwer.clone(); let tx_selenium_clone = tx_selenium.clone(); let client_clone = client.clone(); let resolver_clone = resolver.clone(); let word_list_clone = word_list.clone(); tokio::spawn(async move { crawl_page( x, client_clone, tx_clone, tx_selenium_clone, task_limit, resolver_clone, word_list_clone, ) .await }); }); to_crawl.clear(); drop(tx_cralwer); while let Some(link) = rx_crawler.recv().await { if link.crawled { crawled.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } else { let should_crawl = link.should_crawl(&whitelist, &blacklist); if should_crawl && !crawled.contains(&link.url) { to_crawl.insert(link.url); } else if !should_crawl && !dont_crawl.contains(&link.url) { dont_crawl.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } } } } Ok(()) } async fn crawl_page( url: Url, client: reqwest::Client, tx: mpsc::Sender<Link>, tx_selenium: mpsc::Sender<String>, limit: usize, resolver: trust_dns_resolver::TokioAsyncResolver, word_list: Arc<HashSet<String>>, ) { let mut link = links::Link::new_from_url(&url); let resp = match get_page(url.as_str(), &client).await { Ok(x) => x, Err(_) => { link.crawled = true; let _ = tx.send(link.clone()).await; return; } }; link.update_from_response(&resp); if let Some(host) = &link.host { let host = host.to_string(); let ipv4 = links::resolve_ipv4(&resolver, &host).await; let ipv6 = links::resolve_ipv6(&resolver, &host).await; link.update_dns(ipv4, ipv6); }; let is_html = link.check_mime_from_list(&[mime::TEXT_HTML, mime::TEXT_HTML_UTF_8]); if is_html { let html = match resp.text().await { Ok(x) => x, Err(_) => { return; } }; if links::check_words_html(&html, word_list) { link.contains_words = true; let _ = tx_selenium.send(link.url.to_string()).await; } let links = links::get_links_from_html(&html, url.as_str()); let tx_ref = &tx; stream::iter(links) .for_each_concurrent(limit, |x| async move { let _ = tx_ref.send(x).await; }) .await; } if let Err(_) = tx.send(link).await { return; } } async fn crawl_sitemaps(url: Url, tx: mpsc::Sender<Link>, limit: usize, client: reqwest::Client) { let mut robottxt_url = url.clone(); robottxt_url.set_path("robots.txt"); let robottxt = match get_page(robottxt_url.as_str(), &client).await { Ok(x) => match x.text().await { Ok(x) => x, Err(_) => return, }, Err(_) => return, }; let url_str = url.to_string(); robottxt .lines() .filter(|x| x.contains("Sitemap")) .filter_map(|x| x[9..].split_whitespace().next()) .map(|x| x.trim()) .filter_map(|x| links::normalize_url(x, &url_str)) .for_each(|x| { let tx_clone = tx.clone(); let client_clone = client.clone(); let limit_clone = limit.clone(); tokio::spawn(async move { crawl_sitemap(x.url, tx_clone, limit_clone, client_clone).await; }); }); } async fn crawl_sitemap(url: Url, tx: mpsc::Sender<Link>, limit: usize, client: reqwest::Client) { let mut link = links::Link::new_from_url(&url); let resp = match get_page(url.as_str(), &client).await { Ok(x) => x, Err(_) => return, }; link.update_from_response(&resp); let text = match resp.text().await { Ok(x) => x, Err(_) => return, }; let links = match link.content_type { Some(x) => match (x.type_(), x.subtype()) { (mime::TEXT, mime::PLAIN) => links::get_links_from_text(&text, url.as_str()), _ => return, }, None => return, }; let tx_ref = &tx; stream::iter(links) .for_each_concurrent(limit, |x| async { let _ = tx_ref.send(x).await; }) .await; } async fn get_page( url: &str, client: &reqwest::Client, ) -> Result<reqwest::Response, reqwest::Error> { let resp = client.get(url).send().await?; resp.error_for_status() }
/*! Module Containing the Crawler functions. */ use crate::extractors::links; use futures::{stream, StreamExt}; use links::Link; use reqwest::Url; use std::time::Duration; use std::{collections::HashSet, sync::Arc}; use tokio::sync::mpsc; fn init_reqwest_client(timeout: u64) -> Result<reqwest::Client, String> { let client_builder = reqwest::ClientBuilder::new().timeout(Duration::new(timeout, 0)); match client_builder.build() { Ok(x) => Ok(x), Err(_) => Err("Could not build http client".to_string()), } } fn init_dns_resolver() -> Result<trust_dns_resolver::TokioAsyncResolver, String> { match trust_dns_resolver::TokioAsyncResolver::tokio_from_system_conf() { Ok(x) => Ok(x), Err(_) => Err("Could not build dns resolver".to_string()), } } pub async fn crawl_with_depth( origin_url: Link, crawl_depth: usize, whitelist: Option<HashSet<url::Host>>, blacklist: Option<HashSet<url::Host>>, word_list: HashSet<String>, tx_output: mpsc::Sender<Link>, tx_selenium: mpsc::Sender<String>, task_limit: usize, timeout: u64, ) -> Result<(), String> { let mut to_crawl: HashSet<Url> = HashSet::new(); let mut crawled: HashSet<Url> = HashSet::new(); let mut dont_crawl: HashSet<Url> = HashSet::new(); let word_list = Arc::new(word_list); let client = init_reqwest_client(timeout)?; let resolver = init_dns_resolver()?; to_crawl.insert(origin_url.url); for _ in 0..crawl_depth { println!("Crawling {} URls", to_crawl.len()); let (tx_cralwer, mut rx_crawler) = mpsc::channel::<Link>(task_limit); to_crawl.iter().cloned().for_each(|x| { let tx_clone = tx_cralwer.clone(); let tx_selenium_clone = tx_selenium.clone(); let client_clone = client.clone(); let resolver_clone = resolver.clone(); let word_list_clone = word_list.clone(); tokio::spawn(async move { crawl_page( x, client_clone, tx_clone, tx_selenium_clone, task_limit, resolver_clone, word_list_clone, ) .await }); }); to_crawl.clear(); drop(tx_cralwer); while let Some(link) = rx_crawler.recv().await { if link.crawled { crawled.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } else { let should_crawl = link.should_crawl(&whitelist, &blacklist); if should_crawl && !crawled.contains(&link.url) { to_crawl.insert(link.url); } else if !should_crawl && !dont_crawl.contains(&link.url) { dont_crawl.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } } } } stream::iter(to_crawl) .map(|x| links::Link::new_from_url(&x)) .for_each_concurrent(task_limit, |x| async { let _ = tx_output.send(x).await; }) .await; Ok(()) }
async fn crawl_page( url: Url, client: reqwest::Client, tx: mpsc::Sender<Link>, tx_selenium: mpsc::Sender<String>, limit: usize, resolver: trust_dns_resolver::TokioAsyncResolver, word_list: Arc<HashSet<String>>, ) { let mut link = links::Link::new_from_url(&url); let resp = match get_page(url.as_str(), &client).await { Ok(x) => x, Err(_) => { link.crawled = true; let _ = tx.send(link.clone()).await; return; } }; link.update_from_response(&resp); if let Some(host) = &link.host { let host = host.to_string(); let ipv4 = links::resolve_ipv4(&resolver, &host).await; let ipv6 = links::resolve_ipv6(&resolver, &host).await; link.update_dns(ipv4, ipv6); }; let is_html = link.check_mime_from_list(&[mime::TEXT_HTML, mime::TEXT_HTML_UTF_8]); if is_html { let html = match resp.text().await { Ok(x) => x, Err(_) => { return; } }; if links::check_words_html(&html, word_list) { link.contains_words = true; let _ = tx_selenium.send(link.url.to_string()).await; } let links = links::get_links_from_html(&html, url.as_str()); let tx_ref = &tx; stream::iter(links) .for_each_concurrent(limit, |x| async move { let _ = tx_ref.send(x).await; }) .await; } if let Err(_) = tx.send(link).await { return; } } async fn crawl_sitemaps(url: Url, tx: mpsc::Sender<Link>, limit: usize, client: reqwest::Client) { let mut robottxt_url = url.clone(); robottxt_url.set_path("robots.txt"); let robottxt = match get_page(robottxt_url.as_str(), &client).await { Ok(x) => match x.text().await { Ok(x) => x, Err(_) => return, }, Err(_) => return, }; let url_str = url.to_string(); robottxt .lines() .filter(|x| x.contains("Sitemap")) .filter_map(|x| x[9..].split_whitespace().next()) .map(|x| x.trim()) .filter_map(|x| links::normalize_url(x, &url_str)) .for_each(|x| { let tx_clone = tx.clone(); let client_clone = client.clone(); let limit_clone = limit.clone(); tokio::spawn(async move { crawl_sitemap(x.url, tx_clone, limit_clone, client_clone).await; }); }); } async fn crawl_sitemap(url: Url, tx: mpsc::Sender<Link>, limit: usize, client: reqwest::Client) { let mut link = links::Link::new_from_url(&url); let resp = match get_page(url.as_str(), &client).await { Ok(x) => x, Err(_) => return, }; link.update_from_response(&resp); let text = match resp.text().await { Ok(x) => x, Err(_) => return, }; let links = match link.content_type { Some(x) => match (x.type_(), x.subtype()) { (mime::TEXT, mime::PLAIN) => links::get_links_from_text(&text, url.as_str()), _ => return, }, None => return, }; let tx_ref = &tx; stream::iter(links) .for_each_concurrent(limit, |x| async { let _ = tx_ref.send(x).await; }) .await; } async fn get_page( url: &str, client: &reqwest::Client, ) -> Result<reqwest::Response, reqwest::Error> { let resp = client.get(url).send().await?; resp.error_for_status() }
pub async fn crawl_no_depth( origin_url: Link, whitelist: Option<HashSet<url::Host>>, blacklist: Option<HashSet<url::Host>>, word_list: HashSet<String>, tx_output: mpsc::Sender<Link>, tx_selenium: mpsc::Sender<String>, task_limit: usize, timeout: u64, ) -> Result<(), String> { let mut to_crawl: HashSet<Url> = HashSet::new(); let mut crawled: HashSet<Url> = HashSet::new(); let mut dont_crawl: HashSet<Url> = HashSet::new(); let word_list = Arc::new(word_list); let client = init_reqwest_client(timeout)?; let resolver = init_dns_resolver()?; to_crawl.insert(origin_url.url.clone()); let mut first_crawl = true; while !to_crawl.is_empty() { println!("Crawling {} URls", to_crawl.len()); let (tx_cralwer, mut rx_crawler) = mpsc::channel::<Link>(task_limit); if first_crawl { let tx_clone = tx_cralwer.clone(); let client_clone = client.clone(); let url = origin_url.url.clone(); tokio::spawn(async move { crawl_sitemaps(url, tx_clone, task_limit, client_clone).await; }); first_crawl = false; } to_crawl.iter().cloned().for_each(|x| { let tx_clone = tx_cralwer.clone(); let tx_selenium_clone = tx_selenium.clone(); let client_clone = client.clone(); let resolver_clone = resolver.clone(); let word_list_clone = word_list.clone(); tokio::spawn(async move { crawl_page( x, client_clone, tx_clone, tx_selenium_clone, task_limit, resolver_clone, word_list_clone, ) .await }); }); to_crawl.clear(); drop(tx_cralwer); while let Some(link) = rx_crawler.recv().await { if link.crawled { crawled.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } else { let should_crawl = link.should_crawl(&whitelist, &blacklist); if should_crawl && !crawled.contains(&link.url) { to_crawl.insert(link.url); } else if !should_crawl && !dont_crawl.contains(&link.url) { dont_crawl.insert(link.url.clone()); if let Err(_) = tx_output.send(link).await { return Err("Output Connection Closed".to_string()); } } } } } Ok(()) }
function_block-full_function
[ { "content": "/// Helper function to parse url in a page.\n\n/// Converts relative urls to full urls.\n\n/// Also removes javascript urls and other false urls.\n\npub fn normalize_url(url: &str, base_url: &str) -> Option<Link> {\n\n if url.starts_with(\"#\") {\n\n // Checks for internal links.\n\n // Maybe will make it optioanl to ignore them.\n\n return None;\n\n }\n\n\n\n match Link::new_from_str(&url) {\n\n Some(x) => Some(x),\n\n None => Link::new_relative(&url, base_url),\n\n }\n\n}\n\n\n\n/// Function to get IPV4 DNS\n\npub async fn resolve_ipv4(\n\n resolver: &trust_dns_resolver::TokioAsyncResolver,\n\n query: &str,\n\n) -> Option<Ipv4Addr> {\n\n match resolver.ipv4_lookup(query).await {\n\n Ok(x) => match x.iter().next() {\n", "file_path": "src/extractors/links.rs", "rank": 2, "score": 93242.6964232949 }, { "content": "/// Function to get links from a text file containing link in each line\n\npub fn get_links_from_text(text: &str, url: &str) -> HashSet<Link> {\n\n text.lines()\n\n .map(|x| x.trim())\n\n .filter_map(|x| normalize_url(x, url))\n\n .collect()\n\n}\n\n\n", "file_path": "src/extractors/links.rs", "rank": 3, "score": 84330.19540414367 }, { "content": "/// Function to get links from a htmp Document.\n\n/// Gets links wraped in a tags\n\npub fn get_links_from_html(html: &str, url: &str) -> HashSet<Link> {\n\n Document::from(html)\n\n .find(Name(\"a\"))\n\n .filter_map(|x| x.attr(\"href\"))\n\n .filter_map(|x| normalize_url(x, url))\n\n .collect()\n\n}\n\n\n", "file_path": "src/extractors/links.rs", "rank": 4, "score": 84327.57753835594 }, { "content": "/// Function to check if a word from a list is present in a page.\n\npub fn check_words_html(html: &str, word_list: Arc<HashSet<String>>) -> bool {\n\n word_list\n\n .iter()\n\n .find(|x| html.contains(x.as_str()))\n\n .is_some()\n\n}\n\n\n", "file_path": "src/extractors/links.rs", "rank": 5, "score": 70679.91274829395 }, { "content": " };\n\n Link {\n\n url: url.to_owned(),\n\n headers: headers.to_owned(),\n\n content_type,\n\n host,\n\n ipv4,\n\n ipv6,\n\n crawled,\n\n link_type,\n\n contains_words,\n\n }\n\n }\n\n\n\n /// Creates a new link from string url.\n\n /// Returns None if Url cannot be parsed.\n\n pub fn new_from_str(url: &str) -> Option<Self> {\n\n let parsed_url = match Url::parse(url) {\n\n Ok(x) => x,\n\n Err(_) => return None,\n", "file_path": "src/extractors/links.rs", "rank": 21, "score": 19272.787949859467 }, { "content": " /// Checks the whitelist and blacklist\n\n pub fn should_crawl(\n\n &self,\n\n whitelist_host: &Option<HashSet<url::Host>>,\n\n blacklist_host: &Option<HashSet<url::Host>>,\n\n ) -> bool {\n\n if let Some(x) = whitelist_host {\n\n return self.check_host(x, false);\n\n }\n\n if let Some(x) = blacklist_host {\n\n return !self.check_host(x, true);\n\n }\n\n false\n\n }\n\n\n\n /// Function to check if host is present in a list of hosts.\n\n /// Mostly for whitelist and blacklist.\n\n fn check_host(&self, required_host: &HashSet<url::Host>, default: bool) -> bool {\n\n match &self.host {\n\n Some(x) => required_host.contains(x),\n", "file_path": "src/extractors/links.rs", "rank": 22, "score": 19271.0600660532 }, { "content": "mod opt_headermap {\n\n use reqwest::header::HeaderMap;\n\n use serde::{Deserializer, Serializer};\n\n\n\n pub fn serialize<S>(value: &Option<HeaderMap>, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n match value {\n\n Some(x) => http_serde::header_map::serialize(x, serializer),\n\n None => serializer.serialize_none(),\n\n }\n\n }\n\n\n\n pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<HeaderMap>, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n match http_serde::header_map::deserialize(deserializer) {\n\n Ok(x) => Ok(Some(x)),\n\n Err(_) => Ok(None),\n\n }\n\n }\n\n}\n\n\n\n/// Function to get links from a htmp Document.\n\n/// Gets links wraped in a tags\n", "file_path": "src/extractors/links.rs", "rank": 23, "score": 19270.74823725988 }, { "content": " Some(x) => Some(x.to_owned()),\n\n None => None,\n\n },\n\n Err(_) => None,\n\n }\n\n}\n\n\n\n/// Function to get IPV6 DNS\n\npub async fn resolve_ipv6(\n\n resolver: &trust_dns_resolver::TokioAsyncResolver,\n\n query: &str,\n\n) -> Option<Ipv6Addr> {\n\n match resolver.ipv6_lookup(query).await {\n\n Ok(x) => match x.iter().next() {\n\n Some(x) => Some(x.to_owned()),\n\n None => None,\n\n },\n\n Err(_) => None,\n\n }\n\n}\n", "file_path": "src/extractors/links.rs", "rank": 24, "score": 19270.476486565374 }, { "content": "\n\nimpl fmt::Display for Link {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let json = match serde_json::to_string_pretty(self) {\n\n Ok(x) => x,\n\n Err(_) => return Err(fmt::Error),\n\n };\n\n write!(f, \"{}\", json)\n\n }\n\n}\n\n\n\n/// Implementation of serializer and deserializer for Option<Mime> type.\n\n/// Calls the methods from hyper_serde\n\nmod opt_mime {\n\n use mime::Mime;\n\n use serde::{Deserializer, Serializer};\n\n\n\n pub fn serialize<S>(value: &Option<Mime>, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n", "file_path": "src/extractors/links.rs", "rank": 25, "score": 19269.3390889886 }, { "content": " if let Ok(mime_type) = mime_str {\n\n let mime_type = mime_type.parse::<Mime>();\n\n if let Ok(t) = mime_type {\n\n return Some(t);\n\n }\n\n }\n\n None\n\n }\n\n\n\n /// Function to check if the mime is present in a list.\n\n pub fn check_mime_from_list(&self, required_mime: &[Mime]) -> bool {\n\n if let Some(c) = &self.content_type {\n\n return required_mime.iter().any(|x| x == c);\n\n }\n\n false\n\n }\n\n\n\n /// Function to get the LinkType\n\n fn get_link_type(url: &Url) -> LinkType {\n\n match url.scheme() {\n", "file_path": "src/extractors/links.rs", "rank": 26, "score": 19267.94882597188 }, { "content": " false,\n\n Self::get_link_type(url),\n\n false,\n\n )\n\n }\n\n\n\n /// Creates a new Link if realtive url is supplied.\n\n /// Need the relative url and the base url.\n\n pub fn new_relative(url: &str, base_url: &str) -> Option<Self> {\n\n let base_url_parsed = match Url::parse(base_url) {\n\n Ok(x) => x,\n\n Err(_) => return None,\n\n };\n\n match base_url_parsed.join(url) {\n\n Ok(x) => Self::new_from_str(x.as_str()),\n\n Err(_) => None,\n\n }\n\n }\n\n\n\n /// Checks if a url should be crawled.\n", "file_path": "src/extractors/links.rs", "rank": 27, "score": 19267.85646349468 }, { "content": "\n\nimpl Link {\n\n /// Creates a new Link.\n\n /// Does not assume any argument.\n\n pub fn new(\n\n url: &Url,\n\n headers: &Option<reqwest::header::HeaderMap>,\n\n ipv4: Option<Ipv4Addr>,\n\n ipv6: Option<Ipv6Addr>,\n\n crawled: bool,\n\n link_type: LinkType,\n\n contains_words: bool,\n\n ) -> Self {\n\n let host = match url.host() {\n\n Some(x) => Some(x.to_owned()),\n\n None => None,\n\n };\n\n let content_type = match headers {\n\n Some(x) => Self::get_mime(x),\n\n None => None,\n", "file_path": "src/extractors/links.rs", "rank": 28, "score": 19267.302278435072 }, { "content": " None => default,\n\n }\n\n }\n\n\n\n /// Function to update ipv4 and ipv6 dns.\n\n pub fn update_dns(&mut self, ipv4: Option<Ipv4Addr>, ipv6: Option<Ipv6Addr>) {\n\n self.ipv4 = ipv4;\n\n self.ipv6 = ipv6;\n\n }\n\n\n\n /// Function to updated Link from http response.\n\n pub fn update_from_response(&mut self, response: &reqwest::Response) {\n\n self.content_type = Self::get_mime(response.headers());\n\n self.headers = Some(response.headers().to_owned());\n\n self.crawled = true;\n\n }\n\n\n\n /// Function to get page mime type from http header.\n\n fn get_mime(header: &reqwest::header::HeaderMap) -> Option<Mime> {\n\n let mime_str = header.get(reqwest::header::CONTENT_TYPE)?.to_str();\n", "file_path": "src/extractors/links.rs", "rank": 29, "score": 19266.452292539612 }, { "content": " {\n\n match value {\n\n Some(x) => hyper_serde::serialize(x, serializer),\n\n None => serializer.serialize_none(),\n\n }\n\n }\n\n\n\n pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Mime>, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n match hyper_serde::deserialize(deserializer) {\n\n Ok(x) => Ok(Some(x)),\n\n Err(_) => Ok(None),\n\n }\n\n }\n\n}\n\n\n\n/// Implementation of serializer and deserializer for Option<reqwest::header::HeaderMap> type.\n\n/// Calls the methods from http_serde\n", "file_path": "src/extractors/links.rs", "rank": 30, "score": 19265.86605377478 }, { "content": "}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct Link {\n\n pub url: Url,\n\n #[serde(skip)]\n\n pub host: Option<url::Host>,\n\n #[serde(with = \"opt_mime\", skip_serializing_if = \"Option::is_none\")]\n\n pub content_type: Option<Mime>,\n\n #[serde(with = \"opt_headermap\", skip_serializing_if = \"Option::is_none\")]\n\n headers: Option<reqwest::header::HeaderMap>,\n\n #[serde(skip)]\n\n pub crawled: bool,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n ipv4: Option<Ipv4Addr>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n ipv6: Option<Ipv6Addr>,\n\n pub link_type: LinkType,\n\n pub contains_words: bool,\n\n}\n", "file_path": "src/extractors/links.rs", "rank": 31, "score": 19265.387425245226 }, { "content": "//! Submodule containg functins realated to Links.\n\nuse mime::Mime;\n\nuse reqwest::Url;\n\nuse select::{document::Document, predicate::Name};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{\n\n collections::HashSet, fmt, hash::Hash, hash::Hasher, net::Ipv4Addr, net::Ipv6Addr, sync::Arc,\n\n};\n\n\n\n#[derive(Clone, Copy, Debug, Serialize, Deserialize)]\n\npub enum LinkType {\n\n Mail,\n\n PhoneNo,\n\n Other,\n\n}\n\n\n\nimpl std::default::Default for LinkType {\n\n fn default() -> Self {\n\n Self::Other\n\n }\n", "file_path": "src/extractors/links.rs", "rank": 32, "score": 19263.736846630687 }, { "content": " \"mailto\" => LinkType::Mail,\n\n \"tel\" => LinkType::PhoneNo,\n\n _ => LinkType::Other,\n\n }\n\n }\n\n}\n\n\n\nimpl PartialEq for Link {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.url == other.url\n\n }\n\n}\n\n\n\nimpl Eq for Link {}\n\n\n\nimpl Hash for Link {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n self.url.hash(state);\n\n }\n\n}\n", "file_path": "src/extractors/links.rs", "rank": 33, "score": 19263.074186805065 }, { "content": " };\n\n Some(Self::new(\n\n &parsed_url,\n\n &None,\n\n None,\n\n None,\n\n false,\n\n Self::get_link_type(&parsed_url),\n\n false,\n\n ))\n\n }\n\n\n\n /// Creates a new Link form Url url.\n\n /// Assumes other things\n\n pub fn new_from_url(url: &Url) -> Self {\n\n Self::new(\n\n url,\n\n &None,\n\n None,\n\n None,\n", "file_path": "src/extractors/links.rs", "rank": 34, "score": 19262.945558390074 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::{collections::HashSet, sync::Arc};\n\n\n\n #[test]\n\n fn get_words() {\n\n let html = \"This is a sample page which does not work\";\n\n let mut word_list = HashSet::new();\n\n\n\n assert!(!check_words_html(html, Arc::new(word_list.clone())));\n\n\n\n word_list.insert(\"sample\".to_string());\n\n assert!(check_words_html(html, Arc::new(word_list)))\n\n }\n\n}\n", "file_path": "src/extractors/links.rs", "rank": 35, "score": 19261.47827738895 }, { "content": " timeout: u64,\n\n) -> Result<(), String> {\n\n let origin_url = match Link::new_from_str(origin_url.as_str()) {\n\n Some(x) => x,\n\n None => return Err(\"Invalid Url\".to_string()),\n\n };\n\n\n\n let whitelist = match whitelist {\n\n Some(x) => match file_handler::read_hosts(x).await {\n\n Ok(y) => Some(y),\n\n Err(_) => return Err(\"Error in reading Whitelist\".to_string()),\n\n },\n\n None => None,\n\n };\n\n\n\n let blacklist = match blacklist {\n\n Some(x) => match file_handler::read_hosts(x).await {\n\n Ok(y) => Some(y),\n\n Err(_) => return Err(\"Error in reading Blacklist\".to_string()),\n\n },\n", "file_path": "src/cli.rs", "rank": 36, "score": 22.86097365169585 }, { "content": "//! Module containing functions related to File IO.\n\nuse std::collections::HashSet;\n\nuse std::path::PathBuf;\n\nuse tokio::fs::File;\n\nuse tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader, BufWriter};\n\nuse tokio::sync::mpsc::Receiver;\n\n\n\nuse crate::extractors::links;\n\n\n\n/// Function to Read and return a list of hosts from a file.\n\n/// Used from reading whitelist and blacklist.\n\npub async fn read_hosts(\n\n file_path: PathBuf,\n\n) -> Result<HashSet<url::Host>, Box<dyn std::error::Error>> {\n\n use url::Host;\n\n\n\n let file = File::open(file_path).await?;\n\n let reader = BufReader::new(file);\n\n let mut list = reader.lines();\n\n let mut hosts = HashSet::new();\n", "file_path": "src/file_handler.rs", "rank": 37, "score": 17.615245004502498 }, { "content": " let handle = futures::future::try_join_all(handlers).await;\n\n match handle {\n\n Err(x) => Err(x.to_string()),\n\n Ok(_) => Ok(()),\n\n }\n\n}\n\n\n\n/// Function to write to Standard Output.\n\nasync fn write_standard_output(mut rx: mpsc::Receiver<Link>) -> Result<(), std::io::Error> {\n\n while let Some(link) = rx.recv().await {\n\n println!(\"{},\", link);\n\n }\n\n println!(\"\");\n\n Ok(())\n\n}\n", "file_path": "src/cli.rs", "rank": 38, "score": 17.153837291226107 }, { "content": " };\n\n match handler {\n\n Ok(_) => Ok(()),\n\n Err(_) => Err(\"Something went wrong in the Crawler\".to_string()),\n\n }\n\n}\n\n\n\n/// Function to handle selenium.\n\n/// Uses the chrome/chromium browser for now.\n\nasync fn handle_selenium(\n\n file_path: Option<PathBuf>,\n\n flag: bool,\n\n mut rx: mpsc::Receiver<String>,\n\n) -> Result<(), thirtyfour::error::WebDriverError> {\n\n use thirtyfour::prelude::*;\n\n use tokio::fs;\n\n\n\n if flag {\n\n if let Some(file_path) = file_path {\n\n let mut caps = DesiredCapabilities::chrome();\n", "file_path": "src/cli.rs", "rank": 39, "score": 15.818130497595615 }, { "content": " task_limit,\n\n timeout,\n\n )\n\n .await\n\n }\n\n\n\n Some(x) => {\n\n crate::crawler::crawl_with_depth(\n\n origin_url,\n\n x,\n\n whitelist,\n\n blacklist,\n\n word_list,\n\n tx_output,\n\n tx_selenium,\n\n task_limit,\n\n timeout,\n\n )\n\n .await\n\n }\n", "file_path": "src/cli.rs", "rank": 40, "score": 15.432875605790771 }, { "content": " None => None,\n\n };\n\n\n\n let word_list = match search_words {\n\n Some(x) => match file_handler::read_words(x).await {\n\n Ok(x) => x,\n\n Err(_) => return Err(\"Error in reading Word List\".to_string()),\n\n },\n\n None => HashSet::new(),\n\n };\n\n\n\n let handler = match depth {\n\n None => {\n\n crate::crawler::crawl_no_depth(\n\n origin_url,\n\n whitelist,\n\n blacklist,\n\n word_list,\n\n tx_output,\n\n tx_selenium,\n", "file_path": "src/cli.rs", "rank": 41, "score": 15.312813073638726 }, { "content": " while let Ok(Some(x)) = list.next_line().await {\n\n if let Ok(host) = Host::parse(&x) {\n\n hosts.insert(host);\n\n }\n\n }\n\n\n\n Ok(hosts)\n\n}\n\n\n\n/// Function to read and return a list of words from a file.\n\n/// Each line contains only one word.\n\n/// Used for reading Search Words.\n\npub async fn read_words(file_path: PathBuf) -> Result<HashSet<String>, Box<dyn std::error::Error>> {\n\n let file = File::open(file_path).await?;\n\n let reader = BufReader::new(file);\n\n let mut list = reader.lines();\n\n let mut words = HashSet::new();\n\n while let Ok(Some(x)) = list.next_line().await {\n\n words.insert(x);\n\n }\n", "file_path": "src/file_handler.rs", "rank": 42, "score": 15.254675969822697 }, { "content": "\n\n Ok(words)\n\n}\n\n\n\n/// Function to write links to files.\n\n/// Seperate files for crawled, not crawled, emails and phone no.\n\npub async fn write_links(\n\n folder_path: PathBuf,\n\n mut rx: Receiver<links::Link>,\n\n) -> Result<(), std::io::Error> {\n\n const CRAWLED_FILE_NAME: &str = r#\"crawled.json\"#;\n\n const NOT_CRAWLED_FILE_NAME: &str = r#\"not_crawled.json\"#;\n\n const MAIL_FILE_NAME: &str = r#\"emails.json\"#;\n\n const TEL_FILE_NAME: &str = r#\"phone_nos.json\"#;\n\n\n\n let mut crawled_writer = init_writer(CRAWLED_FILE_NAME, &folder_path).await?;\n\n let mut not_crawled_writer = init_writer(NOT_CRAWLED_FILE_NAME, &folder_path).await?;\n\n let mut mail_writer = init_writer(MAIL_FILE_NAME, &folder_path).await?;\n\n let mut tel_writer = init_writer(TEL_FILE_NAME, &folder_path).await?;\n\n\n", "file_path": "src/file_handler.rs", "rank": 43, "score": 14.390457660113569 }, { "content": " launch_crawler(\n\n url,\n\n depth,\n\n task_limit,\n\n tx_output,\n\n tx_selenium,\n\n whitelist,\n\n blacklist,\n\n search_words,\n\n timeout,\n\n )\n\n .await\n\n });\n\n\n\n let output_folder_clone = output_folder.clone();\n\n let selenium_handler =\n\n tokio::spawn(\n\n async move { handle_selenium(output_folder_clone, selenium, rx_selenium).await },\n\n );\n\n\n", "file_path": "src/cli.rs", "rank": 44, "score": 14.349102418429 }, { "content": " let returns =\n\n futures::future::try_join3(output_handler, crawler_handler, selenium_handler).await;\n\n\n\n if let Err(x) = returns {\n\n println!(\"Error : {}\", x);\n\n }\n\n\n\n println!(\"Time Taken: {} seconds\", start_time.elapsed().as_secs());\n\n}\n\n\n\n/// Funtion to launch the crawler. Fires off the correct crawler method depending on the arguments.\n\nasync fn launch_crawler(\n\n origin_url: String,\n\n depth: Option<usize>,\n\n task_limit: usize,\n\n tx_output: mpsc::Sender<Link>,\n\n tx_selenium: mpsc::Sender<String>,\n\n whitelist: Option<PathBuf>,\n\n blacklist: Option<PathBuf>,\n\n search_words: Option<PathBuf>,\n", "file_path": "src/cli.rs", "rank": 45, "score": 14.141406633551306 }, { "content": " wordlist is found in the page\n\n --verbose Output the link to standard output\n\n -V, --version Prints version information\n\n\n\nOPTIONS:\n\n -b, --blacklist <blacklist> Path of file containing list of domains not to be crawled\n\n -d, --depth <depth> Gives numeric depth for crawl\n\n -o, --output-folder <output-folder> Path to the output folder\n\n -s, --search-words <search-words> Path to file containing words to search for in the page\n\n --task-limit <task-limit> Limits the number of parallel tasks [default: 1000]\n\n -t, --timeout <timeout> Timout for http requests [default: 10]\n\n -w, --whitelist <whitelist> Path of file containing list of domains to be crawled\n\n```\n\n*/\n\nmod cli;\n\nmod crawler;\n\nmod extractors;\n\nmod file_handler;\n\n\n\n#[tokio::main]\n", "file_path": "src/main.rs", "rank": 46, "score": 13.349410493452426 }, { "content": " let (tx_output, rx_output) = mpsc::channel(task_limit);\n\n let (tx_selenium, rx_selenium) = mpsc::channel(task_limit);\n\n\n\n let output_folder = opts.output_folder.clone();\n\n let verbose = opts.verbose;\n\n let url = opts.url.clone();\n\n let depth = opts.depth;\n\n let whitelist = opts.whitelist;\n\n let blacklist = opts.blacklist;\n\n let search_words = opts.search_words;\n\n let timeout = opts.timeout;\n\n let selenium = opts.selenium;\n\n\n\n let output_folder_clone = output_folder.clone();\n\n\n\n let output_handler = tokio::spawn(async move {\n\n handle_output(output_folder_clone, verbose, rx_output, task_limit).await\n\n });\n\n\n\n let crawler_handler = tokio::spawn(async move {\n", "file_path": "src/cli.rs", "rank": 47, "score": 13.236307327470993 }, { "content": " }\n\n let _ = driver.quit().await;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n/// Funtion to handle the output. Handles both console and file output.\n\n/// Outputs in JSON format.\n\nasync fn handle_output(\n\n file_path: Option<PathBuf>,\n\n verbose: bool,\n\n mut rx: mpsc::Receiver<Link>,\n\n task_limit: usize,\n\n) -> Result<(), String> {\n\n let mut senders = Vec::new();\n\n let mut handlers = Vec::new();\n\n if let Some(x) = file_path {\n\n let (tx, rx) = mpsc::channel::<Link>(task_limit);\n\n senders.push(tx);\n", "file_path": "src/cli.rs", "rank": 48, "score": 12.827226686105927 }, { "content": " while let Some(link) = rx.recv().await {\n\n let temp = format!(\"{}\", link);\n\n match link.link_type {\n\n links::LinkType::Mail => write_json(&mut mail_writer, &temp).await?,\n\n links::LinkType::PhoneNo => write_json(&mut tel_writer, &temp).await?,\n\n links::LinkType::Other => {\n\n if link.crawled {\n\n write_json(&mut crawled_writer, &temp).await?;\n\n } else {\n\n write_json(&mut not_crawled_writer, &temp).await?;\n\n }\n\n }\n\n };\n\n }\n\n\n\n clean_writer(&mut crawled_writer).await?;\n\n clean_writer(&mut not_crawled_writer).await?;\n\n clean_writer(&mut mail_writer).await?;\n\n clean_writer(&mut tel_writer).await?;\n\n\n", "file_path": "src/file_handler.rs", "rank": 49, "score": 12.1027165205899 }, { "content": " Ok(())\n\n}\n\n\n\n/// Intialize BufWriter\n\nasync fn init_writer(\n\n file_name: &str,\n\n folder_path: &PathBuf,\n\n) -> Result<BufWriter<File>, std::io::Error> {\n\n let mut file_path = folder_path.clone();\n\n file_path.push(file_name);\n\n let mut writer = BufWriter::new(File::create(file_path).await?);\n\n writer.write(b\"[\\n\").await?;\n\n Ok(writer)\n\n}\n\n\n\n/// Write Json Link to a file.\n\nasync fn write_json(writer: &mut BufWriter<File>, json: &str) -> Result<(), std::io::Error> {\n\n writer.write(json.as_bytes()).await?;\n\n writer.write(b\",\\n\").await?;\n\n Ok(())\n\n}\n\n\n\n/// Clean up after writers.\n\nasync fn clean_writer(writer: &mut BufWriter<File>) -> Result<(), std::io::Error> {\n\n writer.write(b\"{}\\n]\").await?;\n\n writer.flush().await?;\n\n Ok(())\n\n}\n", "file_path": "src/file_handler.rs", "rank": 50, "score": 11.868363090433192 }, { "content": " caps.add_chrome_arg(\"--enable-automation\")?;\n\n let driver = WebDriver::new(\"http://localhost:4444/wd/hub\", &caps).await?;\n\n // driver.fullscreen_window().await?;\n\n let mut file_name = 1;\n\n let mut file_path = file_path.clone();\n\n file_path.push(\"screenshots\");\n\n if fs::create_dir_all(&file_path).await.is_err() {\n\n return Err(thirtyfour::error::WebDriverError::FatalError(\n\n \"\".to_string(),\n\n ));\n\n }\n\n\n\n while let Some(link) = rx.recv().await {\n\n if driver.get(link.as_str()).await.is_ok() {\n\n let mut img_path = file_path.clone();\n\n img_path.push(file_name.to_string());\n\n let _ = driver.fullscreen_window().await;\n\n let _ = driver.screenshot(&img_path).await;\n\n file_name += 1;\n\n }\n", "file_path": "src/cli.rs", "rank": 51, "score": 10.341111829763129 }, { "content": " let handler = tokio::spawn(async move { file_handler::write_links(x, rx).await });\n\n handlers.push(handler);\n\n }\n\n if verbose {\n\n let (tx, rx) = mpsc::channel::<Link>(task_limit);\n\n senders.push(tx);\n\n let handler = tokio::spawn(async move { write_standard_output(rx).await });\n\n handlers.push(handler);\n\n }\n\n while let Some(link) = rx.recv().await {\n\n for i in &senders {\n\n if let Err(_) = i.send(link.clone()).await {\n\n let _ = futures::future::try_join_all(handlers).await;\n\n return Err(\"Something Wrong with IO\".to_string());\n\n }\n\n }\n\n }\n\n\n\n senders.clear();\n\n\n", "file_path": "src/cli.rs", "rank": 52, "score": 10.165856354021455 }, { "content": "/*!\n\nModule Containing the Command Line part of the Crawler.\n\n*/\n\nuse crate::extractors::links::Link;\n\nuse crate::file_handler;\n\nuse clap::Clap;\n\nuse std::{collections::HashSet, path::PathBuf, time::Instant};\n\nuse tokio::sync::mpsc;\n\n\n\n#[derive(Clap, Clone)]\n\n#[clap(version = \"1.0\", author = \"Ayush Singh <ayushsingh1325@gmail.com>\")]\n", "file_path": "src/cli.rs", "rank": 53, "score": 9.945799584366268 }, { "content": "/*!\n\nModule Containg functions related to different Extractors.\n\nCurrently contains only links.\n\n*/\n\npub mod links;\n", "file_path": "src/extractors/mod.rs", "rank": 54, "score": 9.6723493770739 }, { "content": "async fn main() {\n\n cli::entry().await;\n\n // test().await;\n\n}\n\n\n\n#[allow(dead_code)]\n\n#[cfg(debug_assertions)]\n\nasync fn test() {\n\n use url::Url;\n\n\n\n let t = Url::parse(\"tel:+6494461709\").unwrap();\n\n println!(\"{}\", t.scheme() == \"tel\");\n\n}\n", "file_path": "src/main.rs", "rank": 55, "score": 9.50885617586492 }, { "content": " verbose: bool,\n\n /// Timout for http requests.\n\n #[clap(short, long, default_value = \"10\")]\n\n timeout: u64,\n\n /// Flag for taking screenshots using Selenium.\n\n /// Takes screenshot if a word from wordlist is found in the page.\n\n #[clap(long)]\n\n selenium: bool,\n\n /// Limits the number of parallel tasks.\n\n #[clap(long, default_value = \"1000\")]\n\n task_limit: usize,\n\n}\n\n\n\n/// Funtion that servers as the entry point to the Command Line Tool.\n\n/// It parses the arguments and fires off the methods.\n\npub async fn entry() {\n\n let start_time = Instant::now();\n\n let opts = CLI::parse();\n\n println!(\"Started\");\n\n let task_limit = opts.task_limit;\n", "file_path": "src/cli.rs", "rank": 56, "score": 9.49649054161249 }, { "content": "/*!\n\n# Introduction\n\n- The project basically crawls the webpage and collects as much information as possible,\n\nlike external links, mails, etc. Like a web crawler used by search engines but specific for\n\na domain and url.\n\n- It is a project for WOC.\n\n# CLI Usage\n\n```\n\nwebcrawler 1.0\n\nAyush Singh <ayushsingh1325@gmail.com>\n\n\n\nUSAGE:\n\n webcrawler [FLAGS] [OPTIONS] <url>\n\n\n\nARGS:\n\n <url> Seed url for crawler\n\n\n\nFLAGS:\n\n -h, --help Prints help information\n\n --selenium Flag for taking screenshots using Selenium. Takes screenshot if a word from\n", "file_path": "src/main.rs", "rank": 57, "score": 9.101581978954469 } ]
Rust
rust/src/storage/gcs/client.rs
Smurphy000/delta-rs
9196ff49bc147b36339b4901624229473092b024
use super::{util, GCSClientError, GCSObject}; use futures::Stream; use std::convert::{TryFrom, TryInto}; use std::path::PathBuf; use std::sync::Arc; use tame_gcs::objects::{self, Object}; use tame_oauth::gcp as oauth; use log::debug; pub struct GCSStorageBackend { pub client: reqwest::Client, pub cred_path: PathBuf, pub auth: Arc<oauth::ServiceAccountAccess>, } impl std::fmt::Debug for GCSStorageBackend { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { f.debug_struct("GCSStorageBackend {...}").finish() } } impl TryFrom<PathBuf> for GCSStorageBackend { type Error = GCSClientError; fn try_from(cred_path: PathBuf) -> Result<Self, Self::Error> { let client = reqwest::Client::builder().build()?; let cred_contents = std::fs::read_to_string(&cred_path)?; let svc_account_info = oauth::ServiceAccountInfo::deserialize(cred_contents)?; let svc_account_access = oauth::ServiceAccountAccess::new(svc_account_info)?; Ok(Self { client, cred_path, auth: std::sync::Arc::new(svc_account_access), }) } } impl GCSStorageBackend { pub async fn metadata<'a>( &self, path: GCSObject<'a>, ) -> Result<objects::Metadata, GCSClientError> { debug!("creating request"); let get_meta_request = Object::get(&path, None)?; debug!("executing request"); let response = util::execute::<_, objects::GetObjectResponse>(self, get_meta_request).await?; debug!("returning meta"); Ok(response.metadata) } pub async fn download<'a>(&self, path: GCSObject<'a>) -> Result<bytes::Bytes, GCSClientError> { let download_request = Object::download(&path, None)?; let response = util::execute::<_, objects::DownloadObjectResponse>(self, download_request) .await .map_err(util::check_object_not_found)?; Ok(response.consume()) } pub fn list<'a>( &'a self, uri: GCSObject<'a>, ) -> impl Stream<Item = Result<objects::Metadata, GCSClientError>> + 'a { let mut page_token: Option<String> = None; async_stream::try_stream! { loop { let list_request_opts = Some(objects::ListOptional { prefix: Some(uri.path.as_ref()), page_token: page_token.as_deref(), standard_params: tame_gcs::common::StandardQueryParameters { fields: Some("items(name, updated"), ..Default::default() }, ..Default::default() }); let list_request = Object::list(&uri.bucket, list_request_opts)?; let list_response = util::execute::<_, objects::ListResponse>( self, list_request).await?; for object_meta in list_response.objects { yield object_meta } page_token = list_response.page_token; if page_token.is_none() { break; } } } } pub async fn insert<'a, 'b>( &self, uri: GCSObject<'a>, content: Vec<u8>, ) -> Result<(), GCSClientError> { let content_len = content.len().try_into().unwrap(); let content_body = std::io::Cursor::new(content); let insert_request = Object::insert_simple(&uri, content_body, content_len, None)?; let _response = util::execute::<_, objects::InsertResponse>(self, insert_request).await?; Ok(()) } pub async fn rename<'a>( &self, src: GCSObject<'a>, dst: GCSObject<'a>, ) -> Result<(), GCSClientError> { let mut rewrite_token = None; loop { let metadata = None; let precondition = Some(objects::RewriteObjectOptional { destination_conditionals: Some(tame_gcs::common::Conditionals { if_generation_match: Some(0), ..Default::default() }), ..Default::default() }); let rewrite_http_request = Object::rewrite(&src, &dst, rewrite_token, metadata, precondition)?; let response = util::execute::<_, objects::RewriteObjectResponse>(self, rewrite_http_request) .await .map_err(util::check_precondition_status)?; rewrite_token = response.rewrite_token; if rewrite_token.is_none() { break; } } self.delete(src).await } pub async fn delete<'a>(&self, uri: GCSObject<'a>) -> Result<(), GCSClientError> { let delete_request = Object::delete(&uri, None)?; let _response = util::execute::<_, objects::DeleteObjectResponse>(self, delete_request).await?; Ok(()) } }
use super::{util, GCSClientError, GCSObject}; use futures::Stream; use std::convert::{TryFrom, TryInto}; use std::path::PathBuf; use std::sync::Arc; use tame_gcs::objects::{self, Object}; use tame_oauth::gcp as oauth; use log::debug; pub struct GCSStorageBackend { pub client: reqwest::Client, pub cred_path: PathBuf, pub auth: Arc<oauth::ServiceAccoun
..Default::default() }); let list_request = Object::list(&uri.bucket, list_request_opts)?; let list_response = util::execute::<_, objects::ListResponse>( self, list_request).await?; for object_meta in list_response.objects { yield object_meta } page_token = list_response.page_token; if page_token.is_none() { break; } } } } pub async fn insert<'a, 'b>( &self, uri: GCSObject<'a>, content: Vec<u8>, ) -> Result<(), GCSClientError> { let content_len = content.len().try_into().unwrap(); let content_body = std::io::Cursor::new(content); let insert_request = Object::insert_simple(&uri, content_body, content_len, None)?; let _response = util::execute::<_, objects::InsertResponse>(self, insert_request).await?; Ok(()) } pub async fn rename<'a>( &self, src: GCSObject<'a>, dst: GCSObject<'a>, ) -> Result<(), GCSClientError> { let mut rewrite_token = None; loop { let metadata = None; let precondition = Some(objects::RewriteObjectOptional { destination_conditionals: Some(tame_gcs::common::Conditionals { if_generation_match: Some(0), ..Default::default() }), ..Default::default() }); let rewrite_http_request = Object::rewrite(&src, &dst, rewrite_token, metadata, precondition)?; let response = util::execute::<_, objects::RewriteObjectResponse>(self, rewrite_http_request) .await .map_err(util::check_precondition_status)?; rewrite_token = response.rewrite_token; if rewrite_token.is_none() { break; } } self.delete(src).await } pub async fn delete<'a>(&self, uri: GCSObject<'a>) -> Result<(), GCSClientError> { let delete_request = Object::delete(&uri, None)?; let _response = util::execute::<_, objects::DeleteObjectResponse>(self, delete_request).await?; Ok(()) } }
tAccess>, } impl std::fmt::Debug for GCSStorageBackend { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { f.debug_struct("GCSStorageBackend {...}").finish() } } impl TryFrom<PathBuf> for GCSStorageBackend { type Error = GCSClientError; fn try_from(cred_path: PathBuf) -> Result<Self, Self::Error> { let client = reqwest::Client::builder().build()?; let cred_contents = std::fs::read_to_string(&cred_path)?; let svc_account_info = oauth::ServiceAccountInfo::deserialize(cred_contents)?; let svc_account_access = oauth::ServiceAccountAccess::new(svc_account_info)?; Ok(Self { client, cred_path, auth: std::sync::Arc::new(svc_account_access), }) } } impl GCSStorageBackend { pub async fn metadata<'a>( &self, path: GCSObject<'a>, ) -> Result<objects::Metadata, GCSClientError> { debug!("creating request"); let get_meta_request = Object::get(&path, None)?; debug!("executing request"); let response = util::execute::<_, objects::GetObjectResponse>(self, get_meta_request).await?; debug!("returning meta"); Ok(response.metadata) } pub async fn download<'a>(&self, path: GCSObject<'a>) -> Result<bytes::Bytes, GCSClientError> { let download_request = Object::download(&path, None)?; let response = util::execute::<_, objects::DownloadObjectResponse>(self, download_request) .await .map_err(util::check_object_not_found)?; Ok(response.consume()) } pub fn list<'a>( &'a self, uri: GCSObject<'a>, ) -> impl Stream<Item = Result<objects::Metadata, GCSClientError>> + 'a { let mut page_token: Option<String> = None; async_stream::try_stream! { loop { let list_request_opts = Some(objects::ListOptional { prefix: Some(uri.path.as_ref()), page_token: page_token.as_deref(), standard_params: tame_gcs::common::StandardQueryParameters { fields: Some("items(name, updated"), ..Default::default() },
random
[ { "content": "pub fn check_object_not_found(err: GCSClientError) -> GCSClientError {\n\n match err {\n\n GCSClientError::GCSError {\n\n source: tame_gcs::error::Error::HttpStatus(HttpStatusError(StatusCode::NOT_FOUND)),\n\n } => GCSClientError::NotFound,\n\n err => err,\n\n }\n\n}\n\n\n", "file_path": "rust/src/storage/gcs/util.rs", "rank": 0, "score": 127636.96584654566 }, { "content": "pub fn check_precondition_status(err: GCSClientError) -> GCSClientError {\n\n match err {\n\n GCSClientError::GCSError {\n\n source:\n\n tame_gcs::error::Error::HttpStatus(HttpStatusError(StatusCode::PRECONDITION_FAILED)),\n\n } => GCSClientError::PreconditionFailed,\n\n err => err,\n\n }\n\n}\n", "file_path": "rust/src/storage/gcs/util.rs", "rank": 1, "score": 103341.63772542663 }, { "content": "#[async_trait::async_trait]\n\npub trait LockClient: Send + Sync + Debug {\n\n /// Attempts to acquire lock. If successful, returns the lock.\n\n /// Otherwise returns [`Option::None`] which is retryable action.\n\n /// Visit implementation docs for more details.\n\n async fn try_acquire_lock(&self, data: &str) -> Result<Option<LockItem>, StorageError>;\n\n\n\n /// Returns current lock from DynamoDB (if any).\n\n async fn get_lock(&self) -> Result<Option<LockItem>, StorageError>;\n\n\n\n /// Update data in the upstream lock of the current user still has it.\n\n /// The returned lock will have a new `rvn` so it'll increase the lease duration\n\n /// as this method is usually called when the work with a lock is extended.\n\n async fn update_data(&self, lock: &LockItem) -> Result<LockItem, StorageError>;\n\n\n\n /// Releases the given lock if the current user still has it, returning true if the lock was\n\n /// successfully released, and false if someone else already stole the lock\n\n async fn release_lock(&self, lock: &LockItem) -> Result<bool, StorageError>;\n\n}\n\n\n\nconst DEFAULT_MAX_RETRY_ACQUIRE_LOCK_ATTEMPTS: u32 = 10_000;\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 2, "score": 73952.65342923111 }, { "content": "struct ParquetBuffer {\n\n writer: ArrowWriter<InMemoryWriteableCursor>,\n\n cursor: InMemoryWriteableCursor,\n\n}\n\n\n\nimpl ParquetBuffer {\n\n fn try_new(schema: arrow::datatypes::SchemaRef) -> Result<Self, DeltaTableError> {\n\n // Initialize writer properties for the underlying arrow writer\n\n let writer_properties = WriterProperties::builder()\n\n // NOTE: Consider extracting config for writer properties and setting more than just compression\n\n .set_compression(Compression::SNAPPY)\n\n .build();\n\n\n\n let cursor = InMemoryWriteableCursor::default();\n\n let writer = ArrowWriter::try_new(cursor.clone(), schema, Some(writer_properties))?;\n\n\n\n Ok(Self { writer, cursor })\n\n }\n\n\n\n fn write_batch(&mut self, batch: &RecordBatch) -> Result<(), DeltaTableError> {\n", "file_path": "rust/src/writer.rs", "rank": 3, "score": 63467.76760893244 }, { "content": "#[pyclass]\n\nstruct RawDeltaTable {\n\n _table: deltalake::DeltaTable,\n\n}\n\n\n", "file_path": "python/src/lib.rs", "rank": 4, "score": 62047.61796638215 }, { "content": "#[pyclass]\n\nstruct RawDeltaTableMetaData {\n\n #[pyo3(get)]\n\n id: String,\n\n #[pyo3(get)]\n\n name: Option<String>,\n\n #[pyo3(get)]\n\n description: Option<String>,\n\n #[pyo3(get)]\n\n partition_columns: Vec<String>,\n\n #[pyo3(get)]\n\n created_time: deltalake::DeltaDataTypeTimestamp,\n\n #[pyo3(get)]\n\n configuration: HashMap<String, Option<String>>,\n\n}\n\n\n\n#[pymethods]\n\nimpl RawDeltaTable {\n\n #[new]\n\n fn new(table_uri: &str, version: Option<deltalake::DeltaDataTypeLong>) -> PyResult<Self> {\n\n let table = match version {\n", "file_path": "python/src/lib.rs", "rank": 5, "score": 59523.46451647334 }, { "content": "pub fn setup() {\n\n std::env::set_var(\"AWS_REGION\", \"us-east-2\");\n\n std::env::set_var(\"AWS_ACCESS_KEY_ID\", \"test\");\n\n std::env::set_var(\"AWS_SECRET_ACCESS_KEY\", \"test\");\n\n std::env::set_var(\"AWS_ENDPOINT_URL\", ENDPOINT);\n\n}\n\n\n", "file_path": "rust/tests/s3_common/mod.rs", "rank": 6, "score": 56665.17655794023 }, { "content": "pub trait DeltaDataframe {\n\n fn from_loaded_delta_table(table: delta::DeltaTable) -> Result<DataFrame, DeltaTableError>;\n\n fn from_delta_table(path: &str) -> Result<DataFrame, DeltaTableError>;\n\n fn from_delta_table_with_version(\n\n path: &str,\n\n version: delta::DeltaDataTypeVersion,\n\n ) -> Result<DataFrame, DeltaTableError>;\n\n}\n\n\n\nimpl DeltaDataframe for DataFrame {\n\n fn from_loaded_delta_table(\n\n delta_table: delta::DeltaTable,\n\n ) -> Result<DataFrame, DeltaTableError> {\n\n let mut batches = vec![];\n\n let mut schema = None;\n\n let table_path = Path::new(&delta_table.table_path);\n\n\n\n for fname in delta_table.get_files() {\n\n let fpath = table_path.join(fname);\n\n let file = File::open(&fpath).map_err(|e| DeltaTableError::MissingDataFile {\n", "file_path": "rust/src/delta_dataframe.rs", "rank": 7, "score": 56665.17655794023 }, { "content": "pub fn create_add(\n\n partition_values: &HashMap<String, Option<String>>,\n\n path: String,\n\n size: i64,\n\n record_batch: &RecordBatch,\n\n) -> Result<Add, DeltaWriterError> {\n\n let stats = Stats {\n\n num_records: record_batch.num_rows() as i64,\n\n // TODO: calculate additional stats\n\n // look at https://github.com/apache/arrow/blob/master/rust/arrow/src/compute/kernels/aggregate.rs for pulling these stats\n\n min_values: HashMap::new(),\n\n max_values: HashMap::new(),\n\n null_count: HashMap::new(),\n\n };\n\n let stats_string = serde_json::to_string(&stats).unwrap();\n\n\n\n let modification_time = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();\n\n let modification_time = modification_time.as_millis() as i64;\n\n\n\n let add = Add {\n", "file_path": "rust/tests/write_exploration.rs", "rank": 8, "score": 56665.17655794023 }, { "content": "pub fn extract_partition_values(\n\n metadata: &DeltaTableMetaData,\n\n record_batch: &RecordBatch,\n\n) -> Result<HashMap<String, Option<String>>, DeltaWriterError> {\n\n let partition_cols = metadata.partition_columns.as_slice();\n\n\n\n let mut partition_values = HashMap::new();\n\n\n\n for col_name in partition_cols.iter() {\n\n let arrow_schema = record_batch.schema();\n\n\n\n let i = arrow_schema.index_of(col_name)?;\n\n let col = record_batch.column(i);\n\n\n\n let partition_string = stringified_partition_value(col)?;\n\n\n\n partition_values.insert(col_name.clone(), partition_string);\n\n }\n\n\n\n Ok(partition_values)\n\n}\n\n\n", "file_path": "rust/tests/write_exploration.rs", "rank": 9, "score": 55455.39515161113 }, { "content": "struct AcquireLockState<'a> {\n\n client: &'a DynamoDbLockClient,\n\n cached_lock: Option<LockItem>,\n\n started: Instant,\n\n timeout_in: Duration,\n\n}\n\n\n\nimpl<'a> AcquireLockState<'a> {\n\n /// If lock is expirable (lease_duration is set) then this function returns `true`\n\n /// if the elapsed time sine `started` is reached `timeout_in`.\n\n fn has_timed_out(&self) -> bool {\n\n self.started.elapsed() > self.timeout_in && {\n\n let non_expirable = if let Some(ref cached_lock) = self.cached_lock {\n\n cached_lock.lease_duration.is_none()\n\n } else {\n\n false\n\n };\n\n !non_expirable\n\n }\n\n }\n", "file_path": "rust/src/storage/s3/dynamodb_lock.rs", "rank": 10, "score": 54832.57739499865 }, { "content": "pub fn record_batch_from_json_buffer(\n\n arrow_schema_ref: Arc<ArrowSchema>,\n\n json_buffer: &[Value],\n\n) -> Result<RecordBatch, DeltaWriterError> {\n\n let row_count = json_buffer.len();\n\n let mut value_ter = InMemValueIter::from_vec(json_buffer);\n\n let decoder = Decoder::new(arrow_schema_ref.clone(), row_count, None);\n\n let batch = decoder.next_batch(&mut value_ter)?;\n\n\n\n // handle none\n\n let batch = batch.unwrap();\n\n\n\n Ok(batch)\n\n}\n\n\n", "file_path": "rust/tests/write_exploration.rs", "rank": 11, "score": 54332.17376840713 }, { "content": "pub fn region() -> Region {\n\n Region::Custom {\n\n name: \"custom\".to_string(),\n\n endpoint: ENDPOINT.to_string(),\n\n }\n\n}\n\n\n", "file_path": "rust/tests/s3_common/mod.rs", "rank": 12, "score": 52741.390318467806 }, { "content": "/// Returns rust crate version, can be use used in language bindings to expose Rust core version\n\npub fn crate_version() -> &'static str {\n\n env!(\"CARGO_PKG_VERSION\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use pretty_assertions::assert_eq;\n\n use std::{collections::HashMap, fs::File, path::Path};\n\n\n\n #[test]\n\n fn state_records_new_txn_version() {\n\n let mut app_transaction_version = HashMap::new();\n\n app_transaction_version.insert(\"abc\".to_string(), 1);\n\n app_transaction_version.insert(\"xyz\".to_string(), 1);\n\n\n\n let mut state = DeltaTableState {\n\n files: vec![],\n\n commit_infos: vec![],\n\n tombstones: vec![],\n", "file_path": "rust/src/delta.rs", "rank": 13, "score": 50582.48553368445 }, { "content": "pub fn setup_dynamodb(key: &str) {\n\n std::env::set_var(\"AWS_S3_LOCKING_PROVIDER\", \"dynamodb\");\n\n std::env::set_var(\"DYNAMO_LOCK_TABLE_NAME\", \"test_table\");\n\n std::env::set_var(\"DYNAMO_LOCK_PARTITION_KEY_VALUE\", key);\n\n std::env::set_var(\"DYNAMO_LOCK_REFRESH_PERIOD_MILLIS\", \"100\");\n\n std::env::set_var(\"DYNAMO_LOCK_ADDITIONAL_TIME_TO_WAIT_MILLIS\", \"100\");\n\n}\n\n\n\npub async fn cleanup_dir_except(path: &str, ignore_files: Vec<String>) {\n\n setup();\n\n let client = S3Client::new(region());\n\n let dir = deltalake::parse_uri(path).unwrap().into_s3object().unwrap();\n\n\n\n for obj in list_objects(&client, dir.bucket, dir.key).await {\n\n let name = obj.split(\"/\").last().unwrap().to_string();\n\n if !ignore_files.contains(&name) && !name.starts_with(\".\") {\n\n let req = DeleteObjectRequest {\n\n bucket: dir.bucket.to_string(),\n\n key: obj,\n\n ..Default::default()\n", "file_path": "rust/tests/s3_common/mod.rs", "rank": 14, "score": 48408.783617316934 }, { "content": "#[async_trait::async_trait]\n\npub trait StorageBackend: Send + Sync + Debug {\n\n /// Create a new path by appending `path_to_join` as a new component to `path`.\n\n #[inline]\n\n fn join_path(&self, path: &str, path_to_join: &str) -> String {\n\n let normalized_path = path.trim_end_matches('/');\n\n format!(\"{}/{}\", normalized_path, path_to_join)\n\n }\n\n\n\n /// More efficient path join for multiple path components. Use this method if you need to\n\n /// combine more than two path components.\n\n #[inline]\n\n fn join_paths(&self, paths: &[&str]) -> String {\n\n paths\n\n .iter()\n\n .map(|s| s.trim_end_matches('/'))\n\n .collect::<Vec<_>>()\n\n .join(\"/\")\n\n }\n\n\n\n /// Returns trimed path with trailing path separator removed.\n", "file_path": "rust/src/storage/mod.rs", "rank": 15, "score": 46643.34083069462 }, { "content": "pub fn create_remove(path: String) -> Remove {\n\n let deletion_timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();\n\n let deletion_timestamp = deletion_timestamp.as_millis() as i64;\n\n\n\n Remove {\n\n path,\n\n deletion_timestamp: deletion_timestamp,\n\n data_change: true,\n\n extended_file_metadata: Some(false),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "rust/tests/write_exploration.rs", "rank": 16, "score": 46643.34083069462 }, { "content": "#[inline]\n\npub fn get_version() -> Result<Version, String> {\n\n imp::get_version()\n\n}\n", "file_path": "glibc_version/src/lib.rs", "rank": 17, "score": 46643.34083069462 }, { "content": "/// Parses the URI and returns a variant of the Uri enum for the appropriate storage backend based\n\n/// on scheme.\n\npub fn parse_uri<'a>(path: &'a str) -> Result<Uri<'a>, UriError> {\n\n let parts: Vec<&'a str> = path.split(\"://\").collect();\n\n\n\n if parts.len() == 1 {\n\n return Ok(Uri::LocalPath(parts[0]));\n\n }\n\n\n\n match parts[0] {\n\n \"s3\" => {\n\n cfg_if::cfg_if! {\n\n if #[cfg(any(feature = \"s3\", feature = \"s3-rustls\"))] {\n\n let mut path_parts = parts[1].splitn(2, '/');\n\n let bucket = match path_parts.next() {\n\n Some(x) => x,\n\n None => {\n\n return Err(UriError::MissingObjectBucket);\n\n }\n\n };\n\n let key = match path_parts.next() {\n\n Some(x) => x,\n", "file_path": "rust/src/storage/mod.rs", "rank": 18, "score": 38410.38249773686 }, { "content": "#[inline]\n\npub fn atomic_rename(from: &str, to: &str, swap: bool) -> Result<(), StorageError> {\n\n imp::atomic_rename(from, to, swap)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fs::File;\n\n use std::io::Write;\n\n use std::path::{Path, PathBuf};\n\n\n\n #[test]\n\n fn test_atomic_rename() {\n\n let tmp_dir = tempdir::TempDir::new_in(\".\", \"test_atomic_rename\").unwrap();\n\n let a = create_file(&tmp_dir.path(), \"a\");\n\n let b = create_file(&tmp_dir.path(), \"b\");\n\n let c = &tmp_dir.path().join(\"c\");\n\n\n\n // unsuccessful move not_exists to C, not_exists is missing\n\n match atomic_rename(\"not_exists\", c.to_str().unwrap(), false) {\n", "file_path": "rust/src/storage/file/rename.rs", "rank": 19, "score": 36850.33403995465 }, { "content": "/// Dynamically construct a Storage backend trait object based on scheme for provided URI\n\npub fn get_backend_for_uri(uri: &str) -> Result<Box<dyn StorageBackend>, StorageError> {\n\n match parse_uri(uri)? {\n\n Uri::LocalPath(root) => Ok(Box::new(file::FileStorageBackend::new(root))),\n\n #[cfg(any(feature = \"s3\", feature = \"s3-rustls\"))]\n\n Uri::S3Object(_) => Ok(Box::new(s3::S3StorageBackend::new()?)),\n\n #[cfg(feature = \"azure\")]\n\n Uri::AdlsGen2Object(obj) => Ok(Box::new(azure::AdlsGen2Backend::new(obj.file_system)?)),\n\n #[cfg(feature = \"gcs\")]\n\n Uri::GCSObject(_) => Ok(Box::new(gcs::GCSStorageBackend::new()?)),\n\n }\n\n}\n", "file_path": "rust/src/storage/mod.rs", "rank": 20, "score": 36270.40441970233 }, { "content": "fn create_s3_client(region: Region) -> Result<S3Client, StorageError> {\n\n if std::env::var(AWS_WEB_IDENTITY_TOKEN_FILE).is_ok() {\n\n let provider = get_web_identity_provider()?;\n\n Ok(S3Client::new_with(HttpClient::new()?, provider, region))\n\n } else if std::env::var(AWS_S3_ASSUME_ROLE_ARN).is_ok() {\n\n let provider = get_sts_assume_role_provider(region.clone())?;\n\n Ok(S3Client::new_with(HttpClient::new()?, provider, region))\n\n } else {\n\n Ok(S3Client::new(region))\n\n }\n\n}\n\n\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 21, "score": 36056.96631649824 }, { "content": "use std::fmt;\n\n\n\n/// Struct describing an object stored in GCS.\n\n#[derive(Debug)]\n\npub struct GCSObject<'a> {\n\n /// The bucket where the object is stored.\n\n pub bucket: tame_gcs::BucketName<'a>,\n\n /// The path of the object within the bucket.\n\n pub path: tame_gcs::ObjectName<'a>,\n\n}\n\n\n\nimpl<'a> GCSObject<'a> {\n\n //// Create a new GCSObject from a bucket and path.\n\n pub(crate) fn new(bucket: &'a str, path: &'a str) -> Self {\n\n // We do not validate the input strings here\n\n // as it is expected that they are correctly parsed and validated a level up in the\n\n // storage module\n\n GCSObject {\n\n bucket: tame_gcs::BucketName::non_validated(bucket),\n\n path: tame_gcs::ObjectName::non_validated(path),\n", "file_path": "rust/src/storage/gcs/object.rs", "rank": 22, "score": 35466.781446714835 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<'a> fmt::Display for GCSObject<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"gs://{}/{}\", self.bucket, self.path)\n\n }\n\n}\n\n\n\nimpl<'a> AsRef<tame_gcs::BucketName<'a>> for GCSObject<'a> {\n\n fn as_ref(&self) -> &tame_gcs::BucketName<'a> {\n\n &self.bucket\n\n }\n\n}\n\n\n\nimpl<'a> AsRef<tame_gcs::ObjectName<'a>> for GCSObject<'a> {\n\n fn as_ref(&self) -> &tame_gcs::ObjectName<'a> {\n\n &self.path\n\n }\n\n}\n", "file_path": "rust/src/storage/gcs/object.rs", "rank": 23, "score": 35457.91776379713 }, { "content": "pub fn cleanup_dir_except<P: AsRef<Path>>(path: P, ignore_files: Vec<String>) {\n\n for p in fs::read_dir(path).unwrap() {\n\n if let Ok(d) = p {\n\n let path = d.path();\n\n let name = d.path().file_name().unwrap().to_str().unwrap().to_string();\n\n\n\n if !ignore_files.contains(&name) && !name.starts_with(\".\") {\n\n fs::remove_file(&path).unwrap();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rust/tests/fs_common/mod.rs", "rank": 32, "score": 34826.8944968079 }, { "content": "fn try_create_lock_client(region: Region) -> Result<Option<Box<dyn LockClient>>, StorageError> {\n\n let dispatcher = HttpClient::new()?;\n\n\n\n match std::env::var(\"AWS_S3_LOCKING_PROVIDER\") {\n\n Ok(p) if p.to_lowercase() == \"dynamodb\" => {\n\n let client = match std::env::var(\"AWS_WEB_IDENTITY_TOKEN_FILE\") {\n\n Ok(_) => rusoto_dynamodb::DynamoDbClient::new_with(\n\n dispatcher,\n\n get_web_identity_provider()?,\n\n region,\n\n ),\n\n Err(_) => rusoto_dynamodb::DynamoDbClient::new(region),\n\n };\n\n let client =\n\n dynamodb_lock::DynamoDbLockClient::new(client, dynamodb_lock::Options::default());\n\n Ok(Some(Box::new(client)))\n\n }\n\n _ => Ok(None),\n\n }\n\n}\n\n\n\n/// Abstraction over a distributive lock provider\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 33, "score": 33228.56728140278 }, { "content": "class StructType(DataType):\n\n \"\"\"Concrete class for struct data types.\"\"\"\n\n\n\n fields: List[\"Field\"]\n\n type: str\n\n\n\n def __init__(self, fields: List[\"Field\"]):\n\n super().__init__(\"struct\")\n\n self.fields = fields\n\n\n\n def __str__(self) -> str:\n\n field_strs = [str(f) for f in self.fields]\n", "file_path": "python/deltalake/schema.py", "rank": 34, "score": 30369.79309946432 }, { "content": "fn get_last_checkpoint_version(log_path: &PathBuf) -> i64 {\n\n let last_checkpoint_path = log_path.join(\"_last_checkpoint\");\n\n assert!(last_checkpoint_path.as_path().exists());\n\n\n\n let last_checkpoint_content = fs::read_to_string(last_checkpoint_path.as_path()).unwrap();\n\n let last_checkpoint_content: serde_json::Value =\n\n serde_json::from_str(last_checkpoint_content.trim()).unwrap();\n\n\n\n last_checkpoint_content\n\n .get(\"version\")\n\n .unwrap()\n\n .as_i64()\n\n .unwrap()\n\n}\n\n\n", "file_path": "rust/tests/checkpoint_writer_test.rs", "rank": 35, "score": 19577.47240278698 }, { "content": "use rusoto_core::Region;\n\nuse rusoto_s3::{DeleteObjectRequest, ListObjectsV2Request, S3Client, S3};\n\n\n\npub const ENDPOINT: &str = \"http://localhost:4566\";\n\n\n", "file_path": "rust/tests/s3_common/mod.rs", "rank": 36, "score": 16.408292614201848 }, { "content": "//! Google Cloud Storage backend.\n\n//!\n\n//! This module is gated behind the \"gcs\" feature. Its usage also requires\n\n//! the `SERVICE_ACCOUNT` environment variables to be set to the path of\n\n//! credentials with permission to read from the bucket.\n\n\n\nmod client;\n\nmod error;\n\nmod object;\n\nmod util;\n\n\n\n// Exports\n\npub(crate) use client::GCSStorageBackend;\n\npub(crate) use error::GCSClientError;\n\npub(crate) use object::GCSObject;\n\n\n\nuse futures::Stream;\n\nuse std::convert::TryInto;\n\nuse std::pin::Pin;\n\n\n", "file_path": "rust/src/storage/gcs/mod.rs", "rank": 37, "score": 16.08504098802233 }, { "content": "use super::{GCSClientError, GCSStorageBackend};\n\n/// This code is largely duplicated from https://github.com/EmbarkStudios/gsutil\n\nuse bytes::BufMut;\n\nuse futures::StreamExt;\n\nuse std::convert::TryInto;\n\nuse std::iter::Iterator;\n\nuse tame_gcs::http;\n\nuse tame_oauth::gcp as oauth;\n\n\n\nasync fn get_token(backend: &GCSStorageBackend) -> Result<tame_oauth::Token, GCSClientError> {\n\n Ok(\n\n match backend.auth.get_token(&[tame_gcs::Scopes::ReadWrite])? {\n\n oauth::TokenOrRequest::Token(token) => token,\n\n oauth::TokenOrRequest::Request {\n\n request,\n\n scope_hash,\n\n ..\n\n } => {\n\n let (parts, body) = request.into_parts();\n\n let read_body = std::io::Cursor::new(body);\n", "file_path": "rust/src/storage/gcs/util.rs", "rank": 38, "score": 14.721182206320755 }, { "content": "//! AWS S3 storage backend. It only supports a single writer and is not multi-writer safe.\n\n\n\nuse std::convert::TryFrom;\n\nuse std::fmt::Debug;\n\nuse std::{fmt, pin::Pin};\n\n\n\nuse chrono::{DateTime, FixedOffset, Utc};\n\nuse futures::Stream;\n\nuse log::debug;\n\nuse rusoto_core::{HttpClient, Region, RusotoError};\n\nuse rusoto_credential::AutoRefreshingProvider;\n\nuse rusoto_s3::{\n\n CopyObjectRequest, Delete, DeleteObjectRequest, DeleteObjectsRequest, GetObjectRequest,\n\n HeadObjectRequest, ListObjectsV2Request, ObjectIdentifier, PutObjectRequest, S3Client, S3,\n\n};\n\nuse rusoto_sts::{StsAssumeRoleSessionCredentialsProvider, StsClient, WebIdentityProvider};\n\nuse serde::{Deserialize, Serialize};\n\nuse tokio::io::AsyncReadExt;\n\n\n\nuse super::{parse_uri, ObjectMeta, StorageBackend, StorageError};\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 39, "score": 13.5933594064947 }, { "content": "/// Error enum that represents an issue encountered\n\n/// during interaction with the GCS service\n\n#[derive(thiserror::Error, Debug)]\n\npub enum GCSClientError {\n\n #[error(\"Authentication error: {source}\")]\n\n AuthError {\n\n #[from]\n\n source: tame_oauth::Error,\n\n },\n\n\n\n #[error(\"Error interacting with GCS: {source}\")]\n\n GCSError {\n\n #[from]\n\n source: tame_gcs::Error,\n\n },\n\n\n\n #[error(\"Reqwest error: {source}\")]\n\n ReqwestError {\n\n #[from]\n\n source: reqwest::Error,\n", "file_path": "rust/src/storage/gcs/error.rs", "rank": 40, "score": 11.848251010557822 }, { "content": "//! Object storage backend abstraction layer for Delta Table transaction logs and data\n\n\n\nuse std::fmt::Debug;\n\nuse std::pin::Pin;\n\n\n\nuse chrono::{DateTime, Utc};\n\nuse futures::Stream;\n\n\n\n#[cfg(feature = \"azure\")]\n\nuse azure_core::errors::AzureError;\n\n#[cfg(feature = \"azure\")]\n\nuse std::error::Error;\n\n\n\n#[cfg(feature = \"azure\")]\n\npub mod azure;\n\npub mod file;\n\n#[cfg(any(feature = \"gcs\"))]\n\npub mod gcs;\n\n#[cfg(any(feature = \"s3\", feature = \"s3-rustls\"))]\n\npub mod s3;\n", "file_path": "rust/src/storage/mod.rs", "rank": 41, "score": 11.83686160896464 }, { "content": "#![recursion_limit = \"1024\"]\n\n\n\nextern crate deltalake;\n\n\n\n#[macro_use]\n\nextern crate lazy_static;\n\n\n\n#[macro_use]\n\nextern crate rutie;\n\n\n\nuse deltalake::DeltaTable;\n\nuse rutie::{AnyObject, Array, Class, Integer, Object, RString};\n\nuse std::sync::Arc;\n\n\n\npub struct TableData {\n\n table_uri: String,\n\n actual: Arc<DeltaTable>,\n\n}\n\n\n\nimpl TableData {\n", "file_path": "ruby/src/lib.rs", "rank": 42, "score": 11.834225889114808 }, { "content": "//! The Azure Data Lake Storage Gen2 storage backend. It currently only supports read operations.\n\n//!\n\n//! This module is gated behind the \"azure\" feature. Its usage also requires\n\n//! the `AZURE_STORAGE_ACCOUNT` and `AZURE_STORAGE_KEY` environment variables\n\n//! to be set to the name and key of the Azure Storage Account, respectively.\n\n\n\nuse std::error::Error;\n\nuse std::sync::Arc;\n\nuse std::{env, fmt, pin::Pin};\n\n\n\nuse azure_core::errors::AzureError;\n\nuse azure_core::prelude::*;\n\nuse azure_storage::clients::{\n\n AsBlobClient, AsContainerClient, AsStorageClient, ContainerClient, StorageAccountClient,\n\n};\n\nuse futures::stream::{Stream, TryStreamExt};\n\nuse log::debug;\n\n\n\nuse super::{parse_uri, ObjectMeta, StorageBackend, StorageError, UriError};\n\n\n", "file_path": "rust/src/storage/azure.rs", "rank": 43, "score": 11.781003832265966 }, { "content": "\n\n fn try_from(obj: rusoto_s3::Object) -> Result<Self, Self::Error> {\n\n Ok(ObjectMeta {\n\n path: obj.key.ok_or_else(|| {\n\n StorageError::S3Generic(\"S3 Object missing key attribute\".to_string())\n\n })?,\n\n modified: parse_obj_last_modified_time(&obj.last_modified)?,\n\n })\n\n }\n\n}\n\n\n\n/// Struct describing an object stored in S3.\n\n#[derive(Debug, PartialEq)]\n\npub struct S3Object<'a> {\n\n /// The bucket where the object is stored.\n\n pub bucket: &'a str,\n\n /// The key of the object within the bucket.\n\n pub key: &'a str,\n\n}\n\n\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 44, "score": 11.580471096628463 }, { "content": "//! Distributed lock backed by Dynamodb.\n\n//! Adapted from https://github.com/awslabs/amazon-dynamodb-lock-client.\n\n\n\nuse std::collections::HashMap;\n\nuse std::time::{Duration, Instant, SystemTime, UNIX_EPOCH};\n\n\n\nuse crate::storage::s3::{LockClient, LockItem, StorageError};\n\nuse maplit::hashmap;\n\nuse rusoto_core::RusotoError;\n\nuse rusoto_dynamodb::*;\n\nuse uuid::Uuid;\n\n\n\nmod options {\n\n /// Environment variable for `partition_key_value` option.\n\n pub const PARTITION_KEY_VALUE: &str = \"DYNAMO_LOCK_PARTITION_KEY_VALUE\";\n\n /// Environment variable for `table_name` option.\n\n pub const TABLE_NAME: &str = \"DYNAMO_LOCK_TABLE_NAME\";\n\n /// Environment variable for `owner_name` option.\n\n pub const OWNER_NAME: &str = \"DYNAMO_LOCK_OWNER_NAME\";\n\n /// Environment variable for `lease_duration` option.\n", "file_path": "rust/src/storage/s3/dynamodb_lock.rs", "rank": 45, "score": 11.326675980908917 }, { "content": "\n\n Ok(builder.body(buffer.freeze())?)\n\n}\n\n\n\n/// Executes a GCS request via a reqwest client and returns the parsed response/API error\n\npub async fn execute<B, R>(\n\n ctx: &GCSStorageBackend,\n\n mut req: http::Request<B>,\n\n) -> Result<R, GCSClientError>\n\nwhere\n\n R: tame_gcs::ApiResponse<bytes::Bytes>,\n\n B: std::io::Read + Send + 'static,\n\n{\n\n // First, get our oauth token, which can mean we have to do an additional\n\n // request if we've never retrieved one yet, or the one we are using has expired\n\n let token = get_token(ctx).await?;\n\n\n\n // Add the authorization token, note that the tame-oauth crate will automatically\n\n // set the HeaderValue correctly, in the GCP case this is usually \"Bearer <token>\"\n\n req.headers_mut()\n", "file_path": "rust/src/storage/gcs/util.rs", "rank": 46, "score": 10.656232559071912 }, { "content": " pub const LEASE_DURATION: &str = \"DYNAMO_LOCK_LEASE_DURATION\";\n\n /// Environment variable for `refresh_period` option.\n\n pub const REFRESH_PERIOD_MILLIS: &str = \"DYNAMO_LOCK_REFRESH_PERIOD_MILLIS\";\n\n /// Environment variable for `additional_time_to_wait_for_lock` option.\n\n pub const ADDITIONAL_TIME_TO_WAIT_MILLIS: &str = \"DYNAMO_LOCK_ADDITIONAL_TIME_TO_WAIT_MILLIS\";\n\n}\n\n\n\n/// Configuration options for [`DynamoDbLockClient`].\n\n#[derive(Clone, Debug)]\n\npub struct Options {\n\n /// Partition key value of DynamoDB table,\n\n /// should be the same among the clients which work with the lock.\n\n pub partition_key_value: String,\n\n /// The DynamoDB table name, should be the same among the clients which work with the lock.\n\n /// The table has to be created if it not exists before using it with DynamoDB locking API.\n\n pub table_name: String,\n\n /// Owner name, should be unique among the clients which work with the lock.\n\n pub owner_name: String,\n\n /// The amount of time (in seconds) that the owner has for the acquired lock.\n\n pub lease_duration: u64,\n", "file_path": "rust/src/storage/s3/dynamodb_lock.rs", "rank": 47, "score": 10.581201524958388 }, { "content": "use log::debug;\n\n\n\nuse super::{parse_uri, ObjectMeta, StorageBackend, StorageError};\n\n\n\nimpl GCSStorageBackend {\n\n pub(crate) fn new() -> Result<Self, StorageError> {\n\n let cred_path = std::env::var(\"SERVICE_ACCOUNT\")\n\n .map(std::path::PathBuf::from)\n\n .map_err(|_err| {\n\n StorageError::GCSConfig(\n\n \"SERVICE_ACCOUNT environment variable must be set\".to_string(),\n\n )\n\n })?;\n\n\n\n Ok(cred_path.try_into()?)\n\n }\n\n}\n\n\n\nimpl From<tame_gcs::objects::Metadata> for ObjectMeta {\n\n fn from(metadata: tame_gcs::objects::Metadata) -> ObjectMeta {\n", "file_path": "rust/src/storage/gcs/mod.rs", "rank": 48, "score": 10.219750363862099 }, { "content": "impl<'a> fmt::Display for S3Object<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"s3://{}/{}\", self.bucket, self.key)\n\n }\n\n}\n\n\n\n/// An S3 implementation of the `StorageBackend` trait\n\npub struct S3StorageBackend {\n\n client: rusoto_s3::S3Client,\n\n lock_client: Option<Box<dyn LockClient>>,\n\n}\n\n\n\nimpl S3StorageBackend {\n\n /// Creates a new S3StorageBackend.\n\n pub fn new() -> Result<Self, StorageError> {\n\n let region = if let Ok(url) = std::env::var(\"AWS_ENDPOINT_URL\") {\n\n Region::Custom {\n\n name: std::env::var(\"AWS_REGION\").unwrap_or_else(|_| \"custom\".to_string()),\n\n endpoint: url,\n\n }\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 49, "score": 10.211320192004859 }, { "content": "use rusoto_core::Region;\n\nuse rusoto_s3::{GetObjectRequest, PutObjectRequest, S3Client, S3};\n\n\n\n#[tokio::test]\n\nasync fn lambda_checkpoint_smoke_test() {\n\n std::env::set_var(\"AWS_REGION\", \"us-east-2\");\n\n std::env::set_var(\"AWS_ACCESS_KEY_ID\", \"test\");\n\n std::env::set_var(\"AWS_SECRET_ACCESS_KEY\", \"test\");\n\n\n\n // CI sets the endpoint URL differently.\n\n // Set to localhost if not present.\n\n if let Err(_) = std::env::var(\"AWS_ENDPOINT_URL\") {\n\n std::env::set_var(\"AWS_ENDPOINT_URL\", \"http://localhost:4566\");\n\n }\n\n let region = Region::Custom {\n\n name: \"custom\".to_string(),\n\n endpoint: std::env::var(\"AWS_ENDPOINT_URL\").unwrap(),\n\n };\n\n let client = S3Client::new(region);\n\n\n", "file_path": "aws/delta-checkpoint/tests/lambda_checkpoint.rs", "rank": 50, "score": 10.102460804490171 }, { "content": "pub mod checkpoints;\n\nmod delta;\n\npub mod delta_arrow;\n\npub mod partitions;\n\nmod schema;\n\npub mod storage;\n\npub mod writer;\n\n\n\n#[cfg(feature = \"datafusion-ext\")]\n\npub mod delta_datafusion;\n\n\n\n#[cfg(feature = \"rust-dataframe-ext\")]\n\nmod delta_dataframe;\n\n\n\npub use self::delta::*;\n\npub use self::partitions::*;\n\npub use self::schema::*;\n\npub use self::storage::{\n\n get_backend_for_uri, parse_uri, StorageBackend, StorageError, Uri, UriError,\n\n};\n", "file_path": "rust/src/lib.rs", "rank": 51, "score": 9.890239034101917 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Default for DeltaTransactionOptions {\n\n fn default() -> Self {\n\n Self {\n\n max_retry_commit_attempts: DEFAULT_DELTA_MAX_RETRY_COMMIT_ATTEMPTS,\n\n }\n\n }\n\n}\n\n\n\n/// Object representing a delta transaction.\n\n/// Clients that do not need to mutate action content in case a transaction conflict is encountered\n\n/// may use the `commit` method and rely on optimistic concurrency to determine the\n\n/// appropriate Delta version number for a commit. A good example of this type of client is an\n\n/// append only client that does not need to maintain transaction state with external systems.\n\n/// Clients that may need to do conflict resolution if the Delta version changes should use\n\n/// the `prepare_commit` and `try_commit_transaction` methods and manage the Delta version\n\n/// themselves so that they can resolve data conflicts that may occur between Delta versions.\n", "file_path": "rust/src/delta.rs", "rank": 52, "score": 9.848403108214452 }, { "content": " ) -> Result<\n\n Pin<Box<dyn Stream<Item = Result<ObjectMeta, StorageError>> + Send + 'a>>,\n\n StorageError,\n\n > {\n\n let uri = parse_uri(path)?.into_s3object()?;\n\n\n\n /// This enum is used to represent 3 states in our object metadata streaming logic:\n\n /// * Value(None): the initial state, prior to performing any s3 list call.\n\n /// * Value(Some(String)): s3 list call returned us a continuation token to be used in\n\n /// subsequent list call after we got through the current page.\n\n /// * End: previous s3 list call reached end of page, we should not perform more s3 list\n\n /// call going forward.\n\n enum ContinuationToken {\n\n Value(Option<String>),\n\n End,\n\n }\n\n\n\n struct ListContext {\n\n client: rusoto_s3::S3Client,\n\n obj_iter: std::vec::IntoIter<rusoto_s3::Object>,\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 53, "score": 9.75173665621039 }, { "content": "//! Delta Table partition handling logic.\n\n\n\nuse std::convert::TryFrom;\n\n\n\nuse crate::DeltaTableError;\n\n\n\n/// A Enum used for selecting the partition value operation when filtering a DeltaTable partition.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum PartitionValue<T> {\n\n /// The partition value with the equal operator\n\n Equal(T),\n\n /// The partition value with the not equal operator\n\n NotEqual(T),\n\n /// The partition values with the in operator\n\n In(Vec<T>),\n\n /// The partition values with the not in operator\n\n NotIn(Vec<T>),\n\n}\n\n\n\n/// A Struct used for filtering a DeltaTable partition by key and value.\n", "file_path": "rust/src/partitions.rs", "rank": 54, "score": 9.553913336549698 }, { "content": " };\n\n client.delete_object(req).await.unwrap();\n\n }\n\n }\n\n}\n\n\n\nasync fn list_objects(client: &S3Client, bucket: &str, prefix: &str) -> Vec<String> {\n\n let mut list = Vec::new();\n\n let result = client\n\n .list_objects_v2(ListObjectsV2Request {\n\n bucket: bucket.to_string(),\n\n prefix: Some(prefix.to_string()),\n\n ..Default::default()\n\n })\n\n .await\n\n .unwrap();\n\n\n\n if let Some(contents) = result.contents {\n\n for obj in contents {\n\n list.push(obj.key.unwrap());\n\n }\n\n }\n\n\n\n list\n\n}\n", "file_path": "rust/tests/s3_common/mod.rs", "rank": 55, "score": 9.500423092107074 }, { "content": "/// to use it for managing distributed locks.\n\npub struct DynamoDbLockClient {\n\n client: DynamoDbClient,\n\n opts: Options,\n\n}\n\n\n\nimpl std::fmt::Debug for DynamoDbLockClient {\n\n fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {\n\n write!(fmt, \"DynamoDbLockClient\")\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl LockClient for DynamoDbLockClient {\n\n async fn try_acquire_lock(&self, data: &str) -> Result<Option<LockItem>, StorageError> {\n\n Ok(self.try_acquire_lock(Some(data)).await?)\n\n }\n\n\n\n async fn get_lock(&self) -> Result<Option<LockItem>, StorageError> {\n\n Ok(self.get_lock().await?)\n", "file_path": "rust/src/storage/s3/dynamodb_lock.rs", "rank": 56, "score": 9.477980282740795 }, { "content": " }\n\n}\n\n\n\n/// Action used to increase the version of the Delta protocol required to read or write to the\n\n/// table.\n\n#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Protocol {\n\n /// Minimum version of the Delta read protocol a client must implement to correctly read the\n\n /// table.\n\n pub min_reader_version: DeltaDataTypeInt,\n\n /// Minimum version of the Delta write protocol a client must implement to correctly read the\n\n /// table.\n\n pub min_writer_version: DeltaDataTypeInt,\n\n}\n\n\n\nimpl Protocol {\n\n fn from_parquet_record(record: &parquet::record::Row) -> Result<Self, ActionError> {\n\n let mut re = Self {\n\n ..Default::default()\n", "file_path": "rust/src/action.rs", "rank": 57, "score": 9.450992228113225 }, { "content": " ///\n\n /// and will panic if both are unset. This also implies that the backend is\n\n /// only valid for a single Storage Account.\n\n pub fn new(container: &str) -> Result<Self, StorageError> {\n\n let http_client: Arc<Box<dyn HttpClient>> = Arc::new(Box::new(reqwest::Client::new()));\n\n\n\n let account_name = env::var(\"AZURE_STORAGE_ACCOUNT\").map_err(|_| {\n\n StorageError::AzureConfig(\"AZURE_STORAGE_ACCOUNT must be set\".to_string())\n\n })?;\n\n\n\n let storage_account_client = if let Ok(sas) = env::var(\"AZURE_STORAGE_SAS\") {\n\n debug!(\"Authenticating to Azure using SAS token\");\n\n StorageAccountClient::new_sas_token(http_client.clone(), &account_name, &sas)\n\n } else if let Ok(key) = env::var(\"AZURE_STORAGE_KEY\") {\n\n debug!(\"Authenticating to Azure using access key\");\n\n StorageAccountClient::new_access_key(http_client.clone(), &account_name, &key)\n\n } else {\n\n return Err(StorageError::AzureConfig(\n\n \"Either AZURE_STORAGE_SAS or AZURE_STORAGE_KEY must be set\".to_string(),\n\n ));\n", "file_path": "rust/src/storage/azure.rs", "rank": 58, "score": 9.382417835029493 }, { "content": " match self {\n\n ColumnValueStat::Column(m) => Some(m),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Returns the serde_json representation of the ColumnValueStat.\n\n pub fn as_value(&self) -> Option<&serde_json::Value> {\n\n match self {\n\n ColumnValueStat::Value(v) => Some(v),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\n/// Struct used to represent nullCount in add action statistics.\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]\n\n#[serde(untagged)]\n\npub enum ColumnCountStat {\n\n /// Composite HashMap representation of statistics.\n", "file_path": "rust/src/action.rs", "rank": 59, "score": 9.381405571267658 }, { "content": " }\n\n}\n\n\n\n/// A storage backend backed by an Azure Data Lake Storage Gen2 account.\n\n///\n\n/// This uses the `dfs.core.windows.net` endpoint.\n\n#[derive(Debug)]\n\npub struct AdlsGen2Backend {\n\n account: String,\n\n container_client: Arc<ContainerClient>,\n\n}\n\n\n\nimpl AdlsGen2Backend {\n\n /// Create a new [`AdlsGen2Backend`].\n\n ///\n\n /// This currently requires the `AZURE_STORAGE_ACCOUNT` and one of the\n\n /// following environment variables to be set:\n\n ///\n\n /// - `AZURE_STORAGE_SAS`\n\n /// - `AZURE_STORAGE_KEY`\n", "file_path": "rust/src/storage/azure.rs", "rank": 60, "score": 9.239704082031615 }, { "content": "#![allow(non_snake_case, non_camel_case_types)]\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\n\n\n/// Type alias for a string expected to match a GUID/UUID format\n\npub type Guid = String;\n\n/// Type alias for i64/Delta long\n\npub type DeltaDataTypeLong = i64;\n\n/// Type alias representing the expected type (i64) of a Delta table version.\n\npub type DeltaDataTypeVersion = DeltaDataTypeLong;\n\n/// Type alias representing the expected type (i64/ms since Unix epoch) of a Delta timestamp.\n\npub type DeltaDataTypeTimestamp = DeltaDataTypeLong;\n\n/// Type alias for i32/Delta int\n\npub type DeltaDataTypeInt = i32;\n\n\n\n/// Represents a struct field defined in the Delta table schema.\n\n// https://github.com/delta-io/delta/blob/master/PROTOCOL.md#Schema-Serialization-Format\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Default, Clone)]\n\npub struct SchemaTypeStruct {\n", "file_path": "rust/src/schema.rs", "rank": 61, "score": 8.93681938901111 }, { "content": "#[cfg(feature = \"s3\")]\n\n#[allow(dead_code)]\n\nmod s3_common;\n\n\n\n#[cfg(feature = \"s3\")]\n\nmod s3 {\n\n\n\n use crate::s3_common;\n\n use deltalake::storage::s3::{dynamodb_lock, S3StorageBackend};\n\n use deltalake::{StorageBackend, StorageError};\n\n use rusoto_core::credential::ChainProvider;\n\n use rusoto_core::request::DispatchSignedRequestFuture;\n\n use rusoto_core::signature::SignedRequest;\n\n use rusoto_core::{DispatchSignedRequest, HttpClient};\n\n use rusoto_s3::S3Client;\n\n use serial_test::serial;\n\n use std::sync::{Arc, Mutex};\n\n use tokio::task::JoinHandle;\n\n use tokio::time::Duration;\n\n\n", "file_path": "rust/tests/repair_s3_rename_test.rs", "rank": 62, "score": 8.60422650397113 }, { "content": " None => false,\n\n Some(lease_duration) => {\n\n now_millis() - self.lookup_time > (lease_duration as u128) * 1000\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Error returned by the [`DynamoDbLockClient`] API.\n\n#[derive(thiserror::Error, Debug)]\n\npub enum DynamoError {\n\n /// Error caused by the DynamoDB table not being created.\n\n #[error(\"Dynamo table not found\")]\n\n TableNotFound,\n\n\n\n /// Error that indicates the condition in the DynamoDB operation could not be evaluated.\n\n /// Mostly used by [`DynamoDbLockClient::acquire_lock`] to handle unsuccessful retries\n\n /// of acquiring the lock.\n\n #[error(\"Conditional check failed\")]\n\n ConditionalCheckFailed,\n", "file_path": "rust/src/storage/s3/dynamodb_lock.rs", "rank": 63, "score": 8.583546880935547 }, { "content": " } else {\n\n Region::default()\n\n };\n\n\n\n let client = create_s3_client(region.clone())?;\n\n let lock_client = try_create_lock_client(region)?;\n\n\n\n Ok(Self {\n\n client,\n\n lock_client,\n\n })\n\n }\n\n\n\n /// Creates a new S3StorageBackend with given s3 and lock clients.\n\n pub fn new_with(client: rusoto_s3::S3Client, lock_client: Option<Box<dyn LockClient>>) -> Self {\n\n Self {\n\n client,\n\n lock_client,\n\n }\n\n }\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 64, "score": 8.539781775540057 }, { "content": "use uuid::Uuid;\n\n\n\npub mod dynamodb_lock;\n\n\n\nconst AWS_S3_ASSUME_ROLE_ARN: &str = \"AWS_S3_ASSUME_ROLE_ARN\";\n\nconst AWS_S3_ROLE_SESSION_NAME: &str = \"AWS_S3_ROLE_SESSION_NAME\";\n\nconst AWS_WEB_IDENTITY_TOKEN_FILE: &str = \"AWS_WEB_IDENTITY_TOKEN_FILE\";\n\n\n\nimpl From<RusotoError<rusoto_s3::GetObjectError>> for StorageError {\n\n fn from(error: RusotoError<rusoto_s3::GetObjectError>) -> Self {\n\n match error {\n\n RusotoError::Service(rusoto_s3::GetObjectError::NoSuchKey(_)) => StorageError::NotFound,\n\n _ => StorageError::S3Get { source: error },\n\n }\n\n }\n\n}\n\n\n\nimpl From<RusotoError<rusoto_s3::HeadObjectError>> for StorageError {\n\n fn from(error: RusotoError<rusoto_s3::HeadObjectError>) -> Self {\n\n match error {\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 65, "score": 8.314456439335991 }, { "content": " .insert(http::header::AUTHORIZATION, token.try_into()?);\n\n\n\n let request = convert_request(req, &ctx.client).await?;\n\n let response = ctx.client.execute(request).await?;\n\n let response = convert_response(response).await?;\n\n\n\n Ok(R::try_from_parts(response)?)\n\n}\n\n\n\nuse http::status::StatusCode;\n\nuse tame_gcs::error::HttpStatusError;\n", "file_path": "rust/src/storage/gcs/util.rs", "rank": 66, "score": 8.240162042913903 }, { "content": " /// Converts the URI to an S3Object. Returns UriError if the URI is not valid for the S3\n\n /// backend.\n\n #[cfg(any(feature = \"s3\", feature = \"s3-rustls\"))]\n\n pub fn into_s3object(self) -> Result<s3::S3Object<'a>, UriError> {\n\n match self {\n\n Uri::S3Object(x) => Ok(x),\n\n #[cfg(feature = \"azure\")]\n\n Uri::AdlsGen2Object(x) => Err(UriError::ExpectedS3Uri(x.to_string())),\n\n #[cfg(feature = \"gcs\")]\n\n Uri::GCSObject(x) => Err(UriError::ExpectedS3Uri(x.to_string())),\n\n Uri::LocalPath(x) => Err(UriError::ExpectedS3Uri(x.to_string())),\n\n }\n\n }\n\n\n\n /// Converts the URI to an AdlsGen2Object. Returns UriError if the URI is not valid for the\n\n /// Azure backend.\n\n #[cfg(feature = \"azure\")]\n\n pub fn into_adlsgen2_object(self) -> Result<azure::AdlsGen2Object<'a>, UriError> {\n\n match self {\n\n Uri::AdlsGen2Object(x) => Ok(x),\n", "file_path": "rust/src/storage/mod.rs", "rank": 67, "score": 8.035874652389932 }, { "content": " std::thread::sleep(std::time::Duration::from_secs(40));\n\n\n\n // verify the checkpoint was created\n\n let request = GetObjectRequest {\n\n bucket: \"delta-checkpoint\".to_string(),\n\n key: \"checkpoint-test/_delta_log/00000000000000000010.checkpoint.parquet\".to_string(),\n\n ..Default::default()\n\n };\n\n let result = client.get_object(request).await;\n\n\n\n let _ = result.unwrap();\n\n\n\n // verify the _last_checkpoint file was created\n\n let request = GetObjectRequest {\n\n bucket: \"delta-checkpoint\".to_string(),\n\n key: \"checkpoint-test/_delta_log/_last_checkpoint\".to_string(),\n\n ..Default::default()\n\n };\n\n let result = client.get_object(request).await;\n\n\n\n assert!(result.is_ok());\n\n}\n", "file_path": "aws/delta-checkpoint/tests/lambda_checkpoint.rs", "rank": 68, "score": 8.031892697865594 }, { "content": " record\n\n );\n\n }\n\n }\n\n }\n\n\n\n Ok(re)\n\n }\n\n}\n\n\n\n/// Action used by streaming systems to track progress using application-specific versions to\n\n/// enable idempotency.\n\n#[derive(Serialize, Deserialize, Debug, Default, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Txn {\n\n /// A unique identifier for the application performing the transaction.\n\n pub app_id: String,\n\n /// An application-specific numeric identifier for this transaction.\n\n pub version: DeltaDataTypeVersion,\n\n /// The time when this transaction action was created in milliseconds since the Unix epoch.\n", "file_path": "rust/src/action.rs", "rank": 69, "score": 7.971629905363143 }, { "content": " };\n\n\n\n Ok(Self {\n\n account: account_name,\n\n container_client: storage_account_client\n\n .as_storage_client()\n\n .as_container_client(container),\n\n })\n\n }\n\n\n\n fn validate_container<'a>(&self, obj: &AdlsGen2Object<'a>) -> Result<(), StorageError> {\n\n if obj.file_system != self.container_client.container_name() {\n\n Err(StorageError::Uri {\n\n source: UriError::ContainerMismatch {\n\n expected: self.container_client.container_name().to_string(),\n\n got: obj.file_system.to_string(),\n\n },\n\n })\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/src/storage/azure.rs", "rank": 70, "score": 7.938830417870983 }, { "content": " // S3 has a maximum of 1000 files to delete\n\n let chunks = s3_objects.chunks(1000);\n\n for chunk in chunks {\n\n let delete = Delete {\n\n objects: chunk\n\n .iter()\n\n .map(|obj| ObjectIdentifier {\n\n key: obj.key.to_string(),\n\n ..Default::default()\n\n })\n\n .collect(),\n\n ..Default::default()\n\n };\n\n let delete_req = DeleteObjectsRequest {\n\n bucket: bucket.to_string(),\n\n delete,\n\n ..Default::default()\n\n };\n\n self.client.delete_objects(delete_req).await?;\n\n }\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 71, "score": 7.856219211999663 }, { "content": "/// An object on an Azure Data Lake Storage Gen2 account.\n\n#[derive(Debug, PartialEq)]\n\npub struct AdlsGen2Object<'a> {\n\n /// The storage account name.\n\n pub account_name: &'a str,\n\n /// The container, or filesystem, of the object.\n\n pub file_system: &'a str,\n\n /// The path of the object on the filesystem.\n\n pub path: &'a str,\n\n}\n\n\n\nimpl<'a> fmt::Display for AdlsGen2Object<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n // This URI syntax is documented at\n\n // https://docs.microsoft.com/en-us/azure/storage/blobs/data-lake-storage-introduction-abfs-uri\n\n write!(\n\n f,\n\n \"abfss://{}@{}.dfs.core.windows.net/{}\",\n\n self.file_system, self.account_name, self.path\n\n )\n", "file_path": "rust/src/storage/azure.rs", "rank": 72, "score": 7.754785166795308 }, { "content": " ..Default::default()\n\n })\n\n .await?;\n\n\n\n self.client\n\n .delete_object(DeleteObjectRequest {\n\n bucket: src.bucket.to_string(),\n\n key: src.key.to_string(),\n\n ..Default::default()\n\n })\n\n .await?;\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Default for S3StorageBackend {\n\n fn default() -> Self {\n\n Self::new().unwrap()\n\n }\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 73, "score": 7.697902224826599 }, { "content": "//! Local file storage backend. This backend read and write objects from local filesystem.\n\n//!\n\n//! The local file storage backend is multi-writer safe.\n\n\n\nuse std::path::{Path, PathBuf};\n\nuse std::pin::Pin;\n\n\n\nuse chrono::DateTime;\n\nuse futures::{Stream, TryStreamExt};\n\nuse tokio::fs;\n\nuse tokio::io::AsyncWriteExt;\n\nuse tokio_stream::wrappers::ReadDirStream;\n\n\n\nuse super::{ObjectMeta, StorageBackend, StorageError};\n\nuse uuid::Uuid;\n\n\n\nmod rename;\n\n\n\n/// Multi-writer support for different platforms:\n\n///\n", "file_path": "rust/src/storage/file/mod.rs", "rank": 74, "score": 7.665653003085112 }, { "content": " },\n\n}\n\n\n\n/// Enum with variants representing each supported storage backend.\n\n#[derive(Debug)]\n\npub enum Uri<'a> {\n\n /// URI for local file system backend.\n\n LocalPath(&'a str),\n\n /// URI for S3 backend.\n\n #[cfg(any(feature = \"s3\", feature = \"s3-rustls\"))]\n\n S3Object(s3::S3Object<'a>),\n\n /// URI for Azure backend.\n\n #[cfg(feature = \"azure\")]\n\n AdlsGen2Object(azure::AdlsGen2Object<'a>),\n\n /// URI for GCS backend\n\n #[cfg(feature = \"gcs\")]\n\n GCSObject(gcs::GCSObject<'a>),\n\n}\n\n\n\nimpl<'a> Uri<'a> {\n", "file_path": "rust/src/storage/mod.rs", "rank": 75, "score": 7.661009988439713 }, { "content": "use parquet::{\n\n arrow::ArrowWriter,\n\n basic::Compression,\n\n errors::ParquetError,\n\n file::{properties::WriterProperties, writer::InMemoryWriteableCursor},\n\n};\n\nuse serde_json::{json, Value};\n\nuse std::collections::HashMap;\n\nuse std::fs;\n\nuse std::fs::File;\n\nuse std::io::Write;\n\nuse std::path::PathBuf;\n\nuse std::sync::Arc;\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\nuse uuid::Uuid;\n\n\n\n#[derive(thiserror::Error, Debug)]\n\npub enum DeltaWriterError {\n\n #[error(\"Partition column contains more than one value\")]\n\n NonDistinctPartitionValue,\n", "file_path": "rust/tests/write_exploration.rs", "rank": 76, "score": 7.5374043602172005 }, { "content": " /// The size of this file in bytes\n\n pub size: DeltaDataTypeLong,\n\n /// A map from partition column to value for this file\n\n pub partition_values: HashMap<String, Option<String>>,\n\n /// Partition values stored in raw parquet struct format. In this struct, the column names\n\n /// correspond to the partition columns and the values are stored in their corresponding data\n\n /// type. This is a required field when the table is partitioned and the table property\n\n /// delta.checkpoint.writeStatsAsStruct is set to true. If the table is not partitioned, this\n\n /// column can be omitted.\n\n ///\n\n /// This field is only available in add action records read from checkpoints\n\n #[serde(skip_serializing, skip_deserializing)]\n\n pub partition_values_parsed: Option<parquet::record::Row>,\n\n /// The time this file was created, as milliseconds since the epoch\n\n pub modification_time: DeltaDataTypeTimestamp,\n\n /// When false the file must already be present in the table or the records in the added file\n\n /// must be contained in one or more remove actions in the same version\n\n ///\n\n /// streaming queries that are tailing the transaction log can use this flag to skip actions\n\n /// that would not affect the final results.\n", "file_path": "rust/src/action.rs", "rank": 77, "score": 7.471025482024899 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn parse_azure_object_uri() {\n\n let uri = parse_uri(\"abfss://fs@sa.dfs.core.windows.net/foo\").unwrap();\n\n assert_eq!(uri.path(), \"foo\");\n\n assert_eq!(\n\n uri.into_adlsgen2_object().unwrap(),\n\n AdlsGen2Object {\n\n account_name: \"sa\",\n\n file_system: \"fs\",\n\n path: \"foo\",\n\n }\n\n );\n\n }\n\n}\n", "file_path": "rust/src/storage/azure.rs", "rank": 78, "score": 7.449262755334269 }, { "content": "use std::fmt;\n\nuse std::io::{BufRead, BufReader, Cursor};\n\nuse std::time::{Duration, SystemTime, UNIX_EPOCH};\n\nuse std::{cmp::Ordering, collections::HashSet};\n\nuse uuid::Uuid;\n\n\n\nuse crate::action::Stats;\n\n\n\nuse super::action;\n\nuse super::action::{Action, DeltaOperation};\n\nuse super::partitions::{DeltaTablePartition, PartitionFilter};\n\nuse super::schema::*;\n\nuse super::storage;\n\nuse super::storage::{parse_uri, StorageBackend, StorageError, UriError};\n\n\n\n/// Metadata for a checkpoint file\n\n#[derive(Serialize, Deserialize, Debug, Default, Clone, Copy)]\n\npub struct CheckPoint {\n\n /// Delta table version\n\n version: DeltaDataTypeVersion, // 20 digits decimals\n", "file_path": "rust/src/delta.rs", "rank": 79, "score": 7.37241290503348 }, { "content": "\n\n Ok(())\n\n }\n\n}\n\n\n\n/// A lock that has been successfully acquired\n\n#[derive(Clone, Debug)]\n\npub struct LockItem {\n\n /// The name of the owner that owns this lock.\n\n pub owner_name: String,\n\n /// Current version number of the lock in DynamoDB. This is what tells the lock client\n\n /// when the lock is stale.\n\n pub record_version_number: String,\n\n /// The amount of time (in seconds) that the owner has this lock for.\n\n /// If lease_duration is None then the lock is non-expirable.\n\n pub lease_duration: Option<u64>,\n\n /// Tells whether or not the lock was marked as released when loaded from DynamoDB.\n\n pub is_released: bool,\n\n /// Optional data associated with this lock.\n\n pub data: Option<String>,\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 80, "score": 7.2662494039634336 }, { "content": " let new_request = http::Request::from_parts(parts, read_body);\n\n\n\n let req = convert_request(new_request, &backend.client).await?;\n\n let res = backend.client.execute(req).await?;\n\n let response = convert_response(res).await?;\n\n backend.auth.parse_token_response(scope_hash, response)?\n\n }\n\n },\n\n )\n\n}\n\n\n\n/// Converts a vanilla `http::Request` into a `reqwest::Request`\n\nasync fn convert_request<B>(\n\n req: http::Request<B>,\n\n client: &reqwest::Client,\n\n) -> Result<reqwest::Request, GCSClientError>\n\nwhere\n\n B: std::io::Read + Send + 'static,\n\n{\n\n let (parts, mut body) = req.into_parts();\n", "file_path": "rust/src/storage/gcs/util.rs", "rank": 81, "score": 7.257564615342083 }, { "content": " pub const PK_EXISTS_AND_RVN_MATCHES: &str = \"attribute_exists(#pk) AND #rvn = :rvn\";\n\n\n\n /// The expression that checks whether the lock record exists,\n\n /// its record version number matches with the given one\n\n /// and its owner name matches with the given one.\n\n pub const PK_EXISTS_AND_OWNER_RVN_MATCHES: &str =\n\n \"attribute_exists(#pk) AND #rvn = :rvn AND #on = :on\";\n\n}\n\n\n\nmod vars {\n\n pub const PK_PATH: &str = \"#pk\";\n\n pub const RVN_PATH: &str = \"#rvn\";\n\n pub const RVN_VALUE: &str = \":rvn\";\n\n pub const IS_RELEASED_PATH: &str = \"#ir\";\n\n pub const IS_RELEASED_VALUE: &str = \":ir\";\n\n pub const OWNER_NAME_PATH: &str = \"#on\";\n\n pub const OWNER_NAME_VALUE: &str = \":on\";\n\n}\n\n\n\n/// Provides a simple library for using DynamoDB's consistent read/write feature\n", "file_path": "rust/src/storage/s3/dynamodb_lock.rs", "rank": 82, "score": 7.182828313212839 }, { "content": "}\n\n\n\n/// The OutputMode used in streaming operations.\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub enum OutputMode {\n\n /// Only new rows will be written when new data is available.\n\n Append,\n\n /// The full output (all rows) will be written whenever new data is available.\n\n Complete,\n\n /// Only rows with updates will be written when new or changed data is available.\n\n Update,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use parquet::file::reader::{FileReader, SerializedFileReader};\n\n use std::fs::File;\n\n\n\n #[test]\n", "file_path": "rust/src/action.rs", "rank": 83, "score": 7.149407615187351 }, { "content": " }\n\n\n\n async fn update_data(&self, lock: &LockItem) -> Result<LockItem, StorageError> {\n\n Ok(self.update_data(lock).await?)\n\n }\n\n\n\n async fn release_lock(&self, lock: &LockItem) -> Result<bool, StorageError> {\n\n Ok(self.release_lock(lock).await?)\n\n }\n\n}\n\n\n\nimpl DynamoDbLockClient {\n\n /// Creates new DynamoDB lock client\n\n pub fn new(client: DynamoDbClient, opts: Options) -> Self {\n\n Self { client, opts }\n\n }\n\n\n\n /// Attempts to acquire lock. If successful, returns the lock.\n\n /// Otherwise returns [`Option::None`] when the lock is stolen by someone else or max\n\n /// provisioned throughput for a table is exceeded. Both are retryable actions.\n", "file_path": "rust/src/storage/s3/dynamodb_lock.rs", "rank": 84, "score": 7.050322630578041 }, { "content": " }\n\n }\n\n _ => { /* noop */ }\n\n }\n\n } else {\n\n let next_context = context.get_mut(&path[0]).and_then(|v| v.as_object_mut());\n\n if let Some(next_context) = next_context {\n\n apply_stats_conversion(next_context, &path[1..], data_type);\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use lazy_static::lazy_static;\n\n\n\n #[test]\n\n fn typed_partition_value_from_string_test() {\n\n let string_value: Value = \"Hello World!\".into();\n", "file_path": "rust/src/checkpoints.rs", "rank": 85, "score": 7.045455848612784 }, { "content": " .as_blob_client(obj.path)\n\n .get()\n\n .execute()\n\n .await\n\n .map_err(to_storage_err)?\n\n .data)\n\n }\n\n\n\n async fn list_objs<'a>(\n\n &'a self,\n\n path: &'a str,\n\n ) -> Result<\n\n Pin<Box<dyn Stream<Item = Result<ObjectMeta, StorageError>> + Send + 'a>>,\n\n StorageError,\n\n > {\n\n debug!(\"Listing objects under {}\", path);\n\n let obj = parse_uri(path)?.into_adlsgen2_object()?;\n\n self.validate_container(&obj)?;\n\n\n\n let stream = self\n", "file_path": "rust/src/storage/azure.rs", "rank": 86, "score": 6.99477783582638 }, { "content": " }\n\n\n\n /// Converts the URI to an str representing a local file system path. Returns UriError if the\n\n /// URI is not valid for the file storage backend.\n\n pub fn into_localpath(self) -> Result<&'a str, UriError> {\n\n match self {\n\n Uri::LocalPath(x) => Ok(x),\n\n #[cfg(any(feature = \"s3\", feature = \"s3-rustls\"))]\n\n Uri::S3Object(x) => Err(UriError::ExpectedSLocalPathUri(format!(\"{}\", x))),\n\n #[cfg(feature = \"azure\")]\n\n Uri::AdlsGen2Object(x) => Err(UriError::ExpectedSLocalPathUri(format!(\"{}\", x))),\n\n #[cfg(feature = \"gcs\")]\n\n Uri::GCSObject(x) => Err(UriError::ExpectedSLocalPathUri(format!(\"{}\", x))),\n\n }\n\n }\n\n\n\n /// Return URI path component as String\n\n #[inline]\n\n pub fn path(&self) -> String {\n\n match self {\n", "file_path": "rust/src/storage/mod.rs", "rank": 87, "score": 6.9944226403278575 }, { "content": " continuation_token: ContinuationToken,\n\n bucket: String,\n\n key: String,\n\n }\n\n let ctx = ListContext {\n\n obj_iter: Vec::new().into_iter(),\n\n continuation_token: ContinuationToken::Value(None),\n\n bucket: uri.bucket.to_string(),\n\n key: uri.key.to_string(),\n\n client: self.client.clone(),\n\n };\n\n\n\n async fn next_meta(\n\n mut ctx: ListContext,\n\n ) -> Option<(Result<ObjectMeta, StorageError>, ListContext)> {\n\n match ctx.obj_iter.next() {\n\n Some(obj) => Some((ObjectMeta::try_from(obj), ctx)),\n\n None => match &ctx.continuation_token {\n\n ContinuationToken::End => None,\n\n ContinuationToken::Value(v) => {\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 88, "score": 6.9881072523126555 }, { "content": " let src_uri = parse_uri(src)?.into_gcs_object()?;\n\n let dst_uri = parse_uri(dst)?.into_gcs_object()?;\n\n match self.rename(src_uri, dst_uri).await {\n\n Err(GCSClientError::PreconditionFailed) => {\n\n return Err(StorageError::AlreadyExists(dst.to_string()))\n\n }\n\n res => Ok(res?),\n\n }\n\n }\n\n\n\n /// Deletes object by `path`.\n\n async fn delete_obj(&self, path: &str) -> Result<(), StorageError> {\n\n let uri = parse_uri(path)?.into_gcs_object()?;\n\n Ok(self.delete(uri).await?)\n\n }\n\n}\n", "file_path": "rust/src/storage/gcs/mod.rs", "rank": 89, "score": 6.985518699997241 }, { "content": " map(SchemaTypeMap),\n\n}\n\n\n\n/// Represents the schema of the delta table.\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]\n\npub struct Schema {\n\n r#type: String,\n\n fields: Vec<SchemaField>,\n\n}\n\n\n\nimpl Schema {\n\n /// Returns the list of fields that make up the schema definition of the table.\n\n pub fn get_fields(&self) -> &Vec<SchemaField> {\n\n &self.fields\n\n }\n\n\n\n /// Create a new Schema using a vector of SchemaFields\n\n pub fn new(r#type: String, fields: Vec<SchemaField>) -> Self {\n\n Self { r#type, fields }\n\n }\n\n}\n", "file_path": "rust/src/schema.rs", "rank": 90, "score": 6.958737537480109 }, { "content": " .await?;\n\n\n\n Ok(ObjectMeta {\n\n path: path.to_string(),\n\n modified: parse_head_obj_last_modified_time(&result.last_modified)?,\n\n })\n\n }\n\n\n\n async fn get_obj(&self, path: &str) -> Result<Vec<u8>, StorageError> {\n\n debug!(\"fetching s3 object: {}...\", path);\n\n\n\n let uri = parse_uri(path)?.into_s3object()?;\n\n let get_req = GetObjectRequest {\n\n bucket: uri.bucket.to_string(),\n\n key: uri.key.to_string(),\n\n ..Default::default()\n\n };\n\n\n\n let result = self.client.get_object(get_req).await?;\n\n\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 91, "score": 6.947144329331342 }, { "content": "#[cfg(feature = \"s3\")]\n\n#[allow(dead_code)]\n\nmod s3_common;\n\n\n\n#[cfg(feature = \"s3\")]\n\nmod dynamodb {\n\n use deltalake::storage::s3::dynamodb_lock::*;\n\n use maplit::hashmap;\n\n use rusoto_dynamodb::*;\n\n use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH};\n\n\n\n const TABLE: &str = \"test_table\";\n\n\n\n async fn create_dynamo_lock(key: &str, owner: &str) -> DynamoDbLockClient {\n\n let opts = Options {\n\n partition_key_value: key.to_string(),\n\n table_name: TABLE.to_string(),\n\n owner_name: owner.to_string(),\n\n lease_duration: 3,\n\n refresh_period: Duration::from_millis(500),\n", "file_path": "rust/tests/dynamodb_lock_test.rs", "rank": 92, "score": 6.790895484636476 }, { "content": "}\n\n\n\nimpl std::fmt::Debug for S3StorageBackend {\n\n fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {\n\n write!(fmt, \"S3StorageBackend\")\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl StorageBackend for S3StorageBackend {\n\n async fn head_obj(&self, path: &str) -> Result<ObjectMeta, StorageError> {\n\n let uri = parse_uri(path)?.into_s3object()?;\n\n\n\n let result = self\n\n .client\n\n .head_object(HeadObjectRequest {\n\n bucket: uri.bucket.to_string(),\n\n key: uri.key.to_string(),\n\n ..Default::default()\n\n })\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 93, "score": 6.726251504806461 }, { "content": " .execute()\n\n .await\n\n .map_err(to_storage_err)?;\n\n let modified = properties\n\n .blob\n\n .last_modified\n\n .expect(\"Last-Modified should never be None for committed blobs\");\n\n Ok(ObjectMeta {\n\n path: path.to_string(),\n\n modified,\n\n })\n\n }\n\n\n\n async fn get_obj(&self, path: &str) -> Result<Vec<u8>, StorageError> {\n\n debug!(\"Loading {}\", path);\n\n let obj = parse_uri(path)?.into_adlsgen2_object()?;\n\n self.validate_container(&obj)?;\n\n\n\n Ok(self\n\n .container_client\n", "file_path": "rust/src/storage/azure.rs", "rank": 94, "score": 6.713299894692895 }, { "content": " #[cfg(any(feature = \"s3\", feature = \"s3-rustls\"))]\n\n Uri::S3Object(x) => Err(UriError::ExpectedAzureUri(x.to_string())),\n\n #[cfg(feature = \"gcs\")]\n\n Uri::GCSObject(x) => Err(UriError::ExpectedAzureUri(x.to_string())),\n\n Uri::LocalPath(x) => Err(UriError::ExpectedAzureUri(x.to_string())),\n\n }\n\n }\n\n\n\n /// Converts the URI to an GCSObject. Returns UriError if the URI is not valid for the\n\n /// Google Cloud Storage backend.\n\n #[cfg(feature = \"gcs\")]\n\n pub fn into_gcs_object(self) -> Result<gcs::GCSObject<'a>, UriError> {\n\n match self {\n\n Uri::GCSObject(x) => Ok(x),\n\n #[cfg(any(feature = \"s3\", feature = \"s3-rustls\"))]\n\n Uri::S3Object(x) => Err(UriError::ExpectedGCSUri(x.to_string())),\n\n #[cfg(feature = \"azure\")]\n\n Uri::AdlsGen2Object(x) => Err(UriError::ExpectedGCSUri(x.to_string())),\n\n Uri::LocalPath(x) => Err(UriError::ExpectedGCSUri(x.to_string())),\n\n }\n", "file_path": "rust/src/storage/mod.rs", "rank": 95, "score": 6.701860099591821 }, { "content": "//! Crate to help rust projects discover GNU libc version at build time. Expected to be used in\n\n//! `build.rs`. Note that this crate is only expected to work under `cfg(target_env = \"gnu\")`, so\n\n//! please guard the usage under relevant configuration predicates.\n\n//!\n\n//! # Example\n\n//!\n\n//! ```\n\n//! let ver = glibc_version::get_version().unwrap();\n\n//! if ver.major >= 2 && ver.minor >= 28 {\n\n//! println!(\"cargo:rustc-cfg=glibc_renameat2\");\n\n//! }\n\n//! ```\n\n\n\npub struct Version {\n\n pub major: usize,\n\n pub minor: usize,\n\n}\n\n\n\n#[cfg(all(target_os = \"linux\", target_env = \"gnu\"))]\n\nmod imp {\n", "file_path": "glibc_version/src/lib.rs", "rank": 96, "score": 6.65902505557113 }, { "content": " }\n\n}\n\n\n\n/// Operation performed when creating a new log entry with one or more actions.\n\n/// This is a key element of the `CommitInfo` action.\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub enum DeltaOperation {\n\n /// Represents a Delta `Write` operation.\n\n /// Write operations will typically only include `Add` actions.\n\n Write {\n\n /// The save mode used during the write.\n\n mode: SaveMode,\n\n /// The columns the write is partitioned by.\n\n partitionBy: Option<Vec<String>>,\n\n /// The predicate used during the write.\n\n predicate: Option<String>,\n\n },\n\n /// Represents a Delta `StreamingUpdate` operation.\n\n StreamingUpdate {\n\n /// The output mode the streaming writer is using.\n", "file_path": "rust/src/action.rs", "rank": 97, "score": 6.612324915354996 }, { "content": " .container_client\n\n .list_blobs()\n\n .prefix(obj.path)\n\n .stream()\n\n .map_ok(move |response| {\n\n futures::stream::iter(response.incomplete_vector.vector.into_iter().map(\n\n move |blob| {\n\n let object = AdlsGen2Object {\n\n account_name: &self.account,\n\n file_system: &blob.container_name,\n\n path: &blob.name,\n\n };\n\n Ok(ObjectMeta {\n\n path: object.to_string(),\n\n modified: blob\n\n .last_modified\n\n .expect(\"Last-Modified should never be None for committed blobs\"),\n\n })\n\n },\n\n ))\n", "file_path": "rust/src/storage/azure.rs", "rank": 98, "score": 6.581172339409687 }, { "content": "\n\n async fn delete_obj(&self, path: &str) -> Result<(), StorageError> {\n\n debug!(\"delete s3 object: {}...\", path);\n\n\n\n let uri = parse_uri(path)?.into_s3object()?;\n\n let delete_req = DeleteObjectRequest {\n\n bucket: uri.bucket.to_string(),\n\n key: uri.key.to_string(),\n\n ..Default::default()\n\n };\n\n\n\n self.client.delete_object(delete_req).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n async fn delete_objs(&self, paths: &[String]) -> Result<(), StorageError> {\n\n debug!(\"delete s3 objects: {:?}...\", paths);\n\n if paths.is_empty() {\n\n return Ok(());\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 99, "score": 6.538316674958546 } ]
Rust
src/query.rs
m42e/zsh-histdb-skim
385c039f4338963e4cada1ab9c3035e1d969a552
use crate::location::Location; use crate::environment::*; pub fn build_query_string(theloc: &Location, grouped: bool) -> String { let mut query = String::from("select history.id as id, commands.argv as cmd,"); if !grouped { query.push_str(" start_time") } else { query.push_str(" max(start_time)") } query.push_str(" as start, exit_status, duration,"); if !grouped { query.push_str(" 1") } else { query.push_str(" count()") } query.push_str(" as count, history.session as session, places.host as host, places.dir as dir"); query.push_str(" from history"); query.push_str(" left join commands on history.command_id = commands.id"); query.push_str(" left join places on history.place_id = places.id"); match theloc { Location::Session | Location::Directory | Location::Machine => { query.push_str(" where"); } _ => {} }; match theloc { Location::Session => { query.push_str(&format!(" session == {} and", &get_current_session_id())); } Location::Directory => { query.push_str(&format!(" places.dir like '{}' and", &get_current_dir())); } _ => {} }; match theloc { Location::Session | Location::Directory | Location::Machine => { query.push_str(&format!(" places.host == '{}'", &get_current_host())); } _ => {} }; if grouped { query.push_str(" group by history.command_id, history.place_id"); } query.push_str(" order by start desc"); return query; } #[cfg(test)] mod query { use super::*; use regex::Regex; #[test] fn has_select_fields() { for l in vec![ Location::Session, Location::Directory, Location::Machine, Location::Everywhere, ] { let query = build_query_string(&l, true); assert!(query.contains("history.id as id")); assert!(query.contains("exit_status")); assert!(query.contains("start")); assert!(query.contains("duration")); assert!(query.contains("count")); assert!(query.contains("history.session as session")); assert!(query.contains("places.dir")); } } #[test] fn contains_host() { let re_host = Regex::new(r"host == '.*'").unwrap(); for l in vec![Location::Session, Location::Directory, Location::Machine] { let query = build_query_string(&l, true); assert!(re_host.is_match(&query)); } let query = build_query_string(&Location::Everywhere, true); assert!(!re_host.is_match(&query)); } #[test] fn contains_grouping() { let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); for l in vec![ Location::Session, Location::Directory, Location::Machine, Location::Everywhere, ] { let query = build_query_string(&l, true); assert!(re_group.is_match(&query)); } } #[test] fn contains_no_grouping_if_disabled() { let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); let re_only_group = Regex::new(r"group").unwrap(); for l in vec![ Location::Session, Location::Directory, Location::Machine, Location::Everywhere, ] { let query = build_query_string(&l, false); assert!(!re_only_group.is_match(&query)); assert!(!re_group.is_match(&query)); } } #[test] fn for_session() { let query = build_query_string(&Location::Session, true); let re_session = Regex::new(r"session == (\d*) and").unwrap(); let re_host = Regex::new(r"host == '.*'").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(re_session.is_match(&query)); assert!(re_host.is_match(&query)); assert!(re_group.is_match(&query)); } #[test] fn for_directory() { let query = build_query_string(&Location::Directory, false); let re_directory = Regex::new(r"places.dir like '.*' and").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(re_directory.is_match(&query)); assert!(!re_group.is_match(&query)); } #[test] fn for_machine() { let query = build_query_string(&Location::Machine, true); let re_session = Regex::new(r"session == (\d*) and").unwrap(); let re_place = Regex::new(r"dir like '.*' and").unwrap(); let re_host = Regex::new(r"host == '.*'").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(!re_session.is_match(&query)); assert!(!re_place.is_match(&query)); assert!(re_host.is_match(&query)); assert!(re_group.is_match(&query)); } #[test] fn for_everywhere() { let query = build_query_string(&Location::Everywhere, true); let re_session = Regex::new(r"session == (\d*) and").unwrap(); let re_place = Regex::new(r"dir like '.*' and").unwrap(); let re_host = Regex::new(r"host == '.*'").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(!re_session.is_match(&query)); assert!(!re_place.is_match(&query)); assert!(!re_host.is_match(&query)); assert!(re_group.is_match(&query)); } }
use crate::location::Location; use crate::environment::*; pub fn build_query_string(theloc: &Location, grouped: bool) -> String { let mut query = String::from("select history.id as id, commands.argv as cmd,"); if !grouped { query.push_str(" start_time") } else { query.push_str(" max(start_time)") } query.push_str(" as start, exit_status, duration,"); if !grouped { query.push_str(" 1") } else { query.push_str(" count()") } query.push_str(" as count, history.session as session, places.host as host, places.dir as dir"); query.push_str(" from history"); query.push_str(" left join commands on history.command_id = commands.id"); query.push_str(" left join places on history.place_id = places.id"); match theloc { Location::Session | Location::Directory | Location::Machine => { query.push_str(" where"); } _ => {} }; match theloc { Location::Session => { query.push_str(&format!(" session == {} and", &get_current_session_id())); } Location::Directory => { query.push_str(&format!(" places.dir like '{}' and", &get_current_dir())); } _ => {} }; match theloc { Location::Session | Location::Directory | Location::Machine => { query.push_str(&format!(" places.host == '{}'", &get_current_host())); } _ => {} }; if grouped { query.push_str(" group by history.command_id, history.place_id"); } query.push_str(" order by start desc"); return query; } #[cfg(test)] mod query { use super::*; use regex::Regex; #[test] fn has_select_fields() { for l in vec![ Location::Session, Location::Directory, Location::Machine, Location::Everywhere, ] { let query = build_query_string(&l, true); assert!(query.contains("history.id as id")); assert!(query.contains("exit_status")); assert!(query.contains("start")); assert!(query.contains("duration")); assert!(query.contains("count")); assert!(query.contains("history.session as session")); assert!(query.contains("places.dir")); } } #[test]
#[test] fn contains_grouping() { let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); for l in vec![ Location::Session, Location::Directory, Location::Machine, Location::Everywhere, ] { let query = build_query_string(&l, true); assert!(re_group.is_match(&query)); } } #[test] fn contains_no_grouping_if_disabled() { let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); let re_only_group = Regex::new(r"group").unwrap(); for l in vec![ Location::Session, Location::Directory, Location::Machine, Location::Everywhere, ] { let query = build_query_string(&l, false); assert!(!re_only_group.is_match(&query)); assert!(!re_group.is_match(&query)); } } #[test] fn for_session() { let query = build_query_string(&Location::Session, true); let re_session = Regex::new(r"session == (\d*) and").unwrap(); let re_host = Regex::new(r"host == '.*'").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(re_session.is_match(&query)); assert!(re_host.is_match(&query)); assert!(re_group.is_match(&query)); } #[test] fn for_directory() { let query = build_query_string(&Location::Directory, false); let re_directory = Regex::new(r"places.dir like '.*' and").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(re_directory.is_match(&query)); assert!(!re_group.is_match(&query)); } #[test] fn for_machine() { let query = build_query_string(&Location::Machine, true); let re_session = Regex::new(r"session == (\d*) and").unwrap(); let re_place = Regex::new(r"dir like '.*' and").unwrap(); let re_host = Regex::new(r"host == '.*'").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(!re_session.is_match(&query)); assert!(!re_place.is_match(&query)); assert!(re_host.is_match(&query)); assert!(re_group.is_match(&query)); } #[test] fn for_everywhere() { let query = build_query_string(&Location::Everywhere, true); let re_session = Regex::new(r"session == (\d*) and").unwrap(); let re_place = Regex::new(r"dir like '.*' and").unwrap(); let re_host = Regex::new(r"host == '.*'").unwrap(); let re_group = Regex::new(r"group by history.command_id, history.place_id").unwrap(); assert!(!re_session.is_match(&query)); assert!(!re_place.is_match(&query)); assert!(!re_host.is_match(&query)); assert!(re_group.is_match(&query)); } }
fn contains_host() { let re_host = Regex::new(r"host == '.*'").unwrap(); for l in vec![Location::Session, Location::Directory, Location::Machine] { let query = build_query_string(&l, true); assert!(re_host.is_match(&query)); } let query = build_query_string(&Location::Everywhere, true); assert!(!re_host.is_match(&query)); }
function_block-full_function
[ { "content": "/// Get the histdb session from the environment\n\npub fn get_current_session_id() -> String {\n\n let key = \"HISTDB_SESSION\";\n\n let session_id = env::var(key).unwrap_or(String::from(\"\"));\n\n return session_id.to_string();\n\n}\n\n\n", "file_path": "src/environment.rs", "rank": 1, "score": 121227.44348141989 }, { "content": "/// Get the current histdb host from the environment\n\npub fn get_current_host() -> String {\n\n let mut host = env::var(\"HISTDB_HOST\").unwrap_or(String::from(\"\"));\n\n if host.starts_with(\"'\") && host.ends_with(\"'\") {\n\n host = host[1..host.len() - 1].to_string()\n\n }\n\n return host.to_string();\n\n}\n\n\n", "file_path": "src/environment.rs", "rank": 2, "score": 108937.23517828941 }, { "content": "/// Get the current working directory\n\npub fn get_current_dir() -> String {\n\n let current_dir = env::current_dir().unwrap();\n\n let cdir_string = current_dir.to_str().unwrap();\n\n return cdir_string.to_string();\n\n}\n\n\n", "file_path": "src/environment.rs", "rank": 3, "score": 108935.05951927154 }, { "content": "pub fn generate_title(location: &Location) -> String {\n\n let extra_info = |theloc: &Location| -> String {\n\n return match theloc {\n\n Location::Session => get_current_session_id(),\n\n Location::Directory => get_current_dir(),\n\n Location::Machine => get_current_host(),\n\n _ => String::from(\"\"),\n\n };\n\n }(&location);\n\n\n\n let location_map = enum_map! {\n\n Location::Session => \"Session location history\",\n\n Location::Directory => \"Directory location history\",\n\n Location::Machine => \"Machine location history\",\n\n Location::Everywhere => \"Everywhere\",\n\n };\n\n\n\n let header_map = enum_map! {\n\n Location::Session =>\n\n\" ┏━━━━━━━━━━━┱─────────────┬────────┬──────────────┐\n", "file_path": "src/title.rs", "rank": 4, "score": 91664.1762107357 }, { "content": "pub fn get_nosort_option() -> bool {\n\n let nosort = env::var(\"HISTDB_NOSORT\").unwrap_or(String::from(\"false\"));\n\n if nosort.to_lowercase() == \"true\" || nosort == \"1\" {\n\n return true\n\n }\n\n return false\n\n}\n", "file_path": "src/environment.rs", "rank": 5, "score": 80843.14722780912 }, { "content": "/// Get the histdb file from the environment\n\npub fn get_histdb_database() -> String {\n\n let key = \"HISTDB_FILE\";\n\n let db_file = env::var(key).unwrap_or(String::from(\"\"));\n\n return db_file.to_string();\n\n}\n\n\n", "file_path": "src/environment.rs", "rank": 6, "score": 78874.96744268425 }, { "content": "/// Get the default (which is non us! or the us date format)\n\n/// - [ ] Read from locale to determine default\n\npub fn get_date_format() -> String {\n\n let key = \"HISTDB_FZF_FORCE_DATE_FORMAT\";\n\n let forced_dateformat = env::var(key).unwrap_or(\"non-us\".to_string()).to_lowercase();\n\n\n\n if forced_dateformat == \"us\" {\n\n return \"%m/%d/%Y\".to_string();\n\n } else {\n\n return \"%d/%m/%Y\".to_string();\n\n }\n\n}\n\n\n", "file_path": "src/environment.rs", "rank": 7, "score": 78874.96744268425 }, { "content": "fn show_history(thequery: String) -> Result<String, String> {\n\n let mut location = get_starting_location();\n\n let mut grouped = true;\n\n let mut query = thequery;\n\n loop {\n\n let title = generate_title(&location);\n\n\n\n let options = SkimOptionsBuilder::default()\n\n .height(Some(\"100%\"))\n\n .multi(false)\n\n .reverse(true)\n\n .prompt(Some(\"history >>\"))\n\n .query(Some(&query))\n\n .bind(vec![\n\n \"f1:abort\",\n\n \"f2:abort\",\n\n \"f3:abort\",\n\n \"f4:abort\",\n\n \"f5:abort\",\n\n \"ctrl-r:abort\",\n", "file_path": "src/main.rs", "rank": 8, "score": 73932.2222969933 }, { "content": "fn read_entries(location: &Location, grouped: bool, tx_item: SkimItemSender) {\n\n let conn_res =\n\n Connection::open_with_flags(get_histdb_database(), OpenFlags::SQLITE_OPEN_READ_ONLY);\n\n if conn_res.is_err() {\n\n let _ = tx_item.send(Arc::new(\"Cannot open database\"));\n\n drop(tx_item);\n\n return;\n\n }\n\n let conn = conn_res.unwrap();\n\n let s = build_query_string(&location, grouped);\n\n\n\n let stmt_result = conn.prepare(&s);\n\n if stmt_result.is_err() {\n\n let _ = tx_item.send(Arc::new(format!(\"Cannot get result from database {}\", stmt_result.err().unwrap())));\n\n drop(tx_item);\n\n return;\n\n }\n\n let mut stmt = stmt_result.unwrap();\n\n\n\n let cats = stmt.query_map([], |row| {\n", "file_path": "src/main.rs", "rank": 9, "score": 69790.10044555849 }, { "content": "fn get_starting_location() -> Location {\n\n let mut location = Location::Session;\n\n if get_current_session_id() == \"\" {\n\n location = Location::Directory;\n\n }\n\n location\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 10, "score": 64656.20420012602 }, { "content": "fn get_epoch_start_of_day() -> u64 {\n\n let now = SystemTime::now();\n\n let now_secs = now\n\n .duration_since(SystemTime::UNIX_EPOCH)\n\n .unwrap_or_default()\n\n .as_secs();\n\n\n\n let seconds_since_midnight = now_secs % (24 * 3600);\n\n now_secs - seconds_since_midnight\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct History {\n\n pub id: i64,\n\n pub cmd: String,\n\n pub start: u64,\n\n pub exit_status: Option<i64>,\n\n pub duration: Option<i64>,\n\n pub count: i64,\n\n pub session: i64,\n", "file_path": "src/history.rs", "rank": 11, "score": 56015.821172790784 }, { "content": "fn process_result(\n\n selected_items: &Option<SkimOutput>,\n\n loc: &mut Location,\n\n grouped: &mut bool,\n\n) -> SelectionResult {\n\n if selected_items.is_some() {\n\n let sel = selected_items.as_ref().unwrap();\n\n match sel.final_key {\n\n Key::ESC | Key::Ctrl('c') | Key::Ctrl('d') | Key::Ctrl('z') => {\n\n return SelectionResult {\n\n selected_cmd: None,\n\n abort: true,\n\n };\n\n }\n\n Key::Enter => {\n\n return SelectionResult {\n\n selected_cmd: Some(format!(\n\n \"{}\",\n\n ((*sel.selected_items[0]).as_any().downcast_ref::<History>())\n\n .unwrap()\n", "file_path": "src/main.rs", "rank": 12, "score": 29098.527054228405 }, { "content": "fn main() -> Result<()> {\n\n let _conn =\n\n Connection::open_with_flags(get_histdb_database(), OpenFlags::SQLITE_OPEN_READ_ONLY);\n\n\n\n let args: Vec<String> = env::args().collect();\n\n let query = |args: Vec<String>| -> String {\n\n if args.len() > 1 {\n\n return args[1].to_string();\n\n }\n\n return \"\".to_string();\n\n }(args);\n\n\n\n if query == \"--version\" {\n\n println!(\"v0.7.6\");\n\n std::process::exit(1);\n\n }\n\n\n\n let result = show_history(query);\n\n if result.is_ok() {\n\n println!(\"{}\", result.ok().unwrap());\n\n } else {\n\n eprintln!(\"{}\", result.err().unwrap());\n\n std::process::exit(1);\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 13, "score": 27678.662204854674 }, { "content": "use enum_map::Enum;\n\n\n\n#[derive(PartialEq, Enum, Copy, Clone)]\n\npub enum Location {\n\n Session,\n\n Directory,\n\n Machine,\n\n Everywhere,\n\n}\n", "file_path": "src/location.rs", "rank": 14, "score": 20287.329429083165 }, { "content": " pub host: String,\n\n pub dir: String,\n\n pub searchrange: [(usize, usize); 1],\n\n}\n\n\n\nimpl History {\n\n pub const FORMAT_DATE_LENGTH: usize = 10;\n\n pub const COMMAND_START: usize = (History::FORMAT_DATE_LENGTH + 1);\n\n\n\n pub fn command(&self) -> &String {\n\n return &self.cmd;\n\n }\n\n}\n\n\n\nimpl History {\n\n fn format_date(&self, full: bool) -> String {\n\n let starttime = NaiveDateTime::from_timestamp(self.start as i64, 0);\n\n if full {\n\n let mut dateinfo = String::from(\"\");\n\n dateinfo.push_str(&get_date_format());\n", "file_path": "src/history.rs", "rank": 15, "score": 19956.302508043285 }, { "content": " let information = format!(\"{:10} {}\", self.format_date(false), self.cmd);\n\n Cow::Owned(information)\n\n }\n\n\n\n fn preview(&self, _context: PreviewContext) -> ItemPreview {\n\n let mut information = String::from(format!(\"\\x1b[1mDetails for {}\\x1b[0m\\n\\n\", self.id));\n\n\n\n let mut tformat = |name: &str, value: &str| {\n\n information.push_str(&format!(\"\\x1b[1m{:20}\\x1b[0m{}\\n\", name, value));\n\n };\n\n\n\n tformat(\"Runtime\", &History::format_or_none(self.duration));\n\n tformat(\"Host\", &self.host);\n\n tformat(\"Executed\", &self.count.to_string());\n\n tformat(\"Directory\", &self.dir);\n\n tformat(\"Exit Status\", &History::format_or_none(self.exit_status));\n\n tformat(\"Session\", &self.session.to_string());\n\n tformat(\"Start Time\", &self.format_date(false));\n\n information.push_str(&format!(\n\n \"\\x1b[1mCommand\\x1b[0m\\n\\n{}\\n\",\n", "file_path": "src/history.rs", "rank": 16, "score": 19953.53151416383 }, { "content": " dateinfo.push_str(\" %H:%M\");\n\n return format!(\"{}\", starttime.format(&dateinfo));\n\n } else if self.start > get_epoch_start_of_day() {\n\n return format!(\"{}\", starttime.format(\"%H:%M\"));\n\n } else {\n\n return format!(\"{}\", starttime.format(&get_date_format()));\n\n }\n\n }\n\n\n\n fn format_or_none(x: Option<i64>) -> String {\n\n if x.is_some() {\n\n format!(\"{}\", x.unwrap())\n\n } else {\n\n \"\\x1b[37;1m<NONE>\\x1b[0m\".to_string()\n\n }\n\n }\n\n}\n\n\n\nimpl SkimItem for History {\n\n fn text(&self) -> Cow<str> {\n", "file_path": "src/history.rs", "rank": 17, "score": 19946.131486159502 }, { "content": " &fill(&self.cmd, _context.width)\n\n ));\n\n ItemPreview::AnsiText(information)\n\n }\n\n\n\n fn get_matching_ranges(&self) -> Option<&[(usize, usize)]> {\n\n Some(&self.searchrange)\n\n }\n\n}\n", "file_path": "src/history.rs", "rank": 18, "score": 19945.254003465972 }, { "content": "extern crate skim;\n\nuse crate::environment::*;\n\nuse chrono::NaiveDateTime;\n\nuse skim::prelude::*;\n\nuse std::time::SystemTime;\n\nuse textwrap::fill;\n\n\n", "file_path": "src/history.rs", "rank": 19, "score": 19944.491050260956 }, { "content": " let cmd: String = row.get(\"cmd\")?;\n\n let commandend = cmd.len() as usize;\n\n Ok(History {\n\n id: row.get(\"id\")?,\n\n cmd: cmd,\n\n start: row.get(\"start\")?,\n\n exit_status: row.get(\"exit_status\")?,\n\n duration: row.get(\"duration\")?,\n\n count: row.get(\"count\")?,\n\n session: row.get(\"session\")?,\n\n host: row.get(\"host\")?,\n\n dir: row.get(\"dir\")?,\n\n searchrange: [(\n\n History::COMMAND_START,\n\n commandend + (History::COMMAND_START),\n\n )],\n\n })\n\n });\n\n for person in cats.unwrap() {\n\n if person.is_ok() {\n\n let x = person.unwrap();\n\n let _ = tx_item.send(Arc::new(x));\n\n }\n\n }\n\n drop(tx_item);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 28, "score": 15.567472424847768 }, { "content": "extern crate skim;\n\nmod environment;\n\nmod history;\n\nmod location;\n\nmod query;\n\nmod title;\n\n\n\nuse crate::environment::*;\n\nuse crate::history::History;\n\nuse crate::location::Location;\n\nuse crate::query::build_query_string;\n\nuse crate::title::generate_title;\n\n\n\nuse rusqlite::{Connection, OpenFlags, Result};\n\nuse skim::prelude::*;\n\nuse std::env;\n\nuse std::thread;\n\n\n", "file_path": "src/main.rs", "rank": 29, "score": 11.483238599123418 }, { "content": " ┃F1: Session┃F2: Directory│F3: Host│F4: Everywhere│ F5: Toggle group\n\n━┛ ┗━━━━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━\",\n\n Location::Directory =>\n\n\" ┌───────────┲━━━━━━━━━━━━━┱────────┬──────────────┐\n\n │F1: Session┃F2: Directory┃F3: Host│F4: Everywhere│ F5: Toggle group\n\n━┷━━━━━━━━━━━┛ ┗━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━\",\n\n\n\n Location::Machine =>\n\n\" ┌───────────┬─────────────┲━━━━━━━━┱──────────────┐\n\n │F1: Session│F2: Directory┃F3: Host┃F4: Everywhere│ F5: Toggle group\n\n━┷━━━━━━━━━━━┷━━━━━━━━━━━━━┛ ┗━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━\",\n\n\n\n Location::Everywhere =>\n\n\" ┌───────────┬─────────────┬────────┲━━━━━━━━━━━━━━┓\n\n │F1: Session│F2: Directory│F3: Host┃F4: Everywhere┃ F5: Toggle group\n\n━┷━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━┛ ┗━━━━━━━━━━━━━━━━━\",\n\n };\n\n\n\n let title = format!(\n\n \"{} {}\\n{}\\n\",\n\n &location_map[location.clone()],\n\n &extra_info,\n\n &header_map[location.clone()],\n\n );\n\n return title.to_string();\n\n}\n", "file_path": "src/title.rs", "rank": 30, "score": 8.561552397339815 }, { "content": " Key::Ctrl('r') => {\n\n *loc = match *loc {\n\n Location::Session => Location::Directory,\n\n Location::Directory => Location::Machine,\n\n Location::Machine => Location::Everywhere,\n\n Location::Everywhere => Location::Session,\n\n };\n\n }\n\n _ => (),\n\n };\n\n return SelectionResult {\n\n selected_cmd: None,\n\n abort: false,\n\n };\n\n } else {\n\n return SelectionResult {\n\n selected_cmd: None,\n\n abort: true,\n\n };\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 31, "score": 8.530278206099897 }, { "content": " \"ctrl-u:half-page-up\",\n\n \"ctrl-d:half-page-down\",\n\n ])\n\n .header(Some(&title))\n\n .preview(Some(\"\")) // preview should be specified to enable preview window\n\n .nosort(get_nosort_option())\n\n .build()\n\n .unwrap();\n\n\n\n let (tx_item, rx_item): (SkimItemSender, SkimItemReceiver) = unbounded();\n\n\n\n let handle = thread::spawn(move || {\n\n read_entries(&location, grouped, tx_item);\n\n });\n\n\n\n let selected_items = Skim::run_with(&options, Some(rx_item));\n\n handle.join().unwrap();\n\n\n\n let selection_result = process_result(&selected_items, &mut location, &mut grouped);\n\n if selection_result.abort {\n\n return Err(\"Aborted\".to_string());\n\n }\n\n if selection_result.selected_cmd.is_some() {\n\n return Ok(selection_result.selected_cmd.unwrap());\n\n }\n\n query = selected_items.unwrap().query;\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 32, "score": 7.398502932446776 }, { "content": " .command()\n\n )),\n\n abort: false,\n\n };\n\n }\n\n Key::F(1) => {\n\n *loc = Location::Session;\n\n }\n\n Key::F(2) => {\n\n *loc = Location::Directory;\n\n }\n\n Key::F(3) => {\n\n *loc = Location::Machine;\n\n }\n\n Key::F(4) => {\n\n *loc = Location::Everywhere;\n\n }\n\n Key::F(5) => {\n\n *grouped = !*grouped;\n\n }\n", "file_path": "src/main.rs", "rank": 33, "score": 6.941640062665799 }, { "content": "use crate::location::Location;\n\nuse enum_map::enum_map;\n\nuse crate::environment::*;\n\n\n", "file_path": "src/title.rs", "rank": 34, "score": 4.964962381547803 }, { "content": "# zsh-histdb-skim\n\n\n\nThis is a reimplementation of https://github.com/m42e/zsh-histdb-fzf in rust and using skim as a library.\n\n\n\n## Why\n\n\n\n[zsh-histdb-fzf](https://github.com/m42e/zsh-histdb-fzf) works, but it is a bit quirky. It has for sure some flaws, regarding responsiveness and communication with processes.\n\n[skim](https://github.com/lotabout/skim) offers a fzf like behavior and is available as library in rust. It lacks some highlighting in the header, and has a bit different order/matching algorithm.\n\n\n\nThis should result in better performance, responsiveness and a more stable behavior.\n\n\n\n## Why rust?\n\n\n\n[skim](https://github.com/lotabout/skim) is available in rust. I have never tried rust with a purpose before. I wanted to give it a try and learn something new.\n\n\n\n## How it works\n\n\n\nWell, it accesses the [zsh histdb](https://github.com/larkery/zsh-histdb). It lets you search on different levels.\n\n\n\n## What do you have to do?\n\n\n\nInstall the plugin, e.g. using [zplug](https://github.com/zplug/zplug).\n\n\n\n```\n\n zplug 'm42e/zsh-histdb-skim', from:github, at:main\n\n```\n\n\n\nIt downloads the binary (if available) automatically. You can do manually by calling `histdb-skim-download`. It will be saved in `${XDG_DATA_HOME}/zsh-histdb-skim`, alternatively `${HOME}/.local/share/zsh-histdb-skim`. You can specify the directory manually by setting `HISTDB_SKIM_PATH`.\n\n\n\nThe download will happen if the executable is not there or the version is outdated (starting from v0.7.0). These checks happen when sourcing the script.\n\n\n\n\n\nThe plugin calls `bindkey` but some other plugins may overwrite. In this case you would have to do it yourself:\n\n\n\n```\n\nbindkey '^R' histdb-skim-widget\n\n```\n\n\n\n\n\n## Additional information\n\n\n\nBy default the binary is downloaded\n\n\n\n\n\n## Building\n\n\n\n```\n\ncargo build --release\n\nmkdir -p bin\n\nmv target/release/zsh-histdb-skim bin\n\n```\n\n\n\n# TODO\n\n- improve rust code\n\n\n\n# Apologies 😉\n\n\n\nWhile I stole the idea from myself, this is my first rust project ever. So I would be glad for tips and improvement PRs.\n", "file_path": "README.md", "rank": 35, "score": 2.496890071110867 }, { "content": "use std::env;\n\n\n\n/// Get the default (which is non us! or the us date format)\n\n/// - [ ] Read from locale to determine default\n", "file_path": "src/environment.rs", "rank": 36, "score": 1.7970653669459054 } ]
Rust
src/routes/mod.rs
koto-bank/zeph
bfb154678e2881a9584e4ea19d90c4dc29e9455b
use {DB,CONFIG}; use iron::prelude::*; use iron::status; use iron::mime::{Mime, TopLevel, SubLevel, Attr, Value}; use urlencoded::UrlEncodedQuery; use serde_json::to_value; pub mod image; pub mod user; pub mod admin; pub use image::*; pub use user::*; pub use admin::*; pub fn index_n_search(_req: &mut Request) -> IronResult<Response> { let page = html! { meta charset="utf-8" / link rel="stylesheet" href="/assets/css/milligram.min.css" / link rel="stylesheet" href="/assets/css/main.css" / link rel="icon" type="image/jpeg" href="/assets/favicon.jpg" / title "Zeph" script src="/assets/js/main.js" {} div style="width:100%;" { div.tags-search { a href="/" title="Boop!" { img#nano-logo src="/assets/logo.jpg" h3 style="display: inline; vertical-align: 50%" "Zeph" } form#tag-search-form action="/search" { input#tag-search-field placeholder="Search" name="q" type="text" / } div#tags {} a href="/about" style="opacity: 0.5;" "About Zeph & Help" } div#images {} button#upload-button onclick="showUploadOrLogin()" "Login" div#login-or-upload-form / } }; Ok(Response::with((status::Ok, page))) } pub fn more(req: &mut Request) -> IronResult<Response> { let mut response = Response::new(); let q = match req.get_ref::<UrlEncodedQuery>() { Ok(hashmap) => hashmap, Err(_) => return Ok(Response::with((status::BadRequest, "No parameters"))) }; let offset = query!(q,"offset").unwrap_or(&"0".to_string()).parse::<usize>().unwrap(); let images = match query!(q,"q") { Some(x) => DB.lock().unwrap().by_tags(25, offset, &x.to_lowercase().split_whitespace().map(String::from).collect::<Vec<_>>()).unwrap(), None => DB.lock().unwrap().get_images(25, offset).unwrap() }; response .set_mut(Mime(TopLevel::Application, SubLevel::Json, vec![(Attr::Charset, Value::Utf8)])) .set_mut(to_value(&images).to_string()) .set_mut(status::Ok); Ok(response) } pub fn about(_: &mut Request) -> IronResult<Response> { let page = html! { meta charset="utf-8" / link rel="stylesheet" href="/assets/css/milligram.min.css" / link rel="stylesheet" href="/assets/css/main.css" / link rel="icon" type="image/jpeg" href="/assets/favicon.jpg" / title "Zeph - About" div style="width:100%;" { div.tags-search { a href="/" title="Boop!" { img#nano-logo src="/assets/logo.jpg" / h3 style="display: inline; vertical-align: 50%" "Zeph" } form#tag-search-form action="/search" { input#tag-search-field placeholder="Search" name="q" type="text" / } } } div style="margin-left: 15%;" { {"Zeph is an open-source booru/imageboard written in " a href="https://www.rust-lang.org/" "Rust" } br / { "You can get source code to build Zeph yourself from " a href="https://github.com/koto-bank/zeph" "Github" } br / @if let Some(addr) = CONFIG.get("contact-email") { { "Contact e-mail adress: " a href={"mailto:" ( addr.as_str().unwrap()) } ( addr.as_str().unwrap() ) } } br h3 "Search options" table style="width: 50%;" { tr { th "Example" th "Meaning" } tr { td code "1girl" td "Search for a girl on her own" } tr { td code "1girl -fur" td "Search for a non-fluffy girl (exclude 'fur' tag)" } tr { td code "rating:s,q" td "Search for a safe and questionable images" } tr { td { code "*girls" "or" code "2girl*" } td "Search for anything that ends with 'girls' (or starts with '2girl')" } tr { td code "from:konachan" td "Search for images synchronized from konachan (full list in source code & easily extendable)" } tr { td code "uploader:random_dude" td "Images uploaded by random_dude, note that 'sync' are synchronized images" } tr { td code "sort:asc:score" td "Sort images by score from worst to best (ascending); desc is for descening" } tr { td code "1girl | 2girls" td "Search for images of girl on her own OR 2 girls" } tr { td code "1girl format:jpg,gif" td "Search for GIF and JPEG images" } } } }; Ok(Response::with((status::Ok, page))) }
use {DB,CONFIG}; use iron::prelude::*; use iron::status; use iron::mime::{Mime, TopLevel, SubLevel, Attr, Value}; use urlencoded::UrlEncodedQuery; use serde_json::to_value; pub mod image; pub mod user; pub mod admin; pub use image::*; pub use user::*; pub use admin::*; pub fn index_n_search(_req: &mut Request) -> IronResult<Response> { let page = html! { meta charset="utf-8" / link rel="stylesheet" href="/assets/css/milligram.min.css" / link rel="stylesheet" href="/assets/css/main.css" / link rel="icon" type="image/jpeg" href="/assets/favicon.jpg" / title "Zeph" script src="/assets/js/main.js" {} div style="width:100%;" { div.tags-search { a href="/" title="Boop!" { img#nano-logo src="/assets/logo.jpg" h3 style="display: inline; vertical-align: 50%" "Zeph" } form#tag-search-form action="/search" { input#tag-search-field placeholder="Search" name="q" type="text" / } div#tags {} a href="/about" style="opacity: 0.5;" "About Zeph & Help" } div#images {} button#upload-button onclick="showUploadOrLogin()" "Login" div#login-or-upload-form / } }; Ok(Response::with((status::Ok, page))) } pub fn more(req: &mut Request) -> IronResult<Response> { let mut response = Response::new(); let q = match req.get_ref::<UrlEncodedQuery>() { Ok(hashmap) => hashmap, Err(_) => return Ok(Response::with((status::BadRequest, "No parameters"))) }; let offset = query!(q,"offset").unwrap_or(&"0".to_string()).parse::<usize>().unwrap(); let images = match query!(q,"q") { Some(x) => DB.lock().unwrap().by_tags(25, offset, &x.to_lowercase().split_whitespace().map(String::from).collect::<Vec<_>>()).unwrap(), None => DB.lock().unwrap().get_images(25, offset).unwrap() }; response .set_mut(Mime(TopLevel::Application, SubLevel::Json, vec![(Attr::Charset, Value::Utf8)])) .set_mut(to_value(&images).to_string()) .set_mut(status::Ok); Ok(response) } pub fn about(_: &mut Request) -> IronResult<Response> { let page = html! { meta charset="utf-8" / link rel="stylesheet" href="/assets/css/milligram.min.css" / link rel="stylesheet" href="/assets/css/main.css" / link rel="icon" type="image/jpeg" href="/assets/favicon.jpg" / title "Zeph - About" div style="width:100%;" { div.tags-search { a href="/" title="Boop!" { img#nano-logo src="/assets/logo.jpg" / h3 style="display: inline; vertical-align: 50%" "Zeph" } form#tag-search-form action="/search" { input#tag-search-field placeholder="Search" name="q" type="text" / } } } div style="margin-left: 15%;" { {"Zeph is an open-source booru/imageboard written in " a href="https://www.rust-lang.org/" "Rust" } br / { "You can get source code to build Zeph yourself from " a href="https://github.com/koto-bank/zeph" "Github" } br / @if let Some(addr) = CONFIG.get("contact-email") { { "Contact e-mail adress: " a href={"mailto:" ( addr.as_str().unwrap()) } ( addr.as_str().unwrap() ) } } br h3 "Search options" table style="width: 50%;" { tr { th "Example" th "Meaning" } tr { td code "1girl" td "Search for a girl on her own" } tr { td code "1girl -fur" td "Search for a non-fluffy girl (exclude 'fur' tag)" } tr { td code "rating:s,q" td "Search for a safe and questionable images" } tr { td { code "*girls" "or" code "2girl*" } td "Search for anything that ends with 'girls' (or starts with '2girl')" } tr { td code "from:konachan" td "Search for images synchronized from konachan (full list in source code & easily extendable)" } tr { td code "uploader:random_dude" td "Images uploaded by random_dude, note that 'sync' are synchronized images" } tr { td code "sort:asc:score" td "Sort images by score from worst to best (ascending); desc is for descening" } tr { td code "1girl | 2girls" td "Search for images of girl on her own OR 2 girls" }
tr { td code "1girl format:jpg,gif" td "Search for GIF and JPEG images" } } } }; Ok(Response::with((status::Ok, page))) }
function_block-function_prefix_line
[ { "content": "pub fn login(req: &mut Request) -> IronResult<Response> {\n\n let mut response = Response::new();\n\n\n\n let body = match req.get::<UrlEncodedBody>() {\n\n Ok(data) => data,\n\n Err(_) => return Ok(Response::with(status::BadRequest))\n\n };\n\n\n\n if let (Some(login), Some(pass)) = (body.get(\"login\"),body.get(\"password\")) {\n\n match DB.lock().unwrap().check_user(&login[0], &pass[0]).unwrap() {\n\n Some(x) if x => {\n\n req.session().set(Login(login[0].clone()))?;\n\n response\n\n .set_mut(Redirect(\"/\".to_string()))\n\n .set_mut(status::Found);\n\n Ok(response)\n\n },\n\n Some(_) => Ok(Response::with((status::BadRequest,\"Incorrect login/pass\"))),\n\n None => Ok(Response::with((status::Ok,\"No such user\")))\n\n }\n\n } else {\n\n Ok(Response::with((status::BadRequest,\"No login/pass\")))\n\n }\n\n}\n\n\n", "file_path": "src/routes/user.rs", "rank": 0, "score": 186279.3743863413 }, { "content": "pub fn admin(req: &mut Request) -> IronResult<Response> {\n\n if let (Some(curr_username), Some(admin_username)) = (req.session().get::<Login>()?,config!(? \"admin-username\")) {\n\n if curr_username.0.to_lowercase() == admin_username.to_lowercase() {\n\n let page = html!{\n\n script src=\"/assets/js/admin.js\" {}\n\n\n\n div#log-block style=\"width:40%; height:50%; overflow-y: auto; border: 1px solid black;\" {\n\n @for l in LOG.lock().unwrap().iter() {\n\n (l)\n\n }\n\n }\n\n br /\n\n form#command-form onsubmit=\"sendCommand(this); return false;\" {\n\n input name=\"comm\" nameplaceholder=\"Command\" type=\"text\" /\n\n input#send-button value=\"Send\" type=\"submit\" /\n\n }\n\n };\n\n Ok(Response::with((status::Ok,page)))\n\n } else {\n\n Ok(Response::with((status::Forbidden,\"Not an admin\")))\n\n }\n\n } else {\n\n Ok(Response::with((status::Forbidden,\"Not logged in\"))) // .. or admin account is not set\n\n }\n\n}\n\n\n", "file_path": "src/routes/admin.rs", "rank": 1, "score": 175053.48611638226 }, { "content": "/// Upload an image w/ multipart/form-data\n\npub fn upload_image(req: &mut Request) -> IronResult<Response> {\n\n let username = match req.session().get::<Login>()? {\n\n Some(u) => u.0,\n\n None => return Ok(Response::with((status::Forbidden,\"Not logged in\")))\n\n };\n\n let mut multipart = match Multipart::from_request(req) {\n\n Ok(m) => m,\n\n Err(e) => return Ok(Response::with((status::BadRequest, format!(\"Not a multipart request? {:#?}\", e))))\n\n };\n\n\n\n match multipart.save_all() {\n\n SaveResult::Full(entries) | SaveResult::Partial(entries, _) => {\n\n let savedfile = match entries.files.get(\"image\") {\n\n Some(s) => s,\n\n None => return Ok(Response::with((status::BadRequest,\"Can't load file\")))\n\n };\n\n let filename = match savedfile.filename {\n\n Some(ref f) => f,\n\n None => return Ok(Response::with((status::BadRequest,\"No filename\"))) // Is this even possible?\n\n };\n", "file_path": "src/routes/image.rs", "rank": 2, "score": 173388.83523163974 }, { "content": "pub fn get_log(req: &mut Request) -> IronResult<Response> {\n\n if let (Some(curr_username), Some(admin_username)) = (req.session().get::<Login>()?,config!(? \"admin-username\")) {\n\n if curr_username.0.to_lowercase() == admin_username.to_lowercase() {\n\n let mut response = Response::new();\n\n response\n\n .set_mut(Mime(TopLevel::Application, SubLevel::Json,\n\n vec![(Attr::Charset, Value::Utf8)]))\n\n .set_mut(to_value(&*LOG.lock().unwrap()).to_string())\n\n .set_mut(status::Ok);\n\n\n\n Ok(response)\n\n } else {\n\n Ok(Response::with((status::Forbidden,\"Not an admin\")))\n\n }\n\n } else {\n\n Ok(Response::with((status::Forbidden,\"Not logged in\"))) // .. or admin account is not set\n\n }\n\n}\n", "file_path": "src/routes/admin.rs", "rank": 3, "score": 165686.8099313011 }, { "content": "pub fn user_status(req: &mut Request) -> IronResult<Response> {\n\n #[derive(Serialize)]\n\n struct UserStatus {\n\n logined: bool,\n\n name: Option<String>\n\n }\n\n\n\n let (logined,name) = match req.session().get::<Login>()? {\n\n Some(user) => (true, Some(user.0)),\n\n None => (false, None)\n\n };\n\n\n\n let mut response = Response::new();\n\n\n\n response\n\n .set_mut(Mime(TopLevel::Application, SubLevel::Json,\n\n vec![(Attr::Charset, Value::Utf8)]))\n\n .set_mut(to_value(&UserStatus{logined: logined,name: name}).to_string())\n\n .set_mut(status::Ok);\n\n Ok(response)\n\n}\n\n\n", "file_path": "src/routes/user.rs", "rank": 6, "score": 155069.45421580062 }, { "content": "pub fn admin_command(req: &mut Request) -> IronResult<Response> {\n\n if let (Some(curr_username), Some(admin_username)) = (req.session().get::<Login>()?,config!(? \"admin-username\")) {\n\n if curr_username.0.to_lowercase() == admin_username.to_lowercase() {\n\n let body = match req.get::<UrlEncodedBody>() {\n\n Ok(data) => data,\n\n Err(_) => return Ok(Response::with(status::BadRequest))\n\n };\n\n\n\n if let Some(comm) = body.get(\"command\") {\n\n exec_command(&comm[0]);\n\n }\n\n\n\n Ok(Response::with(status::Ok))\n\n } else {\n\n Ok(Response::with((status::Forbidden,\"Not an admin\")))\n\n }\n\n } else {\n\n Ok(Response::with((status::Forbidden,\"Not logged in\"))) // .. or admin account is not set\n\n }\n\n}\n\n\n", "file_path": "src/routes/admin.rs", "rank": 7, "score": 155016.55471841845 }, { "content": "// Vota an image\n\npub fn vote_image(req: &mut Request) -> IronResult<Response> {\n\n let q = match req.get::<UrlEncodedQuery>() {\n\n Ok(hashmap) => hashmap,\n\n Err(_) => return Ok(Response::with((status::BadRequest, \"No parameters\")))\n\n };\n\n\n\n Ok(if let (Some(id), Some(vote)) = (query!(q,\"id\"),query!(q,\"vote\")) {\n\n if let Some(name) = req.session().get::<Login>()? {\n\n let name = name.0;\n\n if let (Ok(vote),Ok(id)) = (vote.parse::<bool>(),id.parse::<i32>()) {\n\n match DB.lock().unwrap().vote_image(&name, id, vote).unwrap() {\n\n Ok(newv) => Response::with((status::Ok,newv.to_string())),\n\n Err(VoteImageError::Already) => Response::with((status::Ok,\"Already voted that\")),\n\n Err(VoteImageError::NoImage) => Response::with((status::Ok,\"No such image\"))\n\n }\n\n } else {\n\n Response::with((status::BadRequest,\"Invalid data\"))\n\n }\n\n } else {\n\n Response::with((status::Forbidden,\"Not logged in\"))\n\n }\n\n } else {\n\n Response::with((status::BadRequest,\"No data\"))\n\n })\n\n}\n\n\n", "file_path": "src/routes/image.rs", "rank": 8, "score": 154693.457200752 }, { "content": "pub fn adduser(req: &mut Request) -> IronResult<Response> {\n\n\n\n let body = match req.get::<UrlEncodedBody>() {\n\n Ok(data) => data,\n\n Err(_) => return Ok(Response::with(status::BadRequest))\n\n };\n\n\n\n Ok(if let (Some(login), Some(pass), Some(confirm_pass)) = (body.get(\"login\"), body.get(\"password\"),body.get(\"confirm_password\")) {\n\n let (login,pass,confirm_pass) = (&login[0], &pass[0], &confirm_pass[0]);\n\n if pass == confirm_pass {\n\n if !pass.trim().is_empty() && !login.trim().is_empty() {\n\n match DB.lock().unwrap().add_user(login,pass) {\n\n Ok(res) => {\n\n if res {\n\n let mut response = Response::new();\n\n req.session().set(Login(login.clone()))?;\n\n response\n\n .set_mut(Redirect(\"/\".to_string()))\n\n .set_mut(status::Found);\n\n response\n", "file_path": "src/routes/user.rs", "rank": 10, "score": 150273.4217686008 }, { "content": "/// Find similiar images by tags\n\npub fn similiar(req: &mut Request) -> IronResult<Response> {\n\n let mut response = Response::new();\n\n\n\n let q = match req.get_ref::<UrlEncodedQuery>() {\n\n Ok(hashmap) => hashmap,\n\n Err(_) => return Ok(Response::with((status::BadRequest, \"No parameters\")))\n\n };\n\n\n\n let offset = query!(q,\"offset\").unwrap_or(&\"0\".to_string()).parse::<usize>().unwrap();\n\n let id = query!(q,\"id\").unwrap().parse::<i32>().unwrap();\n\n let images = DB.lock().unwrap().similiar(id, 25, offset).unwrap();\n\n\n\n response\n\n .set_mut(Mime(TopLevel::Application, SubLevel::Json,\n\n vec![(Attr::Charset, Value::Utf8)]))\n\n .set_mut(to_value(&images).to_string())\n\n .set_mut(status::Ok);\n\n Ok(response)\n\n}\n", "file_path": "src/routes/image.rs", "rank": 11, "score": 150005.06025022722 }, { "content": "/// Show an image\n\npub fn show(req: &mut Request) -> IronResult<Response> {\n\n let id = req.extensions.get::<Router>().and_then(|x| x.find(\"id\")).and_then(|x| x.parse::<i32>().ok()).unwrap();\n\n let image = match DB.lock().unwrap().get_image(id).unwrap() {\n\n Some(x) => x,\n\n None => return Ok(Response::with(status::NotFound))\n\n };\n\n\n\n let page = html!{\n\n meta charset=\"utf-8\" /\n\n link rel=\"stylesheet\" href=\"/assets/css/milligram.min.css\" /\n\n link rel=\"stylesheet\" href=\"/assets/css/main.css\" /\n\n link rel=\"icon\" type=\"image/jpeg\" href=\"/assets/favicon.jpg\" /\n\n script src=\"/assets/js/show.js\" {}\n\n title { \"Zeph - \" (image.tags.join(\" \")) }\n\n meta property=\"og:title\" content=\"Zeph\" /\n\n meta property=\"og:description\" content=(image.tags.join(\" \")) /\n\n meta property=\"og:url\" content={ \"https://zeph.kotobank.ch/show/\" (image.id) } /\n\n meta property=\"og:image\" content={\"https://zeph.kotobank.ch/images/preview/\" (image.name)} /\n\n\n\n div style=\"width:100%;\" {\n", "file_path": "src/routes/image.rs", "rank": 12, "score": 150002.38900494226 }, { "content": "/// Remove an image\n\npub fn delete(req: &mut Request) -> IronResult<Response> {\n\n let mut response = Response::new();\n\n\n\n let id = req.extensions.get::<Router>().and_then(|x| x.find(\"id\")).and_then(|x| x.parse::<i32>().ok()).unwrap();\n\n let image = match DB.lock().unwrap().get_image(id).unwrap() {\n\n Some(image) => image,\n\n None => return Ok(Response::with(status::NotFound))\n\n };\n\n\n\n Ok(match req.session().get::<Login>()? {\n\n Some(ref username) if Some(&username.0) == image.uploader.as_ref() => {\n\n let name = DB.lock().unwrap().delete_image(id).unwrap();\n\n remove_file(format!(\"{}/{}\", config!(\"images-directory\"), name)).unwrap();\n\n remove_file(format!(\"{}/preview/{}\", config!(\"images-directory\"), name)).unwrap();\n\n response\n\n .set_mut(Redirect(\"/\".to_string()))\n\n .set_mut(status::Found);\n\n response\n\n },\n\n Some(_) => {\n\n Response::with((status::Forbidden,\"You are not an uploader of this picture\"))\n\n },\n\n None => Response::with((status::Forbidden,\"Not logged in\"))\n\n })\n\n}\n\n\n", "file_path": "src/routes/image.rs", "rank": 13, "score": 150002.38900494226 }, { "content": "pub fn main(rc: &Receiver<()>) {\n\n let client = Client::new();\n\n let mut url_string = \"https://konachan.com/post.json?limit=100\".to_string();\n\n let mut page = 1;\n\n\n\n loop {\n\n let res = match req_and_parse(&client, &url_string) {\n\n Ok(x) => x,\n\n Err(_) => {\n\n thread::sleep(Duration::new(3,0));\n\n continue\n\n }\n\n };\n\n\n\n let images = res.as_array().unwrap();\n\n if images.is_empty() { break }\n\n\n\n let images = images.iter().fold(Vec::new(), |mut acc, x| {\n\n let image = x.as_object().unwrap();\n\n let tags = image[\"tags\"].as_str().unwrap().split_whitespace().map(String::from).collect::<Vec<_>>();\n", "file_path": "src/sync/konachan.rs", "rank": 14, "score": 117920.61467180274 }, { "content": "/// Tag parsing\n\nfn parse_tag(tag: &str) -> Tag {\n\n let tag = tag.replace('\\'', \"''\").replace('\\\\', r\"\\\\\");\n\n let all = tag.split(':').collect::<Vec<_>>();\n\n match all.len() {\n\n 1 => {\n\n let tag = all[0];\n\n if tag.starts_with('-') {\n\n Tag::Exclude(tag[1..].to_string())\n\n } else if tag.starts_with('*') {\n\n Tag::AnyWith(AnyWith::After(tag[1..].to_string()))\n\n } else if tag.ends_with('*') {\n\n let mut n = tag.to_string();\n\n n.pop();\n\n Tag::AnyWith(AnyWith::Before(n))\n\n } else {\n\n Tag::Include(tag.to_string())\n\n }\n\n },\n\n 2 => {\n\n let kind = all[0];\n", "file_path": "src/db/mod.rs", "rank": 15, "score": 115692.02207728577 }, { "content": "/// Parse tags w/ '|' FIXME its kinda bad?\n\nfn parse_tags(tags: &[String]) -> Vec<Tag> {\n\n let mut result = Vec::new();\n\n\n\n for (index,t) in tags.iter().enumerate() {\n\n match t.as_str() {\n\n \"|\" => {\n\n if let Some(second) = tags.get(index+1) {\n\n if index != 0 {\n\n if let Some(first) = tags.get(index-1) {\n\n result.push(Tag::Either(first.to_string(),second.to_string()))\n\n }\n\n }\n\n }\n\n },\n\n\n\n _ => {\n\n let next = tags.get(index+1);\n\n if next.is_none() || next.map(|x| *x == \"|\") == Some(false) {\n\n if index != 0 {\n\n let prev = tags.get(index-1);\n", "file_path": "src/db/mod.rs", "rank": 16, "score": 104973.21157770915 }, { "content": "pub fn open_config() -> Table {\n\n let mut file = match File::open(\"Config.toml\") {\n\n Ok(x) => x,\n\n Err(_) => panic!(\"No config file\")\n\n };\n\n let mut s = String::new();\n\n file.read_to_string(&mut s).unwrap();\n\n Parser::new(&s).parse().unwrap()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn arr_eq_test() {\n\n assert!(arr_eq(&mut vec![\"first\".to_string(), \"second\".to_string()],&mut vec![\"second\".to_string(), \"first\".to_string()]));\n\n }\n\n\n\n #[test]\n\n fn arr_incl_test() {\n\n assert!(includes(&vec![\"a\",\"b\"], &vec![\"a\", \"b\", \"c\"]));\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 17, "score": 101706.83999490018 }, { "content": "pub fn main(rc: &Receiver<()>) {\n\n let client = Client::new();\n\n let mut url_string = \"https://derpibooru.org/search.json?q=score.gt:0&filter_id=56027\".to_string();\n\n let mut page = 1;\n\n\n\n loop {\n\n let res = match req_and_parse(&client, &url_string) {\n\n Ok(x) => x,\n\n Err(_) => {\n\n thread::sleep(Duration::new(3,0));\n\n continue\n\n }\n\n };\n\n\n\n let images = res.as_object().unwrap()[\"search\"].as_array().unwrap();\n\n if images.is_empty() { break }\n\n\n\n let images = images.iter().fold(Vec::new(), |mut acc, x| {\n\n let image = x.as_object().unwrap();\n\n let mut rating = String::new();\n", "file_path": "src/sync/derpy.rs", "rank": 18, "score": 95437.25565617163 }, { "content": "pub fn main(rc: &Receiver<()>) {\n\n let client = Client::new();\n\n let mut url_string = \"http://gelbooru.com/index.php?page=dapi&s=post&q=index&json=1\".to_string();\n\n let mut page = 1;\n\n\n\n loop {\n\n let res = match req_and_parse(&client, &url_string) {\n\n Ok(x) => x,\n\n Err(_) => {\n\n thread::sleep(Duration::new(3,0));\n\n continue\n\n }\n\n };\n\n\n\n let images = res.as_array().unwrap();\n\n if images.is_empty() { break }\n\n\n\n let images = images.iter().fold(Vec::new(), |mut acc, x| {\n\n let image = x.as_object().unwrap();\n\n let tags = image[\"tags\"].as_str().unwrap().split_whitespace().map(String::from).collect::<Vec<_>>();\n", "file_path": "src/sync/gelbooru.rs", "rank": 19, "score": 95437.25565617163 }, { "content": "pub fn main(rc: &Receiver<()>) {\n\n let client = Client::new();\n\n let mut url_string = \"http://danbooru.donmai.us/posts.json?limit=100\".to_string();\n\n let mut page = 1;\n\n\n\n loop {\n\n let res = match req_and_parse(&client, &url_string) {\n\n Ok(x) => x,\n\n Err(_) => {\n\n thread::sleep(Duration::new(3,0));\n\n continue\n\n }\n\n };\n\n\n\n let images = res.as_array().unwrap();\n\n if images.is_empty() { break }\n\n\n\n let images = images.iter().fold(Vec::new(), |mut acc, x| {\n\n let image = x.as_object().unwrap();\n\n let tags = image[\"tag_string\"].as_str().unwrap().split_whitespace().map(String::from).collect::<Vec<_>>();\n", "file_path": "src/sync/danbooru.rs", "rank": 20, "score": 95437.25565617163 }, { "content": "pub fn main(rc: &Receiver<()>) {\n\n let client = Client::new();\n\n let mut url_string = \"https://e621.net/post/index.json?limit=300\".to_string(); // limit is actually 320\n\n\n\n loop {\n\n let res = match req_and_parse(&client, &url_string) {\n\n Ok(x) => x,\n\n Err(_) => {\n\n thread::sleep(Duration::new(3,0));\n\n continue\n\n }\n\n };\n\n\n\n let images = res.as_array().unwrap();\n\n if images.is_empty() { break }\n\n\n\n let before_id = images[images.len()-1]\n\n .as_object()\n\n .map(|x| &x[\"id\"])\n\n .and_then(|x| x.as_u64())\n", "file_path": "src/sync/e621.rs", "rank": 21, "score": 95437.25565617163 }, { "content": "/// Do the prepare/download/add-to-db chores on an image list\n\n/// and stop if console sends `kill` signal\n\nfn process_downloads(client: &Client, images: &[Image], recv: &Receiver<()>) -> Result<(),()> {\n\n let images_c = DB.lock().unwrap().get_images(None,0).unwrap();\n\n\n\n let printed = if includes(&images.iter().map(|x| x.name.clone()).collect::<Vec<_>>(), &images_c.iter().map(|x| x.name.clone()).collect::<Vec<_>>()) {\n\n log(format!(\"ALREADY DONE {} ~ {}\", images.first().unwrap().name, images.last().unwrap().name));\n\n true\n\n } else { false };\n\n\n\n for im in images {\n\n match recv.try_recv() {\n\n Ok(_) | Err(TryRecvError::Disconnected) => {\n\n return Err(());\n\n }\n\n Err(TryRecvError::Empty) => {}\n\n }\n\n\n\n if !images_c.iter().any(|x| x.name == im.name ) {\n\n if let Err(er) = if im.got_from == \"konachan\" || im.got_from == \"danbooru\" {\n\n download(&Client::new(), im)\n\n } else {\n", "file_path": "src/sync/mod.rs", "rank": 22, "score": 90920.85191964179 }, { "content": "/// Are arrays equeal?\n\npub fn arr_eq<T: Ord + PartialEq>(first: &mut Vec<T>, second: &mut Vec<T>) -> bool {\n\n first.sort();\n\n second.sort();\n\n first == second\n\n}\n\n\n\n\n", "file_path": "src/utils.rs", "rank": 23, "score": 87596.85526605585 }, { "content": "/// Download image and add it to DB\n\nfn download(client: &Client, im: &Image) -> Result<(), HyperError> {\n\n let mut res = client.get(&im.url)\n\n .header(UserAgent(\"Zeph/1.0\".to_owned()))\n\n .send()?;\n\n\n\n let mut body = Vec::new();\n\n res.read_to_end(&mut body).unwrap();\n\n let imb = ImageBuilder::new(&im.name, &im.tags)\n\n .got_from(&im.got_from)\n\n .original_link(&im.post_url)\n\n .uploader(\"sync\")\n\n .rating(im.rating)\n\n .score(im.score)\n\n .finalize();\n\n DB.lock().unwrap().add_image(&imb).unwrap();\n\n\n\n save_image(Path::new(config!(\"images-directory\")), &im.name, &body);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/sync/mod.rs", "rank": 24, "score": 86202.79876479061 }, { "content": "/// Get and parse JSON\n\nfn req_and_parse(client: &Client, url: &str) -> Result<Value, HyperError> {\n\n let mut res = match client.get(url)\n\n .header(UserAgent(\"Zeph/1.0\".to_owned()))\n\n .send() {\n\n Ok(x) => x,\n\n Err(x) => {\n\n log(format!(\"ERROR: {}\", x));\n\n return Err(x)\n\n }\n\n };\n\n\n\n let mut body = String::new();\n\n res.read_to_string(&mut body).unwrap();\n\n\n\n Ok(Value::from_str(&body).unwrap())\n\n}\n", "file_path": "src/sync/mod.rs", "rank": 25, "score": 81008.53392772586 }, { "content": "/// Save image & generate preview\n\npub fn save_image(dir: &Path, name: &str, file: &[u8]) {\n\n if read_dir(config!(\"images-directory\")).is_err() { create_dir(config!(\"images-directory\")).unwrap(); }\n\n if read_dir(format!(\"{}/preview\", config!(\"images-directory\"))).is_err() { create_dir(format!(\"{}/preview\", config!(\"images-directory\"))).unwrap(); }\n\n\n\n let prev = match image::load_from_memory(file) {\n\n Ok(x) => x.resize(500, 500, FilterType::Nearest),\n\n Err(x) => {\n\n log(x);\n\n return\n\n }\n\n };\n\n\n\n let mut f = File::create(dir.join(name)).unwrap();\n\n let mut prevf = File::create(dir.join(\"preview\").join(name)).unwrap();\n\n\n\n f.write(file).unwrap();\n\n prev.save(&mut prevf, image::JPEG).unwrap();\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 26, "score": 80315.11836652164 }, { "content": "#[derive(Debug,Clone)]\n\nenum Tag {\n\n /// Just a tag\n\n Include(String),\n\n /// Exlude tag\n\n Exclude(String),\n\n /// Rating, e.g. rating:s for safe\n\n Rating(Vec<String>),\n\n /// Partial tag\n\n AnyWith(AnyWith),\n\n /// from:derpibooru,konachan etc.\n\n From(Vec<String>),\n\n /// Uploader search\n\n Uploader(Vec<String>),\n\n /// Sorting\n\n OrderBy(OrderBy, AscDesc),\n\n // Either left or right with |\n\n Either(String,String),\n\n // File format\n\n Format(Vec<String>),\n\n}\n", "file_path": "src/db/mod.rs", "rank": 27, "score": 74909.12011407268 }, { "content": "pub fn exec_command(input: &str) {\n\n let mut senders = SENDERS.lock().unwrap();\n\n let mut id = ID.lock().unwrap();\n\n\n\n let input = input.trim();\n\n if input.starts_with(\"sync\") {\n\n if let Some(func) = input.split_whitespace().collect::<Vec<_>>().get(1) {\n\n let (sendr, recvr) = mpsc::channel::<()>();\n\n senders.insert(*id, sendr);\n\n match *func {\n\n \"derpy\" => { thread::spawn(move || sync::derpy::main(&recvr)); },\n\n \"e621\" => { thread::spawn(move || sync::e621::main(&recvr)); },\n\n \"dan\" => { thread::spawn(move || sync::danbooru::main(&recvr)); }\n\n \"kona\" => { thread::spawn(move || sync::konachan::main(&recvr)); }\n\n \"gel\" => { thread::spawn(move || sync::gelbooru::main(&recvr)); }\n\n _ => { log(\"Error: function not found\") }\n\n };\n\n log(format!(\"ID: {}\", *id));\n\n *id += 1;\n\n } else { log(\"Use sync <name>\") }\n", "file_path": "src/utils.rs", "rank": 28, "score": 74024.81150437026 }, { "content": "/// Log something\n\npub fn log<T: Display>(s: T) {\n\n LOG.lock().unwrap().push(format!(\"{}\", s));\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 29, "score": 70390.03076261788 }, { "content": "var TAGS_SET = false;\n", "file_path": "assets/js/main.js", "rank": 30, "score": 61547.652292500105 }, { "content": "var CURR_LOG = [];\n", "file_path": "assets/js/admin.js", "rank": 31, "score": 61465.39565943854 }, { "content": "/// Second includes first?\n\npub fn includes<T: PartialEq>(first: &[T], second: &[T]) -> bool {\n\n let r = first.len();\n\n let mut c = 0;\n\n for f in first {\n\n if second.iter().any(|x| x == f) {\n\n c += 1;\n\n }\n\n }\n\n\n\n r == c\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 32, "score": 58882.95534355342 }, { "content": " score: score as i32\n\n });\n\n acc\n\n });\n\n\n\n if process_downloads(&client, &images, rc).is_err() { break }\n\n\n\n page += 1;\n\n\n\n url_string = format!(\"https://konachan.com/post.json?page={}&limit=100\", page);\n\n }\n\n}\n", "file_path": "src/sync/konachan.rs", "rank": 33, "score": 52778.370598481866 }, { "content": " let rating = image[\"rating\"].as_str().unwrap().chars().nth(0).unwrap();\n\n\n\n let url = image[\"file_url\"].as_str().unwrap().to_string();\n\n\n\n let ext = url.clone();\n\n let ext = ext.split('.').collect::<Vec<_>>();\n\n let ext = ext.last().unwrap();\n\n\n\n let id = image[\"id\"].as_i64().unwrap();\n\n let score = image[\"score\"].as_i64().unwrap();\n\n\n\n let name = format!(\"konachan_{}.{}\", id, ext);\n\n\n\n acc.push(Image{\n\n name: name.to_string(),\n\n got_from: \"konachan\".to_string(),\n\n url: url,\n\n tags: tags,\n\n rating: rating,\n\n post_url: format!(\"http://konachan.com/post/show/{}\", id),\n", "file_path": "src/sync/konachan.rs", "rank": 34, "score": 52777.04331031977 }, { "content": "extern crate hyper;\n\n\n\nuse self::hyper::client::Client;\n\n\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse super::{Image,process_downloads,req_and_parse};\n\n\n\nuse std::sync::mpsc::Receiver;\n\n\n", "file_path": "src/sync/konachan.rs", "rank": 35, "score": 52775.557223711556 }, { "content": " got_from: String,\n\n url: String,\n\n rating: char,\n\n post_url: String,\n\n score: i32\n\n}\n\n\n\npub mod e621;\n\npub mod derpy;\n\npub mod danbooru;\n\npub mod konachan;\n\npub mod gelbooru;\n\n\n\n/// Do the prepare/download/add-to-db chores on an image list\n\n/// and stop if console sends `kill` signal\n", "file_path": "src/sync/mod.rs", "rank": 36, "score": 51494.48808706178 }, { "content": " download(client, im)\n\n } {\n\n log(format!(\"ERROR: {}; SKIP\", er));\n\n continue\n\n } else {\n\n log(format!(\"DONE {}\", im.name));\n\n }\n\n } else {\n\n let m_image = images_c.iter().find(|x| x.name == im.name ).unwrap();\n\n let mut m_tags = m_image.tags.clone();\n\n\n\n if !arr_eq(&mut m_tags, &mut im.tags.clone()) || im.score != m_image.score {\n\n let imb = ImageBuilder::new(&im.name, &im.tags)\n\n .got_from(&im.got_from)\n\n .original_link(&im.post_url)\n\n .uploader(\"sync\")\n\n .rating(im.rating)\n\n .score(im.score)\n\n .finalize();\n\n DB.lock().unwrap().add_image(&imb).unwrap();\n\n log(format!(\"UPDATE tags / score on {}\", im.name));\n\n } else if !printed {\n\n log(format!(\"ALREADY DONE {}\", im.name));\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/sync/mod.rs", "rank": 37, "score": 51494.2193214632 }, { "content": "use hyper::client::Client;\n\nuse hyper::header::UserAgent;\n\nuse hyper::Error as HyperError;\n\n\n\nuse std::io::Read;\n\nuse std::path::Path;\n\nuse std::str::FromStr;\n\n\n\nuse {DB,CONFIG};\n\nuse db::ImageBuilder;\n\nuse utils::*;\n\n\n\nuse std::sync::mpsc::{Receiver,TryRecvError};\n\n\n\nuse serde_json::Value;\n\n\n\n#[derive(Debug)]\n\npub struct Image {\n\n name: String,\n\n tags: Vec<String>,\n", "file_path": "src/sync/mod.rs", "rank": 38, "score": 51493.45491192042 }, { "content": "var CURR_LOG = [];\n\n\n\nfunction httpGetAsync(theUrl, callback) {\n\n var xmlHttp = new XMLHttpRequest();\n\n xmlHttp.onreadystatechange = function() {\n\n if (xmlHttp.readyState == 4 && xmlHttp.status == 200) { callback(xmlHttp.responseText); }\n\n }\n\n xmlHttp.open(\"GET\", theUrl, true);\n\n xmlHttp.send(null);\n\n}\n\n\n\nfunction httpPostAsync(theUrl, params/*, callback*/) {\n\n var body = \"\";\n\n for (var p in params) {\n\n body += encodeURIComponent(p) + \"=\" + encodeURIComponent(params[p]) + \"&\"\n\n }\n\n\n\n var xmlHttp = new XMLHttpRequest();\n\n xmlHttp.open(\"POST\", theUrl, true);\n\n xmlHttp.send(body);\n\n} // FIXME: And here it says \"Element not found\". What element..?\n\n\n\nfunction sendCommand(frm) {\n\n httpPostAsync(\"/admin\", { command: frm.comm.value });\n\n frm.comm.value = \"\"\n\n}\n\n\n\nfunction getLog() {\n\n var bl = document.getElementById(\"log-block\");\n\n\n\n httpGetAsync(\"/log\", function(text) {\n\n var body = JSON.parse(text);\n\n if (CURR_LOG.length != body.length) {\n\n bl.innerHTML = \"\";\n\n body.forEach(function(l) {\n\n var s = document.createTextNode(l);\n\n var br = document.createElement(\"br\");\n\n bl.appendChild(s);\n\n bl.appendChild(br);\n\n bl.scrollTop = bl.scrollHeight;\n\n });\n\n CURR_LOG = body\n\n }\n\n });\n\n}\n\n\n\nwindow.setInterval(getLog, 2000);\n", "file_path": "assets/js/admin.js", "rank": 39, "score": 49864.3488201594 }, { "content": "/// Structure to get login from session\n\nstruct Login(String);\n\nimpl SessionValue for Login {\n\n fn get_key() -> &'static str { \"username\" }\n\n fn into_raw(self) -> String { self.0 }\n\n fn from_raw(value: String) -> Option<Self> {\n\n if value.is_empty() {\n\n None\n\n } else {\n\n Some(Login(value))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 40, "score": 48793.905342321974 }, { "content": "#[derive(Debug,Clone)]\n\nenum AscDesc {\n\n Asc,\n\n Desc\n\n}\n\n\n\n/// Sorting, e.g. `sort:asc/desc:id/score`\n", "file_path": "src/db/mod.rs", "rank": 41, "score": 48075.46209238643 }, { "content": "fn main() {\n\n let router = router!(index: get \"/\" => index_n_search,\n\n more: get \"/more\" => more,\n\n search: get \"/search\" => index_n_search,\n\n user_stat: get \"/user_status\" => user_status,\n\n vote: get \"/vote_image\" => vote_image,\n\n about: get \"/about\" => about,\n\n admin: get \"/admin\" => admin,\n\n get_log: get \"/log\" => get_log,\n\n\n\n show: get \"/show/:id\" => show,\n\n delete: get \"/delete/:id\" => delete,\n\n similiar: get \"/similiar\" => similiar,\n\n\n\n adm_comm: post \"/admin\" => admin_command,\n\n login: post \"/login\" => login,\n\n upload_im: post \"/upload_image\" => upload_image,\n\n adduser: post \"/adduser\" => adduser);\n\n\n\n let mut mount = Mount::new();\n\n mount.mount(\"/\", router)\n\n .mount(\"/assets\", Static::new(Path::new(\"assets\")))\n\n .mount(\"/images\", Static::new(Path::new(config!(\"images-directory\"))));\n\n\n\n let mut chain = Chain::new(mount);\n\n chain.around(SessionStorage::new(SignedCookieBackend::new(time::now().to_timespec().sec.to_string().bytes().collect::<Vec<_>>())));\n\n\n\n Iron::new(chain).http(\"127.0.0.1:3000\").unwrap();\n\n}\n", "file_path": "src/main.rs", "rank": 42, "score": 44025.68440507179 }, { "content": "var DONE_LOADING = false;\n", "file_path": "assets/js/main.js", "rank": 43, "score": 40807.89371295996 }, { "content": "var DONE_LOADING = false;\n", "file_path": "assets/js/show.js", "rank": 44, "score": 40807.89371295996 }, { "content": "var LOADING_IN_PROGRESS = false;\n", "file_path": "assets/js/show.js", "rank": 45, "score": 40807.89371295996 }, { "content": "var LOAD_AT_A_TIME = 25;\n", "file_path": "assets/js/show.js", "rank": 46, "score": 40807.89371295996 }, { "content": "var LOADING_IN_PROGRESS = false;\n", "file_path": "assets/js/main.js", "rank": 47, "score": 40807.89371295996 }, { "content": "var LOAD_AT_A_TIME = 25;\n", "file_path": "assets/js/main.js", "rank": 48, "score": 40807.89371295996 }, { "content": "[![Build Status](https://travis-ci.org/koto-bank/zeph.svg?branch=master)](https://travis-ci.org/koto-bank/zeph)\n\n# Zeph\n\n\n\nA \\*booru written in Rust with Iron and PostgreSQL (though there is also an abandoned SQLite module..)\n\n\n\nAlso, some Postgres modules are required: `citext`, `smlar` and `hstore`.\n\n\n\n# Features\n\n\n\n* Synchronization with some other booru's (e.g. Gelbooru, Konachan) in `src/db`\n\n * Syncing is controlled with console in `/admin`\n\n* Search with tags, partial tags, uploader, etc (more info on `/about` page)\n\n* Sort images by ascending/descending of score/id (e.g. `sort:asc:score`)\n\n* HTTP API (it is there, but not well documented) <!-- TODO: document it.. -->\n\n* (Kind of) configurable\n\n * There is some basic configuration to do in `Config.toml`,\n\n `images-directory` is where the pictures are stored, `postgres-login` and `postgres-password` are used to connect to the database.\n\n * `contact-email` is optional and, if set, will be shown on `/about` page\n\n * `admin-username` is account to access `/admin` panel\n\n* Users and registration, passwords are encrypted with scrypt. Users can vote for images (not for `sync`ed, though, because their score is based on the original score and updates when you `sync`)\n\n* Similiar images, based on tags.\n\n\n\n# Running\n\n\n\nCurrently in works only on nightly rust.\n\n\n\nJust run it with `cargo run --release` or build it and place the compiled binary into the crate root.\n\n\n\n# Screenshots\n\n\n\n(All images belong to their respective authors)\n\n\n\n![Search page](/screenshots/screenshot_main.png?raw=true)\n\n![Image page](/screenshots/screenshot_show.png?raw=true)\n\n\n\n# Contributing\n\n\n\nContributions are hightly appreciated! Open a PR if you have features to add\n", "file_path": "README.md", "rank": 49, "score": 35353.1985948392 }, { "content": "var LOAD_AT_A_TIME = 25;\n\nvar TAGS_SET = false;\n\nvar DONE_LOADING = false;\n\nvar LOADING_IN_PROGRESS = false;\n\n\n\nfunction httpGetAsync(theUrl, callback) {\n\n var xmlHttp = new XMLHttpRequest();\n\n xmlHttp.onreadystatechange = function() {\n\n if (xmlHttp.readyState == 4 && xmlHttp.status == 200) { callback(xmlHttp.responseText); }\n\n }\n\n xmlHttp.open(\"GET\", theUrl, true);\n\n xmlHttp.send(null);\n\n}\n\n\n\nfunction getUrlParameter(name) {\n\n name = name.replace(/[\\[]/, '\\\\[').replace(/[\\]]/, '\\\\]');\n\n var regex = new RegExp('[\\\\?&]' + name + '=([^&#]*)');\n\n var results = regex.exec(location.search);\n\n return results === null ? '' : decodeURIComponent(results[1].replace(/\\+/g, ' '));\n\n};\n\n\n\nfunction setAttrs(elem, attrs) {\n\n for (var a in attrs) {\n\n elem.setAttribute(a, attrs[a]);\n\n }\n\n}\n\n\n\nfunction checkVisible(elm) {\n\n var rect = elm.getBoundingClientRect();\n\n var viewHeight = Math.max(document.documentElement.clientHeight, window.innerHeight);\n\n return !(rect.bottom < 0 || rect.top - viewHeight >= 0);\n\n}\n\n\n\nfunction loadMore() {\n\n LOADING_IN_PROGRESS = true;\n\n var image_block = document.getElementById(\"images\");;\n\n var query = \"/more?offset=\"+image_block.children.length;\n\n\n\n var spinner = document.createElement(\"div\");\n\n spinner.className = \"spinner\";\n\n image_block.appendChild(spinner);\n\n\n\n if (window.location.pathname.startsWith(\"/search\")) {\n\n query = query + \"&q=\" + getUrlParameter(\"q\");\n\n }\n\n\n\n httpGetAsync(query, function(text){\n\n var body = JSON.parse(text);\n\n if (body.length < LOAD_AT_A_TIME) {\n\n DONE_LOADING = true;\n\n }\n\n\n\n image_block.removeChild(spinner);\n\n body.forEach(function(image) {\n\n var link = document.createElement(\"a\");\n\n link.href = \"/show/\"+image.id;\n\n link.target = \"_blank\";\n\n var im = document.createElement(\"div\");\n\n im.title = image.tags.join(\" \");\n\n im.className = \"thumbnail\";\n\n im.style.backgroundImage = \"url(\\\"/images/preview/\"+image.name+\"\\\")\";\n\n\n\n link.appendChild(im);\n\n image_block.appendChild(link);\n\n });\n\n\n\n if (!TAGS_SET) {\n\n var imgs = Array.from(document.getElementsByClassName(\"thumbnail\"));\n\n var tags_block = document.getElementById(\"tags\");\n\n var tags = new Set(imgs.reduce(function(arr, im) {\n\n arr.push(im.title.split(\" \")[0]);\n\n return arr;\n\n },[]));\n\n\n\n tags.forEach(function(tag) {\n\n var link = document.createElement(\"a\");\n\n link.textContent = tag;\n\n link.href = \"/search?q=\" + tag;\n\n tags_block.appendChild(link);\n\n tags_block.appendChild(document.createElement(\"br\"));\n\n });\n\n TAGS_SET = true\n\n }\n\n LOADING_IN_PROGRESS = false;\n\n });\n\n}\n\n\n\nfunction drawUploadOrLogin() {\n\n var main_form = document.getElementById(\"login-or-upload-form\");\n\n var upload_button = document.getElementById(\"upload-button\");\n\n\n\n httpGetAsync(\"/user_status\", function(text){\n\n var body = JSON.parse(text);\n\n\n\n if (body[\"logined\"] == false) {\n\n var form = document.createElement(\"form\");\n\n setAttrs(form, {\n\n action: \"/login\",\n\n method: \"POST\"\n\n });\n\n\n\n var login = document.createElement(\"input\");\n\n setAttrs(login,{\n\n type: \"text\",\n\n name: \"login\",\n\n placeholder: \"Login\"\n\n });\n\n var pass = document.createElement(\"input\");\n\n setAttrs(pass, {\n\n type: \"password\",\n\n name: \"password\",\n\n placeholder: \"Password\"\n\n });\n\n var confirm_pass = document.createElement(\"input\");\n\n setAttrs(confirm_pass, {\n\n type: \"password\",\n\n name: \"confirm_password\",\n\n placeholder: \"Confirm password\"\n\n });\n\n var confirm_pass_br = document.createElement(\"br\");\n\n\n\n var sbm = document.createElement(\"input\");\n\n setAttrs(sbm, {\n\n type: \"submit\",\n\n value: \"Login\"\n\n });\n\n\n\n var lor = document.createElement(\"button\");\n\n lor.textContent = \"Sign up\";\n\n lor.onclick = function() {\n\n if (lor.textContent == \"Sign up\") {\n\n lor.textContent = \"Sign in\";\n\n sbm.value = \"Register\";\n\n login.placeholder = \"New login\";\n\n pass.placeholder = \"New password\";\n\n\n\n form.action = \"/adduser\";\n\n\n\n form.insertBefore(confirm_pass, sbm);\n\n form.insertBefore(confirm_pass_br, sbm);\n\n } else {\n\n lor.textContent = \"Sign up\";\n\n sbm.value = \"Login\";\n\n login.placeholder = \"Login\";\n\n pass.placeholder = \"Password\";\n\n\n\n form.action = \"/login\";\n\n\n\n try {\n\n form.removeChild(confirm_pass);\n\n form.removeChild(confirm_pass_br);\n\n } catch(err) { }\n\n }\n\n }\n\n\n\n main_form.appendChild(lor);\n\n form.appendChild(login);\n\n form.appendChild(pass);\n\n\n\n\n\n\n\n form.appendChild(sbm);\n\n main_form.appendChild(form);\n\n } else {\n\n upload_button.textContent = \"Upload image (as \" + body[\"name\"] + \")\";\n\n var form = document.createElement(\"form\");\n\n setAttrs(form, {\n\n action: \"/upload_image\",\n\n method: \"POST\",\n\n enctype: \"multipart/form-data\"\n\n });\n\n var file = document.createElement(\"input\");\n\n setAttrs(file, {\n\n type: \"file\",\n\n name: \"image\",\n\n accept: \"image/*\"});\n\n var tags = document.createElement(\"input\");\n\n setAttrs(tags,{\n\n type: \"text\",\n\n name: \"tags\",\n\n placeholder: \"Space separated tags\"});\n\n var sbm = document.createElement(\"input\");\n\n setAttrs(sbm,{\n\n type: \"submit\",\n\n value: \"Upload\"});\n\n form.appendChild(file);\n\n form.appendChild(tags);\n\n form.appendChild(sbm);\n\n main_form.appendChild(form);\n\n }\n\n });\n\n}\n\n\n\nfunction showUploadOrLogin() {\n\n var form = document.getElementById(\"login-or-upload-form\");\n\n\n\n if (form.style.bottom != \"7%\") {\n\n form.style.bottom = \"7%\";\n\n } else {\n\n form.style.bottom = \"-100%\";\n\n }\n\n}\n\n\n\nwindow.onload = function() {\n\n loadMore();\n\n drawUploadOrLogin();\n\n\n\n document.getElementById(\"tag-search-field\").value = getUrlParameter(\"q\");\n\n}\n\n\n\nwindow.onscroll = function(ev) {\n\n if ((window.innerHeight + window.scrollY) >= document.body.offsetHeight) {\n\n if (!DONE_LOADING && !LOADING_IN_PROGRESS){\n\n loadMore();\n\n }\n\n }\n\n};\n", "file_path": "assets/js/main.js", "rank": 50, "score": 33105.766665744304 }, { "content": "var LOAD_AT_A_TIME = 25;\n\nvar DONE_LOADING = false;\n\nvar LOADING_IN_PROGRESS = false;\n\n\n\nfunction httpGetAsync(theUrl, callback) {\n\n var xmlHttp = new XMLHttpRequest();\n\n xmlHttp.onreadystatechange = function() {\n\n if (xmlHttp.readyState == 4 && xmlHttp.status == 200) { callback(xmlHttp.responseText); }\n\n }\n\n xmlHttp.open(\"GET\", theUrl, true);\n\n xmlHttp.send(null);\n\n}\n\n\n\nfunction load(){\n\n var reg = /show\\/(\\d+)/;\n\n var id = reg.exec(window.location.pathname)[1];\n\n\n\n var vote_up_a = document.createElement(\"a\");\n\n vote_up_a.href = \"#\";\n\n vote_up_a.style.display = \"inline-block\";\n\n var vote_down_a = document.createElement(\"a\");\n\n vote_down_a.href = \"#\";\n\n vote_down_a.style.display = \"inline-block\";\n\n\n\n httpGetAsync(\"/user_status\", function(text){\n\n\n\n var uploader = document.getElementById(\"uploader\").getAttribute(\"value\");\n\n var score = document.getElementById(\"score\");\n\n var image_info =document.getElementById(\"image-info\");\n\n\n\n var userstatus = JSON.parse(text);\n\n if (userstatus[\"logined\"] == true && userstatus[\"name\"] == uploader) {\n\n var l = document.createElement(\"a\");\n\n l.href = \"/delete/\" + id;\n\n l.textContent = \"Delete image\";\n\n\n\n image_info.insertBefore(l, image_info.firstChild);\n\n image_info.insertBefore(document.createElement(\"br\"), l.nextSibling);\n\n }\n\n\n\n if (userstatus[\"logined\"] == true && uploader !== \"sync\") {\n\n var plus_b = document.createElement(\"div\");\n\n plus_b.className = \"vote-up\";\n\n plus_b.onclick = function() { httpGetAsync(\"/vote_image?vote=true&id=\" + id, function(res){\n\n console.log(res);\n\n if (parseInt(res) !== NaN) {\n\n score.textContent = \"Score: \" + res;\n\n } else {\n\n score.textContent = res;\n\n }\n\n })};\n\n vote_up_a.appendChild(plus_b);\n\n\n\n var minus_b = document.createElement(\"div\");\n\n minus_b.className = \"vote-down\";\n\n minus_b.onclick = function() { httpGetAsync(\"/vote_image?vote=false&id=\" + id, function(res){\n\n if (parseInt(res) !== NaN) {\n\n score.textContent = \"Score: \" + res;\n\n } else {\n\n score.textContent = res;\n\n }\n\n })};\n\n vote_down_a.appendChild(minus_b);\n\n\n\n var vote_area = document.getElementById(\"vote-area\");\n\n\n\n vote_area.appendChild(vote_up_a);\n\n vote_area.appendChild(vote_down_a);\n\n }\n\n });\n\n}\n\n\n\nfunction loadSimiliar() {\n\n LOADING_IN_PROGRESS = true;\n\n var reg = /show\\/(\\d+)/;\n\n var id = reg.exec(window.location.pathname)[1];\n\n\n\n var similiar_block = document.getElementById(\"similiar\");\n\n var query = \"/similiar?id=\" + id + \"&offset=\"+similiar_block.children.length;\n\n\n\n var spinner = document.createElement(\"div\");\n\n spinner.className = \"spinner\";\n\n similiar_block.appendChild(spinner);\n\n\n\n httpGetAsync(query, function(text){\n\n var body = JSON.parse(text);\n\n if (body.length < LOAD_AT_A_TIME) {\n\n DONE_LOADING = true;\n\n }\n\n\n\n similiar_block.removeChild(spinner);\n\n body.forEach(function(image) {\n\n var link = document.createElement(\"a\");\n\n link.href = \"/show/\"+image.id;\n\n link.target = \"_blank\";\n\n var im = document.createElement(\"div\");\n\n im.title = image.tags.join(\" \");\n\n im.className = \"thumbnail\";\n\n im.style.backgroundImage = \"url(\\\"/images/preview/\"+image.name+\"\\\")\";\n\n\n\n link.appendChild(im);\n\n similiar_block.appendChild(link);\n\n });\n\n LOADING_IN_PROGRESS = false;\n\n });\n\n}\n\n\n\nwindow.onload = function() {\n\n load();\n\n}\n\n\n\ndocument.addEventListener(\"DOMContentLoaded\", function(event) {\n\n loadSimiliar();\n\n});\n\n\n\nwindow.onscroll = function(ev) {\n\n if ((window.innerHeight + window.scrollY) >= document.body.offsetHeight) {\n\n if (!DONE_LOADING && !LOADING_IN_PROGRESS){\n\n loadSimiliar();\n\n }\n\n }\n\n};\n", "file_path": "assets/js/show.js", "rank": 51, "score": 33105.766665744304 }, { "content": "//! Routes that help to work w/ users\n\n\n\nuse DB;\n\n\n\nuse iron::prelude::*;\n\nuse iron::status;\n\nuse iron::modifiers::RedirectRaw as Redirect;\n\nuse iron::mime::{Mime, TopLevel, SubLevel, Attr, Value};\n\n\n\nuse urlencoded::UrlEncodedBody;\n\nuse session::SessionRequestExt;\n\n\n\nuse serde_json::to_value;\n\n\n\nuse Login;\n\n\n", "file_path": "src/routes/user.rs", "rank": 52, "score": 26988.8703278526 }, { "content": " } else {\n\n Response::with((status::Ok,\"User already exists\"))\n\n }\n\n },\n\n Err(e) => Response::with((status::InternalServerError, format!(\"Internal server error: {}\", e)))\n\n }\n\n } else {\n\n Response::with((status::BadRequest,\"Empty login/pass\"))\n\n }\n\n } else {\n\n Response::with((status::Ok,\"Password and confirmation are not equeal\"))\n\n }\n\n } else {\n\n Response::with((status::BadRequest,\"No data\"))\n\n })\n\n}\n", "file_path": "src/routes/user.rs", "rank": 53, "score": 26980.829169617686 }, { "content": "//! All the admin panel stuff\n\n\n\nuse {LOG,CONFIG,exec_command};\n\n\n\nuse iron::prelude::*;\n\nuse iron::status;\n\nuse iron::mime::{Mime, TopLevel, SubLevel, Attr, Value};\n\n\n\nuse urlencoded::UrlEncodedBody;\n\nuse session::SessionRequestExt;\n\n\n\nuse serde_json::to_value;\n\n\n\nuse Login;\n\n\n", "file_path": "src/routes/admin.rs", "rank": 54, "score": 26933.870799576907 }, { "content": " @if let Some(uploader) = image.uploader {\n\n a#uploader href={ \"/search?q=uploader:\" (uploader) } value=(uploader) { \"uploader:\" (uploader) } br /\n\n }\n\n div#vote-area div#score value=(image.score) { \"Score: \" (image.score) } br /\n\n @for tag in image.tags {\n\n a href={ \"/search?q=\" (tag) } { (tag) } br /\n\n }\n\n }\n\n }\n\n a href=\"/about\" style=\"opacity: 0.5;\" \"About Zeph & Help\"\n\n }\n\n div style=\"margin-left: 15%;\" {\n\n a href={ \"/images/\" (image.name) } {\n\n img#image-block style=\"display: block; margin: 0 auto;\" src={ \"/images/\" (image.name) } /\n\n }\n\n h4 style=\"margin-top: 2%;\" { \"Similiar images\" } br /\n\n div#similiar {} // Similiar w/ JS\n\n }\n\n }\n\n };\n\n\n\n Ok(Response::with((status::Ok, page)))\n\n}\n\n\n", "file_path": "src/routes/image.rs", "rank": 55, "score": 26617.23643212807 }, { "content": " div.tags-search {\n\n a href=\"/\" title=\"Boop!\" {\n\n img#nano-logo src=\"/assets/logo.jpg\" /\n\n h3 style=\"display: inline; vertical-align: 50%\" \"Zeph\"\n\n }\n\n form#tag-search-form action=\"/search\" {\n\n input#tag-search-field placeholder=\"Search\" name=\"q\" type=\"text\" /\n\n }\n\n div#id { \"#\" (image.id) }\n\n div#tags {\n\n div#image-info {\n\n @if let Some(original_link) = image.original_link {\n\n a#original-link href=(original_link) \"Original page\" br /\n\n }\n\n @if let Some(rating) = image.rating {\n\n a#rating href={ \"/search?q=rating:\" (rating) } value=(rating) { \"rating:\" (rating) } br /\n\n }\n\n @if let Some(got_from) = image.got_from {\n\n a#got_from href={ \"/search?q=from:\" (got_from) } value=(got_from) { \"from:\" (got_from) } br /\n\n }\n", "file_path": "src/routes/image.rs", "rank": 56, "score": 26616.55240962959 }, { "content": " let tags = match entries.fields.get(\"tags\") {\n\n Some(t) => t.split_whitespace().map(String::from).collect::<Vec<_>>(),\n\n None => return Ok(Response::with((status::BadRequest,\"No tags found\")))\n\n };\n\n\n\n let mut body = Vec::new();\n\n let _ = File::open(&savedfile.path).unwrap().read_to_end(&mut body);\n\n let name = DB.lock().unwrap().add_with_tags_name(&tags, filename.split('.').collect::<Vec<_>>()[1], &username).unwrap();\n\n\n\n save_image(Path::new(config!(\"images-directory\")), &name, &body);\n\n\n\n let mut response = Response::new();\n\n response\n\n .set_mut(Redirect(\"/\".to_string()))\n\n .set_mut(status::Found);\n\n Ok(response)\n\n },\n\n\n\n SaveResult::Error(e) => Ok(Response::with((status::BadRequest,format!(\"Server could not handle multipart POST! {:?}\", e))))\n\n }\n\n}\n\n\n", "file_path": "src/routes/image.rs", "rank": 57, "score": 26603.45529811696 }, { "content": "//! Routes to work with individual image (and `/show` page)\n\n\n\nuse {DB,CONFIG,save_image,VoteImageError};\n\n\n\nuse std::fs::{File,remove_file};\n\nuse std::io::Read;\n\nuse std::path::Path;\n\n\n\nuse iron::prelude::*;\n\nuse iron::status;\n\nuse iron::modifiers::RedirectRaw as Redirect;\n\nuse iron::mime::{Mime, TopLevel, SubLevel, Attr, Value};\n\n\n\n\n\nuse urlencoded::UrlEncodedQuery;\n\nuse router::Router;\n\nuse session::SessionRequestExt;\n\n\n\nuse multipart::server::{Multipart, SaveResult};\n\n\n\nuse serde_json::to_value;\n\n\n\nuse Login;\n\n\n\n/// Show an image\n", "file_path": "src/routes/image.rs", "rank": 58, "score": 26602.711977462895 }, { "content": " let values = all[1].split(',').map(String::from).collect::<Vec<_>>();\n\n match kind {\n\n \"rating\" => Tag::Rating(values),\n\n \"from\" => Tag::From(values),\n\n \"uploader\" => Tag::Uploader(values),\n\n \"format\" => Tag::Format(values),\n\n _ => Tag::Include(tag.to_string()) // Probably shouldn't be anything there?\n\n }\n\n },\n\n 3 => { // Probably sort, but should check\n\n if all[0] == \"sort\" {\n\n let aod = match all[1] {\n\n \"asc\" => AscDesc::Asc, // - -> +\n\n \"desc\" | _ => AscDesc::Desc, // + -> -\n\n };\n\n let by = match all[2] {\n\n \"score\" => OrderBy::Score,\n\n \"id\" | _ => OrderBy::Id,\n\n };\n\n\n", "file_path": "src/db/mod.rs", "rank": 63, "score": 25760.970935956288 }, { "content": "}\n\n\n\nimpl ImageBuilder {\n\n pub fn new(name: &str, tags: &[String]) -> Self {\n\n ImageBuilder{\n\n name: name.to_string(),\n\n tags: tags.to_owned(),\n\n got_from: None,\n\n original_link: None,\n\n uploader: None,\n\n score: 0,\n\n rating: None\n\n }\n\n }\n\n\n\n pub fn got_from(mut self, got_from: &str) -> Self {\n\n self.got_from = Some(got_from.to_string());\n\n self\n\n }\n\n\n", "file_path": "src/db/mod.rs", "rank": 64, "score": 25758.013059032477 }, { "content": " pub fn original_link(mut self, original_link: &str) -> Self {\n\n self.original_link = Some(original_link.to_string());\n\n self\n\n }\n\n\n\n pub fn uploader(mut self, uploader: &str) -> Self {\n\n self.uploader = Some(uploader.to_string());\n\n self\n\n }\n\n\n\n pub fn score(mut self, score: i32) -> Self {\n\n self.score = score;\n\n self\n\n }\n\n\n\n pub fn rating(mut self, rating: char) -> Self {\n\n self.rating = Some(rating);\n\n self\n\n }\n\n\n\n pub fn finalize(self) -> Self { self }\n\n}\n", "file_path": "src/db/mod.rs", "rank": 65, "score": 25756.117587427594 }, { "content": "#[derive(Debug,Clone,Serialize)]\n\npub struct Image {\n\n pub id: i32,\n\n pub name: String,\n\n pub tags: Vec<String>,\n\n pub got_from: Option<String>,\n\n pub original_link: Option<String>,\n\n pub uploader: Option<String>,\n\n pub rating: Option<char>,\n\n pub score: i32\n\n}\n\n\n\n/// Image voting error\n\npub enum VoteImageError {\n\n /// Voting up/down twice\n\n Already,\n\n /// No such image\n\n NoImage\n\n}\n\n\n\n/// Partial tags, e.g. `Some*`\n\n#[derive(Debug,Clone)]\n", "file_path": "src/db/mod.rs", "rank": 66, "score": 25755.84578761608 }, { "content": " Tag::OrderBy(by, aod)\n\n } else {\n\n Tag::Include(tag.to_string())\n\n }\n\n },\n\n _ => { // Shouldn't happen\n\n Tag::Include(tag.to_string())\n\n }\n\n }\n\n}\n\n\n\n/// Strcture to ease image addition\n\npub struct ImageBuilder {\n\n name: String,\n\n tags: Vec<String>,\n\n got_from: Option<String>,\n\n original_link: Option<String>,\n\n uploader: Option<String>,\n\n score: i32,\n\n rating: Option<char>\n", "file_path": "src/db/mod.rs", "rank": 67, "score": 25755.59661877933 }, { "content": "\n\n let tags = image[\"tags\"].as_str().unwrap().split(',').map(|x| x.trim().replace(\" \", \"_\")).filter_map(|x| {\n\n if x.starts_with(\"artist:\") {\n\n Some(x.split(':').collect::<Vec<_>>()[1].to_string())\n\n } else if x == \"safe\" || x == \"semi-grimdark\" {\n\n rating = \"s\".to_string();\n\n None\n\n } else if x == \"explicit\" || x == \"grimdark\" || x == \"grotesque\" {\n\n rating = \"e\".to_string();\n\n None\n\n } else if x == \"questionable\" || x == \"suggestive\" {\n\n rating = \"q\".to_string();\n\n None\n\n } else {\n\n Some(x.to_string())\n\n }}).collect::<Vec<_>>();\n\n let rating = rating.chars().collect::<Vec<_>>()[0];\n\n let url = format!(\"https:{}\", image[\"image\"].as_str().unwrap());\n\n let id = image[\"id\"].as_str().unwrap().parse::<u64>().unwrap();\n\n\n", "file_path": "src/sync/derpy.rs", "rank": 68, "score": 25751.839389643883 }, { "content": " let ext = image[\"file_name\"].as_str().unwrap().split('.').collect::<Vec<_>>();\n\n let ext = ext.last().unwrap();\n\n let name = format!(\"derpibooru_{}.{}\", id, ext);\n\n let score = image[\"score\"].as_i64().unwrap();\n\n\n\n\n\n acc.push(Image{\n\n name: name,\n\n got_from: \"derpi\".to_string(),\n\n url: url,\n\n tags: tags,\n\n rating: rating,\n\n post_url: format!(\"https://derpibooru.org/{}\", id),\n\n score: score as i32\n\n });\n\n acc\n\n });\n\n\n\n if process_downloads(&client, &images, rc).is_err() { break }\n\n\n\n page += 1;\n\n\n\n url_string = format!(\"https://derpibooru.org/search.json?q=score.gt:0&filter_id=56027&page={}\", page);\n\n }\n\n}\n", "file_path": "src/sync/derpy.rs", "rank": 69, "score": 25751.00370934153 }, { "content": "\n\n/// `pub` is used to switch DBs, though only postgres works TODO: fix sqlite sometime\n\npub mod postgres;\n\nmod sqlite;\n\n\n\n#[cfg(feature = \"sqlite\")]\n\npub use self::sqlite::Db;\n\n\n\n#[cfg(feature = \"postgresql\")]\n\npub use self::postgres::Db;\n\n\n\n\n", "file_path": "src/db/mod.rs", "rank": 70, "score": 25749.530098932857 }, { "content": " .unwrap();\n\n\n\n let images = images.iter().fold(Vec::new(), |mut acc, x| {\n\n let image = x.as_object().unwrap();\n\n let tags = image[\"tags\"].as_str().unwrap().split_whitespace().map(String::from).collect::<Vec<_>>();\n\n let rating = image[\"rating\"].as_str().unwrap().chars().nth(0).unwrap();\n\n\n\n let ext = image[\"file_ext\"].as_str().unwrap();\n\n if ext != \"webm\" && ext != \"swf\" && ext != \"mp4\" {\n\n let url = image[\"file_url\"].as_str().unwrap().to_string();\n\n let id = image[\"id\"].as_i64().unwrap();\n\n let name = format!(\"e621_{}.{}\", id, ext);\n\n let score = image[\"score\"].as_i64().unwrap();\n\n\n\n acc.push(Image{\n\n name: name.to_string(),\n\n got_from: \"e621\".to_string(),\n\n url: url,\n\n tags: tags,\n\n rating: rating,\n", "file_path": "src/sync/e621.rs", "rank": 71, "score": 25748.517785803742 }, { "content": " let rating = image[\"rating\"].as_str().unwrap().chars().nth(0).unwrap();\n\n\n\n if let Some(ext) = image.get(\"file_ext\") {\n\n let ext = ext.as_str().unwrap();\n\n\n\n if ext != \"webm\" && ext != \"swf\" && ext != \"mp4\" {\n\n let url = format!(\"http://danbooru.donmai.us{}\", image[\"file_url\"].as_str().unwrap().to_string());\n\n let id = image[\"id\"].as_i64().unwrap();\n\n let name = format!(\"danbooru_{}.{}\", id, ext);\n\n let score = image[\"score\"].as_i64().unwrap();\n\n\n\n acc.push(Image{\n\n name: name.to_string(),\n\n got_from: \"danbooru\".to_string(),\n\n url: url,\n\n tags: tags,\n\n rating: rating,\n\n post_url: format!(\"http://danbooru.donmai.us/posts/{}\", id),\n\n score: score as i32\n\n });\n", "file_path": "src/sync/danbooru.rs", "rank": 72, "score": 25748.33752390371 }, { "content": " score: score as i32\n\n });\n\n acc\n\n } else {\n\n acc\n\n }\n\n });\n\n\n\n if process_downloads(&client, &images, rc).is_err() { break }\n\n\n\n page += 1;\n\n\n\n url_string = format!(\"http://gelbooru.com//index.php?page=dapi&s=post&q=index&json=1&pid={}\", page);\n\n }\n\n}\n", "file_path": "src/sync/gelbooru.rs", "rank": 73, "score": 25747.913332530832 }, { "content": "extern crate hyper;\n\n\n\nuse self::hyper::client::Client;\n\n\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse super::{Image,process_downloads,req_and_parse};\n\n\n\nuse std::sync::mpsc::Receiver;\n\n\n", "file_path": "src/sync/danbooru.rs", "rank": 74, "score": 25747.713239602504 }, { "content": "extern crate hyper;\n\n\n\nuse self::hyper::client::Client;\n\n\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse super::{Image,process_downloads,req_and_parse};\n\n\n\nuse std::sync::mpsc::Receiver;\n\n\n", "file_path": "src/sync/gelbooru.rs", "rank": 75, "score": 25747.713239602504 }, { "content": "extern crate hyper;\n\n\n\nuse self::hyper::client::Client;\n\n\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse super::{Image,process_downloads,req_and_parse};\n\n\n\nuse std::sync::mpsc::Receiver;\n\n\n", "file_path": "src/sync/e621.rs", "rank": 76, "score": 25747.713239602504 }, { "content": "extern crate hyper;\n\n\n\nuse self::hyper::client::Client;\n\n\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse super::{Image,process_downloads,req_and_parse};\n\n\n\nuse std::sync::mpsc::Receiver;\n\n\n", "file_path": "src/sync/derpy.rs", "rank": 77, "score": 25747.713239602504 }, { "content": " let rating = image[\"rating\"].as_str().unwrap().chars().nth(0).unwrap();\n\n\n\n let url = image[\"file_url\"].as_str().unwrap().to_string();\n\n \n\n let ext = image[\"image\"].as_str().unwrap();\n\n let ext = ext.split('.').collect::<Vec<_>>();\n\n let ext = ext.last().unwrap();\n\n\n\n if *ext != \"webm\" && *ext != \"swf\" && *ext != \"mp4\" {\n\n let id = image[\"id\"].as_i64().unwrap();\n\n let name = format!(\"gelbooru_{}.{}\", id, ext);\n\n let score = image[\"score\"].as_i64().unwrap();\n\n\n\n acc.push(Image{\n\n name: name.to_string(),\n\n got_from: \"gelbooru\".to_string(),\n\n url: url,\n\n tags: tags,\n\n rating: rating,\n\n post_url: format!(\"http://gelbooru.com/index.php?page=post&s=view&id={}\", id),\n", "file_path": "src/sync/gelbooru.rs", "rank": 78, "score": 25747.70557880841 }, { "content": " if prev.is_none() || prev.map(|x| *x == \"|\") == Some(false) {\n\n result.push(parse_tag(t))\n\n }\n\n } else {\n\n result.push(parse_tag(t))\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/db/mod.rs", "rank": 79, "score": 25745.88812865754 }, { "content": " }\n\n }\n\n acc\n\n });\n\n\n\n if process_downloads(&client, &images, rc).is_err() { break }\n\n\n\n page += 1;\n\n\n\n url_string = format!(\"http://danbooru.donmai.us/posts.json?page={}&limit=100\", page);\n\n }\n\n}\n", "file_path": "src/sync/danbooru.rs", "rank": 80, "score": 25745.56415490036 }, { "content": " post_url: format!(\"https://e621.net/post/show/{}\", id),\n\n score: score as i32\n\n });\n\n acc\n\n } else {\n\n acc\n\n }\n\n });\n\n\n\n if process_downloads(&client, &images, rc).is_err() { break }\n\n\n\n url_string = format!(\"https://e621.net/post/index.json?before_id={}&limit=300\", before_id);\n\n }\n\n}\n", "file_path": "src/sync/e621.rs", "rank": 81, "score": 25744.53534130787 }, { "content": "#[derive(Debug,Clone)]\n\nenum AnyWith {\n\n After(String),\n\n Before(String)\n\n}\n\n\n\n/// Ascending/descending sort\n", "file_path": "src/db/mod.rs", "rank": 82, "score": 25740.46873738858 }, { "content": "#[derive(Debug,Clone)]\n\nenum OrderBy {\n\n Id,\n\n Score\n\n}\n\n\n", "file_path": "src/db/mod.rs", "rank": 83, "score": 24502.325772533815 }, { "content": " /// Join tags for tags that can be separated with comma, e.g. rating or uploader\n\n fn join_tags(kind: &str, values: &[String]) -> String {\n\n values.iter().map(|s| format!(\"{} = '{}'\", kind, s)).collect::<Vec<_>>().join(\" OR \")\n\n }\n\n\n\n fn extract_image(row: Row) -> Image {\n\n Image{\n\n id: row.get(\"id\"),\n\n name: row.get(\"name\"),\n\n tags: row.get(\"tags\"),\n\n got_from: row.get::<_, Option<String>>(\"got_from\"),\n\n original_link: row.get::<_, Option<String>>(\"original_link\"),\n\n rating: row.get::<_,Option<String>>(\"rating\").map(|x| x.to_string().chars().collect::<Vec<_>>()[0]),\n\n uploader: row.get::<_,Option<String>>(\"uploader\"),\n\n score: row.get::<_,i32>(\"score\")\n\n }\n\n }\n\n}\n", "file_path": "src/db/postgres.rs", "rank": 84, "score": 18.241603558343957 }, { "content": " }\n\n\n\n /// Checks if pass & login match.\n\n /// Option is used to indicate that user does (not) exist\n\n pub fn check_user(&self, login: &str, pass: &str) -> SQLResult<Option<bool>> {\n\n let pass_hash = self.0.query(\"SELECT * FROM USERS WHERE name = $1\", &[&login])?;\n\n if pass_hash.is_empty() {\n\n Ok(None)\n\n } else {\n\n let pass_hash = pass_hash.get(0).get::<_, String>(\"pass\");\n\n Ok(Some(scrypt_check(pass, &pass_hash).unwrap()))\n\n }\n\n }\n\n\n\n // true - `+`, false - `-`, returns count of votes\n\n pub fn vote_image(&self, login: &str, image_id: i32 ,vote: bool) -> SQLResult<Result<i32, VoteImageError>> {\n\n let tr = self.0.transaction()?;\n\n let votechar = if vote { \"+\" } else { \"-\" }.to_string();\n\n let previous = tr.query(\"SELECT votes -> $2 AS vote FROM users WHERE name = $1\", &[&login, &image_id.to_string()])?;\n\n\n", "file_path": "src/db/postgres.rs", "rank": 85, "score": 17.860416099272417 }, { "content": " $2,\n\n $3,\n\n $4,\n\n $5,\n\n $6,\n\n $7) ON CONFLICT (name) DO UPDATE SET tags = $2, score = $7\",\n\n &[&image.name,&image.tags,&image.got_from, &image.original_link,&image.rating.map(|x| x.to_string()), &image.uploader, &image.score]).unwrap();\n\n Ok(())\n\n }\n\n\n\n pub fn get_image(&self, id: i32) -> SQLResult<Option<Image>> {\n\n let rows = self.0.query(\"SELECT * FROM images WHERE id = $1\", &[&id])?;\n\n Ok(if !rows.is_empty() {\n\n Some(Db::extract_image(rows.get(0)))\n\n } else {\n\n None\n\n })\n\n }\n\n\n\n pub fn get_images<T: Into<Option<i32>>>(&self, take: T, skip: usize) -> SQLResult<Vec<Image>>{\n", "file_path": "src/db/postgres.rs", "rank": 86, "score": 17.519067440660663 }, { "content": " let take = match take.into() {\n\n Some(x) => x.to_string(),\n\n None => \"ALL\".to_string()\n\n };\n\n\n\n Ok(self.0.query(&format!(\"SELECT * FROM images ORDER BY id DESC LIMIT {limit} OFFSET {offset}\",\n\n limit = take,\n\n offset = skip as i32),&[])?\n\n .iter().fold(Vec::new(), |mut acc, row| {\n\n acc.push(Db::extract_image(row));\n\n acc\n\n }))\n\n }\n\n\n\n /// Search images by tags\n\n pub fn by_tags<T: Into<Option<i32>>>(&self, take: T, skip: usize, tags: &[String]) -> SQLResult<Vec<Image>> {\n\n let tags = parse_tags(tags);\n\n let order = tags.iter().filter_map(|x| {\n\n match *x {\n\n Tag::OrderBy(ref by, ref ascdesc) => {\n", "file_path": "src/db/postgres.rs", "rank": 87, "score": 17.389980424445866 }, { "content": " let lastnum = self.0.query(\"SELECT id FROM images ORDER BY id DESC LIMIT 1\", &[])?.get(0).get::<_, i32>(\"id\");\n\n\n\n let name = format!(\"{id}_{tags}.{ext}\",\n\n id = lastnum + 1,\n\n tags = tags.join(\"_\").replace(\"'\",\"''\"),\n\n ext = ext);\n\n self.add_image(&ImageBuilder::new(&name, tags).uploader(uploader).finalize())?;\n\n Ok(name)\n\n }\n\n\n\n /// Add image\n\n pub fn add_image(&self, image: &ImageBuilder) -> SQLResult<()> {\n\n self.0.execute(\"INSERT into images (name,\n\n tags,\n\n got_from,\n\n original_link,\n\n rating,\n\n uploader,\n\n score)\n\n VALUES ($1,\n", "file_path": "src/db/postgres.rs", "rank": 88, "score": 17.130248666523645 }, { "content": " let newcount = if !previous.is_empty() && previous.get(0).get::<_,Option<String>>(\"vote\") == Some(votechar.to_owned()) {\n\n tr.set_rollback();\n\n Err(VoteImageError::Already)\n\n } else {\n\n let res = if vote {\n\n tr.query(\"UPDATE images SET score = score + 1 WHERE id = $1 RETURNING score\", &[&image_id])?\n\n } else {\n\n tr.query(\"UPDATE images SET score = score - 1 WHERE id = $1 RETURNING score\", &[&image_id])?\n\n };\n\n if !res.is_empty() {\n\n tr.set_commit();\n\n Ok(res.get(0).get::<_,i32>(\"score\"))\n\n } else {\n\n tr.set_rollback();\n\n Err(VoteImageError::NoImage)\n\n }\n\n };\n\n\n\n tr.execute(\"UPDATE users SET votes = hstore($2,$3) WHERE name = $1\", &[&login, &image_id.to_string(), &votechar])?;\n\n\n", "file_path": "src/db/postgres.rs", "rank": 89, "score": 16.99567754300907 }, { "content": " values.push_str(\")\");\n\n\n\n let q = format!(\"{} {}\", fields, values);\n\n self.0.execute(&q, &[]).unwrap();\n\n Ok(())\n\n }\n\n\n\n pub fn get_image(&self, id: i32) -> SQLResult<Image> {\n\n self.0.query_row(\"SELECT * FROM images WHERE id = ?\", &[&id], Db::extract_all)\n\n }\n\n\n\n pub fn get_images<T: Into<Option<i32>>>(&self, take: T, skip: usize) -> SQLResult<Vec<Image>>{\n\n let take = match take.into() {\n\n Some(x) => x,\n\n None => -1\n\n };\n\n\n\n let mut st = self.0.prepare(&format!(\"SELECT * FROM images ORDER BY id DESC LIMIT {} OFFSET {}\", take, skip))?;\n\n let st = st.query_map(&[], Db::extract_all_ref)?.map(|x| x.unwrap());\n\n Ok(st.collect::<Vec<_>>())\n", "file_path": "src/db/sqlite.rs", "rank": 90, "score": 16.218450748761732 }, { "content": " None => -1\n\n };\n\n\n\n let mut st = self.0.prepare(&format!(\"SELECT * FROM images WHERE {} ORDER BY id DESC LIMIT {} OFFSET {}\", q, take, skip))?;\n\n let st = st.query_map(&[], Db::extract_all_ref)?.map(|x| x.unwrap());\n\n Ok(st.collect::<Vec<_>>())\n\n }\n\n\n\n fn extract_all(row: Row) -> Image {\n\n let id = row.get(0);\n\n let name = row.get(1);\n\n let mut tags = row.get::<i32,String>(2).split(',').map(String::from).collect::<Vec<_>>();\n\n let l = tags.len()-2;\n\n tags.remove(0); tags.remove(l);\n\n\n\n let got_from = row.get::<i32, Option<String>>(3).unwrap_or(\" \".to_string());\n\n let original_link = row.get::<i32,Option<String>>(4).unwrap_or(\" \".to_string());\n\n let rating = row.get::<i32,Option<String>>(5).unwrap_or(\" \".to_string()).chars().nth(0).unwrap_or(' ');\n\n\n\n Image{\n", "file_path": "src/db/sqlite.rs", "rank": 91, "score": 15.79799809561657 }, { "content": " Ok(newcount)\n\n }\n\n\n\n /// Find similiar images\n\n pub fn similiar<T: Into<Option<i32>>>(&self, id:i32, take: T, skip: usize) -> SQLResult<Vec<Image>> {\n\n let take = match take.into() {\n\n Some(x) => x.to_string(),\n\n None => \"ALL\".to_string()\n\n };\n\n\n\n Ok(self.0.query(&format!(\"SELECT * FROM images\n\n WHERE id != $1\n\n ORDER BY smlar(tags, (SELECT tags FROM images WHERE id = $1)) DESC\n\n LIMIT {limit} OFFSET {offset}\", limit = take, offset = skip as i32),&[&id])?\n\n .iter().fold(Vec::new(), |mut acc, row| {\n\n acc.push(Db::extract_image(row));\n\n acc\n\n }))\n\n }\n\n\n", "file_path": "src/db/postgres.rs", "rank": 92, "score": 14.966670628861664 }, { "content": "\n\n pub fn add_image<'a, T1: Into<Option<&'a str>>,\n\n T2: Into<Option<&'a str>>,\n\n C: Into<Option<char>>>(&self, name: &str, tags: &[String], got_from: T1, original_link: T2, rating: C) -> SQLResult<()> {\n\n let mut fields = \"INSERT INTO images (name, tags\".to_string();\n\n let mut values = format!(\"VALUES('{}', '{}'\", name, format!(\",{},\",tags.join(\",\").replace(\"'\",\"''\")));\n\n if let Some(x) = got_from.into() {\n\n fields.push_str(\", got_from\");\n\n values.push_str(&format!(\", '{}'\", x));\n\n }\n\n if let Some(x) = original_link.into() {\n\n fields.push_str(\", original_link\");\n\n values.push_str(&format!(\", '{}'\", x));\n\n }\n\n if let Some(x) = rating.into() {\n\n fields.push_str(\", rating\");\n\n values.push_str(&format!(\", '{}'\", x));\n\n }\n\n\n\n fields.push_str(\")\");\n", "file_path": "src/db/sqlite.rs", "rank": 93, "score": 14.516025213554087 }, { "content": " acc\n\n }))\n\n }\n\n\n\n pub fn delete_image(&self, id: i32) -> SQLResult<String> {\n\n let name = self.0.query(\"SELECT * FROM images WHERE id = $1\", &[&id])?.get(0).get::<_,String>(\"name\");\n\n self.0.execute(\"DELETE FROM images WHERE id = $1\", &[&id])?;\n\n Ok(name)\n\n }\n\n\n\n // true - all's OK, false - user already exists\n\n pub fn add_user(&self, login: &str, pass: &str) -> SQLResult<bool> {\n\n if self.0.query(\"SELECT * FROM users WHERE name = $1\", &[&login])?.is_empty() && login.to_lowercase() != \"sync\" {\n\n let pass = scrypt_simple(pass, &SCRYPT_PARAMS).unwrap();\n\n\n\n self.0.execute(\"INSERT INTO users (name,pass) VALUES ($1,$2)\", &[&login, &pass])?;\n\n Ok(true)\n\n } else {\n\n Ok(false)\n\n }\n", "file_path": "src/db/postgres.rs", "rank": 94, "score": 13.875456839521098 }, { "content": " tags TEXT[] NOT NULL,\n\n uploader TEXT,\n\n score INT NOT NULL DEFAULT 0,\n\n\n\n got_from TEXT,\n\n original_link TEXT,\n\n rating CHAR\n\n );\n\n\n\n CREATE TABLE IF NOT EXISTS users(\n\n id SERIAL PRIMARY KEY,\n\n name CITEXT UNIQUE NOT NULL,\n\n pass TEXT NOT NULL,\n\n votes HSTORE\n\n );\").unwrap();\n\n Db(conn)\n\n }\n\n\n\n /// Add image, generate name from tags & id\n\n pub fn add_with_tags_name(&self, tags: &[String], ext: &str, uploader: &str) -> SQLResult<String> {\n", "file_path": "src/db/postgres.rs", "rank": 95, "score": 13.39799620184716 }, { "content": " conn.execute(\"CREATE TABLE IF NOT EXISTS images(\n\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n\n name TEXT NOT NULL UNIQUE,\n\n tags TEXT NOT NULL,\n\n\n\n got_from TEXT,\n\n original_link TEXT,\n\n rating CHAR);\",&[]).unwrap();\n\n Db(conn)\n\n }\n\n\n\n pub fn add_with_tags_name(&self, tags: &[String], ext: &str) -> SQLResult<String> {\n\n let lastnum = self.0.query_row(\"SELECT id FROM images ORDER BY id DESC LIMIT 1\", &[], |row| {\n\n row.get::<i32,i32>(0)\n\n }).unwrap();\n\n\n\n let name = format!(\"{}_{}.{}\", lastnum + 1 , tags.join(\"_\").replace(\"'\",\"''\"),ext);\n\n self.add_image(&name, tags, None, None, None)?;\n\n Ok(name)\n\n }\n", "file_path": "src/db/sqlite.rs", "rank": 96, "score": 13.335183467166694 }, { "content": " id: id,\n\n name: name,\n\n tags: tags,\n\n got_from: got_from,\n\n original_link: original_link,\n\n rating: rating\n\n }\n\n }\n\n\n\n fn extract_all_ref(row: &Row) -> Image {\n\n let id = row.get(0);\n\n let name = row.get(1);\n\n let mut tags = row.get::<i32,String>(2).split(',').map(String::from).collect::<Vec<_>>();\n\n let l = tags.len()-2;\n\n tags.remove(0); tags.remove(l);\n\n\n\n let got_from = row.get::<i32, Option<String>>(3).unwrap_or(\" \".to_string());\n\n let original_link = row.get::<i32,Option<String>>(4).unwrap_or(\" \".to_string());\n\n let rating = row.get::<i32,Option<String>>(5).unwrap_or(\" \".to_string()).chars().nth(0).unwrap_or(' ');\n\n\n", "file_path": "src/db/sqlite.rs", "rank": 97, "score": 13.0215683999653 }, { "content": " Tag::Uploader(ref u) => Db::join_tags(\"uploader\", u),\n\n Tag::OrderBy(_,_) => String::new(), // <- This one\n\n Tag::Either(ref f, ref s) => format!(\"(tags @> ARRAY['{}']) OR (tags @> ARRAY['{}'])\", f, s),\n\n Tag::Format(ref f) => f.iter().map(|s| format!(\"name ILIKE '%.{}'\", s)).collect::<Vec<_>>().join(\" OR \")\n\n }\n\n }).filter(|x| !x.is_empty()).collect::<Vec<_>>().join(\" AND \");\n\n let q = if !q.is_empty() { format!(\"WHERE {}\", q) } else { String::new() };\n\n\n\n let take = match take.into() {\n\n Some(x) => x.to_string(),\n\n None => \"ALL\".to_string()\n\n };\n\n\n\n Ok(self.0.query(&format!(\"SELECT * FROM images {query} ORDER BY {order} LIMIT {limit} OFFSET {offset}\",\n\n query = q,\n\n order = order,\n\n limit = take,\n\n offset = skip),&[])?\n\n .iter().fold(Vec::new(), |mut acc, row| {\n\n acc.push(Db::extract_image(row));\n", "file_path": "src/db/postgres.rs", "rank": 98, "score": 12.159454375456518 }, { "content": "\n\nuse db::{Db,VoteImageError};\n\nuse utils::{save_image,open_config,exec_command};\n\nuse routes::*;\n\n\n\nlazy_static! {\n\n pub static ref DB : Mutex<Db> = Mutex::new(Db::new());\n\n pub static ref CONFIG : Table = open_config();\n\n /// Used in utils\n\n pub static ref LOG : Mutex<Vec<String>> = Mutex::new(Vec::new());\n\n}\n\n\n\n/// Structure to get login from session\n", "file_path": "src/main.rs", "rank": 99, "score": 11.901432909978997 } ]
Rust
src/opus/imported_encode.rs
binast/range-encoding-rs
7dd51d3522a936b50b0ee02a351af5783d65e1e5
#![allow(dead_code, mutable_transmutes, non_camel_case_types, non_snake_case, non_upper_case_globals, unused_mut)] use std; use std::io::Write; pub type int32_t = std::os::raw::c_int; pub type uint32_t = std::os::raw::c_uint; pub type __uint16_t = std::os::raw::c_ushort; pub type __uint32_t = std::os::raw::c_uint; pub type __int64_t = std::os::raw::c_longlong; pub type __uint64_t = std::os::raw::c_ulonglong; pub type __darwin_size_t = std::os::raw::c_ulong; pub type __darwin_off_t = __int64_t; pub type opus_int32 = int32_t; pub type opus_uint32 = uint32_t; pub type size_t = __darwin_size_t; pub struct ec_enc<W> where W: std::io::Write, { pub out: W, pub end_window: ec_window, pub nend_bits: std::os::raw::c_int, pub nbits_total: std::os::raw::c_int, pub offs: opus_uint32, pub rng: opus_uint32, pub val: opus_uint32, pub ext: opus_uint32, pub rem: std::os::raw::c_int, pub end_buffer: Vec<u8>, } #[derive(Copy, Clone)] #[repr(C)] pub union unnamed { __f: std::os::raw::c_double, __u: std::os::raw::c_ulonglong, } #[derive(Copy, Clone)] #[repr(C)] pub struct unnamed_0 { pub __m: std::os::raw::c_ulonglong, pub __sexp: std::os::raw::c_ushort, } pub type ec_window = opus_uint32; pub type fpos_t = __darwin_off_t; fn celt_udiv(mut n: opus_uint32, mut d: opus_uint32) -> opus_uint32 { return n.wrapping_div(d); } pub unsafe fn ec_encode<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: std::os::raw::c_uint, mut _fh: std::os::raw::c_uint, mut _ft: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = celt_udiv((*_this).rng, _ft); if _fl > 0i32 as std::os::raw::c_uint { (*_this).val = ((*_this).val as std::os::raw::c_uint).wrapping_add( (*_this) .rng .wrapping_sub(r.wrapping_mul(_ft.wrapping_sub(_fl))), ) as opus_uint32 as opus_uint32; (*_this).rng = r.wrapping_mul(_fh.wrapping_sub(_fl)) } else { (*_this).rng = ((*_this).rng as std::os::raw::c_uint) .wrapping_sub(r.wrapping_mul(_ft.wrapping_sub(_fh))) as opus_uint32 as opus_uint32 } ec_enc_normalize(_this)?; Ok(()) } unsafe fn ec_enc_normalize<W: Write>(mut _this: *mut ec_enc<W>) -> Result<(), std::io::Error> { while (*_this).rng <= 1u32 << 32i32 - 1i32 >> 8i32 { ec_enc_carry_out( _this, ((*_this).val >> 32i32 - 8i32 - 1i32) as std::os::raw::c_int, )?; (*_this).val = (*_this).val << 8i32 & (1u32 << 32i32 - 1i32).wrapping_sub(1i32 as std::os::raw::c_uint); (*_this).rng <<= 8i32; (*_this).nbits_total += 8i32 } Ok(()) } unsafe fn ec_enc_carry_out<W: Write>( mut _this: *mut ec_enc<W>, mut _c: std::os::raw::c_int, ) -> Result<(), std::io::Error> { if _c as std::os::raw::c_uint != (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint) { let mut carry = _c >> 8i32; if (*_this).rem >= 0i32 { ec_write_byte(_this, ((*_this).rem + carry) as std::os::raw::c_uint)?; } if (*_this).ext > 0i32 as std::os::raw::c_uint { let mut sym = (1u32 << 8i32) .wrapping_sub(1i32 as std::os::raw::c_uint) .wrapping_add(carry as std::os::raw::c_uint) & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint); loop { ec_write_byte(_this, sym)?; (*_this).ext = (*_this).ext.wrapping_sub(1); if !((*_this).ext > 0i32 as std::os::raw::c_uint) { break; } } } (*_this).rem = (_c as std::os::raw::c_uint & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint)) as std::os::raw::c_int } else { (*_this).ext = (*_this).ext.wrapping_add(1) }; Ok(()) } unsafe fn ec_write_byte<W: Write>( mut _this: *mut ec_enc<W>, mut _value: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { (*_this).out.write_all(&[_value as u8])?; Ok(()) } pub unsafe fn ec_encode_bin<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: std::os::raw::c_uint, mut _fh: std::os::raw::c_uint, mut _bits: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = (*_this).rng >> _bits; if _fl > 0i32 as std::os::raw::c_uint { (*_this).val = ((*_this).val as std::os::raw::c_uint).wrapping_add( (*_this) .rng .wrapping_sub(r.wrapping_mul((1u32 << _bits).wrapping_sub(_fl))), ) as opus_uint32 as opus_uint32; (*_this).rng = r.wrapping_mul(_fh.wrapping_sub(_fl)) } else { (*_this).rng = ((*_this).rng as std::os::raw::c_uint) .wrapping_sub(r.wrapping_mul((1u32 << _bits).wrapping_sub(_fh))) as opus_uint32 as opus_uint32 } ec_enc_normalize(_this)?; Ok(()) } pub unsafe fn ec_enc_bit_logp<W: Write>( mut _this: *mut ec_enc<W>, mut _val: std::os::raw::c_int, mut _logp: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = (*_this).rng; let mut l = (*_this).val; let mut s = r >> _logp; r = (r as std::os::raw::c_uint).wrapping_sub(s) as opus_uint32 as opus_uint32; if 0 != _val { (*_this).val = l.wrapping_add(r) } (*_this).rng = if 0 != _val { s } else { r }; ec_enc_normalize(_this)?; Ok(()) } pub unsafe fn ec_enc_icdf<W: Write>( mut _this: *mut ec_enc<W>, mut _s: std::os::raw::c_int, mut _icdf: *const std::os::raw::c_uchar, mut _ftb: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = (*_this).rng >> _ftb; if _s > 0i32 { (*_this).val = ((*_this).val as std::os::raw::c_uint).wrapping_add((*_this).rng.wrapping_sub( r.wrapping_mul(*_icdf.offset((_s - 1i32) as isize) as std::os::raw::c_uint), )) as opus_uint32 as opus_uint32; (*_this).rng = r.wrapping_mul( (*_icdf.offset((_s - 1i32) as isize) as std::os::raw::c_int - *_icdf.offset(_s as isize) as std::os::raw::c_int) as std::os::raw::c_uint, ) } else { (*_this).rng = ((*_this).rng as std::os::raw::c_uint) .wrapping_sub(r.wrapping_mul(*_icdf.offset(_s as isize) as std::os::raw::c_uint)) as opus_uint32 as opus_uint32 } ec_enc_normalize(_this)?; Ok(()) } pub unsafe fn ec_enc_uint<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: opus_uint32, mut _ft: opus_uint32, ) -> Result<(), std::io::Error> { assert!(_ft > 1); _ft = _ft.wrapping_sub(1); let mut ftb = ::std::mem::size_of::<std::os::raw::c_uint>() as std::os::raw::c_ulong as std::os::raw::c_int * 8i32 - _ft.leading_zeros() as i32; if ftb > 8i32 { ftb -= 8i32; let ft = (_ft >> ftb).wrapping_add(1i32 as std::os::raw::c_uint); let fl = _fl >> ftb; ec_encode(_this, fl, fl.wrapping_add(1i32 as std::os::raw::c_uint), ft)?; ec_enc_bits( _this, _fl & ((1i32 as opus_uint32) << ftb).wrapping_sub(1u32), ftb as std::os::raw::c_uint, )?; } else { ec_encode( _this, _fl, _fl.wrapping_add(1i32 as std::os::raw::c_uint), _ft.wrapping_add(1i32 as std::os::raw::c_uint), )?; } return Ok(()); } pub unsafe fn ec_enc_bits<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: opus_uint32, mut _bits: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut window = (*_this).end_window; let mut used = (*_this).nend_bits; assert!(_bits > 0); if (used as std::os::raw::c_uint).wrapping_add(_bits) > (::std::mem::size_of::<ec_window>() as std::os::raw::c_ulong as std::os::raw::c_int * 8i32) as std::os::raw::c_uint { loop { ec_write_byte_at_end( _this, window & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint), )?; window >>= 8i32; used -= 8i32; if !(used >= 8i32) { break; } } } window |= _fl << used; used = (used as std::os::raw::c_uint).wrapping_add(_bits) as std::os::raw::c_int as std::os::raw::c_int; (*_this).end_window = window; (*_this).nend_bits = used; (*_this).nbits_total = ((*_this).nbits_total as std::os::raw::c_uint).wrapping_add(_bits) as std::os::raw::c_int as std::os::raw::c_int; return Ok(()); } unsafe fn ec_write_byte_at_end<W: Write>( mut _this: *mut ec_enc<W>, mut _value: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { (*_this).end_buffer.push(_value as u8); Ok(()) } pub unsafe fn ec_enc_done<W: Write>(mut _this: *mut ec_enc<W>) -> Result<(), std::io::Error> { let mut l = 32i32 - (::std::mem::size_of::<std::os::raw::c_uint>() as std::os::raw::c_ulong as std::os::raw::c_int * 8i32 - (*_this).rng.leading_zeros() as i32); let mut msk = (1u32 << 32i32 - 1i32).wrapping_sub(1i32 as std::os::raw::c_uint) >> l; let mut end = (*_this).val.wrapping_add(msk) & !msk; if end | msk >= (*_this).val.wrapping_add((*_this).rng) { l += 1; msk >>= 1i32; end = (*_this).val.wrapping_add(msk) & !msk; }; while l > 0i32 { ec_enc_carry_out(_this, (end >> 32i32 - 8i32 - 1i32) as std::os::raw::c_int)?; end = end << 8i32 & (1u32 << 32i32 - 1i32).wrapping_sub(1i32 as std::os::raw::c_uint); l -= 8i32; } if (*_this).rem >= 0i32 || (*_this).ext > 0i32 as std::os::raw::c_uint { ec_enc_carry_out(_this, 0i32)?; }; let mut window = (*_this).end_window; let mut used = (*_this).nend_bits; while used >= 8i32 { ec_write_byte_at_end( _this, window & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint), )?; window >>= 8i32; used -= 8i32; } if used > 0i32 { *(*_this).end_buffer.last_mut().unwrap() |= window as u8; }; for byte in (*_this).end_buffer.iter().rev() { ec_write_byte(_this, *byte as u32)?; } Ok(()) }
#![allow(dead_code, mutable_transmutes, non_camel_case_types, non_snake_case, non_upper_case_globals, unused_mut)] use std; use std::io::Write; pub type int32_t = std::os::raw::c_int; pub type uint32_t = std::os::raw::c_uint; pub type __uint16_t = std::os::raw::c_ushort; pub type __uint32_t = std::os::raw::c_uint; pub type __int64_t = std::os::raw::c_longlong; pub type __uint64_t = std::os::raw::c_ulonglong; pub type __darwin_size_t = std::os::raw::c_ulong; pub type __darwin_off_t = __int64_t; pub type opus_int32 = int32_t; pub type opus_uint32 = uint32_t; pub type size_t = __darwin_size_t; pub struct ec_enc<W> where W: std::io::Write, { pub out: W, pub end_window: ec_window, pub nend_bits: std::os::raw::c_int, pub nbits_total: std::os::raw::c_int, pub offs: opus_uint32, pub rng: opus_uint32, pub val: opus_uint32, pub ext: opus_uint32, pub rem: std::os::raw::c_int, pub end_buffer: Vec<u8>, } #[derive(Copy, Clone)] #[repr(C)] pub union unnamed { __f: std::os::raw::c_double, __u: std::os::raw::c_ulonglong, } #[derive(Copy, Clone)] #[repr(C)] pub struct unnamed_0 { pub __m: std::os::raw::c_ulonglong, pub __sexp: std::os::raw::c_ushort, } pub type ec_window = opus_uint32; pub type fpos_t = __darwin_off_t; fn celt_udiv(mut n: opus_uint32, mut d: opus_uint32) -> opus_uint32 { return n.wrapping_div(d); } pub unsafe fn ec_encode<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: std::os::raw::c_uint, mut _fh: std::os::raw::c_uint, mut _ft: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = celt_udiv((*_this).rng, _ft); if _fl > 0i32 as std::os::raw::c_uint { (*_this).val = ((*_this).val as std::os::raw::c_uint).wrapping_add( (*_this) .rng .wrapping_sub(r.wrapping_mul(_ft.wrapping_sub(_fl))), ) as opus_uint32 as opus_uint32; (*_this).rng = r.wrapping_mul(_fh.wrapping_sub(_fl)) } else { (*_this).rng = ((*_this).rng as std::os::raw::c_uint) .wrapping_sub(r.wrapping_mul(_ft.wrapping_sub(_fh))) as opus_uint32 as opus_uint32 } ec_enc_normalize(_this)?; Ok(()) } unsafe fn ec_enc_normalize<W: Write>(mut _this: *mut ec_enc<W>) -> Result<(), std::io::Error> { while (*_this).rng <= 1u32 << 32i32 - 1i32 >> 8i32 { ec_enc_carry_out( _this, ((*_this).val >> 32i32 - 8i32 - 1i32) as std::os::raw::c_int, )?; (*_this).val = (*_this).val << 8i32 & (1u32 << 32i32 - 1i32).wrapping_sub(1i32 as std::os::raw::c_uint); (*_this).rng <<= 8i32; (*_this).nbits_total += 8i32 } Ok(()) } unsafe fn ec_enc_carry_out<W: Write>( mut _this: *mut ec_enc<W>, mut _c: std::os::raw::c_int, ) -> Result<(), std::io::Error> { if _c as std::os::raw::c_uint != (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint) { let mut carry = _c >> 8i32; if (*_this).rem >= 0i32 { ec_write_byte(_this, ((*_this).rem + carry) as std::os::raw::c_uint)?; } if (*_this).ext > 0i32 as std::os::raw::c_uint { let mut sym = (1u32 << 8i32)
this: *mut ec_enc<W>, mut _value: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { (*_this).out.write_all(&[_value as u8])?; Ok(()) } pub unsafe fn ec_encode_bin<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: std::os::raw::c_uint, mut _fh: std::os::raw::c_uint, mut _bits: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = (*_this).rng >> _bits; if _fl > 0i32 as std::os::raw::c_uint { (*_this).val = ((*_this).val as std::os::raw::c_uint).wrapping_add( (*_this) .rng .wrapping_sub(r.wrapping_mul((1u32 << _bits).wrapping_sub(_fl))), ) as opus_uint32 as opus_uint32; (*_this).rng = r.wrapping_mul(_fh.wrapping_sub(_fl)) } else { (*_this).rng = ((*_this).rng as std::os::raw::c_uint) .wrapping_sub(r.wrapping_mul((1u32 << _bits).wrapping_sub(_fh))) as opus_uint32 as opus_uint32 } ec_enc_normalize(_this)?; Ok(()) } pub unsafe fn ec_enc_bit_logp<W: Write>( mut _this: *mut ec_enc<W>, mut _val: std::os::raw::c_int, mut _logp: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = (*_this).rng; let mut l = (*_this).val; let mut s = r >> _logp; r = (r as std::os::raw::c_uint).wrapping_sub(s) as opus_uint32 as opus_uint32; if 0 != _val { (*_this).val = l.wrapping_add(r) } (*_this).rng = if 0 != _val { s } else { r }; ec_enc_normalize(_this)?; Ok(()) } pub unsafe fn ec_enc_icdf<W: Write>( mut _this: *mut ec_enc<W>, mut _s: std::os::raw::c_int, mut _icdf: *const std::os::raw::c_uchar, mut _ftb: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut r = (*_this).rng >> _ftb; if _s > 0i32 { (*_this).val = ((*_this).val as std::os::raw::c_uint).wrapping_add((*_this).rng.wrapping_sub( r.wrapping_mul(*_icdf.offset((_s - 1i32) as isize) as std::os::raw::c_uint), )) as opus_uint32 as opus_uint32; (*_this).rng = r.wrapping_mul( (*_icdf.offset((_s - 1i32) as isize) as std::os::raw::c_int - *_icdf.offset(_s as isize) as std::os::raw::c_int) as std::os::raw::c_uint, ) } else { (*_this).rng = ((*_this).rng as std::os::raw::c_uint) .wrapping_sub(r.wrapping_mul(*_icdf.offset(_s as isize) as std::os::raw::c_uint)) as opus_uint32 as opus_uint32 } ec_enc_normalize(_this)?; Ok(()) } pub unsafe fn ec_enc_uint<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: opus_uint32, mut _ft: opus_uint32, ) -> Result<(), std::io::Error> { assert!(_ft > 1); _ft = _ft.wrapping_sub(1); let mut ftb = ::std::mem::size_of::<std::os::raw::c_uint>() as std::os::raw::c_ulong as std::os::raw::c_int * 8i32 - _ft.leading_zeros() as i32; if ftb > 8i32 { ftb -= 8i32; let ft = (_ft >> ftb).wrapping_add(1i32 as std::os::raw::c_uint); let fl = _fl >> ftb; ec_encode(_this, fl, fl.wrapping_add(1i32 as std::os::raw::c_uint), ft)?; ec_enc_bits( _this, _fl & ((1i32 as opus_uint32) << ftb).wrapping_sub(1u32), ftb as std::os::raw::c_uint, )?; } else { ec_encode( _this, _fl, _fl.wrapping_add(1i32 as std::os::raw::c_uint), _ft.wrapping_add(1i32 as std::os::raw::c_uint), )?; } return Ok(()); } pub unsafe fn ec_enc_bits<W: Write>( mut _this: *mut ec_enc<W>, mut _fl: opus_uint32, mut _bits: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { let mut window = (*_this).end_window; let mut used = (*_this).nend_bits; assert!(_bits > 0); if (used as std::os::raw::c_uint).wrapping_add(_bits) > (::std::mem::size_of::<ec_window>() as std::os::raw::c_ulong as std::os::raw::c_int * 8i32) as std::os::raw::c_uint { loop { ec_write_byte_at_end( _this, window & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint), )?; window >>= 8i32; used -= 8i32; if !(used >= 8i32) { break; } } } window |= _fl << used; used = (used as std::os::raw::c_uint).wrapping_add(_bits) as std::os::raw::c_int as std::os::raw::c_int; (*_this).end_window = window; (*_this).nend_bits = used; (*_this).nbits_total = ((*_this).nbits_total as std::os::raw::c_uint).wrapping_add(_bits) as std::os::raw::c_int as std::os::raw::c_int; return Ok(()); } unsafe fn ec_write_byte_at_end<W: Write>( mut _this: *mut ec_enc<W>, mut _value: std::os::raw::c_uint, ) -> Result<(), std::io::Error> { (*_this).end_buffer.push(_value as u8); Ok(()) } pub unsafe fn ec_enc_done<W: Write>(mut _this: *mut ec_enc<W>) -> Result<(), std::io::Error> { let mut l = 32i32 - (::std::mem::size_of::<std::os::raw::c_uint>() as std::os::raw::c_ulong as std::os::raw::c_int * 8i32 - (*_this).rng.leading_zeros() as i32); let mut msk = (1u32 << 32i32 - 1i32).wrapping_sub(1i32 as std::os::raw::c_uint) >> l; let mut end = (*_this).val.wrapping_add(msk) & !msk; if end | msk >= (*_this).val.wrapping_add((*_this).rng) { l += 1; msk >>= 1i32; end = (*_this).val.wrapping_add(msk) & !msk; }; while l > 0i32 { ec_enc_carry_out(_this, (end >> 32i32 - 8i32 - 1i32) as std::os::raw::c_int)?; end = end << 8i32 & (1u32 << 32i32 - 1i32).wrapping_sub(1i32 as std::os::raw::c_uint); l -= 8i32; } if (*_this).rem >= 0i32 || (*_this).ext > 0i32 as std::os::raw::c_uint { ec_enc_carry_out(_this, 0i32)?; }; let mut window = (*_this).end_window; let mut used = (*_this).nend_bits; while used >= 8i32 { ec_write_byte_at_end( _this, window & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint), )?; window >>= 8i32; used -= 8i32; } if used > 0i32 { *(*_this).end_buffer.last_mut().unwrap() |= window as u8; }; for byte in (*_this).end_buffer.iter().rev() { ec_write_byte(_this, *byte as u32)?; } Ok(()) }
.wrapping_sub(1i32 as std::os::raw::c_uint) .wrapping_add(carry as std::os::raw::c_uint) & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint); loop { ec_write_byte(_this, sym)?; (*_this).ext = (*_this).ext.wrapping_sub(1); if !((*_this).ext > 0i32 as std::os::raw::c_uint) { break; } } } (*_this).rem = (_c as std::os::raw::c_uint & (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint)) as std::os::raw::c_int } else { (*_this).ext = (*_this).ext.wrapping_add(1) }; Ok(()) } unsafe fn ec_write_byte<W: Write>( mut _
random
[ { "content": "#[test]\n\nfn widths() {\n\n let widths = [1, 30, 5, 20];\n\n let probabilities = CumulativeDistributionFrequency::new(widths.iter().cloned().collect());\n\n\n\n let widths2: Vec<_> = probabilities.widths().collect();\n\n assert_eq!(widths2, widths)\n\n}\n\n\n", "file_path": "tests/simple.rs", "rank": 1, "score": 29883.5719973344 }, { "content": "fn bits_roundtrip() {\n\n eprintln!(\"Initializing sample\");\n\n let mut buf = vec![];\n\n let mut small_rng = rand::rngs::SmallRng::from_entropy();\n\n let mut total_bits = 0;\n\n for _ in 0..small_rng.gen_range(20, 40) {\n\n let byte : u16 = small_rng.gen_range(0, std::u16::MAX);\n\n total_bits += small_rng.gen_range(1, 16);\n\n buf.push((byte, total_bits));\n\n }\n\n\n\n\n\n eprintln!(\"Writing...\");\n\n let mut writer = opus::Writer::with_capacity(128);\n\n for (byte, bits) in &buf {\n\n eprintln!(\"Writing {} / {}\", byte, bits);\n\n writer.bits(*byte, *bits)\n\n .expect(\"Could not write bits\");\n\n }\n\n let data = writer.done();\n", "file_path": "tests/simple.rs", "rank": 2, "score": 28639.518375637068 }, { "content": "#[test]\n\nfn probabilities_roundtrip() {\n\n let symbols = ['g', 'a', 't', 'c'];\n\n let mut probabilities = CumulativeDistributionFrequency::new(vec![1, 30, 5, 20]);\n\n\n\n let mut test_with_sample = |sample: &str| {\n\n eprintln!(\"Writing...\");\n\n\n\n let mut writer = opus::Writer::new(vec![]);\n\n for c in sample.chars() {\n\n let index = symbols.iter().cloned().position(|x| x == c).unwrap();\n\n writer\n\n .symbol(index, &mut probabilities)\n\n .expect(\"Could not write symbol\");\n\n }\n\n let encoded = writer.done().expect(\"Could not finalize writer\");\n\n eprintln!(\"Wrote {} bytes to {} bytes\", sample.len(), encoded.len());\n\n\n\n eprintln!(\"Reading...\");\n\n let mut reader =\n\n opus::Reader::new(Cursor::new(encoded)).expect(\"Could not initialize reader\");\n", "file_path": "tests/simple.rs", "rank": 3, "score": 28639.518375637068 }, { "content": "/// Fixed point arithmetics.\n\npub trait Fixed {\n\n fn floor_log2(&self, n: usize) -> usize;\n\n fn ceil_log2(&self, n: usize) -> usize;\n\n fn align_power_of_two(&self, n: usize) -> usize;\n\n fn align_power_of_two_and_shift(&self, n: usize) -> usize;\n\n}\n\n\n\nimpl Fixed for usize {\n\n #[inline]\n\n fn floor_log2(&self, n: usize) -> usize {\n\n self & !((1 << n) - 1)\n\n }\n\n #[inline]\n\n fn ceil_log2(&self, n: usize) -> usize {\n\n (self + (1 << n) - 1).floor_log2(n)\n\n }\n\n #[inline]\n\n fn align_power_of_two(&self, n: usize) -> usize {\n\n self.ceil_log2(n)\n\n }\n\n #[inline]\n\n fn align_power_of_two_and_shift(&self, n: usize) -> usize {\n\n (self + (1 << n) - 1) >> n\n\n }\n\n}\n\nuse num_traits::PrimInt;\n\nuse std::mem::size_of;\n\n\n", "file_path": "src/util.rs", "rank": 4, "score": 26982.881682692892 }, { "content": "/// Fast `log` for primitive integers.\n\npub trait ILog : PrimInt {\n\n fn ilog(self) -> Self {\n\n Self::from(size_of::<Self>() * 8 - self.leading_zeros() as usize).unwrap()\n\n }\n\n}\n\n\n\nimpl<T> ILog for T where T: PrimInt {}\n", "file_path": "src/util.rs", "rank": 5, "score": 23454.621887057354 }, { "content": " mut _this: *mut ec_dec<R>,\n\n) -> Result<(), std::io::Error> {\n\n while (*_this).rng <= 1u32 << 32i32 - 1i32 >> 8i32 {\n\n (*_this).nbits_total += 8i32;\n\n (*_this).rng <<= 8i32;\n\n let mut sym = (*_this).rem;\n\n (*_this).rem = ec_read_byte(_this)? as i32;\n\n sym = (sym << 8i32 | (*_this).rem) >> 8i32 - ((32i32 - 2i32) % 8i32 + 1i32);\n\n (*_this).val = ((*_this).val << 8i32).wrapping_add(\n\n (1u32 << 8i32).wrapping_sub(1i32 as std::os::raw::c_uint)\n\n & !sym as std::os::raw::c_uint,\n\n ) & (1u32 << 32i32 - 1i32).wrapping_sub(1i32 as std::os::raw::c_uint)\n\n }\n\n Ok(())\n\n}\n\nunsafe extern \"C\" fn ec_read_byte<R: Read>(\n\n mut _this: *mut ec_dec<R>,\n\n) -> Result<u8, std::io::Error> {\n\n let mut buf = [0];\n\n if let Err(err) = (*_this).inp.read_exact(&mut buf) {\n", "file_path": "src/opus/imported_decode.rs", "rank": 9, "score": 17.569482587480415 }, { "content": " mut _fh: std::os::raw::c_uint,\n\n mut _ft: std::os::raw::c_uint,\n\n) -> Result<(), std::io::Error> {\n\n let mut s = (*_this).ext.wrapping_mul(_ft.wrapping_sub(_fh));\n\n (*_this).val =\n\n ((*_this).val as std::os::raw::c_uint).wrapping_sub(s) as opus_uint32 as opus_uint32;\n\n (*_this).rng = if _fl > 0i32 as std::os::raw::c_uint {\n\n (*_this).ext.wrapping_mul(_fh.wrapping_sub(_fl))\n\n } else {\n\n (*_this).rng.wrapping_sub(s)\n\n };\n\n ec_dec_normalize(_this)?;\n\n Ok(())\n\n}\n\n\n\npub unsafe extern \"C\" fn ec_dec_bit_logp<R: Read>(\n\n mut _this: *mut ec_dec<R>,\n\n mut _logp: std::os::raw::c_uint,\n\n) -> Result<i32, std::io::Error> {\n\n let mut r = (*_this).rng;\n", "file_path": "src/opus/imported_decode.rs", "rank": 10, "score": 16.39823837439346 }, { "content": " let mut d = (*_this).val;\n\n let mut s = r >> _logp;\n\n let mut ret = (d < s) as std::os::raw::c_int;\n\n if 0 == ret {\n\n (*_this).val = d.wrapping_sub(s)\n\n }\n\n (*_this).rng = if 0 != ret { s } else { r.wrapping_sub(s) };\n\n ec_dec_normalize(_this)?;\n\n Ok(ret)\n\n}\n\n\n\npub unsafe extern \"C\" fn ec_dec_icdf<R: Read>(\n\n mut _this: *mut ec_dec<R>,\n\n mut _icdf: *const std::os::raw::c_uchar,\n\n mut _ftb: std::os::raw::c_uint,\n\n) -> Result<i32, std::io::Error> {\n\n let mut t;\n\n let mut s = (*_this).rng;\n\n let mut d = (*_this).val;\n\n let mut r = s >> _ftb;\n", "file_path": "src/opus/imported_decode.rs", "rank": 13, "score": 15.761502638623286 }, { "content": "}\n\n\n\npub unsafe extern \"C\" fn ec_dec_init<R: Read>(\n\n mut _this: *mut ec_dec<R>,\n\n) -> Result<(), std::io::Error> {\n\n (*_this).end_window = 0i32 as ec_window;\n\n (*_this).nend_bits = 0i32;\n\n (*_this).nbits_total = 32i32 + 1i32 - (32i32 - ((32i32 - 2i32) % 8i32 + 1i32)) / 8i32 * 8i32;\n\n (*_this).rng = 1u32 << (32i32 - 2i32) % 8i32 + 1i32;\n\n (*_this).rem = ec_read_byte(_this)? as i32;\n\n (*_this).val = (*_this)\n\n .rng\n\n .wrapping_sub(1i32 as std::os::raw::c_uint)\n\n .wrapping_sub(\n\n ((*_this).rem >> 8i32 - ((32i32 - 2i32) % 8i32 + 1i32)) as std::os::raw::c_uint,\n\n );\n\n ec_dec_normalize(_this)?;\n\n Ok(())\n\n}\n\nunsafe extern \"C\" fn ec_dec_normalize<R: Read>(\n", "file_path": "src/opus/imported_decode.rs", "rank": 14, "score": 14.954359747108569 }, { "content": "pub type ec_window = opus_uint32;\n\n\n\npub struct ec_dec<R>\n\nwhere\n\n R: std::io::Read,\n\n{\n\n pub inp: R,\n\n pub end_window: ec_window,\n\n pub nend_bits: std::os::raw::c_int,\n\n pub nbits_total: std::os::raw::c_int,\n\n pub rng: opus_uint32,\n\n pub val: opus_uint32,\n\n pub ext: opus_uint32,\n\n pub rem: std::os::raw::c_int,\n\n}\n\nunsafe extern \"C\" fn celt_udiv(mut n: opus_uint32, mut d: opus_uint32) -> opus_uint32 {\n\n return n.wrapping_div(d);\n\n}\n\nunsafe extern \"C\" fn celt_sudiv(mut n: opus_int32, mut d: opus_int32) -> opus_int32 {\n\n return n / d;\n", "file_path": "src/opus/imported_decode.rs", "rank": 15, "score": 14.900158132334253 }, { "content": " rng: std::u32::MAX / 2 + 1,\n\n rem: -1,\n\n val: 0,\n\n ext: 0,\n\n end_buffer: vec![],\n\n }\n\n }\n\n }\n\n\n\n /// Encode the next symbol in line.\n\n pub fn symbol(&mut self, index: usize, icdf: &CumulativeDistributionFrequency) -> Result<(), std::io::Error> {\n\n let width = icdf.width();\n\n let segment = icdf.at_index(index).ok_or_else(|| {\n\n std::io::Error::new(std::io::ErrorKind::InvalidInput, \"Invalid symbol\")\n\n })?;\n\n unsafe {\n\n imported_encode::ec_encode(&mut self.state, segment.low, segment.next, width)?;\n\n };\n\n Ok(())\n\n }\n", "file_path": "src/opus/encode.rs", "rank": 16, "score": 14.815476597060835 }, { "content": " as std::os::raw::c_uint,\n\n ));\n\n}\n\n\n\npub unsafe extern \"C\" fn ec_decode_bin<R: Read>(\n\n mut _this: *mut ec_dec<R>,\n\n mut _bits: std::os::raw::c_uint,\n\n) -> std::os::raw::c_uint {\n\n (*_this).ext = (*_this).rng >> _bits;\n\n let mut s = (*_this).val.wrapping_div((*_this).ext);\n\n return (1u32 << _bits).wrapping_sub(s.wrapping_add(1u32).wrapping_add(\n\n (1u32 << _bits).wrapping_sub(s.wrapping_add(1u32))\n\n & -((1u32 << _bits < s.wrapping_add(1u32)) as std::os::raw::c_int)\n\n as std::os::raw::c_uint,\n\n ));\n\n}\n\n\n\npub unsafe extern \"C\" fn ec_dec_update<R: Read>(\n\n mut _this: *mut ec_dec<R>,\n\n mut _fl: std::os::raw::c_uint,\n", "file_path": "src/opus/imported_decode.rs", "rank": 18, "score": 13.721184636202526 }, { "content": " if let std::io::ErrorKind::UnexpectedEof = err.kind() {\n\n // Reading past the end returns 0.\n\n Ok(0)\n\n } else {\n\n Err(err)\n\n }\n\n } else {\n\n Ok(buf[0])\n\n }\n\n}\n\n\n\npub unsafe extern \"C\" fn ec_decode<R: Read>(\n\n mut _this: *mut ec_dec<R>,\n\n mut _ft: std::os::raw::c_uint,\n\n) -> std::os::raw::c_uint {\n\n (*_this).ext = celt_udiv((*_this).rng, _ft);\n\n let mut s = (*_this).val.wrapping_div((*_this).ext);\n\n return _ft.wrapping_sub(s.wrapping_add(1i32 as std::os::raw::c_uint).wrapping_add(\n\n _ft.wrapping_sub(s.wrapping_add(1i32 as std::os::raw::c_uint))\n\n & -((_ft < s.wrapping_add(1i32 as std::os::raw::c_uint)) as std::os::raw::c_int)\n", "file_path": "src/opus/imported_decode.rs", "rank": 19, "score": 13.642914602102628 }, { "content": "use CumulativeDistributionFrequency;\n\n\n\nuse opus::imported_decode;\n\n\n\nuse std;\n\n\n\npub struct Reader<R>\n\nwhere\n\n R: std::io::Read,\n\n{\n\n state: imported_decode::ec_dec<R>,\n\n}\n\n\n\nimpl<R> Reader<R>\n\nwhere\n\n R: std::io::Read,\n\n{\n\n /*\n\n pub fn from_boxed_slice(mut source: Box<[u8]>) -> Self {\n\n let state = unsafe {\n", "file_path": "src/opus/decode.rs", "rank": 22, "score": 12.384191703648359 }, { "content": "\n\n/*\n\n// FIXME: I actually don't understand `bits()` well enough\n\n// to publish it. /// Encode a sequence of raw bits, without any frequency information.\n\n pub fn bits(&mut self, bits: u16, size: usize) -> Result<(), std::io::Error> {\n\n unsafe {\n\n imported_encode::ec_enc_bits(&mut self.state,\n\n bits as u32,\n\n size as u32);\n\n self.check_status()?;\n\n }\n\n Ok(())\n\n }\n\n*/\n\n\n\n /// Flush and return the underlying stream.\n\n pub fn done(mut self) -> Result<W, std::io::Error> {\n\n unsafe {\n\n imported_encode::ec_enc_done(&mut self.state)?;\n\n };\n\n Ok(self.state.out)\n\n }\n\n}\n", "file_path": "src/opus/encode.rs", "rank": 23, "score": 12.300235021130906 }, { "content": " let mut ret = -1i32;\n\n loop {\n\n t = s;\n\n ret += 1;\n\n s = r.wrapping_mul(*_icdf.offset(ret as isize) as std::os::raw::c_uint);\n\n if !(d < s) {\n\n break;\n\n }\n\n }\n\n (*_this).val = d.wrapping_sub(s);\n\n (*_this).rng = t.wrapping_sub(s);\n\n ec_dec_normalize(_this)?;\n\n Ok(ret)\n\n}\n\n\n\n/*\n\n// ec_dec_unit and ec_dec_bits need the data to be packetized,\n\n// which is not something we do at least for the time being.\n\n\n\npub unsafe extern \"C\" fn ec_dec_uint<R: Read>(mut _this: *mut ec_dec<R>,\n", "file_path": "src/opus/imported_decode.rs", "rank": 24, "score": 12.083512288214513 }, { "content": " let mut state : imported_decode::ec_dec = std::mem::uninitialized();\n\n imported_decode::ec_dec_init(&mut state, source.as_mut_ptr(), source.len() as u32);\n\n state\n\n };\n\n Reader {\n\n source,\n\n state\n\n }\n\n }\n\n */\n\n pub fn new(input: R) -> Result<Self, std::io::Error> {\n\n let mut state = imported_decode::ec_dec {\n\n inp: input,\n\n // The rest will be initialized by `ec_dec_init`.\n\n end_window: 0,\n\n nend_bits: 0,\n\n nbits_total: 0,\n\n rng: 0,\n\n rem: 0,\n\n val: 0,\n", "file_path": "src/opus/decode.rs", "rank": 25, "score": 11.877548179779145 }, { "content": "\n\nuse ::{ CumulativeDistributionFrequency };\n\n\n\nuse opus::imported_encode;\n\n\n\nuse std;\n\n\n\npub struct Writer<W> where W: std::io::Write {\n\n state: imported_encode::ec_enc<W>,\n\n}\n\n\n\nimpl<W> Writer<W> where W: std::io::Write {\n\n pub fn new(out: W) -> Self {\n\n Self {\n\n state: imported_encode::ec_enc {\n\n out,\n\n end_window: 0,\n\n nend_bits: 0,\n\n nbits_total: 33,\n\n offs: 0,\n", "file_path": "src/opus/encode.rs", "rank": 26, "score": 11.596021317062766 }, { "content": " ext: 0,\n\n };\n\n unsafe {\n\n imported_decode::ec_dec_init(&mut state)?;\n\n }\n\n Ok(Reader { state })\n\n }\n\n\n\n /// Decode the next symbol in line.\n\n pub fn symbol(\n\n &mut self,\n\n icdf: &CumulativeDistributionFrequency,\n\n ) -> Result<u32, std::io::Error> {\n\n let index = unsafe {\n\n let frequency = imported_decode::ec_decode(&mut self.state, icdf.width());\n\n let indexed = icdf.find(frequency).ok_or_else(|| {\n\n std::io::Error::new(std::io::ErrorKind::InvalidInput, \"Invalid probability\")\n\n })?;\n\n imported_decode::ec_dec_update(\n\n &mut self.state,\n", "file_path": "src/opus/decode.rs", "rank": 29, "score": 11.22757778361495 }, { "content": " indexed.segment.low,\n\n indexed.segment.next,\n\n icdf.width(),\n\n )?;\n\n indexed.index\n\n };\n\n Ok(index as u32)\n\n }\n\n\n\n /*\n\n // FIXME: I actually don't understand `bits()` well enough\n\n // to publish it. /// Encode a sequence of raw bits, without any frequency information.\n\n pub fn bits(&mut self, size: usize) -> Result<u16, std::io::Error> {\n\n let result = unsafe {\n\n let result = imported_decode::ec_dec_bits(&mut self.state,\n\n size as u32);\n\n self.check_status()?;\n\n result as u16\n\n };\n\n Ok(result)\n\n }\n\n */\n\n\n\n pub fn done(self) {\n\n // FIXME: Nothing to do?\n\n }\n\n}\n", "file_path": "src/opus/decode.rs", "rank": 31, "score": 10.21556585947459 }, { "content": " }\n\n } else {\n\n _ft = _ft.wrapping_add(1);\n\n s = ec_decode(_this, _ft);\n\n ec_dec_update(_this, s, s.wrapping_add(1i32 as std::os::raw::c_uint),\n\n _ft);\n\n Ok(s)\n\n }\n\n}\n\n\n\npub unsafe extern \"C\" fn ec_dec_bits<R: Read>(mut _this: *mut ec_dec<R>,\n\n mut _bits: std::os::raw::c_uint) -> opus_uint32 {\n\n let mut window: ec_window = 0;\n\n let mut available: std::os::raw::c_int = 0;\n\n let mut ret: opus_uint32 = 0;\n\n window = (*_this).end_window;\n\n available = (*_this).nend_bits;\n\n if (available as std::os::raw::c_uint) < _bits {\n\n loop {\n\n window |=\n", "file_path": "src/opus/imported_decode.rs", "rank": 33, "score": 9.551046090203561 }, { "content": "#![allow(dead_code,\n\n mutable_transmutes,\n\n non_camel_case_types,\n\n non_snake_case,\n\n non_upper_case_globals,\n\n unused_mut)]\n\n\n\nuse std;\n\nuse std::io::Read;\n\n\n\npub type size_t = std::os::raw::c_ulong;\n\npub type int32_t = std::os::raw::c_int;\n\npub type uint32_t = std::os::raw::c_uint;\n\npub type __uint16_t = std::os::raw::c_ushort;\n\npub type __uint32_t = std::os::raw::c_uint;\n\npub type __int64_t = std::os::raw::c_longlong;\n\npub type __uint64_t = std::os::raw::c_ulonglong;\n\npub type __darwin_off_t = __int64_t;\n\npub type opus_int32 = int32_t;\n\npub type opus_uint32 = uint32_t;\n", "file_path": "src/opus/imported_decode.rs", "rank": 36, "score": 8.882238397644187 }, { "content": "}\n\nunsafe extern \"C\" fn ec_read_byte_from_end<R: Read>(mut _this: *mut ec_dec<R>)\n\n -> std::os::raw::c_int {\n\n return if (*_this).end_offs < (*_this).storage {\n\n (*_this).end_offs = (*_this).end_offs.wrapping_add(1);\n\n *(*_this).buf.offset((*_this).storage.wrapping_sub((*_this).end_offs)\n\n as isize) as std::os::raw::c_int\n\n } else { 0i32 };\n\n}\n\n\n\n*/\n", "file_path": "src/opus/imported_decode.rs", "rank": 38, "score": 7.584033362402277 }, { "content": " return Ordering::Less;\n\n }\n\n Ordering::Equal\n\n })\n\n .ok()?;\n\n Some(IndexedSegment {\n\n index,\n\n segment: self.segments[index].clone(),\n\n })\n\n }\n\n\n\n /// Find a value from its index\n\n pub fn at_index<'a>(&'a self, index: usize) -> Option<&'a Segment> {\n\n if index >= self.segments.len() {\n\n return None;\n\n }\n\n Some(&self.segments[index])\n\n }\n\n\n\n /// Return the number of values in this CDF\n\n pub fn len(&self) -> usize {\n\n self.segments.len()\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 39, "score": 7.385424101537463 }, { "content": "/// Implementation of the Opus range encoder.\n\npub mod opus {\n\n /// A c2rust-ified version of the Opus range decoder.\n\n mod imported_decode;\n\n mod imported_encode;\n\n\n\n mod decode;\n\n mod encode;\n\n\n\n pub use self::decode::Reader;\n\n pub use self::encode::Writer;\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Segment {\n\n /// First value part of the segment.\n\n low: u32,\n\n\n\n /// First value >= low **not** part of the segment.\n\n next: u32,\n", "file_path": "src/lib.rs", "rank": 40, "score": 6.169290042390167 }, { "content": " mut _ft: opus_uint32) -> Result<u32, ()> {\n\n let mut ft: std::os::raw::c_uint = 0;\n\n let mut s: std::os::raw::c_uint = 0;\n\n let mut ftb: std::os::raw::c_int = 0;\n\n assert!(_ft > 1);\n\n _ft = _ft.wrapping_sub(1);\n\n ftb =\n\n ::std::mem::size_of::<std::os::raw::c_uint>() as std::os::raw::c_ulong as\n\n std::os::raw::c_int * 8i32 - _ft.leading_zeros() as i32;\n\n if ftb > 8i32 {\n\n let mut t: opus_uint32 = 0;\n\n ftb -= 8i32;\n\n ft = (_ft >> ftb).wrapping_add(1i32 as std::os::raw::c_uint);\n\n s = ec_decode(_this, ft);\n\n ec_dec_update(_this, s, s.wrapping_add(1i32 as std::os::raw::c_uint), ft);\n\n t = s << ftb | ec_dec_bits(_this, ftb as std::os::raw::c_uint);\n\n if t <= _ft {\n\n Ok(t)\n\n } else {\n\n Err(())\n", "file_path": "src/opus/imported_decode.rs", "rank": 41, "score": 6.167319876316417 }, { "content": " width: u32,\n\n}\n\nimpl CumulativeDistributionFrequency {\n\n pub fn new(probabilities: Vec<u32>) -> Self {\n\n let mut segments = Vec::with_capacity(probabilities.len());\n\n let mut start = 0;\n\n for probability in probabilities {\n\n let next = start + probability;\n\n segments.push(Segment::new(start, next));\n\n start = next;\n\n }\n\n Self {\n\n segments: segments.into_boxed_slice(),\n\n width: start,\n\n }\n\n }\n\n\n\n /// Return the total frequency of symbols in this distribution.\n\n pub fn width(&self) -> u32 {\n\n self.width\n", "file_path": "src/lib.rs", "rank": 42, "score": 5.187716276140248 }, { "content": " }\n\n\n\n /// Iterate through the widths of the symbols.\n\n pub fn widths<'a>(&'a self) -> impl Iterator<Item = u32> + 'a {\n\n self.segments.iter().map(Segment::width)\n\n }\n\n\n\n /// Find a value from its frequency.\n\n pub fn find(&self, probability: u32) -> Option<IndexedSegment> {\n\n if probability >= self.width {\n\n return None;\n\n }\n\n let index = self\n\n .segments\n\n .binary_search_by(|segment| {\n\n use std::cmp::Ordering;\n\n if segment.low > probability {\n\n return Ordering::Greater;\n\n }\n\n if segment.next <= probability {\n", "file_path": "src/lib.rs", "rank": 43, "score": 4.710350881242873 }, { "content": "extern crate rand;\n\nextern crate range_encoding;\n\n\n\nuse rand::*;\n\nuse range_encoding::*;\n\nuse std::io::Cursor;\n\n\n\n#[test]\n", "file_path": "tests/simple.rs", "rank": 44, "score": 4.583741835175204 }, { "content": " for reference in sample.chars() {\n\n let index = reader\n\n .symbol(&mut probabilities)\n\n .expect(\"Could not find symbol\");\n\n assert_eq!(symbols[index as usize], reference);\n\n }\n\n\n\n eprintln!(\"Reading succeeded\");\n\n };\n\n\n\n test_with_sample(\"gattaca\");\n\n\n\n let mut small_rng = rand::rngs::SmallRng::from_entropy();\n\n let larger_sample: String = (0..32)\n\n .map(|_| {\n\n let index = small_rng.gen_range(0, symbols.len());\n\n symbols[index]\n\n })\n\n .collect();\n\n test_with_sample(&larger_sample);\n\n}\n\n\n\n/*\n\n#[test]\n\n// FIXME: I actually don't understand `bits()` well enough\n\n// to publish it.\n\n\n", "file_path": "tests/simple.rs", "rank": 45, "score": 3.9448163864054173 }, { "content": " eprintln!(\"Wrote {} bytes to {}.\", buf.len(), data.len());\n\n\n\n\n\n eprintln!(\"Reading...\");\n\n let mut reader = opus::Reader::from_boxed_slice(data.into_boxed_slice());\n\n for i in 0 .. buf.len() {\n\n let (expected, bits) = buf[i].clone();\n\n let extracted = reader.bits(bits)\n\n .expect(\"Could not read bits\");\n\n assert_eq!(expected, extracted);\n\n }\n\n eprintln!(\"Read complete\")\n\n}\n\n*/\n", "file_path": "tests/simple.rs", "rank": 46, "score": 3.1573350982043933 }, { "content": "}\n\nimpl Segment {\n\n pub fn new(low: u32, next: u32) -> Segment {\n\n Segment { low, next }\n\n }\n\n pub fn width(&self) -> u32 {\n\n self.next - self.low\n\n }\n\n}\n\n\n\npub struct IndexedSegment {\n\n pub segment: Segment,\n\n pub index: usize,\n\n}\n\n\n\npub struct CumulativeDistributionFrequency {\n\n /// Ordered, contiguous list of segments, starting at 0.\n\n segments: Box<[Segment]>,\n\n\n\n /// The width, which is exactly `segments.last.width`.\n", "file_path": "src/lib.rs", "rank": 47, "score": 2.5674550106834113 }, { "content": " (ec_read_byte_from_end(_this) as ec_window) << available;\n\n available += 8i32;\n\n if !(available <=\n\n ::std::mem::size_of::<ec_window>() as std::os::raw::c_ulong as\n\n std::os::raw::c_int * 8i32 - 8i32) {\n\n break ;\n\n }\n\n }\n\n }\n\n ret = window & ((1i32 as opus_uint32) << _bits).wrapping_sub(1u32);\n\n window >>= _bits;\n\n available =\n\n (available as std::os::raw::c_uint).wrapping_sub(_bits) as std::os::raw::c_int as\n\n std::os::raw::c_int;\n\n (*_this).end_window = window;\n\n (*_this).nend_bits = available;\n\n (*_this).nbits_total =\n\n ((*_this).nbits_total as std::os::raw::c_uint).wrapping_add(_bits) as\n\n std::os::raw::c_int as std::os::raw::c_int;\n\n return ret;\n", "file_path": "src/opus/imported_decode.rs", "rank": 48, "score": 2.159979401768314 }, { "content": "[![Build Status](https://travis-ci.org/binast/range-encoding-rs.svg?branch=master)](https://travis-ci.org/binast/range-encoding-rs)\n\n\n\nAn implementation of range encoding for Rust.\n\n\n\nThis implementation is extracted using c2rust from a private API of the [Opus codec](https://github.com/xiph/opus/blob/master/celt/), so it should be fairly robust.\n", "file_path": "README.md", "rank": 49, "score": 1.6467132827005284 } ]
Rust
src/database/tcn_dao.rs
Co-Epi/app-backend-rust
b332afe7e92caf2703334c00dabf72fb4f279869
use crate::{ byte_vec_to_16_byte_array, errors::{ServicesError}, expect_log, reports_interval, tcn_recording::observed_tcn_processor::ObservedTcn, }; use log::*; use reports_interval::UnixTime; use rusqlite::{params, Row, NO_PARAMS, types::Value}; use std::{ sync::Arc, rc::Rc, }; use tcn::TemporaryContactNumber; use super::database::Database; pub trait TcnDao: Send + Sync { fn all(&self) -> Result<Vec<ObservedTcn>, ServicesError>; fn find_tcns( &self, with: Vec<TemporaryContactNumber>, ) -> Result<Vec<ObservedTcn>, ServicesError>; fn overwrite(&self, observed_tcns: Vec<ObservedTcn>) -> Result<(), ServicesError>; } pub struct TcnDaoImpl { db: Arc<Database>, } impl TcnDaoImpl { fn create_table_if_not_exists(db: &Arc<Database>) { let res = db.execute_sql( "create table if not exists tcn( tcn text not null, contact_start integer not null, contact_end integer not null, min_distance real not null, avg_distance real not null, total_count integer not null )", params![], ); expect_log!(res, "Couldn't create tcn table"); } fn to_tcn(row: &Row) -> ObservedTcn { let tcn: Result<String, _> = row.get(0); let tcn_value = expect_log!(tcn, "Invalid row: no TCN"); let tcn = Self::db_tcn_str_to_tcn(tcn_value); let contact_start_res = row.get(1); let contact_start: i64 = expect_log!(contact_start_res, "Invalid row: no contact start"); let contact_end_res = row.get(2); let contact_end: i64 = expect_log!(contact_end_res, "Invalid row: no contact end"); let min_distance_res = row.get(3); let min_distance: f64 = expect_log!(min_distance_res, "Invalid row: no min distance"); let avg_distance_res = row.get(4); let avg_distance: f64 = expect_log!(avg_distance_res, "Invalid row: no avg distance"); let total_count_res = row.get(5); let total_count: i64 = expect_log!(total_count_res, "Invalid row: no total count"); ObservedTcn { tcn, contact_start: UnixTime { value: contact_start as u64, }, contact_end: UnixTime { value: contact_end as u64, }, min_distance: min_distance as f32, avg_distance: avg_distance as f32, total_count: total_count as usize, } } fn db_tcn_str_to_tcn(str: String) -> TemporaryContactNumber { let tcn_value_bytes_vec_res = hex::decode(str); let tcn_value_bytes_vec = expect_log!(tcn_value_bytes_vec_res, "Invalid stored TCN format"); let tcn_value_bytes = byte_vec_to_16_byte_array(tcn_value_bytes_vec); TemporaryContactNumber(tcn_value_bytes) } pub fn new(db: Arc<Database>) -> TcnDaoImpl { Self::create_table_if_not_exists(&db); TcnDaoImpl { db } } } impl TcnDao for TcnDaoImpl { fn all(&self) -> Result<Vec<ObservedTcn>, ServicesError> { self.db .query( "select tcn, contact_start, contact_end, min_distance, avg_distance, total_count from tcn", NO_PARAMS, |row| Self::to_tcn(row), ) .map_err(ServicesError::from) } fn find_tcns( &self, with: Vec<TemporaryContactNumber>, ) -> Result<Vec<ObservedTcn>, ServicesError> { let tcn_strs: Vec<Value> = with.into_iter().map(|tcn| Value::Text(hex::encode(tcn.0)) ) .collect(); self.db .query( "select tcn, contact_start, contact_end, min_distance, avg_distance, total_count from tcn where tcn in rarray(?);", params![Rc::new(tcn_strs)], |row| Self::to_tcn(row), ) .map_err(ServicesError::from) } fn overwrite(&self, observed_tcns: Vec<ObservedTcn>) -> Result<(), ServicesError> { debug!("Overwriting db exposures with same TCNs, with: {:?}", observed_tcns); let tcn_strs: Vec<Value> = observed_tcns.clone().into_iter().map(|tcn| Value::Text(hex::encode(tcn.tcn.0)) ) .collect(); self.db.transaction(|t| { let delete_res = t.execute("delete from tcn where tcn in rarray(?);", params![Rc::new(tcn_strs)]); if delete_res.is_err() { return Err(ServicesError::General("Delete TCNs failed".to_owned())) } for tcn in observed_tcns { let tcn_str = hex::encode(tcn.tcn.0); let insert_res = t.execute("insert into tcn(tcn, contact_start, contact_end, min_distance, avg_distance, total_count) values(?1, ?2, ?3, ?4, ?5, ?6)", params![ tcn_str, tcn.contact_start.value as i64, tcn.contact_end.value as i64, tcn.min_distance as f64, tcn.avg_distance as f64, tcn.total_count as i64 ]); if insert_res.is_err() { return Err(ServicesError::General("Insert TCN failed".to_owned())) } } Ok(()) }) } } #[cfg(test)] mod tests { use super::*; use rusqlite::Connection; use crate::{tcn_recording::tcn_batches_manager::TcnBatchesManager, reports_update::exposure::ExposureGrouper}; #[test] fn saves_and_loads_observed_tcn() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = TcnDaoImpl::new(database); let observed_tcn = ObservedTcn { tcn: TemporaryContactNumber([ 24, 229, 125, 245, 98, 86, 219, 221, 172, 25, 232, 150, 206, 66, 164, 173, ]), contact_start: UnixTime { value: 1590528300 }, contact_end: UnixTime { value: 1590528301 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let save_res = tcn_dao.overwrite(vec![observed_tcn.clone()]); assert!(save_res.is_ok()); let loaded_tcns_res = tcn_dao.all(); assert!(loaded_tcns_res.is_ok()); let loaded_tcns = loaded_tcns_res.unwrap(); assert_eq!(loaded_tcns.len(), 1); assert_eq!(loaded_tcns[0], observed_tcn); } #[test] fn saves_and_loads_multiple_tcns() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = TcnDaoImpl::new(database); let observed_tcn_1 = ObservedTcn { tcn: TemporaryContactNumber([ 24, 229, 125, 245, 98, 86, 219, 221, 172, 25, 232, 150, 206, 66, 164, 173, ]), contact_start: UnixTime { value: 1590528300 }, contact_end: UnixTime { value: 1590528301 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let observed_tcn_2 = ObservedTcn { tcn: TemporaryContactNumber([ 43, 229, 125, 245, 98, 86, 100, 1, 172, 25, 0, 150, 123, 66, 34, 12, ]), contact_start: UnixTime { value: 1590518190 }, contact_end: UnixTime { value: 1590518191 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let observed_tcn_3 = ObservedTcn { tcn: TemporaryContactNumber([ 11, 246, 125, 123, 102, 86, 100, 1, 34, 25, 21, 150, 99, 66, 34, 0, ]), contact_start: UnixTime { value: 2230522104 }, contact_end: UnixTime { value: 2230522105 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let save_res_1 = tcn_dao.overwrite(vec![observed_tcn_1.clone()]); let save_res_2 = tcn_dao.overwrite(vec![observed_tcn_2.clone()]); let save_res_3 = tcn_dao.overwrite(vec![observed_tcn_3.clone()]); assert!(save_res_1.is_ok()); assert!(save_res_2.is_ok()); assert!(save_res_3.is_ok()); let loaded_tcns_res = tcn_dao.all(); assert!(loaded_tcns_res.is_ok()); let loaded_tcns = loaded_tcns_res.unwrap(); assert_eq!(loaded_tcns.len(), 3); assert_eq!(loaded_tcns[0], observed_tcn_1); assert_eq!(loaded_tcns[1], observed_tcn_2); assert_eq!(loaded_tcns[2], observed_tcn_3); } #[test] fn test_finds_tcn() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = Arc::new(TcnDaoImpl::new(database)); let stored_tcn1 = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 1000 }, contact_end: UnixTime { value: 6000 }, min_distance: 0.4, avg_distance: 0.4, total_count: 1, }; let stored_tcn2 = ObservedTcn { tcn: TemporaryContactNumber([1; 16]), contact_start: UnixTime { value: 2000 }, contact_end: UnixTime { value: 3000 }, min_distance: 1.8, avg_distance: 1.8, total_count: 1, }; let stored_tcn3 = ObservedTcn { tcn: TemporaryContactNumber([2; 16]), contact_start: UnixTime { value: 1600 }, contact_end: UnixTime { value: 2600 }, min_distance: 2.3, avg_distance: 2.3, total_count: 1, }; let save_res = tcn_dao.overwrite(vec![ stored_tcn1.clone(), stored_tcn2.clone(), stored_tcn3.clone(), ]); assert!(save_res.is_ok()); let res = tcn_dao.find_tcns(vec![ TemporaryContactNumber([0; 16]), TemporaryContactNumber([2; 16]), ]); assert!(res.is_ok()); let mut tcns = res.unwrap(); tcns.sort_by_key(|tcn| tcn.contact_start.value); assert_eq!(2, tcns.len()); assert_eq!(stored_tcn1, tcns[0]); assert_eq!(stored_tcn3, tcns[1]); } #[test] fn test_multiple_exposures_updated_correctly() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = Arc::new(TcnDaoImpl::new(database)); let batches_manager = TcnBatchesManager::new(tcn_dao.clone(), ExposureGrouper { threshold: 1000 }); let stored_tcn1 = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 1000 }, contact_end: UnixTime { value: 3000 }, min_distance: 0.4, avg_distance: 0.4, total_count: 1, }; let stored_tcn2 = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 5000 }, contact_end: UnixTime { value: 7000 }, min_distance: 2.0, avg_distance: 2.0, total_count: 1, }; let save_res = tcn_dao.overwrite(vec![stored_tcn1.clone(), stored_tcn2.clone()]); assert!(save_res.is_ok()); let tcn = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 7500 }, contact_end: UnixTime { value: 9000 }, min_distance: 1.0, avg_distance: 1.0, total_count: 1, }; batches_manager.push(tcn.clone()); let flush_res = batches_manager.flush(); assert!(flush_res.is_ok()); let loaded_tcns_res = tcn_dao.all(); assert!(loaded_tcns_res.is_ok()); let mut loaded_tcns = loaded_tcns_res.unwrap(); assert_eq!(2, loaded_tcns.len()); loaded_tcns.sort_by_key(|tcn| tcn.contact_start.value); assert_eq!( loaded_tcns[0], ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 1000 }, contact_end: UnixTime { value: 3000 }, min_distance: 0.4, avg_distance: 0.4, total_count: 1 } ); assert_eq!( loaded_tcns[1], ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 5000 }, contact_end: UnixTime { value: 9000 }, min_distance: 1.0, avg_distance: 1.5, total_count: 2 } ); } }
use crate::{ byte_vec_to_16_byte_array, errors::{ServicesError}, expect_log, reports_interval, tcn_recording::observed_tcn_processor::ObservedTcn, }; use log::*; use reports_interval::UnixTime; use rusqlite::{params, Row, NO_PARAMS, types::Value}; use std::{ sync::Arc, rc::Rc, }; use tcn::TemporaryContactNumber; use super::database::Database; pub trait TcnDao: Send + Sync { fn all(&self) -> Result<Vec<ObservedTcn>, ServicesError>; fn find_tcns( &self, with: Vec<TemporaryContactNumber>, ) -> Result<Vec<ObservedTcn>, ServicesError>; fn overwrite(&self, observed_tcns: Vec<ObservedTcn>) -> Result<(), ServicesError>; } pub struct TcnDaoImpl { db: Arc<Database>, } impl TcnDaoImpl { fn create_table_if_not_exists(db: &Arc<Database>) { let res = db.execute_sql( "create table if not exists tcn( tcn text not null, contact_start integer not null, contact_end integer not null, min_distance real not null, avg_distance real not null, total_count integer not null )", params![], ); expect_log!(res, "Couldn't create tcn table"); } fn to_tcn(row: &Row) -> ObservedTcn { let tcn: Result<String, _> = row.get(0); let tcn_value = expect_log!(tcn, "Invalid row: no TCN"); let tcn = Self::db_tcn_str_to_tcn(tcn_value); let contact_start_res = row.get(1); let contact_start: i64 = expect_log!(contact_start_res, "Invalid row: no contact start"); let contact_end_res = row.get(2); let contact_end: i64 = expect_log!(contact_end_res, "Invalid row: no contact end"); let min_distance_res = row.get(3); let min_distance: f64 = expect_log!(min_distance_res, "Invalid row: no min distance"); let avg_distance_res = row.get(4); let avg_distance: f64 = expect_log!(avg_distance_res, "Invalid row: no avg distance"); let total_count_res = row.get(5); let total_count: i64 = expect_log!(total_count_res, "Invalid row: no total count"); ObservedTcn { tcn, contact_start: UnixTime { value: contact_start as u64, }, contact_end: UnixTime { value: contact_end as u64, }, min_distance: min_distance as f32, avg_distance: avg_distance as f32, total_count: total_count as usize, } } fn db_tcn_str_to_tcn(str: String) -> TemporaryContactNumber { let tcn_value_bytes_vec_res = hex::decode(str); let tcn_value_bytes_vec = expect_log!(tcn_value_bytes_vec_res, "Invalid stored TCN format"); let tcn_value_bytes = byte_vec_to_16_byte_array(tcn_value_bytes_vec); TemporaryContactNumber(tcn_value_bytes) } pub fn new(db: Arc<Database>) -> TcnDaoImpl { Self::create_table_if_not_exists(&db); TcnDaoImpl { db } } } impl TcnDao for TcnDaoImpl { fn all(&self) -> Result<Vec<ObservedTcn>, ServicesError> { self.db .query( "select tcn, contact_start, contact_end, min_distance, avg_distance, total_count from tcn", NO_PARAMS, |row| Self::to_tcn(row), ) .map_err(ServicesError::from) } fn find_tcns( &self, with: Vec<TemporaryContactNumber>, ) -> Result<Vec<ObservedTcn>, ServicesError> { let tcn_strs: Vec<Value> = with.into_iter().map(|tcn| Value::Text(hex::encode(tcn.0)) ) .collect(); self.db .query( "select tcn, contact_start, contact_end, min_distance, avg_distance, total_count from tcn where tcn in rarray(?);", params![Rc::new(tcn_strs)], |row| Self::to_tcn(row), ) .map_err(ServicesError::from) } fn overwrite(&self, observed_tcns: Vec<ObservedTcn>) -> Result<(), ServicesError> { debug!("Overwriting db exposures with same TCNs, with: {:?}", observed_tcns); let tcn_strs: Vec<Value> = observed_tcns.clone().into_iter().map(|tcn| Value::Text(hex::encode(tcn.tcn.0)) ) .collect(); self.db.transaction(|t| { let delete_res = t.execute("delete from tcn where tcn in rarray(?);", params![Rc::new(tcn_strs)]); if delete_res.is_err() { return Err(ServicesError::General("Delete TCNs failed".to_owned())) } for tcn in observed_tcns { let tcn_str = hex::encode(tcn.tcn.0); let insert_res = t.execute("insert into tcn(tcn, contact_start, contact_end, min_distance, avg_distance, total_count) values(?1, ?2, ?3, ?4, ?5, ?6)", params![ tcn_str, tcn.contact_start.value as i64, tcn.contact_end.value as i64, tcn.min_distance as f64, tcn.avg_distance as f64, tcn.total_count as i64 ]); if insert_res.is_err() { return Err(ServicesError::General("Insert TCN failed".to_owned())) } } Ok(()) }) } } #[cfg(test)] mod tests { use super::*; use rusqlite::Connection; use crate::{tcn_recording::tcn_batches_manager::TcnBatchesManager, reports_update::exposure::ExposureGrouper}; #[test] fn saves_and_loads_observed_tcn() { let database = Arc::new(Databas
stance: 2.0, avg_distance: 2.0, total_count: 1, }; let save_res = tcn_dao.overwrite(vec![stored_tcn1.clone(), stored_tcn2.clone()]); assert!(save_res.is_ok()); let tcn = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 7500 }, contact_end: UnixTime { value: 9000 }, min_distance: 1.0, avg_distance: 1.0, total_count: 1, }; batches_manager.push(tcn.clone()); let flush_res = batches_manager.flush(); assert!(flush_res.is_ok()); let loaded_tcns_res = tcn_dao.all(); assert!(loaded_tcns_res.is_ok()); let mut loaded_tcns = loaded_tcns_res.unwrap(); assert_eq!(2, loaded_tcns.len()); loaded_tcns.sort_by_key(|tcn| tcn.contact_start.value); assert_eq!( loaded_tcns[0], ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 1000 }, contact_end: UnixTime { value: 3000 }, min_distance: 0.4, avg_distance: 0.4, total_count: 1 } ); assert_eq!( loaded_tcns[1], ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 5000 }, contact_end: UnixTime { value: 9000 }, min_distance: 1.0, avg_distance: 1.5, total_count: 2 } ); } }
e::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = TcnDaoImpl::new(database); let observed_tcn = ObservedTcn { tcn: TemporaryContactNumber([ 24, 229, 125, 245, 98, 86, 219, 221, 172, 25, 232, 150, 206, 66, 164, 173, ]), contact_start: UnixTime { value: 1590528300 }, contact_end: UnixTime { value: 1590528301 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let save_res = tcn_dao.overwrite(vec![observed_tcn.clone()]); assert!(save_res.is_ok()); let loaded_tcns_res = tcn_dao.all(); assert!(loaded_tcns_res.is_ok()); let loaded_tcns = loaded_tcns_res.unwrap(); assert_eq!(loaded_tcns.len(), 1); assert_eq!(loaded_tcns[0], observed_tcn); } #[test] fn saves_and_loads_multiple_tcns() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = TcnDaoImpl::new(database); let observed_tcn_1 = ObservedTcn { tcn: TemporaryContactNumber([ 24, 229, 125, 245, 98, 86, 219, 221, 172, 25, 232, 150, 206, 66, 164, 173, ]), contact_start: UnixTime { value: 1590528300 }, contact_end: UnixTime { value: 1590528301 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let observed_tcn_2 = ObservedTcn { tcn: TemporaryContactNumber([ 43, 229, 125, 245, 98, 86, 100, 1, 172, 25, 0, 150, 123, 66, 34, 12, ]), contact_start: UnixTime { value: 1590518190 }, contact_end: UnixTime { value: 1590518191 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let observed_tcn_3 = ObservedTcn { tcn: TemporaryContactNumber([ 11, 246, 125, 123, 102, 86, 100, 1, 34, 25, 21, 150, 99, 66, 34, 0, ]), contact_start: UnixTime { value: 2230522104 }, contact_end: UnixTime { value: 2230522105 }, min_distance: 0.0, avg_distance: 0.0, total_count: 1, }; let save_res_1 = tcn_dao.overwrite(vec![observed_tcn_1.clone()]); let save_res_2 = tcn_dao.overwrite(vec![observed_tcn_2.clone()]); let save_res_3 = tcn_dao.overwrite(vec![observed_tcn_3.clone()]); assert!(save_res_1.is_ok()); assert!(save_res_2.is_ok()); assert!(save_res_3.is_ok()); let loaded_tcns_res = tcn_dao.all(); assert!(loaded_tcns_res.is_ok()); let loaded_tcns = loaded_tcns_res.unwrap(); assert_eq!(loaded_tcns.len(), 3); assert_eq!(loaded_tcns[0], observed_tcn_1); assert_eq!(loaded_tcns[1], observed_tcn_2); assert_eq!(loaded_tcns[2], observed_tcn_3); } #[test] fn test_finds_tcn() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = Arc::new(TcnDaoImpl::new(database)); let stored_tcn1 = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 1000 }, contact_end: UnixTime { value: 6000 }, min_distance: 0.4, avg_distance: 0.4, total_count: 1, }; let stored_tcn2 = ObservedTcn { tcn: TemporaryContactNumber([1; 16]), contact_start: UnixTime { value: 2000 }, contact_end: UnixTime { value: 3000 }, min_distance: 1.8, avg_distance: 1.8, total_count: 1, }; let stored_tcn3 = ObservedTcn { tcn: TemporaryContactNumber([2; 16]), contact_start: UnixTime { value: 1600 }, contact_end: UnixTime { value: 2600 }, min_distance: 2.3, avg_distance: 2.3, total_count: 1, }; let save_res = tcn_dao.overwrite(vec![ stored_tcn1.clone(), stored_tcn2.clone(), stored_tcn3.clone(), ]); assert!(save_res.is_ok()); let res = tcn_dao.find_tcns(vec![ TemporaryContactNumber([0; 16]), TemporaryContactNumber([2; 16]), ]); assert!(res.is_ok()); let mut tcns = res.unwrap(); tcns.sort_by_key(|tcn| tcn.contact_start.value); assert_eq!(2, tcns.len()); assert_eq!(stored_tcn1, tcns[0]); assert_eq!(stored_tcn3, tcns[1]); } #[test] fn test_multiple_exposures_updated_correctly() { let database = Arc::new(Database::new( Connection::open_in_memory().expect("Couldn't create database!"), )); let tcn_dao = Arc::new(TcnDaoImpl::new(database)); let batches_manager = TcnBatchesManager::new(tcn_dao.clone(), ExposureGrouper { threshold: 1000 }); let stored_tcn1 = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 1000 }, contact_end: UnixTime { value: 3000 }, min_distance: 0.4, avg_distance: 0.4, total_count: 1, }; let stored_tcn2 = ObservedTcn { tcn: TemporaryContactNumber([0; 16]), contact_start: UnixTime { value: 5000 }, contact_end: UnixTime { value: 7000 }, min_di
random
[ { "content": "fn record_tcn(env: &JNIEnv, tcn: JString, distance: jfloat) -> Result<(), ServicesError> {\n\n let tcn_java_str = env.get_string(tcn)?;\n\n let tcn_str = tcn_java_str.to_str()?;\n\n\n\n dependencies()\n\n .observed_tcn_processor\n\n .save(tcn_str, distance as f32)\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 1, "score": 204511.88016271018 }, { "content": "pub fn bootstrap(db_path: &str) -> Result<(), ServicesError> {\n\n info!(\"Bootstrapping with db path: {:?}\", db_path);\n\n\n\n let sqlite_path = format!(\"{}/db.sqlite\", db_path);\n\n debug!(\"Sqlite path: {:?}\", sqlite_path);\n\n\n\n let connection_res = Connection::open(sqlite_path);\n\n let connection = expect_log!(connection_res, \"Couldn't create database!\");\n\n let database = Arc::new(Database::new(connection));\n\n\n\n let migration_handler = Migration::new(database.clone());\n\n migration_handler.run_db_migrations(1);\n\n\n\n if let Err(_) = DEPENDENCIES.set(create_dependencies(database)) {\n\n return Err(ServicesError::General(\n\n \"Couldn't initialize dependencies\".to_owned(),\n\n ));\n\n };\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/dependencies.rs", "rank": 2, "score": 201393.068410941 }, { "content": "pub trait TcnKeys {\n\n fn create_report(&self, report: Vec<u8>) -> Result<SignedReport, Error>;\n\n fn generate_tcn(&self) -> TemporaryContactNumber;\n\n}\n\n\n", "file_path": "src/tcn_ext/tcn_keys.rs", "rank": 3, "score": 165458.9014356296 }, { "content": "fn create_test_alert(id: &str, report_time: u64) -> Alert {\n\n let symptoms = PublicSymptoms {\n\n report_time: UnixTime { value: report_time },\n\n earliest_symptom_time: UserInput::Some(UnixTime { value: 1590356601 }),\n\n fever_severity: FeverSeverity::Mild,\n\n cough_severity: CoughSeverity::Dry,\n\n breathlessness: true,\n\n muscle_aches: true,\n\n loss_smell_or_taste: false,\n\n diarrhea: false,\n\n runny_nose: true,\n\n other: false,\n\n no_symptoms: true,\n\n };\n\n\n\n Alert {\n\n id: id.to_owned(),\n\n report_id: \"224\".to_owned(),\n\n symptoms,\n\n contact_start: 1592567315,\n\n contact_end: 1592567335,\n\n min_distance: 1.2,\n\n avg_distance: 2.1,\n\n is_read: false,\n\n }\n\n}\n", "file_path": "src/android/jni_domain_tests.rs", "rank": 4, "score": 160951.34246790936 }, { "content": "pub trait TcnApi {\n\n fn get_reports(\n\n &self,\n\n interval_number: u64,\n\n interval_length: u64,\n\n ) -> Result<Vec<String>, NetworkingError>;\n\n fn post_report(&self, report: String) -> Result<(), NetworkingError>;\n\n}\n\n\n\npub struct TcnApiMock {}\n\n\n\nimpl TcnApi for TcnApiMock {\n\n fn get_reports(\n\n &self,\n\n _interval_number: u64,\n\n _interval_length: u64,\n\n ) -> Result<Vec<String>, NetworkingError> {\n\n Err(NetworkingError {\n\n http_status: 500,\n\n message: \"Not impl\".to_string(),\n", "file_path": "src/networking.rs", "rank": 5, "score": 160786.96221548846 }, { "content": "pub trait TcnMatcher {\n\n fn match_reports(\n\n &self,\n\n tcns: Vec<ObservedTcn>,\n\n reports: Vec<SignedReport>,\n\n ) -> Result<Vec<MatchedReport>, ServicesError>;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct MatchedReport {\n\n pub report: SignedReport,\n\n pub tcns: Vec<ObservedTcn>,\n\n}\n\n\n\npub struct TcnMatcherRayon {}\n\n\n\nimpl TcnMatcher for TcnMatcherRayon {\n\n fn match_reports(\n\n &self,\n\n tcns: Vec<ObservedTcn>,\n", "file_path": "src/reports_update/tcn_matcher.rs", "rank": 6, "score": 160680.23817195234 }, { "content": "pub trait ObservedTcnProcessor {\n\n fn save(&self, tcn_str: &str, distance: f32) -> Result<(), ServicesError>;\n\n}\n\n\n\npub struct ObservedTcnProcessorImpl<T>\n\nwhere\n\n T: 'static + TcnDao,\n\n{\n\n tcn_batches_manager: Arc<TcnBatchesManager<T>>,\n\n _timer_data: TimerData,\n\n}\n\n\n", "file_path": "src/tcn_recording/observed_tcn_processor.rs", "rank": 7, "score": 160280.09749657504 }, { "content": "pub trait TckBytesWrapperExt {\n\n fn with_bytes(bytes: Vec<u8>) -> TckBytesWrapper {\n\n let mut array = [0; TCK_SIZE_IN_BYTES];\n\n let bytes = &bytes[..array.len()]; // panics if not enough data\n\n array.copy_from_slice(bytes);\n\n TckBytesWrapper { tck_bytes: array }\n\n }\n\n}\n\n\n\nimpl TckBytesWrapperExt for TckBytesWrapper {}\n\n\n\npub struct TcnKeysImpl<T>\n\nwhere\n\n T: Preferences,\n\n{\n\n pub preferences: Arc<T>,\n\n}\n\n\n\nimpl<T> TcnKeys for TcnKeysImpl<T>\n\nwhere\n", "file_path": "src/tcn_ext/tcn_keys.rs", "rank": 8, "score": 155230.9723511789 }, { "content": "pub trait ReportAuthorizationKeyExt {\n\n fn with_bytes(bytes: [u8; 32]) -> ReportAuthorizationKey {\n\n let res = ReportAuthorizationKey::read(Cursor::new(&bytes));\n\n expect_log!(res, \"Couldn't read RAK bytes\")\n\n }\n\n}\n\n\n\nimpl ReportAuthorizationKeyExt for ReportAuthorizationKey {}\n\n\n", "file_path": "src/tcn_ext/tcn_keys.rs", "rank": 9, "score": 155230.9723511789 }, { "content": "fn to_result_str<T: Serialize>(result: Result<T, ServicesError>) -> CFStringRef {\n\n let lib_result = match result {\n\n Ok(success) => LibResult {\n\n status: 200,\n\n data: Some(success),\n\n error_message: None,\n\n },\n\n // TODO better error identification, using HTTP status for everything is weird.\n\n Err(e) => LibResult {\n\n status: 500,\n\n data: None,\n\n error_message: Some(e.to_string()),\n\n },\n\n };\n\n\n\n let lib_result_string =\n\n serde_json::to_string(&lib_result).unwrap_or_else(|_| fallback_error_result_str::<T>());\n\n\n\n let cf_string = CFString::new(&lib_result_string);\n\n let cf_string_ref = cf_string.as_concrete_TypeRef();\n\n\n\n ::std::mem::forget(cf_string);\n\n\n\n return cf_string_ref;\n\n}\n\n\n", "file_path": "src/ios/ios_interface.rs", "rank": 10, "score": 155036.4670644587 }, { "content": "pub trait LogCallback {\n\n fn call(&self, log_message: CoreLogMessage);\n\n}\n\n\n\nimpl LogCallback for unsafe extern \"C\" fn(CoreLogMessage) {\n\n fn call(&self, log_message: CoreLogMessage) {\n\n unsafe {\n\n self(log_message);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ios/ios_interface.rs", "rank": 11, "score": 154784.31811346937 }, { "content": "pub trait Callback {\n\n fn call(&self, my_int: i32, my_bool: bool, my_str: CFStringRef);\n\n}\n\n\n\nimpl Callback for unsafe extern \"C\" fn(i32, bool, CFStringRef) {\n\n fn call(&self, a_number: i32, a_boolean: bool, my_str: CFStringRef) {\n\n unsafe {\n\n self(a_number, a_boolean, my_str);\n\n }\n\n }\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn call_callback(callback: unsafe extern \"C\" fn(i32, bool, CFStringRef)) -> i32 {\n\n let cf_string = CFString::new(&\"hi!\".to_owned());\n\n let cf_string_ref = cf_string.as_concrete_TypeRef();\n\n\n\n callback.call(123, false, cf_string_ref);\n\n 1\n\n}\n", "file_path": "src/ios/ffi_for_sanity_tests.rs", "rank": 12, "score": 154269.67487107188 }, { "content": "//https://github.com/rust-lang/log/blob/efcc39c5217edae4f481b73357ca2f868bfe0a2c/test_max_level_features/main.rs#L10\n\nfn set_boxed_logger(logger: Box<dyn Log>) -> Result<(), log::SetLoggerError> {\n\n log::set_logger(Box::leak(logger))\n\n}\n\n\n\n//Convenience fn\n", "file_path": "src/simple_logger.rs", "rank": 13, "score": 151953.47342102084 }, { "content": "pub fn jni_obj_result(\n\n status: i32,\n\n message: Option<&str>,\n\n obj: JObject,\n\n outer_class: &str,\n\n inner_class: &str,\n\n env: &JNIEnv,\n\n) -> jobject {\n\n let cls_res = env.find_class(outer_class);\n\n\n\n let status_j_value = JValue::from(status);\n\n\n\n let msg = message.unwrap_or(\"\");\n\n\n\n let msg_j_string_res = env.new_string(msg);\n\n // If we can't create a result to send to JNI, we only can crash\n\n let msg_j_string = expect_log!(msg_j_string_res, \"Couldn't create JNI msg string\");\n\n let msg_j_value = JValue::from(msg_j_string);\n\n\n\n // If we can't create a result to send to JNI, we only can crash\n", "file_path": "src/android/android_interface.rs", "rank": 14, "score": 150318.21880057067 }, { "content": "pub fn alert_to_jobject(alert: Alert, env: &JNIEnv) -> Result<jobject, ServicesError> {\n\n let jni_public_symptoms_class = env.find_class(\"org/coepi/core/jni/JniPublicSymptoms\")?;\n\n\n\n let report_time_j_value = JValue::from(alert.symptoms.report_time.value as i64);\n\n\n\n let earliest_time = match &alert.symptoms.earliest_symptom_time {\n\n UserInput::Some(time) => time.value as i64,\n\n UserInput::None => -1,\n\n };\n\n let earliest_time_j_value = JValue::from(earliest_time);\n\n\n\n let fever_severity = match &alert.symptoms.fever_severity {\n\n FeverSeverity::None => 0,\n\n FeverSeverity::Mild => 1,\n\n FeverSeverity::Serious => 2,\n\n };\n\n let fever_severity_j_value = JValue::from(fever_severity);\n\n\n\n let cough_severity = match &alert.symptoms.cough_severity {\n\n CoughSeverity::None => 0,\n", "file_path": "src/android/android_interface.rs", "rank": 15, "score": 143301.12118317513 }, { "content": "fn delete_alert(env: &JNIEnv, id: JString) -> Result<(), ServicesError> {\n\n let id_java_str = env.get_string(id)?;\n\n let id_str = id_java_str.to_str()?;\n\n\n\n dependencies().alert_dao.delete(id_str.to_owned())\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 16, "score": 135751.30318487904 }, { "content": "fn fallback_error_result_str<T: Serialize>() -> String {\n\n serde_json::to_string(&LibResult::<T> {\n\n status: 500,\n\n data: None,\n\n error_message: Some(\"Couldn't serialize result\".to_owned()),\n\n })\n\n // unwrap: safe, since we are using a hardcoded value\n\n .unwrap()\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn set_symptom_ids(c_ids: *const c_char) -> CFStringRef {\n\n debug!(\"Setting symptom ids: {:?}\", c_ids);\n\n let ids_str = cstring_to_str(&c_ids);\n\n let result = ids_str.and_then(|ids_str| {\n\n dependencies()\n\n .symptom_inputs_processor\n\n .set_symptom_ids(ids_str)\n\n });\n\n return to_result_str(result);\n", "file_path": "src/ios/ios_interface.rs", "rank": 17, "score": 135312.52542924366 }, { "content": "fn set_breathlessness_cause(env: &JNIEnv, cause: JString) -> Result<(), ServicesError> {\n\n let java_str = env.get_string(cause)?;\n\n let str = java_str.to_str()?;\n\n\n\n dependencies()\n\n .symptom_inputs_processor\n\n .set_breathlessness_cause(str)\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 18, "score": 133738.78698978655 }, { "content": "fn set_symptom_ids(env: &JNIEnv, ids: JString) -> Result<(), ServicesError> {\n\n let java_str = env.get_string(ids)?;\n\n let ids_str = java_str.to_str()?;\n\n\n\n debug!(\"Setting symptom ids: {:?}\", ids_str);\n\n\n\n dependencies()\n\n .symptom_inputs_processor\n\n .set_symptom_ids(ids_str)\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 19, "score": 133738.78698978655 }, { "content": "struct MyCallbackImpl {\n\n // The callback passed from Android is a local reference: only valid during the method call.\n\n // To store it, we need to put it in a global reference.\n\n // See https://developer.android.com/training/articles/perf-jni#local-and-global-references\n\n callback: GlobalRef,\n\n\n\n // We need JNIEnv to call the callback.\n\n // JNIEnv is valid only in the same thread, so we have to store the vm instead, and use it to get\n\n // a JNIEnv for the current thread.\n\n // See https://developer.android.com/training/articles/perf-jni#javavm-and-jnienvb\n\n java_vm: JavaVM,\n\n}\n\n\n\nimpl MyCallback for MyCallbackImpl {\n\n fn call(&self, par: String) {\n\n let env = self.java_vm.attach_current_thread().unwrap();\n\n\n\n let str_res = env.new_string(par);\n\n let str = expect_log!(str_res, \"Couldn't create java string!\");\n\n let str_j_value = JValue::from(JObject::from(str));\n", "file_path": "src/android/ffi_for_sanity_tests.rs", "rank": 20, "score": 132267.19065309453 }, { "content": "fn set_cough_type(env: &JNIEnv, cough_type: JString) -> Result<(), ServicesError> {\n\n let java_str = env.get_string(cough_type)?;\n\n let cough_type_str = java_str.to_str()?;\n\n\n\n debug!(\"Setting cough type: {:?}\", cough_type_str);\n\n\n\n dependencies()\n\n .symptom_inputs_processor\n\n .set_cough_type(cough_type_str)\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 21, "score": 131822.87206460797 }, { "content": "fn set_cough_status(env: &JNIEnv, cough_status: JString) -> Result<(), ServicesError> {\n\n let java_str = env.get_string(cough_status)?;\n\n let str = java_str.to_str()?;\n\n\n\n dependencies()\n\n .symptom_inputs_processor\n\n .set_cough_status(str)\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 22, "score": 131822.87206460797 }, { "content": "fn set_fever_taken_temperature_spot(env: &JNIEnv, spot: JString) -> Result<(), ServicesError> {\n\n let java_str = env.get_string(spot)?;\n\n let str = java_str.to_str()?;\n\n\n\n debug!(\"Setting temperature spot cause: {:?}\", str);\n\n dependencies()\n\n .symptom_inputs_processor\n\n .set_fever_taken_temperature_spot(str)\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 23, "score": 129996.76609873169 }, { "content": "struct LogCallbackWrapperImpl {\n\n // The callback passed from Android is a local reference: only valid during the method call.\n\n // To store it, we need to put it in a global reference.\n\n // See https://developer.android.com/training/articles/perf-jni#local-and-global-references\n\n callback: GlobalRef,\n\n\n\n // We need JNIEnv to call the callback.\n\n // JNIEnv is valid only in the same thread, so we have to store the vm instead, and use it to get\n\n // a JNIEnv for the current thread.\n\n // See https://developer.android.com/training/articles/perf-jni#javavm-and-jnienvb\n\n java_vm: JavaVM,\n\n}\n\n\n\nimpl LogCallbackWrapper for LogCallbackWrapperImpl {\n\n fn call(&self, level: CoreLogLevel, text: String) {\n\n match self.java_vm.attach_current_thread() {\n\n Ok(env) => self.call(level, text, &env),\n\n // The Android LogCat will not show this, but for consistency or testing with non-Android JNI.\n\n // Note that if we panic, LogCat will also not show a message, or location.\n\n // TODO consider writing to file. Otherwise it's impossible to notice this.\n", "file_path": "src/android/android_interface.rs", "rank": 24, "score": 129623.68863716183 }, { "content": "fn extract<T>(bits: &Vec<bool>, mapper: &dyn BitMapper<T>, start: usize) -> ExtractResult<T> {\n\n let end = mapper.bit_count() + start;\n\n let sub_bits_res = bits[start..end].try_into();\n\n let sub_bits: Vec<bool> = expect_log!(sub_bits_res, \"Couldn't convert bits into vector\");\n\n\n\n ExtractResult {\n\n value: mapper.from_bits(BitVector { bits: sub_bits }),\n\n count: mapper.bit_count(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::reporting::public_symptoms::{CoughSeverity, FeverSeverity};\n\n use crate::reporting::symptom_inputs::UserInput;\n\n use crate::reports_interval::UnixTime;\n\n\n\n #[test]\n\n fn maps_nothing_set() {\n", "file_path": "src/reporting/memo.rs", "rank": 25, "score": 127892.62562209 }, { "content": "pub trait Preferences {\n\n fn last_completed_reports_interval(&self) -> Option<ReportsInterval>;\n\n fn set_last_completed_reports_interval(&self, value: ReportsInterval);\n\n\n\n // TODO encrypted\n\n fn authorization_key(&self) -> Option<[u8; 32]>;\n\n fn set_autorization_key(&self, value: [u8; 32]);\n\n\n\n fn tck(&self) -> Option<TckBytesWrapper>;\n\n fn set_tck(&self, value: TckBytesWrapper);\n\n}\n\n\n\npub struct PreferencesImpl {\n\n pub dao: PreferencesDao,\n\n}\n\n\n\nimpl Preferences for PreferencesImpl {\n\n fn last_completed_reports_interval(&self) -> Option<ReportsInterval> {\n\n let str = self.dao.load(\"last_completed_reports_interval\");\n\n str.map(|str| {\n", "file_path": "src/database/preferences.rs", "rank": 26, "score": 125000.00809845488 }, { "content": "fn update_alert_is_read(env: &JNIEnv, id: JString, is_read: jint) -> Result<(), ServicesError> {\n\n let id_java_str = env.get_string(id)?;\n\n let id_str = id_java_str.to_str()?;\n\n\n\n dependencies()\n\n .alert_dao\n\n .update_is_read(id_str.to_owned(), is_read == 1)\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 27, "score": 123962.91523989437 }, { "content": "#[cfg(test)]\n\npub fn setup() {\n\n setup_logger(LevelFilter::Trace, false);\n\n}\n\n\n\n//Logs everything\n\npub struct SimpleLogger {}\n\n//Logs CoEpi specific messages only\n\npub struct CoEpiLogger {}\n\n\n\n#[cfg(not(test))]\n\nmacro_rules! log_prod {\n\n ($sel: ident, $record: ident) => {{\n\n if $sel.enabled($record.metadata()) {\n\n let arg_string = format!(\"{}\", $record.args());\n\n let lvl = match $record.level() {\n\n Level::Debug => CoreLogLevel::Debug,\n\n Level::Error => CoreLogLevel::Error,\n\n Level::Info => CoreLogLevel::Info,\n\n Level::Warn => CoreLogLevel::Warn,\n\n Level::Trace => CoreLogLevel::Trace,\n", "file_path": "src/simple_logger.rs", "rank": 28, "score": 123865.2442812827 }, { "content": "pub trait ByteArrayMappable {\n\n fn as_bytes(&self) -> [u8; 8];\n\n}\n\n\n\nimpl ByteArrayMappable for u64 {\n\n // Returns u64 as little endian byte array\n\n fn as_bytes(&self) -> [u8; 8] {\n\n (0..8).fold([0; 8], |mut acc, index| {\n\n let value: u8 = ((self >> (index * 8)) & 0xFF) as u8;\n\n acc[index] = value;\n\n acc\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/extensions.rs", "rank": 29, "score": 122621.00877331523 }, { "content": "pub trait MemoMapper {\n\n fn to_memo(&self, report: PublicSymptoms) -> Memo;\n\n fn to_report(&self, memo: Memo) -> PublicSymptoms;\n\n}\n\n\n\npub struct MemoMapperImpl {}\n\n\n\nimpl MemoMapperImpl {\n\n const VERSION_MAPPER: VersionMapper = VersionMapper {};\n\n const TIME_MAPPER: TimeMapper = TimeMapper {};\n\n const TIME_USER_INPUT_MAPPER: TimeUserInputMapper = TimeUserInputMapper {};\n\n const COUGH_SEVERITY_MAPPER: CoughSeverityMapper = CoughSeverityMapper {};\n\n const FEVER_SEVERITY_MAPPER: FeverSeverityMapper = FeverSeverityMapper {};\n\n const BOOLEAN_MAPPER: BoolMapper = BoolMapper {};\n\n}\n\n\n\nimpl MemoMapper for MemoMapperImpl {\n\n fn to_memo(&self, report: PublicSymptoms) -> Memo {\n\n let memo_version: u16 = 1;\n\n\n", "file_path": "src/reporting/memo.rs", "rank": 30, "score": 122621.00877331523 }, { "content": "pub trait SymptomInputsSubmitter<T: MemoMapper, U: TcnKeys, V: TcnApi> {\n\n fn submit_inputs(&self, inputs: SymptomInputs) -> Result<(), ServicesError>;\n\n}\n\n\n\npub struct SymptomInputsSubmitterImpl<'a, T: MemoMapper, U: TcnKeys, V: TcnApi> {\n\n pub memo_mapper: &'a T,\n\n pub tcn_keys: Arc<U>,\n\n pub api: &'a V,\n\n}\n\n\n\nimpl<'a, T: MemoMapper, U: TcnKeys, V: TcnApi> SymptomInputsSubmitter<T, U, V>\n\n for SymptomInputsSubmitterImpl<'a, T, U, V>\n\n{\n\n fn submit_inputs(&self, inputs: SymptomInputs) -> Result<(), ServicesError> {\n\n if let Some(report) = PublicSymptoms::with_inputs(inputs, UnixTime::now()) {\n\n self.send_report(report)\n\n } else {\n\n debug!(\"Nothing to send.\");\n\n Ok(())\n\n }\n", "file_path": "src/reporting/symptom_inputs.rs", "rank": 31, "score": 121404.064136903 }, { "content": "// To insert easily side effects in flows anywhere (from Kotlin)\n\npub trait Also: Sized {\n\n fn also<T>(self, f: T) -> Self\n\n where\n\n T: FnOnce(&Self) -> (),\n\n {\n\n f(&self);\n\n self\n\n }\n\n}\n\n\n\nimpl<T> Also for T {}\n", "file_path": "src/extensions.rs", "rank": 32, "score": 120615.09065685504 }, { "content": "pub trait AlertDao {\n\n fn all(&self) -> Result<Vec<Alert>, ServicesError>;\n\n fn save(&self, alerts: Vec<Alert>) -> Result<(), ServicesError>;\n\n fn delete(&self, id: String) -> Result<(), ServicesError>;\n\n fn update_is_read(&self, id: String, is_read: bool) -> Result<(), ServicesError>;\n\n}\n\n\n\npub struct AlertDaoImpl {\n\n db: Arc<Database>,\n\n}\n\n\n\nimpl AlertDaoImpl {\n\n pub fn new(db: Arc<Database>) -> AlertDaoImpl {\n\n Self::create_table_if_not_exists(&db);\n\n AlertDaoImpl { db }\n\n }\n\n\n\n fn create_table_if_not_exists(db: &Arc<Database>) {\n\n // TODO use blob for tcn? https://docs.rs/rusqlite/0.23.1/rusqlite/blob/index.html\n\n // TODO ideally FFI should send byte arrays too\n", "file_path": "src/database/alert_dao.rs", "rank": 33, "score": 120392.26304517311 }, { "content": "pub trait BitVectorMappable {\n\n fn to_bits(&self) -> BitVector;\n\n}\n\n\n\nimpl BitVectorMappable for u64 {\n\n fn to_bits(&self) -> BitVector {\n\n let bits: Vec<bool> = (0..64)\n\n .map(|index| {\n\n let value: Self = (self >> index) & 0x01;\n\n value == 1\n\n })\n\n .collect();\n\n BitVector { bits }\n\n }\n\n}\n\n\n\nimpl BitVectorMappable for u16 {\n\n fn to_bits(&self) -> BitVector {\n\n let bits: Vec<bool> = (0..16)\n\n .map(|index| {\n", "file_path": "src/reporting/mappers.rs", "rank": 34, "score": 120392.26304517311 }, { "content": "pub trait SymptomInputsProcessor {\n\n fn set_symptom_ids(&self, ids: &str) -> Result<(), ServicesError>;\n\n fn set_cough_type(&self, cough_type: &str) -> Result<(), ServicesError>;\n\n fn set_cough_days(&self, is_set: bool, days: u32) -> Result<(), ServicesError>;\n\n fn set_cough_status(&self, status: &str) -> Result<(), ServicesError>;\n\n fn set_breathlessness_cause(&self, cause: &str) -> Result<(), ServicesError>;\n\n fn set_fever_days(&self, is_set: bool, days: u32) -> Result<(), ServicesError>;\n\n fn set_fever_taken_temperature_today(\n\n &self,\n\n is_set: bool,\n\n taken: bool,\n\n ) -> Result<(), ServicesError>;\n\n fn set_fever_taken_temperature_spot(&self, spot: &str) -> Result<(), ServicesError>;\n\n fn set_fever_highest_temperature_taken(\n\n &self,\n\n is_set: bool,\n\n temperature: f32,\n\n ) -> Result<(), ServicesError>;\n\n fn set_earliest_symptom_started_days_ago(\n\n &self,\n", "file_path": "src/reporting/symptom_inputs_manager.rs", "rank": 35, "score": 116331.97630437833 }, { "content": "pub trait SignedReportExt {\n\n fn with_str(str: &str) -> Option<SignedReport> {\n\n base64::decode(str)\n\n .also(|res| {\n\n if let Err(error) = res {\n\n error!(\"Error: {} decoding (base64) report: {:?}\", error, res)\n\n }\n\n })\n\n .map_err(Error::from)\n\n .and_then(|bytes| SignedReport::read(bytes.as_slice()).map_err(Error::from))\n\n .map_err(|err| {\n\n error!(\"Error decoding or generating report: {}\", err);\n\n err\n\n })\n\n .ok()\n\n }\n\n}\n\nimpl SignedReportExt for SignedReport {}\n\n\n\npub struct ReportsUpdater<\n", "file_path": "src/reports_update/reports_updater.rs", "rank": 36, "score": 116331.97630437833 }, { "content": "pub trait SymptomInputsManager {\n\n fn select_symptom_ids(&self, ids: HashSet<SymptomId>);\n\n fn set_cough_type(&self, input: UserInput<CoughType>);\n\n fn set_cough_days(&self, input: UserInput<Days>);\n\n fn set_cough_status(&self, status: UserInput<CoughStatus>);\n\n fn set_breathlessness_cause(&self, cause: UserInput<BreathlessnessCause>);\n\n fn set_fever_days(&self, days: UserInput<Days>);\n\n fn set_fever_taken_temperature_today(&self, taken: UserInput<bool>);\n\n fn set_fever_taken_temperature_spot(&self, spot: UserInput<TemperatureSpot>);\n\n fn set_fever_highest_temperature_taken(&self, temp: UserInput<FarenheitTemperature>);\n\n fn set_earliest_symptom_started_days_ago(&self, days: UserInput<Days>);\n\n\n\n fn submit(&self) -> Result<(), ServicesError>;\n\n fn clear(&self);\n\n}\n\n\n\npub struct SymptomInputsManagerImpl<T>\n\nwhere\n\n // TODO no concrete types here?\n\n T: SymptomInputsSubmitter<MemoMapperImpl, TcnKeysImpl<PreferencesImpl>, TcnApiImpl>,\n", "file_path": "src/reporting/symptom_inputs_manager.rs", "rank": 37, "score": 116331.97630437833 }, { "content": "pub trait BitMapper<T> {\n\n fn bit_count(&self) -> usize;\n\n\n\n fn to_bits(&self, value: T) -> BitVector {\n\n let bits = self.to_bits_unchecked(value);\n\n if bits.len() != self.bit_count() {\n\n panic!(\n\n \"Incorrect bit count: {}. Required: {}\",\n\n bits.len(),\n\n self.bit_count()\n\n )\n\n } else {\n\n bits\n\n }\n\n }\n\n\n\n fn from_bits(&self, bit_vector: BitVector) -> T {\n\n if bit_vector.len() != self.bit_count() {\n\n panic!(\n\n \"Incorrect bit count: {}. Required: {}\",\n", "file_path": "src/reporting/mappers.rs", "rank": 38, "score": 116007.34560357327 }, { "content": "// TODO move to utils file or similar. Consider returning Result instead of panicking.\n\npub fn byte_vec_to_16_byte_array(bytes: Vec<u8>) -> [u8; 16] {\n\n let mut array = [0; 16];\n\n let bytes = &bytes[..array.len()]; // panics if not enough data\n\n array.copy_from_slice(bytes);\n\n array\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 39, "score": 114218.42348669164 }, { "content": "pub fn dependencies() -> &'static Dependencies<\n\n 'static,\n\n PreferencesImpl,\n\n TcnDaoImpl,\n\n TcnMatcherRayon,\n\n TcnApiImpl,\n\n SymptomInputsProcessorImpl<\n\n SymptomInputsManagerImpl<\n\n SymptomInputsSubmitterImpl<\n\n 'static,\n\n MemoMapperImpl,\n\n TcnKeysImpl<PreferencesImpl>,\n\n TcnApiImpl,\n\n >,\n\n >,\n\n >,\n\n ObservedTcnProcessorImpl<TcnDaoImpl>,\n\n MemoMapperImpl,\n\n TcnKeysImpl<PreferencesImpl>,\n\n AlertDaoImpl,\n", "file_path": "src/dependencies.rs", "rank": 40, "score": 113545.5961062374 }, { "content": "trait ResultExt<T, ServicesError> {\n\n fn to_void_jni(self, env: &JNIEnv) -> jobject;\n\n}\n\nimpl<T> ResultExt<T, ServicesError> for Result<T, ServicesError> {\n\n fn to_void_jni(self, env: &JNIEnv) -> jobject {\n\n match self {\n\n Ok(_) => jni_void_result(1, None, &env),\n\n Err(error) => {\n\n let jni_error = error.to_jni_error();\n\n jni_void_result(jni_error.status, Some(jni_error.message.as_ref()), &env)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 41, "score": 112803.79983783627 }, { "content": "fn fetch_new_reports(env: &JNIEnv) -> Result<jobjectArray, ServicesError> {\n\n info!(\"Updating reports\");\n\n let result = dependencies().reports_updater.update_and_fetch_alerts()?;\n\n info!(\"New reports: {:?}\", result);\n\n\n\n alerts_to_jobject_array(result, &env)\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 42, "score": 111981.38574991756 }, { "content": "#[derive(Debug)]\n\nstruct MyStruct {\n\n my_int: i32,\n\n my_str: String,\n\n my_u8: u8,\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn pass_struct(par: *const FFIParameterStruct) -> i32 {\n\n let my_str = cstring_to_str(&(*par).my_str).unwrap();\n\n\n\n let my_struct = MyStruct {\n\n my_int: (*par).my_int,\n\n my_str: my_str.to_owned(),\n\n my_u8: (*par).my_nested.my_u8,\n\n };\n\n\n\n info!(\"Received struct from iOS: {:?}\", my_struct);\n\n\n\n 1\n\n}\n", "file_path": "src/ios/ffi_for_sanity_tests.rs", "rank": 43, "score": 109851.93640472225 }, { "content": "pub fn jni_void_result(status: i32, message: Option<&str>, env: &JNIEnv) -> jobject {\n\n let cls_res = env.find_class(\"org/coepi/core/jni/JniVoidResult\");\n\n\n\n let status_j_value = JValue::from(status);\n\n\n\n let msg = message.unwrap_or(\"\");\n\n let msg_j_string_res = env.new_string(msg);\n\n\n\n // If we can't create a result to send to JNI, we only can crash\n\n let msg_j_string = expect_log!(msg_j_string_res, \"Couldn't create JNI msg string\");\n\n\n\n let msg_j_value = JValue::from(msg_j_string);\n\n\n\n // If we can't create a result to send to JNI, we only can crash\n\n let cls = expect_log!(cls_res, \"Couldn't create JNI result class\");\n\n\n\n let obj = env.new_object(\n\n cls,\n\n \"(ILjava/lang/String;)V\",\n\n &[status_j_value, msg_j_value],\n\n );\n\n\n\n let res = obj;\n\n // If we can't create a result to send to JNI, we only can crash\n\n expect_log!(res, \"Couldn't create JNI result object\").into_inner()\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 44, "score": 107219.86217458593 }, { "content": "// Convenience to map non-success HTTP status to errors\n\ntrait AsResult {\n\n fn as_result(self) -> Result<Response, NetworkingError>;\n\n}\n\n\n\nimpl AsResult for Response {\n\n fn as_result(self) -> Result<Response, NetworkingError> {\n\n let status = self.status();\n\n if status.is_success() {\n\n Ok(self)\n\n } else {\n\n Err(NetworkingError {\n\n http_status: status.as_u16(),\n\n message: format!(\"{:?}\", self.text()),\n\n })\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/networking.rs", "rank": 45, "score": 107111.48558802076 }, { "content": "struct TimerData {\n\n _timer: Arc<Mutex<Timer>>,\n\n _guard: Guard,\n\n}\n\n\n\nimpl<T> ObservedTcnProcessorImpl<T>\n\nwhere\n\n T: 'static + TcnDao,\n\n{\n\n pub fn new(tcn_batches_manager: TcnBatchesManager<T>) -> ObservedTcnProcessorImpl<T> {\n\n let tcn_batches_manager = Arc::new(tcn_batches_manager);\n\n let instance = ObservedTcnProcessorImpl {\n\n tcn_batches_manager: tcn_batches_manager.clone(),\n\n _timer_data: Self::schedule_process_batches(tcn_batches_manager),\n\n };\n\n instance\n\n }\n\n\n\n fn schedule_process_batches(tcn_batches_manager: Arc<TcnBatchesManager<T>>) -> TimerData {\n\n let timer = Arc::new(Mutex::new(Timer::new()));\n", "file_path": "src/tcn_recording/observed_tcn_processor.rs", "rank": 46, "score": 105315.08059061461 }, { "content": "fn create_dependencies(\n\n database: Arc<Database>,\n\n // required_db_version: i32,\n\n) -> Dependencies<\n\n 'static,\n\n PreferencesImpl,\n\n TcnDaoImpl,\n\n TcnMatcherRayon,\n\n TcnApiImpl,\n\n SymptomInputsProcessorImpl<\n\n SymptomInputsManagerImpl<\n\n SymptomInputsSubmitterImpl<\n\n 'static,\n\n MemoMapperImpl,\n\n TcnKeysImpl<PreferencesImpl>,\n\n TcnApiImpl,\n\n >,\n\n >,\n\n >,\n\n ObservedTcnProcessorImpl<TcnDaoImpl>,\n", "file_path": "src/dependencies.rs", "rank": 47, "score": 103440.44986907372 }, { "content": "fn init_log(env: &JNIEnv, level_j_string: JString, coepi_only: jboolean, callback: jobject) -> i32 {\n\n match (env.get_java_vm(), env.new_global_ref(callback)) {\n\n (Ok(java_vm), Ok(callback_global_ref)) => {\n\n let callback_wrapper = LogCallbackWrapperImpl {\n\n java_vm,\n\n callback: callback_global_ref,\n\n };\n\n register_callback_internal(Box::new(callback_wrapper));\n\n\n\n let level_java_str = env.get_string(level_j_string).unwrap();\n\n let level_str = level_java_str.to_str().unwrap();\n\n let filter_level_res = LevelFilter::from_str(&level_str);\n\n let filter_level = expect_log!(filter_level_res, \"Incorrect log level selected!\");\n\n let _ = simple_logger::setup_logger(filter_level, coepi_only != 0);\n\n log::max_level() as i32\n\n }\n\n\n\n // Note: These println will not show on Android, as LogCat doesn't show stdout / stderr.\n\n // panic will also not show anything useful, so there doesn't seem to be a point in crashing here.\n\n (Ok(_), Err(e)) => {\n", "file_path": "src/android/android_interface.rs", "rank": 48, "score": 101473.65675777507 }, { "content": "trait MyCallback {\n\n fn call(&self, par: String);\n\n}\n\n\n", "file_path": "src/android/ffi_for_sanity_tests.rs", "rank": 49, "score": 99135.89168117449 }, { "content": "#[test]\n\nfn verify_test_macros() {\n\n setup_logger(LevelFilter::Debug, false);\n\n println!(\"Resulting level : {}\", log::max_level());\n\n println!(\"STATIC_MAX_LEVEL : {}\", log::STATIC_MAX_LEVEL);\n\n trace!(\"trace\");\n\n debug!(\"debug\");\n\n info!(\"info\");\n\n warn!(\"warn\");\n\n error!(\"error\");\n\n}\n", "file_path": "src/simple_logger.rs", "rank": 50, "score": 97913.07022449179 }, { "content": "//Boxed logger setup\n\npub fn setup_logger(level: LevelFilter, coepi_only: bool) {\n\n INIT.call_once(|| {\n\n println!(\"RUST : Logger level : {}\", level);\n\n if coepi_only {\n\n println!(\"RUST : CoEpi logs only\",);\n\n set_boxed_logger(Box::new(CoEpiLogger {}))\n\n .map(|()| log::set_max_level(level))\n\n .expect(\"Logger initialization failed!\");\n\n } else {\n\n set_boxed_logger(Box::new(SimpleLogger {}))\n\n .map(|()| log::set_max_level(level))\n\n .expect(\"Logger initialization failed!\");\n\n }\n\n })\n\n}\n", "file_path": "src/simple_logger.rs", "rank": 51, "score": 97873.06195867808 }, { "content": "struct ExtractResult<T> {\n\n value: T,\n\n count: usize,\n\n}\n\n\n\nimpl<T> ExtractResult<T> {\n\n // Convenience to parse memo with less boilerplate\n\n fn value<F: FnOnce(usize) -> ()>(self, f: F) -> T {\n\n f(self.count);\n\n self.value\n\n }\n\n}\n\n\n", "file_path": "src/reporting/memo.rs", "rank": 52, "score": 97682.70943178362 }, { "content": "trait LogCallbackWrapper {\n\n fn call(&self, level: CoreLogLevel, text: String);\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 53, "score": 97430.36957686741 }, { "content": "fn to_alerts_result_jobject(\n\n status: i32,\n\n message: Option<&str>,\n\n alerts: jobjectArray,\n\n env: &JNIEnv,\n\n) -> jobject {\n\n jni_obj_result(\n\n status,\n\n message,\n\n JObject::from(alerts),\n\n \"org/coepi/core/jni/JniAlertsArrayResult\",\n\n \"[Lorg/coepi/core/jni/JniAlert;\",\n\n &env,\n\n )\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 54, "score": 96002.43939145081 }, { "content": "pub fn byte_vec_to_24_byte_array(bytes: Vec<u8>) -> [u8; 24] {\n\n let mut array = [0; 24];\n\n let bytes = &bytes[..array.len()]; // panics if not enough data\n\n array.copy_from_slice(bytes);\n\n array\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 55, "score": 95518.78055113074 }, { "content": "pub fn byte_vec_to_8_byte_array(bytes: Vec<u8>) -> [u8; 8] {\n\n let mut array = [0; 8];\n\n let bytes = &bytes[..array.len()]; // panics if not enough data\n\n array.copy_from_slice(bytes);\n\n array\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 56, "score": 95518.78055113074 }, { "content": "pub fn byte_vec_to_32_byte_array(bytes: Vec<u8>) -> [u8; 32] {\n\n let mut array = [0; 32];\n\n let bytes = &bytes[..array.len()]; // panics if not enough data\n\n array.copy_from_slice(bytes);\n\n array\n\n}\n\n\n\n// TODO (deleting of TCNs not critical for now)\n\n// pub fn delete_cens_between(start: i64, end: i64) -> Res<()> {\n\n// let db = DB.get().ok_or(DB_UNINIT)?;\n\n// let mut tx = db.begin()?;\n\n\n\n// let tsv = tx\n\n// .range::<i64, u128, _>(CENS_BY_TS, start..end)?\n\n// .map(|(ts, _)| ts)\n\n// .collect::<Vec<_>>();\n\n\n\n// for ts in tsv {\n\n// tx.remove::<i64, u128>(CENS_BY_TS, ts, None)?;\n\n// }\n", "file_path": "src/lib.rs", "rank": 57, "score": 95518.78055113074 }, { "content": "#[derive(Serialize)]\n\nstruct LibResult<T> {\n\n status: u16,\n\n data: Option<T>,\n\n error_message: Option<String>,\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn setup_logger(level: CoreLogLevel, coepi_only: bool) -> i32 {\n\n let level_string = level.to_string();\n\n let filter_level = LevelFilter::from_str(&level_string).expect(\"Incorrect log level selected!\");\n\n let _ = simple_logger::setup_logger(filter_level, coepi_only);\n\n log::max_level() as i32\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn bootstrap_core(\n\n db_path: *const c_char,\n\n level: CoreLogLevel,\n\n coepi_only: bool,\n\n) -> CFStringRef {\n", "file_path": "src/ios/ios_interface.rs", "rank": 58, "score": 95447.18478557907 }, { "content": "fn register_log_callback_internal(callback: Box<dyn LogCallback>) {\n\n // Make callback implement Send (marker for thread safe, basically) https://doc.rust-lang.org/std/marker/trait.Send.html\n\n let log_callback = unsafe {\n\n std::mem::transmute::<Box<dyn LogCallback>, Box<dyn LogCallback + Send>>(callback)\n\n };\n\n\n\n // Create channel\n\n let (tx, rx): (\n\n Sender<CoreLogMessageThreadSafe>,\n\n Receiver<CoreLogMessageThreadSafe>,\n\n ) = mpsc::channel();\n\n\n\n // Save the sender in a static variable, which will be used to push elements to the callback\n\n unsafe {\n\n SENDER = Some(tx);\n\n }\n\n\n\n // Thread waits for elements pushed to SENDER and calls the callback\n\n thread::spawn(move || {\n\n for log_entry in rx.iter() {\n", "file_path": "src/ios/ios_interface.rs", "rank": 59, "score": 87909.88070619132 }, { "content": "fn to_db_int(b: bool) -> i8 {\n\n if b {\n\n 1\n\n } else {\n\n 0\n\n }\n\n}\n\n\n\n// fn to_db_user_input(input: UserInput<T>) {\n\n\n\n// }\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use rusqlite::Connection;\n\n\n\n #[test]\n\n fn test_saves_and_loads_alert() {\n\n let database = Arc::new(Database::new(\n", "file_path": "src/database/alert_dao.rs", "rank": 60, "score": 87686.65707534141 }, { "content": "fn to_bool(db_int: i8) -> bool {\n\n if db_int == 1 {\n\n true\n\n } else if db_int == 0 {\n\n false\n\n } else {\n\n error!(\"Invalid db_int: {}\", db_int);\n\n panic!()\n\n }\n\n}\n\n\n", "file_path": "src/database/alert_dao.rs", "rank": 61, "score": 85715.36276231862 }, { "content": "fn register_callback_internal(callback: Box<dyn Callback>) {\n\n // Make callback implement Send (marker for thread safe, basically) https://doc.rust-lang.org/std/marker/trait.Send.html\n\n let my_callback =\n\n unsafe { std::mem::transmute::<Box<dyn Callback>, Box<dyn Callback + Send>>(callback) };\n\n\n\n // Create channel\n\n let (tx, rx): (Sender<String>, Receiver<String>) = mpsc::channel();\n\n\n\n // Save the sender in a static variable, which will be used to push elements to the callback\n\n unsafe {\n\n SENDER = Some(tx);\n\n }\n\n\n\n // Thread waits for elements pushed to SENDER and calls the callback\n\n thread::spawn(move || {\n\n for str in rx.iter() {\n\n let cf_string = CFString::new(&str.to_owned());\n\n let cf_string_ref = cf_string.as_concrete_TypeRef();\n\n // For convenience, pass around only the string and hardcode the other 2 parameters.\n\n my_callback.call(1, true, cf_string_ref)\n\n }\n\n });\n\n}\n", "file_path": "src/ios/ffi_for_sanity_tests.rs", "rank": 62, "score": 78418.29792457267 }, { "content": "fn register_callback_internal(callback: Box<dyn MyCallback>) {\n\n // Make callback implement Send (marker for thread safe, basically) https://doc.rust-lang.org/std/marker/trait.Send.html\n\n let my_callback =\n\n unsafe { std::mem::transmute::<Box<dyn MyCallback>, Box<dyn MyCallback + Send>>(callback) };\n\n\n\n // Create channel\n\n let (tx, rx): (Sender<String>, Receiver<String>) = mpsc::channel();\n\n\n\n // Save the sender in a static variable, which will be used to push elements to the callback\n\n unsafe {\n\n SENDER = Some(tx);\n\n }\n\n\n\n // Thread waits for elements pushed to SENDER and calls the callback\n\n thread::spawn(move || {\n\n for string in rx.iter() {\n\n my_callback.call(format!(\"{} world!\", string))\n\n }\n\n });\n\n}\n", "file_path": "src/android/ffi_for_sanity_tests.rs", "rank": 63, "score": 78418.29792457267 }, { "content": "fun JniVoidResult.statusDescription(): String =\n\n statusDescription(status, message)\n\n\n\nprivate fun statusDescription(status: Int, message: String): String =\n\n \"Status: $status Message: $message\"\n", "file_path": "android/core/core/src/main/java/org/coepi/core/jni/JniApi.kt", "rank": 64, "score": 78039.89003518168 }, { "content": "fn register_callback_internal(callback: Box<dyn LogCallbackWrapper>) {\n\n // Make callback implement Send (marker for thread safe, basically) https://doc.rust-lang.org/std/marker/trait.Send.html\n\n let log_callback = unsafe {\n\n std::mem::transmute::<Box<dyn LogCallbackWrapper>, Box<dyn LogCallbackWrapper + Send>>(\n\n callback,\n\n )\n\n };\n\n\n\n // Create channel\n\n let (tx, rx): (\n\n Sender<CoreLogMessageThreadSafe>,\n\n Receiver<CoreLogMessageThreadSafe>,\n\n ) = mpsc::channel();\n\n\n\n // Save the sender in a static variable, which will be used to push elements to the callback\n\n unsafe {\n\n SENDER = Some(tx);\n\n }\n\n\n\n // Thread waits for elements pushed to SENDER and calls the callback\n\n thread::spawn(move || {\n\n for log_entry in rx.iter() {\n\n log_callback.call(log_entry.level, log_entry.text.into());\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 65, "score": 77294.26171050865 }, { "content": "fn to_cough_severity(cough: &Cough, selected_has_cough: bool) -> CoughSeverity {\n\n match &cough.cough_type {\n\n UserInput::None => {\n\n if selected_has_cough {\n\n CoughSeverity::Existing\n\n } else {\n\n CoughSeverity::None\n\n }\n\n }\n\n UserInput::Some(cough_type) => match cough_type {\n\n CoughType::Wet => CoughSeverity::Wet,\n\n CoughType::Dry => CoughSeverity::Dry,\n\n },\n\n }\n\n}\n", "file_path": "src/reporting/public_symptoms.rs", "rank": 66, "score": 76028.57610315764 }, { "content": "pub mod observed_tcn_processor;\n\npub mod tcn_batches_manager;\n", "file_path": "src/tcn_recording/mod.rs", "rank": 67, "score": 75052.40407693187 }, { "content": "pub mod tcn_keys;\n", "file_path": "src/tcn_ext/mod.rs", "rank": 68, "score": 75051.07714736204 }, { "content": "#[derive(Copy, Clone)]\n\nstruct Element {}\n\n\n\n#[derive(Debug, Serialize, PartialEq, Clone)]\n\npub struct Alert {\n\n pub id: String,\n\n pub report_id: String,\n\n\n\n pub symptoms: PublicSymptoms,\n\n\n\n // Note: for now these fields \"raw\", as this struct is used for FFI.\n\n // if it's needed to manipulate Alert in Rust, a separate type should be created.\n\n pub contact_start: u64,\n\n pub contact_end: u64,\n\n \n\n pub min_distance: f32, // Meters\n\n pub avg_distance: f32, // Meters\n\n\n\n pub is_read: bool,\n\n}\n\n\n", "file_path": "src/reports_update/reports_updater.rs", "rank": 69, "score": 65107.76784723415 }, { "content": "struct JniError {\n\n status: i32,\n\n message: String,\n\n}\n", "file_path": "src/android/android_interface.rs", "rank": 70, "score": 65107.76784723415 }, { "content": " CFStringRef text;\n", "file_path": "src/ios/c_headers/coepicore.h", "rank": 71, "score": 64930.82599724665 }, { "content": "trait JniErrorMappable {\n\n fn to_jni_error(&self) -> JniError;\n\n}\n\n\n\nimpl JniErrorMappable for ServicesError {\n\n fn to_jni_error(&self) -> JniError {\n\n match self {\n\n ServicesError::Networking(networking_error) => JniError {\n\n status: 2,\n\n message: format!(\"{:?}\", networking_error),\n\n },\n\n ServicesError::Error(error) => JniError {\n\n status: 3,\n\n message: format!(\"{:?}\", error),\n\n },\n\n ServicesError::FFIParameters(msg) => JniError {\n\n status: 4,\n\n message: msg.to_owned(),\n\n },\n\n ServicesError::General(msg) => JniError {\n", "file_path": "src/android/android_interface.rs", "rank": 72, "score": 63038.314508571166 }, { "content": "#[derive(Debug, Clone)]\n\nstruct SignedReportsChunk {\n\n reports: Vec<SignedReport>,\n\n interval: ReportsInterval,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n // Utility to see quickly all TCNs (hex) for a report\n\n #[test]\n\n #[ignore]\n\n fn print_tcns_for_report() {\n\n let report_str = \"rOFMgzy3y36MJns34Xj7EZu5Dti9XMhYGRpa/DVznep6q4hMtMYm9sYMg9+sRSHAj0Ff2rHTPXskuzJH0+pZMQEAAgAAFAEAnazaXgAAAAD//////////wMAMFLrKLNOvwUJQSNta9rlzTyjFdpfq25Kv34c6y+ZOoSzRewzNAWsd56Yzm8LUw9cpHB8yyzDUMJ9YTKhD8dADA==\";\n\n let report = SignedReport::with_str(report_str).unwrap();\n\n info!(\"{:?}\", report);\n\n for tcn in report.verify().unwrap().temporary_contact_numbers() {\n\n info!(\"{}\", hex::encode(tcn.0));\n\n }\n\n }\n", "file_path": "src/reports_update/reports_updater.rs", "rank": 73, "score": 62934.814443224604 }, { "content": "#[derive(Debug, Clone)]\n\nstruct MatchedReportsChunk {\n\n reports: Vec<SignedReport>,\n\n matched: Vec<MatchedReport>,\n\n interval: ReportsInterval,\n\n}\n\n\n", "file_path": "src/reports_update/reports_updater.rs", "rank": 74, "score": 62934.814443224604 }, { "content": "fn bootstrap_core(\n\n env: &JNIEnv,\n\n db_path_j_string: JString,\n\n log_level_j_string: JString,\n\n log_coepi_only: jboolean,\n\n log_callback: jobject,\n\n) -> Result<(), ServicesError> {\n\n init_log(&env, log_level_j_string, log_coepi_only, log_callback);\n\n\n\n let db_path_java_str = env.get_string(db_path_j_string)?;\n\n let db_path_str = db_path_java_str.to_str()?;\n\n\n\n let db_result = bootstrap(db_path_str)?;\n\n info!(\"Bootstrapping result: {:?}\", db_result);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 75, "score": 62926.61326617775 }, { "content": "class TcnGeneratorImpl(private val api: JniApi) : TcnGenerator {\n\n override fun generateTcn(): Tcn =\n\n Tcn(api.generateTcn().hexToByteArray())\n\n}\n", "file_path": "android/core/core/src/main/java/org/coepi/core/services/TcnGenerator.kt", "rank": 76, "score": 62406.48189791377 }, { "content": "fn alerts_to_jobject_array(\n\n alerts: Vec<Alert>,\n\n env: &JNIEnv,\n\n) -> Result<jobjectArray, ServicesError> {\n\n let alerts_j_objects_res: Result<Vec<jobject>, ServicesError> = alerts\n\n .into_iter()\n\n .map(|alert| alert_to_jobject(alert, &env))\n\n .collect();\n\n\n\n let alerts_j_objects: Vec<jobject> = alerts_j_objects_res?;\n\n\n\n let placeholder_alert_j_object = alert_to_jobject(placeholder_alert(), &env)?;\n\n\n\n let alerts_array = env.new_object_array(\n\n alerts_j_objects.len() as i32,\n\n \"org/coepi/core/jni/JniAlert\",\n\n placeholder_alert_j_object,\n\n )?;\n\n\n\n for (index, alert_j_object) in alerts_j_objects.into_iter().enumerate() {\n\n env.set_object_array_element(alerts_array, index as i32, alert_j_object)?;\n\n }\n\n\n\n Ok(alerts_array)\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 77, "score": 61817.37989860006 }, { "content": "fun JniVoidResult.asResult(): Result<Unit, Throwable> = when (status) {\n\n 1 -> Success(Unit)\n\n else -> Failure(Throwable(statusDescription()))\n\n}\n\n\n", "file_path": "android/core/core/src/main/java/org/coepi/core/jni/JniApi.kt", "rank": 78, "score": 60354.83043038128 }, { "content": "// To prefill the JNI array (TODO can this be skipped?)\n\nfn placeholder_alert() -> Alert {\n\n let symptoms = PublicSymptoms {\n\n report_time: UnixTime { value: 0 },\n\n earliest_symptom_time: UserInput::Some(UnixTime { value: 0 }),\n\n fever_severity: FeverSeverity::None,\n\n cough_severity: CoughSeverity::None,\n\n breathlessness: false,\n\n muscle_aches: false,\n\n loss_smell_or_taste: false,\n\n diarrhea: false,\n\n runny_nose: false,\n\n other: false,\n\n no_symptoms: false,\n\n };\n\n\n\n Alert {\n\n id: \"0\".to_owned(),\n\n report_id: \"0\".to_owned(),\n\n symptoms,\n\n contact_start: 0,\n\n contact_end: 0,\n\n min_distance: 0.0,\n\n avg_distance: 0.0,\n\n is_read: false,\n\n }\n\n}\n\n\n", "file_path": "src/android/android_interface.rs", "rank": 79, "score": 59259.54060255935 }, { "content": "fun ByteArray.toHex(): String {\n\n val hexChars = CharArray(size * 2)\n\n for (j in indices) {\n\n val v: Int = this[j].toInt() and 0xFF\n\n hexChars[j * 2] = HEX_ARRAY[v ushr 4]\n\n hexChars[j * 2 + 1] = HEX_ARRAY[v and 0x0F]\n\n }\n\n return String(hexChars)\n\n}\n", "file_path": "android/core/core/src/main/java/org/coepi/core/extensions/ByteArrayExtensions.kt", "rank": 80, "score": 53279.476048450226 }, { "content": "fn to_fever_severity(fever: &Fever) -> FeverSeverity {\n\n match &fever.highest_temperature {\n\n UserInput::None => FeverSeverity::None,\n\n UserInput::Some(temp) => match temp.value {\n\n t if t > 100.6 => FeverSeverity::Serious,\n\n t if t > 98.6 => FeverSeverity::Mild,\n\n _ => FeverSeverity::None,\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/reporting/public_symptoms.rs", "rank": 81, "score": 52392.68926723601 }, { "content": "// For testing / debugging\n\nfn signed_report_to_bytes(signed_report: SignedReport) -> Vec<u8> {\n\n let mut buf = Vec::new();\n\n signed_report\n\n .write(Cursor::new(&mut buf))\n\n .expect(\"Couldn't write signed report bytes\");\n\n buf\n\n}\n", "file_path": "src/lib.rs", "rank": 82, "score": 49988.51476405587 }, { "content": "fn signed_report_to_bytes(signed_report: SignedReport) -> Vec<u8> {\n\n let mut buf = Vec::new();\n\n let res = signed_report.write(Cursor::new(&mut buf));\n\n expect_log!(res, \"Couldn't write signed report bytes\");\n\n buf\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::errors;\n\n use crate::errors::ServicesError;\n\n use crate::errors::ServicesError::Error;\n\n use crate::reporting::memo::MemoMapperImpl;\n\n use crate::simple_logger;\n\n use crate::{\n\n database::preferences::PreferencesTckMock,\n\n networking::TcnApiMock,\n\n tcn_ext::tcn_keys::{ReportAuthorizationKeyExt, TcnKeysImpl},\n\n };\n", "file_path": "src/reporting/symptom_inputs.rs", "rank": 83, "score": 48380.617995197375 }, { "content": " let res = TemporaryContactKey::read(Cursor::new(&tck));\n\n expect_log!(res, \"Couldn't read TCK bytes\")\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::database::preferences::PreferencesTckMock;\n\n\n\n #[test]\n\n fn test_rak() {\n\n let new_key = ReportAuthorizationKey::new(rand::thread_rng());\n\n let bytes = TcnKeysImpl::<PreferencesTckMock>::rak_to_bytes(new_key);\n\n debug!(\"{:?}\", bytes);\n\n }\n\n\n\n #[test]\n\n fn test_load_rak() {\n\n let bytes = [\n", "file_path": "src/tcn_ext/tcn_keys.rs", "rank": 84, "score": 45104.622475949116 }, { "content": "use crate::{\n\n database::preferences::{Preferences, TckBytesWrapper, TCK_SIZE_IN_BYTES},\n\n expect_log,\n\n};\n\nuse log::*;\n\nuse std::{io::Cursor, sync::Arc};\n\nuse tcn::{\n\n Error, MemoType, ReportAuthorizationKey, SignedReport, TemporaryContactKey,\n\n TemporaryContactNumber,\n\n};\n\n\n", "file_path": "src/tcn_ext/tcn_keys.rs", "rank": 85, "score": 45103.64317653671 }, { "content": " T: Preferences,\n\n{\n\n fn create_report(&self, report: Vec<u8>) -> Result<SignedReport, Error> {\n\n let end_index = self.tck().index();\n\n let periods = 14 * 24 * (60 / 15);\n\n let mut start_index = 1;\n\n if end_index > periods {\n\n start_index = (end_index - periods) as u16\n\n }\n\n debug!(\"start_index={}, end_index={}\", start_index, end_index);\n\n\n\n self.rak()\n\n .create_report(MemoType::CoEpiV1, report, start_index, end_index)\n\n }\n\n\n\n fn generate_tcn(&self) -> TemporaryContactNumber {\n\n let tck = self.tck();\n\n let tcn = tck.temporary_contact_number();\n\n let new_tck = tck.ratchet();\n\n\n", "file_path": "src/tcn_ext/tcn_keys.rs", "rank": 86, "score": 45096.362100112216 }, { "content": " 42, 118, 64, 131, 236, 36, 122, 23, 13, 108, 73, 171, 102, 145, 66, 91, 157, 105, 195,\n\n 126, 139, 162, 15, 31, 0, 22, 31, 230, 242, 241, 225, 85,\n\n ];\n\n let key = ReportAuthorizationKey::with_bytes(bytes);\n\n let tck = key.initial_temporary_contact_key();\n\n TcnKeysImpl::<PreferencesTckMock>::tck_to_bytes(tck);\n\n }\n\n\n\n #[test]\n\n fn test_load_tck() {\n\n let rak_bytes = [\n\n 42, 118, 64, 131, 236, 36, 122, 23, 13, 108, 73, 171, 102, 145, 66, 91, 157, 105, 195,\n\n 126, 139, 162, 15, 31, 0, 22, 31, 230, 242, 241, 225, 85,\n\n ];\n\n let rak = ReportAuthorizationKey::with_bytes(rak_bytes);\n\n let _tck_1 = rak.initial_temporary_contact_key();\n\n\n\n let tck_inner_bytes = [\n\n 34, 166, 47, 23, 224, 52, 240, 95, 140, 186, 95, 243, 26, 13, 174, 128, 224, 229, 158,\n\n 248, 117, 7, 118, 110, 108, 57, 67, 206, 129, 22, 84, 13,\n", "file_path": "src/tcn_ext/tcn_keys.rs", "rank": 87, "score": 45092.2909002084 }, { "content": " expect_log!(res, \"Couldn't write RAK bytes\");\n\n Self::byte_vec_to_32_byte_array(buf)\n\n }\n\n\n\n fn byte_vec_to_32_byte_array(bytes: Vec<u8>) -> [u8; 32] {\n\n let mut array = [0; 32];\n\n let bytes = &bytes[..array.len()]; // panics if not enough data\n\n array.copy_from_slice(bytes);\n\n array\n\n }\n\n\n\n pub fn tck_to_bytes(tck: TemporaryContactKey) -> TckBytesWrapper {\n\n let mut buf = Vec::new();\n\n let res = tck.write(Cursor::new(&mut buf));\n\n expect_log!(res, \"Couldn't write TCK bytes\");\n\n // Self::byte_vec_to_tck_byte_wrapper(buf)\n\n TckBytesWrapper::with_bytes(buf)\n\n }\n\n\n\n fn bytes_to_tck(tck: TckBytesWrapper) -> TemporaryContactKey {\n", "file_path": "src/tcn_ext/tcn_keys.rs", "rank": 88, "score": 45090.469316151575 }, { "content": " if let Some(new_tck) = new_tck {\n\n self.set_tck(new_tck);\n\n }\n\n\n\n debug!(\"Generated tcn: {:?}\", tcn);\n\n // TODO: if None, rotate RAK\n\n tcn\n\n }\n\n}\n\n\n\nimpl<T> TcnKeysImpl<T>\n\nwhere\n\n T: Preferences,\n\n{\n\n fn rak(&self) -> ReportAuthorizationKey {\n\n self.preferences\n\n .authorization_key()\n\n .map(|rak_bytes| ReportAuthorizationKey::with_bytes(rak_bytes)) //Self::bytes_to_rak(rak_bytes))\n\n .unwrap_or_else(|| {\n\n let new_key = ReportAuthorizationKey::new(rand::thread_rng());\n", "file_path": "src/tcn_ext/tcn_keys.rs", "rank": 89, "score": 45089.62813924826 }, { "content": " ];\n\n debug!(\"count = {}\", tck_inner_bytes.len());\n\n\n\n let version_bytes: [u8; 2] = [1, 0];\n\n\n\n let version_vec = version_bytes.to_vec();\n\n let rak_vec = rak_bytes.to_vec();\n\n let tck_inner_vec = tck_inner_bytes.to_vec();\n\n\n\n let complete_tck_vec = [&version_vec[..], &rak_vec[..], &tck_inner_vec[..]].concat();\n\n\n\n let tck_bytes_wrapped = TckBytesWrapper::with_bytes(complete_tck_vec);\n\n let tck = TcnKeysImpl::<PreferencesTckMock>::bytes_to_tck(tck_bytes_wrapped);\n\n\n\n debug!(\"{:?}\", tck);\n\n }\n\n\n\n #[test]\n\n fn test_generate_tcns() {\n\n let rak_bytes = [\n", "file_path": "src/tcn_ext/tcn_keys.rs", "rank": 90, "score": 45089.44258333608 }, { "content": " self.preferences\n\n .set_autorization_key(Self::rak_to_bytes(new_key));\n\n new_key\n\n })\n\n }\n\n\n\n fn tck(&self) -> TemporaryContactKey {\n\n self.preferences\n\n .tck()\n\n .map(|tck_bytes| Self::bytes_to_tck(tck_bytes))\n\n .unwrap_or_else(|| self.rak().initial_temporary_contact_key())\n\n }\n\n\n\n fn set_tck(&self, tck: TemporaryContactKey) {\n\n self.preferences.set_tck(Self::tck_to_bytes(tck));\n\n }\n\n\n\n fn rak_to_bytes(rak: ReportAuthorizationKey) -> [u8; 32] {\n\n let mut buf = Vec::new();\n\n let res = rak.write(Cursor::new(&mut buf));\n", "file_path": "src/tcn_ext/tcn_keys.rs", "rank": 91, "score": 45087.33281465889 }, { "content": " 42, 118, 64, 131, 236, 36, 122, 23, 13, 108, 73, 171, 102, 145, 66, 91, 157, 105, 195,\n\n 126, 139, 162, 15, 31, 0, 22, 31, 230, 242, 241, 225, 85,\n\n ];\n\n\n\n let rak = ReportAuthorizationKey::with_bytes(rak_bytes);\n\n let mut tck = rak.initial_temporary_contact_key(); // tck <- tck_1\n\n let mut tcns = Vec::new();\n\n\n\n for _ in 0..100 {\n\n tcns.push(tck.temporary_contact_number());\n\n tck = tck.ratchet().unwrap();\n\n }\n\n\n\n info!(\"Number of generated TCNS: {}\", tcns.len());\n\n assert_eq!(100, tcns.len(), \"Expected 100 TCNs\");\n\n }\n\n}\n", "file_path": "src/tcn_ext/tcn_keys.rs", "rank": 92, "score": 45083.22271523252 }, { "content": "use super::tcn_batches_manager::TcnBatchesManager;\n\nuse crate::{\n\n byte_vec_to_16_byte_array, database::tcn_dao::TcnDao, errors::ServicesError, expect_log,\n\n reports_interval,\n\n};\n\nuse log::*;\n\nuse reports_interval::UnixTime;\n\nuse std::sync::{Arc, Mutex};\n\nuse tcn::TemporaryContactNumber;\n\nuse timer::{Guard, Timer};\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct ObservedTcn {\n\n pub tcn: TemporaryContactNumber,\n\n pub contact_start: UnixTime,\n\n pub contact_end: UnixTime,\n\n pub min_distance: f32,\n\n pub avg_distance: f32,\n\n pub total_count: usize, // Needed to calculate correctly average of averages (= average of single values)\n\n}\n\n\n", "file_path": "src/tcn_recording/observed_tcn_processor.rs", "rank": 93, "score": 44191.74073068034 }, { "content": "\n\n // Returns a merged TCN, if the TCNs are contiguous, None otherwise.\n\n // Assumes: tcn contact_start after db_tcn contact_start\n\n fn merge_tcns(\n\n exposure_grouper: &ExposureGrouper,\n\n db_tcn: ObservedTcn,\n\n tcn: ObservedTcn,\n\n ) -> Option<ObservedTcn> {\n\n if exposure_grouper.is_contiguous(&db_tcn, &tcn) {\n\n // Put db TCN and new TCN in an exposure as convenience to re-calculate measurements.\n\n let mut exposure = Exposure::create(db_tcn);\n\n exposure.push(tcn.clone());\n\n let measurements = exposure.measurements();\n\n Some(ObservedTcn {\n\n tcn: tcn.tcn,\n\n contact_start: measurements.contact_start,\n\n contact_end: measurements.contact_end,\n\n min_distance: measurements.min_distance,\n\n avg_distance: measurements.avg_distance,\n\n total_count: measurements.total_count,\n", "file_path": "src/tcn_recording/tcn_batches_manager.rs", "rank": 94, "score": 44188.76013415257 }, { "content": " #[test]\n\n fn test_flush_updates_correctly_existing_entry() {\n\n let database = Arc::new(Database::new(\n\n Connection::open_in_memory().expect(\"Couldn't create database!\"),\n\n ));\n\n let tcn_dao = Arc::new(TcnDaoImpl::new(database));\n\n\n\n let batches_manager =\n\n TcnBatchesManager::new(tcn_dao.clone(), ExposureGrouper { threshold: 1000 });\n\n\n\n let stored_tcn = ObservedTcn {\n\n tcn: TemporaryContactNumber([0; 16]),\n\n contact_start: UnixTime { value: 1600 },\n\n contact_end: UnixTime { value: 2600 },\n\n min_distance: 2.3,\n\n avg_distance: 1.25, // (2.3 + 0.7 + 1 + 1) / 4\n\n total_count: 4,\n\n };\n\n let save_res = tcn_dao.overwrite(vec![stored_tcn]);\n\n assert!(save_res.is_ok());\n", "file_path": "src/tcn_recording/tcn_batches_manager.rs", "rank": 95, "score": 44186.522038352035 }, { "content": " tcn: TemporaryContactNumber([1; 16]),\n\n contact_start: UnixTime { value: 1600 },\n\n contact_end: UnixTime { value: 2600 },\n\n min_distance: 2.3,\n\n avg_distance: 2.3,\n\n total_count: 1,\n\n };\n\n let save_res = tcn_dao.overwrite(vec![stored_tcn1.clone(), stored_tcn2.clone()]);\n\n assert!(save_res.is_ok());\n\n\n\n let tcn = ObservedTcn {\n\n tcn: TemporaryContactNumber([0; 16]),\n\n contact_start: UnixTime { value: 3000 },\n\n contact_end: UnixTime { value: 7000 },\n\n min_distance: 1.12,\n\n avg_distance: 1.12,\n\n total_count: 1,\n\n };\n\n batches_manager.push(tcn.clone());\n\n\n", "file_path": "src/tcn_recording/tcn_batches_manager.rs", "rank": 96, "score": 44185.37255375053 }, { "content": " let batches_manager =\n\n TcnBatchesManager::new(tcn_dao.clone(), ExposureGrouper { threshold: 1000 });\n\n\n\n let stored_tcn = ObservedTcn {\n\n tcn: TemporaryContactNumber([1; 16]),\n\n contact_start: UnixTime { value: 1600 },\n\n contact_end: UnixTime { value: 2600 },\n\n min_distance: 2.3,\n\n avg_distance: 2.3,\n\n total_count: 1,\n\n };\n\n let save_res = tcn_dao.overwrite(vec![stored_tcn]);\n\n assert!(save_res.is_ok());\n\n\n\n let tcn = ObservedTcn {\n\n tcn: TemporaryContactNumber([0; 16]),\n\n contact_start: UnixTime { value: 3000 },\n\n contact_end: UnixTime { value: 5000 },\n\n min_distance: 1.12,\n\n avg_distance: 1.12,\n", "file_path": "src/tcn_recording/tcn_batches_manager.rs", "rank": 97, "score": 44185.08323888488 }, { "content": " #[test]\n\n fn test_flush_updates_correctly_2_stored_1_updated() {\n\n let database = Arc::new(Database::new(\n\n Connection::open_in_memory().expect(\"Couldn't create database!\"),\n\n ));\n\n let tcn_dao = Arc::new(TcnDaoImpl::new(database));\n\n\n\n let batches_manager =\n\n TcnBatchesManager::new(tcn_dao.clone(), ExposureGrouper { threshold: 1000 });\n\n\n\n let stored_tcn1 = ObservedTcn {\n\n tcn: TemporaryContactNumber([0; 16]),\n\n contact_start: UnixTime { value: 1000 },\n\n contact_end: UnixTime { value: 6000 },\n\n min_distance: 0.4,\n\n avg_distance: 0.4,\n\n total_count: 1,\n\n };\n\n\n\n let stored_tcn2 = ObservedTcn {\n", "file_path": "src/tcn_recording/tcn_batches_manager.rs", "rank": 98, "score": 44184.42815185723 }, { "content": " assert_eq!(\n\n loaded_tcns[0],\n\n ObservedTcn {\n\n tcn: TemporaryContactNumber([0; 16]),\n\n contact_start: UnixTime { value: 1600 },\n\n contact_end: UnixTime { value: 5000 },\n\n min_distance: 1.12,\n\n avg_distance: 1.14285714, // (2.3 + 0.7 + 1 + 1 + 1.12 + 0.88 + 1) / (4 + 3)\n\n total_count: 7,\n\n }\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_flush_does_not_affect_different_stored_tcn() {\n\n let database = Arc::new(Database::new(\n\n Connection::open_in_memory().expect(\"Couldn't create database!\"),\n\n ));\n\n let tcn_dao = Arc::new(TcnDaoImpl::new(database));\n\n\n", "file_path": "src/tcn_recording/tcn_batches_manager.rs", "rank": 99, "score": 44183.72872029282 } ]
Rust
src/bits/mask.rs
feb29/cwt
5e757ca672887b584c75871bc54875295759f825
use std::{ borrow::Cow, cmp::Ordering::{self, Equal, Greater, Less}, iter::{empty, Peekable}, }; pub trait Mask<'a>: Sized { type Block: 'a + ?Sized + ToOwned; type Steps: Iterator<Item = (usize, Cow<'a, Self::Block>)>; fn into_steps(self) -> Self::Steps; fn and<Rhs: Mask<'a>>(self, that: Rhs) -> And<'a, Self, Rhs> { And::new(self, that) } fn or<Rhs: Mask<'a>>(self, that: Rhs) -> Or<'a, Self, Rhs> { Or::new(self, that) } fn and_not<Rhs: Mask<'a>>(self, that: Rhs) -> AndNot<'a, Self, Rhs> { AndNot::new(self, that) } fn xor<Rhs: Mask<'a>>(self, that: Rhs) -> Xor<'a, Self, Rhs> { Xor::new(self, that) } } impl<'a, I, T> Mask<'a> for I where T: 'a + ?Sized + ToOwned, I: IntoIterator<Item = (usize, Cow<'a, T>)>, { type Block = T; type Steps = I::IntoIter; fn into_steps(self) -> Self::Steps { self.into_iter() } } macro_rules! defops { ( $( $name:ident ),* ) => ($( #[must_use = "do nothing unless consumed"] pub struct $name<'a, L: Mask<'a>, R: Mask<'a>> { lhs: Peekable<L::Steps>, rhs: Peekable<R::Steps>, } impl<'a, L: Mask<'a>, R: Mask<'a>> $name<'a, L, R> { pub(crate) fn new(lhs: L, rhs: R) -> Self { $name { lhs: lhs.into_steps().peekable(), rhs: rhs.into_steps().peekable(), } } } )*); } defops!(And, AndNot, Or, Xor); impl<'a, L, R> Iterator for And<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: Intersection<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; loop { let compared = lhs .peek() .and_then(|(x, _)| rhs.peek().map(|(y, _)| x.cmp(y))); match compared { Some(Less) => { lhs.next(); } Some(Equal) => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().intersection(&rhs); break Some((i, lhs)); } Some(Greater) => { rhs.next(); } None => break None, } } } } impl<'a, L, R> Iterator for Or<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: Union<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; match cmp_index(lhs.peek(), rhs.peek(), Greater, Less) { Less => lhs.next(), Equal => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().union(rhs.as_ref()); Some((i, lhs)) } Greater => rhs.next(), } } } impl<'a, L, R> Iterator for AndNot<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: Difference<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; loop { match cmp_index(lhs.peek(), rhs.peek(), Less, Less) { Less => return lhs.next(), Equal => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().difference(rhs.as_ref()); return Some((i, lhs)); } Greater => { rhs.next(); } }; } } } impl<'a, L, R> Iterator for Xor<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: SymmetricDifference<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; match cmp_index(lhs.peek(), rhs.peek(), Greater, Less) { Less => lhs.next(), Equal => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().symmetric_difference(rhs.as_ref()); Some((i, lhs)) } Greater => rhs.next(), } } } #[inline] pub fn and<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> And<'a, L, R> { And::new(lhs, rhs) } #[inline] pub fn or<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> Or<'a, L, R> { Or::new(lhs, rhs) } #[inline] pub fn and_not<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> AndNot<'a, L, R> { AndNot::new(lhs, rhs) } #[inline] pub fn xor<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> Xor<'a, L, R> { Xor::new(lhs, rhs) } pub trait Intersection<T: ?Sized> { fn intersection(&mut self, data: &T); } pub trait Union<T: ?Sized> { fn union(&mut self, data: &T); } pub trait Difference<T: ?Sized> { fn difference(&mut self, data: &T); } pub trait SymmetricDifference<T: ?Sized> { fn symmetric_difference(&mut self, data: &T); } fn cmp_index<T>( x: Option<&(usize, T)>, y: Option<&(usize, T)>, none_x: Ordering, none_y: Ordering, ) -> Ordering { match (x, y) { (None, _) => none_x, (_, None) => none_y, (Some((i, _)), Some((j, _))) => i.cmp(j), } } pub struct Fold<'a, T>(Box<dyn Iterator<Item = (usize, T)> + 'a>); impl<'a, T: ?Sized> Fold<'a, Cow<'a, T>> where T: 'a + ToOwned, { pub(crate) fn fold<A, B, F>(xs: impl IntoIterator<Item = A>, mut f: F) -> Fold<'a, Cow<'a, T>> where A: 'a + Mask<'a, Block = T>, B: 'a + Mask<'a, Block = T>, F: FnMut(Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A) -> B, { let mut xs = xs.into_iter(); if let Some(head) = xs.next() { let init = Box::new(head.into_steps()); Fold(xs.fold(init, |a, x| Box::new(f(a, x).into_steps()))) } else { Fold(Box::new(empty())) } } pub fn and<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, And<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, And::new) } pub fn or<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, Or<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, Or::new) } pub fn and_not<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, AndNot<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, AndNot::new) } pub fn xor<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, Xor<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, Xor::new) } } impl<'a, T: 'a + ?Sized + ToOwned> Iterator for Fold<'a, Cow<'a, T>> { type Item = (usize, Cow<'a, T>); #[inline] fn next(&mut self) -> Option<Self::Item> { self.0.next() } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() } }
use std::{ borrow::Cow, cmp::Ordering::{self, Equal, Greater, Less}, iter::{empty, Peekable}, }; pub trait Mask<'a>: Sized { type Block: 'a + ?Sized + ToOwned; type Steps: Iterator<Item = (usize, Cow<'a, Self::Block>)>; fn into_steps(self) -> Self::Steps; fn and<Rhs: Mask<'a>>(self, that: Rhs) -> And<'a, Self, Rhs> { And::new(self, that) } fn or<Rhs: Mask<'a>>(self, that: Rhs) -> Or<'a, Self, Rhs> { Or::new(self, that) } fn and_not<Rhs: Mask<'a>>(self, that: Rhs) -> AndNot<'a, Self, Rhs> { AndNot::new(self, that) } fn xor<Rhs: Mask<'a>>(self, that: Rhs) -> Xor<'a, Self, Rhs> { Xor::new(self, that) } } impl<'a, I, T> Mask<'a> for I where T: 'a + ?Sized + ToOwned, I: IntoIterator<Item = (usize, Cow<'a, T>)>, { type Block = T; type Steps = I::IntoIter; fn into_steps(self) -> Self::Steps { self.into_iter() } } macro_rules! defops { ( $( $name:ident ),* ) => ($( #[must_use = "do nothing unless consumed"] pub struct $name<'a, L: Mask<'a>, R: Mask<'a>> { lhs: Peekable<L::Steps>, rhs: Peekable<R::Steps>, } impl<'a, L: Mask<'a>, R: Mask<'a>> $name<'a, L, R> { pub(crate) fn new(lhs: L, rhs: R) -> Self { $name { lhs: lhs.into_steps().peekable(), rhs: rhs.into_steps().peekable(), } } } )*); } defops!(And, AndNot, Or, Xor); impl<'a, L, R> Iterator for And<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: Intersection<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; loop { let compared = lhs .peek() .and_then(|(x, _)| rhs.peek().map(|(y, _)| x.cmp(y))); match compared { Some(Less) => { lhs.next(); } Some(Equal) => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().intersection(&rhs); break Some((i, lhs)); } Some(Greater) => { rhs.next(); } None => break None, } } } } impl<'a, L, R> Iterator for Or<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: Union<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; match cmp_index(lhs.peek(), rhs.peek(), Greater, Less) { Less => lhs.next(), Equal => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().union(rhs.as_ref()); Some((i, lhs)) } Greater => rhs.next(), } } } impl<'a, L, R> Iterator for AndNot<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: Difference<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; loop { match cmp_index(lhs.peek(), rhs.peek(), Less, Less) { Less => return lhs.next(), Equal => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().difference(rhs.as_ref()); return Some((i, lhs)); } Greater => { rhs.next(); } }; } } } impl<'a, L, R> Iterator for Xor<'a, L, R> where L: Mask<'a>, R: Mask<'a, Block = L::Block>, <L::Block as ToOwned>::Owned: SymmetricDifference<L::Block>, { type Item = (usize, Cow<'a, L::Block>); fn next(&mut self) -> Option<Self::Item> { let lhs = &mut self.lhs; let rhs = &mut self.rhs; match cmp_index(lhs.peek(), rhs.peek(), Greater, Less) { Less => lhs.next(), Equal => { let (i, mut lhs) = lhs.next().expect("unreachable"); let (j, rhs) = rhs.next().expect("unreachable"); debug_assert_eq!(i, j); lhs.to_mut().symmetric_difference(rhs.as_ref()); Some((i, lhs)) } Greater => rhs.next(), } } } #[inline] pub fn and<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> And<'a, L, R> { And::new(lhs, rhs) } #[inline] pub fn or<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> Or<'a, L, R> { Or::new(lhs, rhs) } #[inline] pub fn and_not<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> AndNot<'a, L, R> { AndNot::new(lhs, rhs) } #[inline] pub fn xor<'a, L: Mask<'a>, R: Mask<'a>>(lhs: L, rhs: R) -> Xor<'a, L, R> { Xor::new(lhs, rhs) } pub trait Intersection<T: ?Sized> { fn intersection(&mut self, data: &T); } pub trait Union<T: ?Sized> { fn union(&mut self, data: &T); } pub trait Difference<T: ?Sized> { fn difference(&mut self, data: &T); } pub trait SymmetricDifference<T: ?Sized> { fn symmetric_difference(&mut self, data: &T); } fn cmp_index<T>( x: Option<&(usize, T)>, y: Option<&(usize, T)>, none_x: Ordering, none_y: Ordering, ) -> Ordering { match (x, y) { (None, _) => none_x, (_, None) => none_y, (Some((i, _)), Some((j, _))) => i.cmp(j), } } pub struct Fold<'a, T>(Box<dyn Iterator<Item = (usize, T)> + 'a>); impl<'a, T: ?Sized> Fold<'a, Cow<'a, T>> where T: 'a + ToOwned, {
pub fn and<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, And<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, And::new) } pub fn or<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, Or<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, Or::new) } pub fn and_not<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, AndNot<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, AndNot::new) } pub fn xor<A>(xs: impl IntoIterator<Item = A>) -> Self where A: Mask<'a, Block = T>, Xor<'a, Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A>: 'a + Mask<'a, Block = T>, { Self::fold(xs, Xor::new) } } impl<'a, T: 'a + ?Sized + ToOwned> Iterator for Fold<'a, Cow<'a, T>> { type Item = (usize, Cow<'a, T>); #[inline] fn next(&mut self) -> Option<Self::Item> { self.0.next() } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() } }
pub(crate) fn fold<A, B, F>(xs: impl IntoIterator<Item = A>, mut f: F) -> Fold<'a, Cow<'a, T>> where A: 'a + Mask<'a, Block = T>, B: 'a + Mask<'a, Block = T>, F: FnMut(Box<dyn Iterator<Item = (usize, Cow<'a, T>)> + 'a>, A) -> B, { let mut xs = xs.into_iter(); if let Some(head) = xs.next() { let init = Box::new(head.into_steps()); Fold(xs.fold(init, |a, x| Box::new(f(a, x).into_steps()))) } else { Fold(Box::new(empty())) } }
function_block-full_function
[ { "content": "#[inline]\n\npub fn blocks<T: FixedBits>(n: usize) -> usize {\n\n blocks_by(n, T::SIZE)\n\n}\n\n\n\n/// Computes the minimum length of the sequence to store `n` bits.\n\n#[inline]\n\npub const fn blocks_by(n: usize, block_size: usize) -> usize {\n\n // If we want 17 bits, dividing by 32 will produce 0. So we add 1 to make sure we reserve enough.\n\n // But if we want exactly a multiple of `block_size`, this will actually allocate one too many.\n\n n / block_size + (n % block_size > 0) as usize\n\n}\n\n\n", "file_path": "src/bits.rs", "rank": 4, "score": 155771.80696376736 }, { "content": "/// Allocates an empty bitvector with the specified bit size.\n\npub fn sized<T: FixedBits>(n: usize) -> Vec<T> {\n\n sized_with(n, T::none)\n\n}\n\n\n\n/// Returns an empty `Vec` with the at least specified capacity in bits.\n", "file_path": "src/bits.rs", "rank": 5, "score": 139328.68695308286 }, { "content": "/// The mutable bit sequence.\n\npub trait BitsMut: Bits {\n\n /// Manipulates bit at `i`.\n\n #[inline]\n\n fn put(&mut self, i: usize, bit: bool) {\n\n if bit {\n\n self.put1(i)\n\n } else {\n\n self.put0(i)\n\n }\n\n }\n\n\n\n /// Enables the bit at `i`.\n\n fn put1(&mut self, i: usize);\n\n\n\n /// Disables the bit at `i`.\n\n fn put0(&mut self, i: usize);\n\n\n\n /// Flips the bit at `i`.\n\n #[inline]\n\n fn flip(&mut self, i: usize) {\n", "file_path": "src/ops.rs", "rank": 6, "score": 137895.26722455927 }, { "content": "fn prev_power_of_two(mut n: usize) -> usize {\n\n // if n == 0 {\n\n // return n;\n\n // };\n\n if n > 0 {\n\n while n & (n - 1) > 0 {\n\n n = n & (n - 1);\n\n }\n\n }\n\n n\n\n}\n\n\n\n#[cfg(test)]\n\nmod fenwick {\n\n use super::*;\n\n use quickcheck::quickcheck;\n\n\n\n type Fenwick<T> = FenwickTree<T>;\n\n\n\n quickcheck! {\n", "file_path": "src/fenwick.rs", "rank": 7, "score": 135825.35164875595 }, { "content": "/// `Rank` generalizes `Text::count`.\n\npub trait Rank<Idx = usize>: Text {\n\n /// Returns the number of occurrence of `code`.\n\n fn rank(&self, code: &Self::Code, i: Idx) -> usize;\n\n}\n\n\n", "file_path": "src/ops.rs", "rank": 12, "score": 127796.94960786801 }, { "content": "struct Regions<I: Iterator<Item = Region>> {\n\n finished: bool,\n\n max_size: usize,\n\n last_val: Option<Region>,\n\n regions: Peekable<I>,\n\n}\n\n\n\nimpl<I: Iterator<Item = Region>> Regions<I> {\n\n fn into_and(self) -> impl Iterator<Item = Range<usize>> {\n\n self.filter_map(|member| match member {\n\n Region::And(range) => Some(range),\n\n _ => None,\n\n })\n\n }\n\n\n\n fn into_or(self) -> impl Iterator<Item = Range<usize>> {\n\n self.filter_map(|member| match member {\n\n Region::Lhs(range) => Some(range),\n\n Region::Rhs(range) => Some(range),\n\n Region::And(range) => Some(range),\n", "file_path": "src/bits/roaring/runs.rs", "rank": 14, "score": 125826.73084836965 }, { "content": "/// `FixedBits` is a fixed size, mutable `Bits`.\n\npub trait FixedBits: Clone + Bits + BitsMut {\n\n /// A constant size in bits. This value should be always equal to `size()`.\n\n const SIZE: usize;\n\n\n\n /// Returns an empty instance.\n\n fn none() -> Self;\n\n}\n\n\n", "file_path": "src/ops.rs", "rank": 15, "score": 121174.87729018583 }, { "content": "#[inline]\n\nfn sampling_blocks(bits: usize) -> (usize, usize, usize) {\n\n let upper_blocks = bits::blocks_by(bits, UPPER_BLOCK);\n\n let super_blocks = bits::blocks_by(bits, SUPER_BLOCK);\n\n let (lower_blocks, remain) = divrem!(super_blocks, SUPERS);\n\n assert_eq!(upper_blocks, lower_blocks + (remain > 0) as usize);\n\n (upper_blocks, lower_blocks, remain)\n\n}\n\n\n\nimpl<T: FixedBits> Pop<T> {\n\n /// Returns an empty `BitVec`.\n\n ///\n\n /// ```\n\n /// # use compacts::ops::Bits;\n\n /// # type BitVec<T> = compacts::Pop<T>;\n\n /// let bv = BitVec::<u64>::new(1000);\n\n /// assert!(bv.len() >= 1000);\n\n /// ```\n\n pub fn new(len: usize) -> Self {\n\n Pop {\n\n samples: Samples::none(len),\n", "file_path": "src/bits/pop_vec.rs", "rank": 16, "score": 116798.51464170462 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\nstruct Data {\n\n depth: usize,\n\n rank0: usize,\n\n rank1: usize,\n\n}\n\n\n", "file_path": "src/text/wavelet_matrix/trace.rs", "rank": 17, "score": 116503.6023076234 }, { "content": "/// A trait for integral types.\n\npub trait Int:\n\n 'static\n\n + Copy\n\n + Default\n\n + Clone\n\n + Eq\n\n + Ord\n\n + Hash\n\n + Sum\n\n + fmt::Debug\n\n + fmt::Display\n\n + fmt::Binary\n\n + fmt::Octal\n\n + fmt::UpperHex\n\n + fmt::LowerHex\n\n + ops::Add<Output = Self>\n\n + ops::AddAssign\n\n + ops::Sub<Output = Self>\n\n + ops::SubAssign\n\n + ops::Mul<Output = Self>\n", "file_path": "src/num.rs", "rank": 18, "score": 115057.68760497039 }, { "content": "/// The immutable bit sequences.\n\n///\n\n/// ## Defaults\n\n///\n\n/// This trait has methods with circular defaults, so implementator need to redefine methods below.\n\n///\n\n/// ```text\n\n/// - Either count1 or count0\n\n/// - Either rank1 or rank0\n\n/// ```\n\npub trait Bits {\n\n /// The size of this bit sequence. The size is always equal to `count1() + count0()`.\n\n fn size(&self) -> usize;\n\n\n\n /// Reads bit at `i`.\n\n fn bit(&self, i: usize) -> bool;\n\n\n\n /// Reads `n` bits in `[i, i+n)`, and returns them as the lowest `n` bit of `T`.\n\n #[doc(hidden)]\n\n fn getn<T: Word>(&self, i: usize, n: usize) -> T {\n\n let mut word = T::NONE;\n\n for b in i..i + n {\n\n if self.bit(b) {\n\n word.put1(b - i);\n\n }\n\n }\n\n word\n\n }\n\n\n\n /// Returns true if all bits are enabled.\n", "file_path": "src/ops.rs", "rank": 19, "score": 115052.13878995189 }, { "content": "/// `Text` is a sequence of `Code`. Typically, `Code` is an unsigned integer.\n\npub trait Text {\n\n /// A content of this text.\n\n type Code;\n\n\n\n /// The size of this text.\n\n fn size(&self) -> usize;\n\n\n\n /// Counts the occurences of `e` in this text.\n\n fn count(&self, e: &Self::Code) -> usize;\n\n}\n\n\n", "file_path": "src/ops.rs", "rank": 20, "score": 115048.68556696721 }, { "content": "/// Unsigned int\n\npub trait Word:\n\n Int\n\n + FixedBits\n\n + TryFrom<u8>\n\n + TryFrom<u16>\n\n + TryFrom<u32>\n\n + TryFrom<u64>\n\n + TryFrom<u128>\n\n + TryFrom<usize>\n\n{\n\n}\n\n\n\nmacro_rules! implInt {\n\n ($( ( $Word:ty, $Sint:ty) ),*) => ($(\n\n impl Int for $Word {\n\n const _0: Self = 0;\n\n const _1: Self = 1;\n\n\n\n const NONE: Self = 0;\n\n const FULL: Self = !0;\n", "file_path": "src/num.rs", "rank": 21, "score": 115048.68556696721 }, { "content": "fn words<T: Word>(slice: &[T], chunk_bits: usize) -> impl Iterator<Item = Option<&[T]>> {\n\n assert!(chunk_bits % T::BITS == 0 && chunk_bits <= 65536);\n\n slice.chunks(chunk_bits / T::BITS).map(Some)\n\n}\n\n\n\nimpl<T: Word> From<Vec<T>> for BitArray<T> {\n\n fn from(data: Vec<T>) -> Self {\n\n let (ones, sum_samples, idx_samples) = {\n\n let slice = data.as_slice();\n\n samples(slice.size(), words(slice, SUPER_BLOCK))\n\n };\n\n\n\n debug_assert_eq!(ones, data.count1() as u64);\n\n BitArray {\n\n ones,\n\n data,\n\n sum_samples,\n\n idx_samples,\n\n }\n\n }\n", "file_path": "src/bits/bit_array.rs", "rank": 22, "score": 113011.97305837137 }, { "content": "/// A trait to seal private trait.\n\npub trait Sealed {}\n\n\n\nmacro_rules! impl_Sealed {\n\n ( $( [ $($tts:tt)+ ] for $Type:ty; )* ) => {\n\n $( impl<$($tts)+> Sealed for $Type {} )*\n\n };\n\n ( $( $Type:ty ),* ) => {\n\n $( impl Sealed for $Type {} )*\n\n };\n\n}\n\n\n\nimpl_Sealed!(u8, u16, u32, u64, u128, usize);\n\nimpl_Sealed!(i8, i16, i32, i64, i128, isize);\n\nimpl_Sealed!(RangeFull);\n\n\n\nimpl_Sealed!(\n\n [T: Sealed] for Range<T>;\n\n [T: Sealed] for RangeFrom<T>;\n\n [T: Sealed] for RangeTo<T>;\n\n [T: Sealed] for RangeInclusive<T>;\n\n [T: Sealed] for RangeToInclusive<T>;\n\n [T: Sealed] for (Bound<T>, Bound<T>);\n\n);\n", "file_path": "src/ops/private.rs", "rank": 23, "score": 112114.11447363657 }, { "content": "#[inline]\n\npub fn with_capacity<T: FixedBits>(n: usize) -> Vec<T> {\n\n Vec::with_capacity(blocks::<T>(n))\n\n}\n\n\n\npub(crate) fn sized_with<T, F>(bits: usize, mut f: F) -> Vec<T>\n\nwhere\n\n T: FixedBits,\n\n F: FnMut() -> T,\n\n{\n\n std::iter::from_fn(|| Some(f()))\n\n .take(blocks_by(bits, T::SIZE))\n\n .collect()\n\n}\n\n\n\npub(crate) fn to_exclusive<R: RangeBounds<usize>>(range: &R, max: usize) -> Option<(usize, usize)> {\n\n let start = match range.start_bound() {\n\n Bound::Included(&n) => n,\n\n Bound::Excluded(&n) => n + 1,\n\n Bound::Unbounded => 0,\n\n };\n", "file_path": "src/bits.rs", "rank": 24, "score": 111285.38327385727 }, { "content": "/// Selects `n`th item.\n\npub trait Select: Text {\n\n /// Selects `n`th code.\n\n fn select(&self, code: &Self::Code, n: usize) -> Option<usize>;\n\n}\n\n\n", "file_path": "src/ops.rs", "rank": 25, "score": 108024.64951609957 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Iter<'a, T: Words> {\n\n iter: Enumerate<slice::Iter<'a, Option<Box<T>>>>,\n\n}\n\n\n\nimpl<'a, T: Words> Iterator for Steps<'a, T> {\n\n type Item = (usize, Cow<'a, [T::Word]>);\n\n #[inline]\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.0.next().map(|(i, s)| (i, Cow::Borrowed(s)))\n\n }\n\n #[inline]\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n self.0.size_hint()\n\n }\n\n}\n\n\n\nimpl<'a, T: Words> Iterator for Iter<'a, T> {\n\n type Item = (usize, &'a [T::Word]);\n\n #[inline]\n\n fn next(&mut self) -> Option<Self::Item> {\n", "file_path": "src/bits/map.rs", "rank": 26, "score": 103515.4003036855 }, { "content": "/// `Code`\n\npub trait Code: Copy + Bits {\n\n /// DEPTH\n\n const DEPTH: usize;\n\n /// MIN\n\n const MIN: Self;\n\n /// MAX\n\n const MAX: Self;\n\n}\n\n\n", "file_path": "src/ops.rs", "rank": 27, "score": 101913.94164800094 }, { "content": "fn samples<'a, T, I>(size: usize, supers: I) -> (u64, SumSamples, IdxSamples)\n\nwhere\n\n T: Word,\n\n I: Iterator<Item = Option<&'a [T]>>,\n\n{\n\n use crate::bits::blocks_by;\n\n let mut l0s = vec![0; blocks_by(size, UPPER_BLOCK)];\n\n let mut l1l2s = vec![L1L2(0); blocks_by(size, SUPER_BLOCK)];\n\n\n\n let mut idxs = vec![Vec::new(); l0s.len()];\n\n let mut ones = 0i64; // max is 1<<63\n\n\n\n const ISIZE: i64 = SAMPLE_SIZE as i64;\n\n let mut cur = 0;\n\n let mut pre = 0;\n\n\n\n for (i, chunk) in supers.enumerate() {\n\n let basics = {\n\n let mut bbs = [0; NUM_BB];\n\n if let Some(slice) = chunk.as_ref() {\n", "file_path": "src/bits/bit_array.rs", "rank": 28, "score": 97756.20968636587 }, { "content": "/// `Words` is a fixed size array of word.\n\npub trait Words: 'static + Copy + FixedBits + Sealed {\n\n /// An unsigned int, the element of the array.\n\n type Word: Word;\n\n\n\n /// The length of the array.\n\n const LEN: usize;\n\n\n\n /// The size in slice of the array.\n\n #[doc(hidden)]\n\n const BITS: usize = <Self::Word as Int>::BITS * Self::LEN;\n\n\n\n /// Constructs an empty word array.\n\n #[inline]\n\n #[doc(hidden)]\n\n fn empty() -> Self {\n\n Self::splat(<Self::Word as Int>::NONE)\n\n }\n\n\n\n /// Constructs the word array from bit pattern.\n\n fn splat(word: Self::Word) -> Self; // [Self::Word; Self::LEN]\n", "file_path": "src/bits.rs", "rank": 29, "score": 89629.36163961128 }, { "content": "#[inline]\n\nfn cmp_opt<T: Ord>(x: Option<&T>, y: Option<&T>, a: Ordering, b: Ordering) -> Ordering {\n\n match (x, y) {\n\n (None, _) => a,\n\n (_, None) => b,\n\n (Some(x), Some(y)) => x.cmp(y),\n\n }\n\n}\n\n\n\nimpl Union<Self> for Loc1 {\n\n fn union(&mut self, that: &Self) {\n\n self.data = Cmp {\n\n a: self.data.iter().peekable(),\n\n b: that.data.iter().peekable(),\n\n }\n\n .cloned()\n\n .collect();\n\n\n\n struct Cmp<L: Iterator, R: Iterator> {\n\n a: Peekable<L>,\n\n b: Peekable<R>,\n", "file_path": "src/bits/roaring/locs.rs", "rank": 30, "score": 85148.18328502693 }, { "content": "fn trace_by<B, F>(index: (usize, usize), rows: Rows<'_, B>, router: F) -> Trace<'_, B, F> {\n\n let rows = rows.enumerate();\n\n Trace {\n\n index,\n\n rows,\n\n router,\n\n }\n\n}\n\n\n", "file_path": "src/text/wavelet_matrix/trace.rs", "rank": 31, "score": 78323.856853853 }, { "content": "fn regions<'a: 'r, 'b: 'r, 'r>(\n\n this: impl IntoIterator<Item = &'a Run> + 'a,\n\n that: impl IntoIterator<Item = &'b Run> + 'b,\n\n) -> Regions<impl Iterator<Item = Region> + 'r> {\n\n let max_size = Block::BITS;\n\n let finished = false;\n\n let last_val = None;\n\n let regions = inner_regions(this, that).peekable();\n\n Regions {\n\n finished,\n\n max_size,\n\n last_val,\n\n regions,\n\n }\n\n}\n\n\n", "file_path": "src/bits/roaring/runs.rs", "rank": 32, "score": 77657.84326414345 }, { "content": "fn merge<'a: 'r, 'b: 'r, 'r>(\n\n this: impl IntoIterator<Item = &'a Run> + 'a,\n\n that: impl IntoIterator<Item = &'b Run> + 'b,\n\n) -> impl Iterator<Item = Side> + 'r {\n\n use {Braket::*, Side::*};\n\n\n\n struct MergeBy<L, R, F>\n\n where\n\n L: Iterator,\n\n R: Iterator,\n\n F: Fn(&L::Item, &R::Item) -> Ordering,\n\n {\n\n lhs: Peekable<L>,\n\n rhs: Peekable<R>,\n\n fun: F,\n\n }\n\n\n\n impl<L, R, F> MergeBy<L, R, F>\n\n where\n\n L: Iterator,\n", "file_path": "src/bits/roaring/runs.rs", "rank": 33, "score": 77657.84326414345 }, { "content": "fn inner_regions<'a: 'r, 'b: 'r, 'r>(\n\n this: impl IntoIterator<Item = &'a Run> + 'a,\n\n that: impl IntoIterator<Item = &'b Run> + 'b,\n\n) -> impl Iterator<Item = Region> + 'r {\n\n // Tuples yields window\n\n struct Tuples<I: Iterator> {\n\n iter: I,\n\n last: Option<I::Item>,\n\n }\n\n impl<I: Iterator> Tuples<I> {\n\n fn tuples(mut iter: I) -> Self {\n\n let last = iter.next();\n\n Tuples { iter, last }\n\n }\n\n }\n\n impl<I> Iterator for Tuples<I>\n\n where\n\n I: Iterator,\n\n I::Item: Copy,\n\n {\n", "file_path": "src/bits/roaring/runs.rs", "rank": 34, "score": 76307.07634371464 }, { "content": "fn rrr_table<P: AsRef<Path>>(path: P, n: usize) -> io::Result<()> {\n\n use std::{env, fs::File, io::Write};\n\n\n\n fn gentab(size: usize) -> Vec<Vec<u128>> {\n\n let mut table = vec![vec![0u128; size]; size];\n\n for k in 0..size {\n\n table[k][k] = 1; // initialize diagonal\n\n table[0][k] = 0; // initialize first row\n\n table[k][0] = 1; // initialize first col\n\n }\n\n for i in 1..size {\n\n for j in 1..size {\n\n table[i][j] = table[i - 1][j - 1] + table[i - 1][j];\n\n }\n\n }\n\n table\n\n }\n\n\n\n let dir = env::var(\"OUT_DIR\").unwrap();\n\n let mut file = File::create(Path::new(&dir).join(path))?;\n\n writeln!(file, \"{:?}\", gentab(n))\n\n}\n", "file_path": "build.rs", "rank": 35, "score": 66248.92778730855 }, { "content": "// Helper trait to implement `select1` and `select0`\n\ntrait Broadword {\n\n fn broadword(&self, c: usize) -> Option<usize>;\n\n}\n\n\n\nimpl Broadword for u64 {\n\n fn broadword(&self, c: usize) -> Option<usize> {\n\n const X01: u64 = 0x0101_0101_0101_0101;\n\n const X02: u64 = 0x2020_2020_2020_2020;\n\n const X33: u64 = 0x3333_3333_3333_3333;\n\n const X22: u64 = 0x2222_2222_2222_2222;\n\n const X80: u64 = 0x2010_0804_0201_0080;\n\n const X81: u64 = 0x2010_0804_0201_0081;\n\n const X0F: u64 = 0x0f0f_0f0f_0f0f_0f0f;\n\n const X55: u64 = X22 + X33 + X22 + X33;\n\n const X8X: u64 = X81 + X80 + X80 + X80;\n\n\n\n #[inline]\n\n const fn le8(x: u64, y: u64) -> u64 {\n\n let x8 = X02 + X02 + X02 + X02;\n\n let xs = (y | x8) - (x & !x8);\n", "file_path": "src/num.rs", "rank": 36, "score": 61952.03913214358 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq)]\n\nstruct Samples {\n\n uppers: FenwickTree<u64>,\n\n // L1 and L2 are interleaved into one vector,\n\n // each L1 entries is followed by its L2 index entries.\n\n lowers: Vec<FenwickTree<L1L2>>,\n\n}\n\n\n\n// /// `BitArray<T>` is a freezed `BitVec` with the extra index for `select1`.\n\n// #[derive(Debug, Clone, PartialEq, Eq)]\n\n// pub struct PopArray<T> {\n\n// samples: CombinedSamples,\n\n// data: Vec<T>,\n\n// }\n\n\n\n// #[derive(Debug, Clone, PartialEq, Eq)]\n\n// struct CombinedSamples {\n\n// pops: Samples,\n\n// pos1: Vec<Vec<u32>>,\n\n// }\n\n\n\n/// Interleaves L1[i] and L2[i] into 64bit word.\n", "file_path": "src/bits/pop_vec.rs", "rank": 37, "score": 58871.10915019991 }, { "content": "#[derive(Debug, Clone, Default, PartialEq, Eq)]\n\nstruct Runs {\n\n data: Vec<Run>,\n\n}\n\n\n\n/// `Run` is an inclusive range between `[i, j]`.\n", "file_path": "src/bits/roaring/mod.rs", "rank": 38, "score": 58871.10915019991 }, { "content": "#[derive(Debug, Clone, Default, PartialEq, Eq)]\n\nstruct Loc1 {\n\n data: Vec<u16>,\n\n}\n\n\n\n// // /// 0-based sorted bit sequence.\n\n// // #[derive(Debug, Clone, Default, PartialEq, Eq)]\n\n// // pub(crate) struct Loc0 {\n\n// // locs: Vec<u16>,\n\n// // }\n\n\n\n/// A run length encoded bits.\n", "file_path": "src/bits/roaring/mod.rs", "rank": 39, "score": 58871.10915019991 }, { "content": "#[derive(Debug, Clone)]\n\nstruct UnionFind {\n\n cell: RefCell<Vec<usize>>,\n\n}\n\n\n\nimpl UnionFind {\n\n fn new(size: usize) -> Self {\n\n UnionFind {\n\n cell: RefCell::new(vec![0; size]),\n\n }\n\n }\n\n\n\n fn root(&self, i: usize) -> usize {\n\n assert_ne!(i, 0);\n\n let mut data = self.cell.borrow_mut();\n\n let mut root = i;\n\n while data[root] != 0 {\n\n root = data[root];\n\n }\n\n if i != root {\n\n data[i] = root;\n", "file_path": "src/union_find.rs", "rank": 40, "score": 58871.10915019991 }, { "content": "#[test]\n\nfn union_find() {\n\n let mut uf = UnionFind::new(100);\n\n\n\n uf.join(1, 9);\n\n uf.join(3, 9);\n\n uf.join(5, 9);\n\n uf.join(7, 9);\n\n\n\n uf.join(2, 10);\n\n uf.join(4, 10);\n\n uf.join(6, 4);\n\n uf.join(8, 2);\n\n\n\n assert!(uf.same(1, 9));\n\n assert!(uf.same(3, 9));\n\n assert!(uf.same(5, 9));\n\n assert!(uf.same(7, 9));\n\n assert!(uf.same(9, 9));\n\n\n\n assert!(uf.same(2, 4));\n\n assert!(uf.same(4, 6));\n\n assert!(uf.same(6, 8));\n\n assert!(uf.same(8, 10));\n\n}\n", "file_path": "src/union_find.rs", "rank": 41, "score": 58244.49227935805 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq, Hash)]\n\nstruct SumSamples {\n\n // L0: cumulative absolute counts\n\n // L1: cumulative relative counts\n\n // L2: non-cumulative relative counts\n\n // L1 and L2 are interleaved into one vector,\n\n // each L1 entries is followed by its L2 index entries.\n\n l0s: Vec<u64>,\n\n l1l2s: Vec<L1L2>,\n\n}\n\n\n\n/// An interleaved value of L1[i] and L2[i] of `RankSamples`.\n", "file_path": "src/bits/bit_array.rs", "rank": 42, "score": 57517.678941152815 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\nstruct Node {\n\n depth: usize,\n\n index: (usize, usize),\n\n route: Route, // node direction from depth-1 to depth\n\n}\n\n\n", "file_path": "src/text/wavelet_matrix/trace.rs", "rank": 43, "score": 57517.678941152815 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq, Hash)]\n\nstruct IdxSamples {\n\n idxs: Vec<Vec<u32>>,\n\n}\n\n\n\nconst UPPER_BLOCK: usize = 1 << 32;\n\nconst SUPER_BLOCK: usize = 2048;\n\nconst BASIC_BLOCK: usize = 512;\n\n\n\nconst SAMPLE_SIZE: usize = 8192;\n\n\n\nconst NUM_SB: usize = (1 << 32) / 2048; // 2097152\n\nconst NUM_BB: usize = 2048 / 512;\n\n\n", "file_path": "src/bits/bit_array.rs", "rank": 44, "score": 57517.678941152815 }, { "content": "#[test]\n\nfn rank_select() {\n\n for _ in 0..1000 {\n\n let rank1 = thread_rng().gen_range(0, V0.count1());\n\n assert!(V0.rank1(V0.select1(rank1).unwrap()) == rank1);\n\n let rank0 = thread_rng().gen_range(0, V0.count0());\n\n assert!(V0.rank0(V0.select0(rank0).unwrap()) == rank0);\n\n }\n\n}\n\n\n\nmod bytes {\n\n use super::*;\n\n use std::io::Cursor;\n\n\n\n lazy_static! {\n\n static ref BUF0: Vec<u8> = {\n\n let mut vec = Vec::with_capacity(1 << 16);\n\n V0.serialize_into(&mut vec).unwrap();\n\n vec\n\n };\n\n static ref BUF1: Vec<u8> = {\n", "file_path": "src/bits/roaring/tests.rs", "rank": 45, "score": 56893.72535892924 }, { "content": "#[cfg(test)]\n\n#[test]\n\n#[rustfmt::skip]\n\nfn test_regions() {\n\n assert_eq!(\n\n regions(\n\n &[Run(3u16, 5), Run(7, 10)],\n\n &[Run(2, 3)],\n\n )\n\n .collect::<Vec<Region>>(),\n\n vec![\n\n Region::Not(0..2),\n\n Region::Rhs(2..3),\n\n Region::And(3..4),\n\n Region::Lhs(4..6),\n\n Region::Not(6..7),\n\n Region::Lhs(7..11),\n\n Region::Not(11..65536),\n\n ]\n\n );\n\n\n\n assert_eq!(\n\n regions(\n", "file_path": "src/bits/roaring/runs.rs", "rank": 46, "score": 56893.72535892924 }, { "content": "fn main() -> io::Result<()> {\n\n rrr_table(\"table.rs\", 127)\n\n}\n\n\n", "file_path": "build.rs", "rank": 47, "score": 55216.56031867176 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq)]\n\nstruct Node<B> {\n\n size: usize,\n\n dict: B,\n\n}\n\n\n\n// /// `Buf<T>`\n\n// #[derive(Debug, Clone, PartialEq, Eq)]\n\n// struct Buf<T> {\n\n// size: usize, // actual size of bits; not `bits.len() * T::SIZE`\n\n// data: Vec<T>, // bit blocks\n\n// }\n\n\n\n// impl<B: FixedBits> Node<Vec<B>> {\n\n// fn with_capacity(min_cap: usize) -> Self {\n\n// Node {\n\n// size: 0,\n\n// dict: vec![B::none(); blocks(min_cap, B::SIZE)],\n\n// }\n\n// }\n\n\n", "file_path": "src/text/wavelet_tree.rs", "rank": 48, "score": 54986.0954493939 }, { "content": "#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n\nstruct L1L2(u64);\n\n\n\n/// (upper_blocks, lower_blocks, super_blocks)\n", "file_path": "src/bits/pop_vec.rs", "rank": 50, "score": 53736.92348180282 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\nstruct L1L2(u64);\n\n\n\n/// A sampling values of `select1`.\n", "file_path": "src/bits/bit_array.rs", "rank": 51, "score": 53736.92348180282 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\nstruct Run(u16, u16);\n\n\n\nimpl From<&'_ Loc1> for Page {\n\n fn from(pos1: &Loc1) -> Self {\n\n let mut bits = Page::none();\n\n for &i in &pos1.data {\n\n bits.put1(try_cast(i));\n\n }\n\n bits\n\n }\n\n}\n\n\n\n// impl From<&'_ Runs> for BitsRepr {\n\n// fn from(runs: &Runs) -> Self {\n\n// let mut bits = BitsRepr::empty();\n\n// for &Bounds(i, j) in runs {\n\n// for b in i..=j {\n\n// bits.put1(try_cast::<u16, usize>(b));\n\n// }\n\n// }\n", "file_path": "src/bits/roaring/mod.rs", "rank": 52, "score": 51669.46880789737 }, { "content": "type Page = Box<[u64; 1024]>;\n\n\n\n/// 1-based sorted bit sequence.\n", "file_path": "src/bits/roaring/mod.rs", "rank": 53, "score": 51011.48794889194 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Probe<T, U> {\n\n index: (usize, usize),\n\n depth: usize,\n\n value: T,\n\n _kind: PhantomData<U>,\n\n}\n\n\n\nimpl<'a, T: Text> Search<'a, T>\n\nwhere\n\n T::Code: Word,\n\n{\n\n /// Enumerates value that satisfy `min <= value < max` in ascending order.\n\n pub fn min(self) -> Min<'a, T> {\n\n Min(self.heap())\n\n }\n\n\n\n /// Enumerates value that satisfy `min <= value < max` in descending order.\n\n pub fn max(self) -> Max<'a, T> {\n\n Max(self.heap())\n\n }\n", "file_path": "src/text/wavelet_matrix/search.rs", "rank": 54, "score": 50512.95495146411 }, { "content": "fn by_value<B, T>(\n\n index: (usize, usize),\n\n rows: Rows<'_, B>,\n\n val: T,\n\n) -> Trace<'_, B, impl FnMut(Data) -> Route>\n\nwhere\n\n T: Code,\n\n{\n\n trace_by(index, rows, move |Data { depth, .. }| {\n\n Route::from_bit(val.bit(T::DEPTH - depth - 1))\n\n })\n\n}\n\n\n\nimpl<'a, T: Code, B> View<'a, WaveletMatrix<T, B>> {\n\n #[inline]\n\n fn trace(&self, val: T) -> Option<Trace<'a, B, impl FnMut(Data) -> Route>> {\n\n self.idx\n\n .as_ref()\n\n .map(|&idx| by_value(idx, self.seq.rows(), val))\n\n }\n", "file_path": "src/text/wavelet_matrix/trace.rs", "rank": 55, "score": 49996.42750761524 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Trace<'a, B, F> {\n\n index: (usize, usize),\n\n rows: Enumerate<Rows<'a, B>>,\n\n router: F, // invoke for each depth to decide which route to trace\n\n}\n\n\n", "file_path": "src/text/wavelet_matrix/trace.rs", "rank": 56, "score": 48792.70995413406 }, { "content": "type BitMap = compacts::BitMap<[u64; 1024]>;\n\n\n\nmacro_rules! generate {\n\n ($rng:expr, $len:expr, $tab:expr) => {{\n\n // let mut build = Vec::<u64>::with_capacity($len);\n\n let mut build = vec![0; $len];\n\n for i in 0..$len {\n\n build[i] = $tab[$rng.gen_range(0, $tab.len())];\n\n }\n\n build\n\n }};\n\n ($rng:expr, $len:expr) => {{\n\n // let mut build = Vec::<u64>::with_capacity($len);\n\n let mut build = vec![0; $len];\n\n for i in 0..$len {\n\n build[i] = $rng.gen_range(0, $len as u32);\n\n }\n\n build\n\n }};\n\n}\n", "file_path": "benches/wm.rs", "rank": 57, "score": 48325.08551130054 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Heap<'a, By, T: Text> {\n\n seq: &'a T,\n\n min: Option<T::Code>,\n\n max: Option<T::Code>,\n\n bin: BinaryHeap<Probe<T::Code, By>>,\n\n}\n\n\n", "file_path": "src/text/wavelet_matrix/search.rs", "rank": 58, "score": 47338.98429995562 }, { "content": " R: Iterator<Item = T>,\n\n F: Fn(&T, &T) -> Ordering,\n\n {\n\n type Item = T;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n match (self.lhs.peek(), self.rhs.peek()) {\n\n (Some(lhs), Some(rhs)) => match (self.fun)(lhs, rhs) {\n\n Ordering::Less | Ordering::Equal => self.lhs.next(),\n\n Ordering::Greater => self.rhs.next(),\n\n },\n\n (_, None) => self.lhs.next(),\n\n (None, _) => self.rhs.next(),\n\n }\n\n }\n\n }\n\n\n\n let lhs = {\n\n let mut vec_lhs = Vec::new();\n\n for lhs in this {\n\n let (n, m) = {\n", "file_path": "src/bits/roaring/runs.rs", "rank": 62, "score": 38.37792132278049 }, { "content": " b: that.data.iter().peekable(),\n\n }\n\n .cloned()\n\n .collect();\n\n\n\n struct Cmp<L: Iterator, R: Iterator> {\n\n a: Peekable<L>,\n\n b: Peekable<R>,\n\n }\n\n impl<L, R, T: Ord + Debug> Iterator for Cmp<L, R>\n\n where\n\n L: Iterator<Item = T>,\n\n R: Iterator<Item = T>,\n\n {\n\n type Item = T;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n loop {\n\n match Ord::cmp(self.a.peek()?, self.b.peek()?) {\n\n LT => {\n\n self.a.next();\n", "file_path": "src/bits/roaring/locs.rs", "rank": 63, "score": 38.01003357377566 }, { "content": " }\n\n}\n\n\n\nimpl Difference<Self> for Loc1 {\n\n fn difference(&mut self, that: &Self) {\n\n self.data = Cmp {\n\n a: self.data.iter().peekable(),\n\n b: that.data.iter().peekable(),\n\n }\n\n .cloned()\n\n .collect();\n\n\n\n struct Cmp<L: Iterator, R: Iterator> {\n\n a: Peekable<L>,\n\n b: Peekable<R>,\n\n }\n\n impl<L, R, T: Ord + Debug> Iterator for Cmp<L, R>\n\n where\n\n L: Iterator<Item = T>,\n\n R: Iterator<Item = T>,\n", "file_path": "src/bits/roaring/locs.rs", "rank": 68, "score": 32.313016462798075 }, { "content": "}\n\n\n\nimpl SymmetricDifference<Self> for Loc1 {\n\n fn symmetric_difference(&mut self, that: &Self) {\n\n self.data = Cmp {\n\n a: self.data.iter().peekable(),\n\n b: that.data.iter().peekable(),\n\n }\n\n .cloned()\n\n .collect();\n\n\n\n struct Cmp<L: Iterator, R: Iterator> {\n\n a: Peekable<L>,\n\n b: Peekable<R>,\n\n }\n\n impl<L, R, T: Ord + Debug> Iterator for Cmp<L, R>\n\n where\n\n L: Iterator<Item = T>,\n\n R: Iterator<Item = T>,\n\n {\n", "file_path": "src/bits/roaring/locs.rs", "rank": 69, "score": 31.965954581240403 }, { "content": " let mut m = n;\n\n let n = try_cast(n);\n\n while let Some(&peek) = self.0.peek() {\n\n if m + 1 == *peek {\n\n m = *self.0.next().unwrap();\n\n continue;\n\n } else {\n\n break;\n\n }\n\n }\n\n Some((n, try_cast(m)))\n\n })\n\n }\n\n }\n\n RunIter(self.data.iter().peekable())\n\n }\n\n}\n\n\n\nimpl FixedBits for Loc1 {\n\n const SIZE: usize = Block::BITS;\n", "file_path": "src/bits/roaring/locs.rs", "rank": 70, "score": 30.677537291318313 }, { "content": " #[inline]\n\n pub fn with_capacity(cap: usize) -> Self {\n\n Self{data:Vec::with_capacity(cap)}\n\n }\n\n }\n\n )*)\n\n}\n\nimpl_ops!(Loc1);\n\n\n\nimpl Loc1 {\n\n pub fn runs<'a>(&'a self) -> impl Iterator<Item = (usize, usize)> + 'a {\n\n // `Iterator::scan` should be better?\n\n struct RunIter<'b, I: Iterator<Item = &'b u16>>(Peekable<I>);\n\n impl<'b, I> Iterator for RunIter<'b, I>\n\n where\n\n I: Iterator<Item = &'b u16>,\n\n {\n\n type Item = (usize, usize);\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.0.next().and_then(|&n| {\n", "file_path": "src/bits/roaring/locs.rs", "rank": 72, "score": 30.283590610975907 }, { "content": " R: Iterator,\n\n F: Fn(&L::Item, &R::Item) -> Ordering,\n\n {\n\n fn merge_by<A, B, T>(lhs: A, rhs: B, fun: F) -> Self\n\n where\n\n A: IntoIterator<Item = T, IntoIter = L>,\n\n B: IntoIterator<Item = T, IntoIter = R>,\n\n L: Iterator<Item = T>,\n\n R: Iterator<Item = T>,\n\n F: Fn(&T, &T) -> Ordering,\n\n {\n\n let lhs = lhs.into_iter().peekable();\n\n let rhs = rhs.into_iter().peekable();\n\n MergeBy { lhs, rhs, fun }\n\n }\n\n }\n\n\n\n impl<L, R, F, T> Iterator for MergeBy<L, R, F>\n\n where\n\n L: Iterator<Item = T>,\n", "file_path": "src/bits/roaring/runs.rs", "rank": 73, "score": 30.164896741294015 }, { "content": "// mod tests;\n\n\n\nuse std::cmp::Ordering::{self, Equal as EQ, Greater as GT, Less as LT};\n\n\n\nuse crate::{num::try_cast, ops::*};\n\n\n\n// #[cfg(test)]\n\n// pub(crate) use {posn::Pos1, runs::Runs};\n\n\n\n// #[derive(Debug, Clone, PartialEq, Eq)]\n\n// pub struct BitMap<K> {\n\n// size: usize,\n\n// keys: Vec<K>,\n\n// data: Vec<Block>,\n\n// }\n\n\n\n#[derive(Debug, Default, Clone, PartialEq, Eq)]\n\npub struct Block(Repr);\n\n\n\nimpl Block {\n\n const BITS: usize = 65536;\n\n}\n\n\n\n#[derive(Clone)]\n", "file_path": "src/bits/roaring/mod.rs", "rank": 75, "score": 28.984451764882404 }, { "content": " }\n\n impl<L, R, T: Ord + Debug> Iterator for Cmp<L, R>\n\n where\n\n L: Iterator<Item = T>,\n\n R: Iterator<Item = T>,\n\n {\n\n type Item = T;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n match cmp_opt(self.a.peek(), self.b.peek(), GT, LT) {\n\n LT => self.a.next(),\n\n EQ => {\n\n let a = self.a.next().unwrap();\n\n let b = self.b.next().unwrap();\n\n assert_eq!(a, b);\n\n Some(a)\n\n }\n\n GT => self.b.next(),\n\n }\n\n }\n\n }\n", "file_path": "src/bits/roaring/locs.rs", "rank": 76, "score": 27.77194390908801 }, { "content": "\n\n// // impl<'a> Iterator for Steps<'a, Block> {\n\n// // type Item = (usize, Cow<'a, Block>);\n\n// // fn next(&mut self) -> Option<Self::Item> {\n\n// // self.iter.find_map(|(index, b)| {\n\n// // if b.any() {\n\n// // Some((index, Cow::Borrowed(b)))\n\n// // } else {\n\n// // None\n\n// // }\n\n// // })\n\n// // }\n\n// // }\n\n\n\n// // #[derive(Debug, Clone, PartialEq, Eq)]\n\n// // pub struct Bytes<T> {\n\n// // header: Header,\n\n// // bytes: T,\n\n// // }\n\n\n", "file_path": "src/bits/roaring/mod.rs", "rank": 77, "score": 27.43071283271495 }, { "content": " /// let bv = compacts::BitVec::<u64>::none(100);\n\n /// assert!(bv.len() == 100 && bv.capacity() >= 100);\n\n /// ```\n\n pub fn none(len: usize) -> Self {\n\n Self::from_fn(len, B::none)\n\n }\n\n\n\n /// Allocates buf by multiples of `B::SIZE`, such that `BitVec` has at least `n` length and capacity.\n\n ///\n\n /// ```\n\n /// let bv = compacts::BitVec::<u64>::from_fn(1000, || !0);\n\n /// assert!(bv.len() == 1000 && bv.capacity() >= 1000);\n\n /// ```\n\n pub fn from_fn<F>(len: usize, mut f: F) -> Self\n\n where\n\n F: FnMut() -> B,\n\n {\n\n BitVec {\n\n buf: iter::from_fn(|| Some(f()))\n\n .take(blocks_by(len, B::SIZE))\n", "file_path": "src/bits/bit_vec.rs", "rank": 78, "score": 27.26107187099423 }, { "content": "use std::{\n\n iter::{FromIterator, Peekable},\n\n ops::{Range, RangeInclusive},\n\n slice,\n\n};\n\n\n\nuse crate::{\n\n bits::{Difference, Intersection, SymmetricDifference, Union},\n\n num::try_cast,\n\n ops::*,\n\n};\n\n\n\nuse super::{Block, Ordering, Run, Runs, EQ, GT, LT};\n\n\n\nimpl<'a> IntoIterator for &'a Runs {\n\n type Item = &'a Run;\n\n type IntoIter = slice::Iter<'a, Run>;\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.data.iter()\n\n }\n", "file_path": "src/bits/roaring/runs.rs", "rank": 81, "score": 26.480831114266067 }, { "content": " #[inline]\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n self.rows.size_hint()\n\n }\n\n\n\n // #[inline]\n\n // fn nth(&mut self, i: usize) -> Option<Self::Code> {\n\n // self.matrix.nth(i).map(|t| {\n\n // let node = self.next_node(t);\n\n // self.index = node.index;\n\n // node\n\n // })\n\n // }\n\n}\n\n\n\nimpl<'a, B: Bits, F> ExactSizeIterator for Trace<'a, B, F>\n\nwhere\n\n F: FnMut(Data) -> Route,\n\n{\n\n #[inline]\n\n fn len(&self) -> usize {\n\n self.rows.len()\n\n }\n\n}\n\n\n\nimpl<'a, B: Bits, F> FusedIterator for Trace<'a, B, F> where F: FnMut(Data) -> Route {}\n", "file_path": "src/text/wavelet_matrix/trace.rs", "rank": 82, "score": 25.902233762636786 }, { "content": "use std::{\n\n cmp::Ordering,\n\n fmt::{self, Debug},\n\n iter::{once, repeat, repeat_with},\n\n ops::{Add, AddAssign, RangeBounds, Sub, SubAssign},\n\n};\n\n\n\nuse Ordering::{Equal as EQ, Greater as GT, Less as LT};\n\n\n\nuse crate::{bits, fenwick::FenwickTree, num, num::Int, ops::*};\n\n\n\nconst UPPER_BLOCK: usize = 1 << 32;\n\nconst SUPER_BLOCK: usize = 2048;\n\nconst BASIC_BLOCK: usize = 512;\n\n\n\nconst SUPERS: usize = UPPER_BLOCK / SUPER_BLOCK; // 2097152\n\n\n\n// const BASICS: usize = SUPER_BLOCK / BASIC_BLOCK; // 4\n\n\n\n/// `BitVec<T>` is `Vec<T>` with the index for `rank1` and `rank0`.\n", "file_path": "src/bits/pop_vec.rs", "rank": 83, "score": 25.677181730440598 }, { "content": " // Region::Not(range) => Some(range),\n\n // _ => None,\n\n // })\n\n // }\n\n}\n\n\n\nimpl<I: Iterator<Item = Region>> Iterator for Regions<I> {\n\n type Item = Region;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.finished {\n\n return None;\n\n };\n\n\n\n loop {\n\n let peek = self.regions.peek();\n\n match (self.last_val.clone(), peek) {\n\n // `inner_region` may yields empty value\n\n (_, Some(region)) if region.is_empty() => {\n\n self.regions.next().unwrap();\n\n continue;\n", "file_path": "src/bits/roaring/runs.rs", "rank": 84, "score": 25.571582682715366 }, { "content": "// fn empty() -> Self {\n\n// Self::default()\n\n// }\n\n// }\n\n\n\nimpl Bits for Runs {\n\n #[inline]\n\n fn size(&self) -> usize {\n\n Block::BITS\n\n }\n\n\n\n #[inline]\n\n fn count1(&self) -> usize {\n\n self.data.iter().map(Run::len).sum()\n\n }\n\n\n\n #[inline]\n\n fn any(&self) -> bool {\n\n !self.data.is_empty()\n\n }\n", "file_path": "src/bits/roaring/runs.rs", "rank": 85, "score": 25.410462619918 }, { "content": " }\n\n }\n\n}\n\n\n\npub struct BytesSteps<'bytes, 'b> {\n\n index: std::ops::Range<usize>,\n\n bytes: &'b Bytes<&'bytes [u8]>,\n\n}\n\n\n\nimpl<'bytes, 'b> BitMask for &'b Bytes<&'bytes [u8]> {\n\n type Index = u16;\n\n type Value = Cow<'b, Block>;\n\n type Steps = BytesSteps<'bytes, 'b>;\n\n fn into_steps(self) -> Self::Steps {\n\n let index = 0..self.blocks();\n\n let bytes = self;\n\n BytesSteps { index, bytes }\n\n }\n\n}\n\n\n\nimpl<'bytes, 'b> Iterator for BytesSteps<'bytes, 'b> {\n\n type Item = (u16, Cow<'b, Block>);\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.index.next().map(|i| self.bytes.step(i))\n\n }\n\n}\n", "file_path": "src/bits/roaring/map.rs", "rank": 86, "score": 25.282447925331738 }, { "content": "// // }\n\n\n\n// // impl<'a> mask::BitMask for &'a crate::BitsRepr<Block> {\n\n// // type Index = usize;\n\n// // type Value = Cow<'a, Block>;\n\n// // type Steps = Steps<'a, Block>;\n\n// // fn into_steps(self) -> Self::Steps {\n\n// // Steps {\n\n// // iter: self.iter().enumerate(),\n\n// // }\n\n// // }\n\n// // }\n\n\n\n// // impl<'a> Iterator for Steps<'a, Loc1> {\n\n// // type Item = (usize, Cow<'a, Loc1>);\n\n// // fn next(&mut self) -> Option<Self::Item> {\n\n// // self.iter.find_map(|(index, pos1)| {\n\n// // if !pos1.is_empty() {\n\n// // let value = Cow::Borrowed(pos1);\n\n// // Some((index, value))\n", "file_path": "src/bits/roaring/mod.rs", "rank": 87, "score": 25.078235470267433 }, { "content": " _ => None,\n\n })\n\n }\n\n\n\n fn into_and_not(self) -> impl Iterator<Item = Range<usize>> {\n\n self.filter_map(|member| match member {\n\n Region::Lhs(range) => Some(range),\n\n _ => None,\n\n })\n\n }\n\n\n\n fn into_xor(self) -> impl Iterator<Item = Range<usize>> {\n\n self.filter_map(|member| match member {\n\n Region::Lhs(range) | Region::Rhs(range) => Some(range),\n\n _ => None,\n\n })\n\n }\n\n\n\n // pub fn into_not(self) -> impl Iterator<Item = Range<usize>> + 'r {\n\n // self.filter_map(|member| match member {\n", "file_path": "src/bits/roaring/runs.rs", "rank": 88, "score": 24.724354474457343 }, { "content": "pub(crate) fn bwd_links(pos: usize) -> impl Iterator<Item = usize> {\n\n let mut next = pos;\n\n from_fn(move || {\n\n if next > 0 {\n\n let curr = next;\n\n next = next_bwd(next);\n\n Some(curr)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n\nimpl<T: Copy> FenwickTree<T> {\n\n // Assume that `ident <> ident == ident`.\n\n pub fn init(size: usize, ident: T) -> Self {\n\n FenwickTree {\n\n tree: vec![ident; size + 1],\n\n }\n\n }\n", "file_path": "src/fenwick.rs", "rank": 89, "score": 24.602868913179833 }, { "content": "\n\n /// The total size of the std::slice must be no larger than isize::MAX bytes in memory.\n\n pub unsafe fn from_raw_parts_mut<'a>(data: *mut T::Word, len: usize) -> &'a mut FixedBits<T> {\n\n assert_eq!(len, T::SIZE);\n\n FixedBits::make_mut(std::slice::from_raw_parts_mut(data, len))\n\n }\n\n\n\n #[inline]\n\n pub fn as_bits(&self) -> &FixedBits<T::Word> {\n\n FixedBits::make(&self.0)\n\n }\n\n #[inline]\n\n pub fn as_mut_bits(&mut self) -> &mut FixedBits<T::Word> {\n\n FixedBits::make_mut(&mut self.0)\n\n }\n\n}\n\n\n\nimpl<T: Words> FixedBits for Box<FixedBits<T>> {\n\n const SIZE: u64 = FixedBits::<T>::ARRAY_BITS;\n\n fn none() -> Self {\n", "file_path": "src/bits/fixed_bits.rs", "rank": 90, "score": 24.311840808448903 }, { "content": " }\n\n}\n\n\n\nimpl FixedBits for Repr {\n\n const SIZE: usize = Block::BITS;\n\n fn none() -> Self {\n\n Self::default()\n\n }\n\n}\n\n\n\nimpl Bits for Block {\n\n #[inline]\n\n fn size(&self) -> usize {\n\n Block::BITS\n\n }\n\n #[inline]\n\n fn count1(&self) -> usize {\n\n self.0.count1()\n\n }\n\n #[inline]\n", "file_path": "src/bits/roaring/repr.rs", "rank": 91, "score": 24.30693091873055 }, { "content": " for Block(repr) in &self.data {\n\n repr.write_to(&mut w)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn deserialize_from<R: io::Read>(mut r: R) -> io::Result<Self> {\n\n Header::read_from(&mut r).and_then(|desc| match desc {\n\n Header::Inline(map) => Ok(map),\n\n Header::Serial {\n\n runs, keys, pops, ..\n\n } => {\n\n let mut data = Vec::with_capacity(keys.len());\n\n for (i, pop) in pops.into_iter().enumerate() {\n\n let pop = pop as usize;\n\n let repr = if Bits::make(&runs).get(try_cast(i)) {\n\n Repr::read_runs_from(&mut r, pop)?\n\n } else if pop <= REPR_POS1_LEN {\n\n Repr::read_heap_from(&mut r, pop)?\n", "file_path": "src/bits/roaring/map.rs", "rank": 92, "score": 24.0418044235909 }, { "content": " #[inline]\n\n fn none() -> Self {\n\n Loc1 { data: Vec::new() }\n\n }\n\n}\n\n\n\nimpl Bits for Loc1 {\n\n #[inline]\n\n fn size(&self) -> usize {\n\n Self::SIZE\n\n }\n\n\n\n #[inline]\n\n fn count1(&self) -> usize {\n\n self.data.len()\n\n }\n\n\n\n #[inline]\n\n fn bit(&self, i: usize) -> bool {\n\n self.data.binary_search(&try_cast(i)).is_ok()\n", "file_path": "src/bits/roaring/locs.rs", "rank": 93, "score": 23.853070422148424 }, { "content": " /// ```\n\n #[inline]\n\n fn rank1<R: RangeBounds<usize>>(&self, range: R) -> usize {\n\n let rank = |p: usize| {\n\n if p == self.size() {\n\n self.count1()\n\n } else {\n\n let (q, r) = divrem!(p, T::BITS);\n\n self.tree.sum::<usize>(q) + self.bits.buf[q].rank1(..r)\n\n }\n\n };\n\n match super::to_exclusive(&range, self.size()).expect(\"out of bounds\") {\n\n (0, i) => rank(i),\n\n (i, j) => rank(j) - rank(i),\n\n }\n\n }\n\n\n\n /// ```\n\n /// use compacts::{BitMap, ops::{BitsMut, Bits}};\n\n /// let mut bv = BitMap::<[u64; 1024]>::none(66666);\n", "file_path": "src/bits/map.rs", "rank": 94, "score": 23.749688708417338 }, { "content": " self.iter\n\n .find_map(|(i, opt)| opt.as_ref().map(|s| (i, s.as_ref_words())))\n\n }\n\n #[inline]\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n self.iter.size_hint()\n\n }\n\n}\n\n\n\nimpl<'a, T: Words> FromIterator<(usize, Cow<'a, [T::Word]>)> for BitMap<T> {\n\n fn from_iter<I>(iterable: I) -> Self\n\n where\n\n I: IntoIterator<Item = (usize, Cow<'a, [T::Word]>)>,\n\n {\n\n let mut len = 0;\n\n let mut buf = Vec::new();\n\n for (index, cow) in iterable {\n\n if index > buf.len() {\n\n buf.resize(index, None);\n\n }\n", "file_path": "src/bits/map.rs", "rank": 95, "score": 23.68841565708937 }, { "content": "}\n\n\n\nimpl<'a, K> Iterator for Keys<'a, K> {\n\n type Item = &'a K;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.iter.next()\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for Values<'a> {\n\n type Item = &'a Block;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.iter.next()\n\n }\n\n}\n\n\n\nimpl<'a, K: Word> Iterator for Steps<'a, K> {\n\n type Item = (K, Cow<'a, Block>);\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.zipped.find_map(|(&index, block)| {\n", "file_path": "src/bits/roaring/map.rs", "rank": 96, "score": 23.525280468688386 }, { "content": "// // } else {\n\n// // None\n\n// // }\n\n// // })\n\n// // }\n\n// // }\n\n\n\n// // impl<'a> Iterator for Steps<'a, Runs> {\n\n// // type Item = (usize, Cow<'a, Runs>);\n\n// // fn next(&mut self) -> Option<Self::Item> {\n\n// // self.iter.find_map(|(index, runs)| {\n\n// // if !runs.is_empty() {\n\n// // let value = Cow::Borrowed(runs);\n\n// // Some((index, value))\n\n// // } else {\n\n// // None\n\n// // }\n\n// // })\n\n// // }\n\n// // }\n", "file_path": "src/bits/roaring/mod.rs", "rank": 97, "score": 23.37949846917927 }, { "content": "}\n\n\n\n#[inline]\n\npub(crate) fn next_bwd(pos: usize) -> usize {\n\n pos - (pos & (-(pos as isize) as usize))\n\n}\n\n\n\npub(crate) fn fwd_links(pos: usize, max: usize) -> impl Iterator<Item = usize> {\n\n let mut next = pos + 1;\n\n from_fn(move || {\n\n if next < max {\n\n let curr = next;\n\n next = next_fwd(next);\n\n Some(curr)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/fenwick.rs", "rank": 98, "score": 23.349960396881414 }, { "content": " type Item = &'a u16;\n\n type IntoIter = slice::Iter<'a, u16>;\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.data.iter()\n\n }\n\n }\n\n\n\n impl IntoIterator for $Loc {\n\n type Item = u16;\n\n type IntoIter = vec::IntoIter<u16>;\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.data.into_iter()\n\n }\n\n }\n\n\n\n impl $Loc {\n\n #[inline]\n\n pub fn new() -> Self {\n\n Self{data:Vec::new()}\n\n }\n", "file_path": "src/bits/roaring/locs.rs", "rank": 99, "score": 23.241199183863124 } ]
Rust
src/module.rs
accup/cargo-expunch
58b24319545a2874a3a3fc0127f745c00ef3bd60
use std::collections::HashMap; use std::path::PathBuf; use syn::{self, UseTree}; pub fn concat_module_parts(prefix: &[String], suffix: &[String], crate_name: &str) -> Vec<String> { let mut full_parts = prefix.to_vec(); for part in suffix { match part { _ if part == "crate" || part == crate_name => { full_parts.clear(); full_parts.push(part.clone()); } _ if part == "super" => { full_parts.pop(); } _ if part == "self" => {} _ => { full_parts.push(part.clone()); } } } full_parts } #[derive(Debug, Clone)] pub enum ModuleItemAccessibility { Direct(ModuleItemPath), Indirect(ModuleItemPath), } #[derive(Debug, Clone)] pub enum ModuleItemPath { Dir(Vec<String>, PathBuf), File(Vec<String>, PathBuf), Insoluble(Vec<String>), } pub fn make_module_item_path( module_parts: &[String], package_name: &str, package_src_path: &PathBuf, crate_path: &PathBuf, self_path: &PathBuf, ) -> Result<ModuleItemPath, String> { let mut lib_file = None; let mut path_buf = PathBuf::new(); let resolved_parts = module_parts .iter() .filter_map(|module_part| match module_part { _ if module_part == "crate" => None, _ if module_part == "self" => None, _ => Some(String::from(module_part)), }) .collect(); for module_part in module_parts.iter() { lib_file = None; path_buf.push(match module_part { _ if module_part == "crate" => crate_path.clone(), _ if module_part == package_name => { lib_file = Some(package_src_path.join("lib.rs")); package_src_path.clone() } _ if module_part == "super" => self_path .parent() .ok_or_else(|| { format!( "Failed to get the parent directory of the {0} {1} より上の階層へ遡ろうとしました", self_path.to_str().unwrap_or("(undisplayable path)"), self_path.to_str().unwrap_or("(表示できないパス)"), ) })? .to_path_buf(), _ if module_part == "self" => { if path_buf.as_os_str().is_empty() { self_path.clone() } else { continue; } } _ => PathBuf::from(module_part), }); } let module_name_file = path_buf.with_extension("rs"); Ok( if let Some(lib_file) = lib_file.and_then(|file| if file.is_file() { Some(file) } else { None }) { ModuleItemPath::File(resolved_parts, lib_file) } else if module_name_file.is_file() { ModuleItemPath::File(resolved_parts, module_name_file) } else if path_buf.is_dir() { let mod_file = path_buf.join("mod.rs"); if mod_file.is_file() { ModuleItemPath::File(resolved_parts, mod_file) } else { ModuleItemPath::Dir(resolved_parts, path_buf) } } else { ModuleItemPath::Insoluble(resolved_parts) }, ) } pub fn collect_module_items( use_tree: &UseTree, package_name: &str, package_src_path: &PathBuf, crate_name: &str, crate_path: &PathBuf, self_path: &PathBuf, ) -> Result<Vec<ModuleItemAccessibility>, String> { let mut module_path_map = HashMap::new(); collect_module_items_impl( use_tree, &mut Vec::new(), package_name, package_src_path, crate_name, crate_path, self_path, &mut module_path_map, )?; Ok(module_path_map.values().cloned().collect()) } fn collect_module_items_impl( use_tree: &UseTree, module_parts: &mut Vec<String>, package_name: &str, package_src_path: &PathBuf, crate_name: &str, crate_path: &PathBuf, self_path: &PathBuf, module_path_map: &mut HashMap<Vec<String>, ModuleItemAccessibility>, ) -> Result<(), String> { match use_tree { UseTree::Path(use_path) => { let name = use_path.ident.to_string(); module_parts.push(name); module_path_map.entry(module_parts.clone()).or_insert( ModuleItemAccessibility::Indirect(make_module_item_path( module_parts, package_name, package_src_path, crate_path, self_path, )?), ); collect_module_items_impl( &use_path.tree, module_parts, package_name, package_src_path, crate_name, crate_path, self_path, module_path_map, )?; module_parts.pop(); } UseTree::Name(use_name) => { let name = use_name.ident.to_string(); module_parts.push(name); module_path_map .entry(module_parts.clone()) .or_insert(ModuleItemAccessibility::Direct(make_module_item_path( module_parts, package_name, package_src_path, crate_path, self_path, )?)); module_parts.pop(); } UseTree::Rename(use_rename) => { let name = use_rename.ident.to_string(); module_parts.push(name); module_path_map .entry(module_parts.clone()) .or_insert(ModuleItemAccessibility::Direct(make_module_item_path( module_parts, package_name, package_src_path, crate_path, self_path, )?)); module_parts.pop(); } UseTree::Group(use_group) => { for item in use_group.items.iter() { collect_module_items_impl( item, module_parts, package_name, package_src_path, crate_name, crate_path, self_path, module_path_map, )?; } } _ => (), }; Ok(()) }
use std::collections::HashMap; use std::path::PathBuf; use syn::{self, UseTree}; pub fn concat_module_parts(prefix: &[String], suffix: &[String], crate_name: &str) -> Vec<String> {
ne() } _ if module_part == "super" => self_path .parent() .ok_or_else(|| { format!( "Failed to get the parent directory of the {0} {1} より上の階層へ遡ろうとしました", self_path.to_str().unwrap_or("(undisplayable path)"), self_path.to_str().unwrap_or("(表示できないパス)"), ) })? .to_path_buf(), _ if module_part == "self" => { if path_buf.as_os_str().is_empty() { self_path.clone() } else { continue; } } _ => PathBuf::from(module_part), }); } let module_name_file = path_buf.with_extension("rs"); Ok( if let Some(lib_file) = lib_file.and_then(|file| if file.is_file() { Some(file) } else { None }) { ModuleItemPath::File(resolved_parts, lib_file) } else if module_name_file.is_file() { ModuleItemPath::File(resolved_parts, module_name_file) } else if path_buf.is_dir() { let mod_file = path_buf.join("mod.rs"); if mod_file.is_file() { ModuleItemPath::File(resolved_parts, mod_file) } else { ModuleItemPath::Dir(resolved_parts, path_buf) } } else { ModuleItemPath::Insoluble(resolved_parts) }, ) } pub fn collect_module_items( use_tree: &UseTree, package_name: &str, package_src_path: &PathBuf, crate_name: &str, crate_path: &PathBuf, self_path: &PathBuf, ) -> Result<Vec<ModuleItemAccessibility>, String> { let mut module_path_map = HashMap::new(); collect_module_items_impl( use_tree, &mut Vec::new(), package_name, package_src_path, crate_name, crate_path, self_path, &mut module_path_map, )?; Ok(module_path_map.values().cloned().collect()) } fn collect_module_items_impl( use_tree: &UseTree, module_parts: &mut Vec<String>, package_name: &str, package_src_path: &PathBuf, crate_name: &str, crate_path: &PathBuf, self_path: &PathBuf, module_path_map: &mut HashMap<Vec<String>, ModuleItemAccessibility>, ) -> Result<(), String> { match use_tree { UseTree::Path(use_path) => { let name = use_path.ident.to_string(); module_parts.push(name); module_path_map.entry(module_parts.clone()).or_insert( ModuleItemAccessibility::Indirect(make_module_item_path( module_parts, package_name, package_src_path, crate_path, self_path, )?), ); collect_module_items_impl( &use_path.tree, module_parts, package_name, package_src_path, crate_name, crate_path, self_path, module_path_map, )?; module_parts.pop(); } UseTree::Name(use_name) => { let name = use_name.ident.to_string(); module_parts.push(name); module_path_map .entry(module_parts.clone()) .or_insert(ModuleItemAccessibility::Direct(make_module_item_path( module_parts, package_name, package_src_path, crate_path, self_path, )?)); module_parts.pop(); } UseTree::Rename(use_rename) => { let name = use_rename.ident.to_string(); module_parts.push(name); module_path_map .entry(module_parts.clone()) .or_insert(ModuleItemAccessibility::Direct(make_module_item_path( module_parts, package_name, package_src_path, crate_path, self_path, )?)); module_parts.pop(); } UseTree::Group(use_group) => { for item in use_group.items.iter() { collect_module_items_impl( item, module_parts, package_name, package_src_path, crate_name, crate_path, self_path, module_path_map, )?; } } _ => (), }; Ok(()) }
let mut full_parts = prefix.to_vec(); for part in suffix { match part { _ if part == "crate" || part == crate_name => { full_parts.clear(); full_parts.push(part.clone()); } _ if part == "super" => { full_parts.pop(); } _ if part == "self" => {} _ => { full_parts.push(part.clone()); } } } full_parts } #[derive(Debug, Clone)] pub enum ModuleItemAccessibility { Direct(ModuleItemPath), Indirect(ModuleItemPath), } #[derive(Debug, Clone)] pub enum ModuleItemPath { Dir(Vec<String>, PathBuf), File(Vec<String>, PathBuf), Insoluble(Vec<String>), } pub fn make_module_item_path( module_parts: &[String], package_name: &str, package_src_path: &PathBuf, crate_path: &PathBuf, self_path: &PathBuf, ) -> Result<ModuleItemPath, String> { let mut lib_file = None; let mut path_buf = PathBuf::new(); let resolved_parts = module_parts .iter() .filter_map(|module_part| match module_part { _ if module_part == "crate" => None, _ if module_part == "self" => None, _ => Some(String::from(module_part)), }) .collect(); for module_part in module_parts.iter() { lib_file = None; path_buf.push(match module_part { _ if module_part == "crate" => crate_path.clone(), _ if module_part == package_name => { lib_file = Some(package_src_path.join("lib.rs")); package_src_path.clo
random
[ { "content": "/// Rustソースコードを解析して展開する\n\nfn expunch_file(source_code_path: &str) -> Result<(), String> {\n\n let source_code_path = PathBuf::from(source_code_path);\n\n let package_path = PathBuf::from(\".\");\n\n let metadata = MetadataCommand::new()\n\n .manifest_path(\"./Cargo.toml\")\n\n .current_dir(&package_path)\n\n .exec()\n\n .unwrap();\n\n let package = metadata.root_package().unwrap();\n\n\n\n let mut expuncher = Expuncher::new(&package.name, package_path.join(\"src\"));\n\n expuncher.analyze_source_file(&source_code_path)?;\n\n expuncher.dump()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 1, "score": 55998.66709608112 }, { "content": "fn help() {\n\n println!(\n\n \"expunch\n\nExpand `use`d modules or declared `mod`ule in a Rust source-code into that contents in the workspace library crate\n\nRustソースコード中のuse文とモジュール宣言をワークスペースのライブラリクレートに含まれるソースコードの内容に展開する\n\n\n\nUSAGE:\n\n cargo expunch <source_code_path>\n\n\n\n * Use this subcommand at the directory of your workspace\n\n このサブコマンドはワークスペースのディレクトリで使用する必要があります\n\n\n\nOPTIONS:\n\n h, --help Prints help information\n\n ヘルプを表示する\n\n\n\nARGS:\n\n source_code_path Path to a Rust source code\n\n Rustソースコードへのパス\n\n\"\n\n );\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 24450.832579550824 }, { "content": "fn main() {\n\n let mut args: Vec<String> = env::args().collect();\n\n\n\n // サブコマンドの場合は第2引数に\"expunch\"がくるので除外する\n\n if let Some(arg) = args.get(1) {\n\n if arg == \"expunch\" {\n\n args.remove(1);\n\n }\n\n }\n\n\n\n match args.len() {\n\n // コマンドライン引数が指定されていない\n\n 1 => {\n\n eprintln!(\n\n \"Specify the path to a Rust source-code in the option `source_code_path`\n\n引数 source_code_path にRustソースコードへのパスを指定してください\"\n\n )\n\n }\n\n // ヘルプ表示の指定\n\n 2 if &args[1] == \"-h\" || &args[1] == \"--help\" => {\n", "file_path": "src/main.rs", "rank": 5, "score": 24450.832579550824 }, { "content": " &full_parts,\n\n // ライブラリクレートの場合はクレートを変更する\n\n &String::from(if is_lib_crate {\n\n &self.package_name\n\n } else {\n\n crate_name\n\n }),\n\n )?;\n\n println!(\"}}\");\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn remove_top_module(&self, use_tree: &UseTree, crate_name: &str) -> Option<UseTree> {\n\n self.remove_top_module_impl(use_tree, crate_name, 0)\n\n }\n\n\n\n fn remove_top_module_impl(\n\n &self,\n", "file_path": "src/expuncher.rs", "rank": 7, "score": 6.195765859214217 }, { "content": " .items\n\n .iter()\n\n .map(|item| self.resolve_modules_impl(item, crate_name))\n\n .collect(),\n\n }),\n\n UseTree::Rename(_) => use_tree.clone(),\n\n UseTree::Glob(_) => use_tree.clone(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ModuleNode {\n\n pub path: Option<PathBuf>,\n\n pub visibility: Option<String>,\n\n pub replacement_spans: Vec<ReplacementSpan>,\n\n pub children: HashMap<String, ModuleNode>,\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/expuncher.rs", "rank": 8, "score": 5.9032517890284355 }, { "content": "# cargo-expunch\n\n\n\nCargo subcommand to expand `use`d modules or declared `mod`ule in a Rust source-code into that contents in the workspace library crate.\n\n\n\nRustソースコード中のuse文とモジュール宣言をワークスペースのライブラリクレートに含まれるソースコードの内容に展開するCargoのサブコマンドです。\n\n\n\n## Installation\n\n```sh\n\ncargo install --git https://github.com/accup/cargo-expunch.git\n\n```\n\n\n\n## Usage\n\n```sh\n\ncargo expunch <source_code_path>\n\n```\n\n\n\n* Use this subcommand at the directory of your workspace\n\n\n\n このサブコマンドはワークスペースのディレクトリで使用する必要があります\n\n\n\n### Example\n\n#### File contents\n\n##### `Cargo.toml`\n\n```toml\n\n[package]\n\nname = \"example\"\n\n# ...\n\n```\n\n\n\n##### `src/main.rs`\n\n```rs\n\nuse example::{self, foo};\n\n\n\nfn main() {\n\n println!(\"Hello, world!\");\n\n}\n\n```\n\n\n\n##### `src/lib.rs`\n\n```rs\n\npub mod foo;\n\n\n\npub fn good_afternoon() {}\n\n```\n\n\n\n##### `src/foo/mod.rs`\n\n```rs\n\nmod bar;\n\n\n\npub fn good_evening() {}\n\n```\n\n\n\n##### `src/foo/bar.rs`\n\n```rs\n\npub fn good_morning() {}\n\n```\n\n\n\n#### Output\n\n\n\nUse of the `example` module is removed and the contents of the library crate are appended.\n\n\n\n`example`モジュールのuseが削除され、ライブラリクレートの内容が末尾に展開されます。\n\n\n\n##### Standard output of the command `cargo expunch ./src/main.rs`\n\n```rs\n\nuse example :: { foo } ;\n\n\n\nfn main() {\n\n println!(\"Hello, world!\");\n\n}\n\n\n\nmod example {\n\n\n\n\n\npub fn good_afternoon() {}\n\n\n\npub mod foo {\n\n\n\n\n\npub fn good_evening() {}\n\n\n\nmod bar {\n\npub fn good_morning() {}\n\n}\n\n}\n\n}\n\n```\n\n\n\n##### Standard output of the command `cargo expunch ./src/main.rs | rustfmt`\n\n```rs\n\nuse example::foo;\n\n\n\nfn main() {\n\n println!(\"Hello, world!\");\n\n}\n\n\n\nmod example {\n\n\n\n pub fn good_afternoon() {}\n\n\n\n pub mod foo {\n\n\n\n pub fn good_evening() {}\n\n\n\n mod bar {\n\n pub fn good_morning() {}\n\n }\n\n }\n\n}\n\n```\n", "file_path": "README.md", "rank": 9, "score": 5.897295212790756 }, { "content": " items: use_group\n\n .items\n\n .iter()\n\n .filter_map(|item| self.remove_top_module_impl(item, crate_name, depth))\n\n .collect(),\n\n })),\n\n UseTree::Rename(_) => Some(use_tree.clone()),\n\n UseTree::Glob(_) => Some(use_tree.clone()),\n\n }\n\n }\n\n\n\n pub fn resolve_modules(&self, use_tree: &UseTree, crate_name: &str) -> UseTree {\n\n self.resolve_modules_impl(use_tree, crate_name)\n\n }\n\n\n\n fn resolve_modules_impl(&self, use_tree: &UseTree, crate_name: &str) -> UseTree {\n\n match use_tree {\n\n UseTree::Path(use_path) => UseTree::Path(UsePath {\n\n ident: match use_path.ident {\n\n _ if use_path.ident.to_string() == \"crate\" => {\n", "file_path": "src/expuncher.rs", "rank": 10, "score": 5.661740363968866 }, { "content": " ///\n\n /// * `package_name` モジュールの解決に用いるパッケージの名前\n\n ///\n\n /// * `package_src_path` パッケージのsrcディレクトリへのパス\n\n ///\n\n /// * `crate_path` クレートのパス\n\n pub fn new(package_name: &str, package_src_path: PathBuf) -> Expuncher {\n\n Expuncher {\n\n collected_modules: ModuleNode::new(),\n\n package_name: String::from(package_name),\n\n package_src_path,\n\n }\n\n }\n\n\n\n /// ファイルの内容を基にすべての依存するモジュールを解析する\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `source_path` ソースコードへのパス\n\n pub fn analyze_source_file(&mut self, source_path: &PathBuf) -> Result<(), String> {\n", "file_path": "src/expuncher.rs", "rank": 11, "score": 4.523957080182516 }, { "content": "use crate::module::*;\n\nuse proc_macro2::LineColumn;\n\nuse quote::ToTokens;\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::{self, prelude::*, BufReader, Read};\n\nuse std::path::PathBuf;\n\nuse syn::{self, spanned::Spanned, Ident, Item, ItemUse, UseGroup, UseName, UsePath, UseTree};\n\n\n\n#[derive(Debug)]\n\npub struct Expuncher {\n\n collected_modules: ModuleNode,\n\n package_name: String,\n\n package_src_path: PathBuf,\n\n}\n\n\n\nimpl Expuncher {\n\n /// 新たなエクスパンチャを作成する\n\n ///\n\n /// # Arguments\n", "file_path": "src/expuncher.rs", "rank": 12, "score": 4.499955582313358 }, { "content": " }))\n\n }\n\n }\n\n }\n\n // 展開対象のトップレベルのクレートをuse文から削除する\n\n UseTree::Name(use_name) => {\n\n if depth == 0\n\n && (use_name.ident.to_string() == \"crate\"\n\n || use_name.ident.to_string() == self.package_name)\n\n {\n\n None\n\n } else if depth == 1 && use_name.ident.to_string() == \"self\" {\n\n None\n\n } else {\n\n Some(use_tree.clone())\n\n }\n\n }\n\n // 空のグループは許容されているのでそのままグループとして返す\n\n UseTree::Group(use_group) => Some(UseTree::Group(UseGroup {\n\n brace_token: use_group.brace_token,\n", "file_path": "src/expuncher.rs", "rank": 13, "score": 4.4411661845303465 }, { "content": "pub struct ReplacementSpan {\n\n pub start: LineColumn,\n\n pub end: LineColumn,\n\n pub replacement: String,\n\n}\n\n\n\nimpl ModuleNode {\n\n pub fn new() -> ModuleNode {\n\n ModuleNode {\n\n path: None,\n\n visibility: Some(String::from(\"pub\")),\n\n replacement_spans: Vec::new(),\n\n children: HashMap::new(),\n\n }\n\n }\n\n\n\n /// モジュールのノードを再帰的に追加して末尾要素にファイルのパスを登録する\n\n ///\n\n /// パスが既に登録されている場合は返戻値として`Some(source_path)`が返される\n\n pub fn update(\n", "file_path": "src/expuncher.rs", "rank": 14, "score": 4.281890739678765 }, { "content": " // モジュールの参照先がライブラリクレートか\n\n let is_lib_crate = &full_parts == &[self.package_name.clone()];\n\n\n\n // ファイルが解決されるモジュールのみを登録\n\n if let ModuleItemPath::File(_, path) = module_item_path {\n\n // トップレベルのソースコードの解析時でありライブラリクレートが直接useされている場合に限り\n\n // モジュールの可視性をuseの指定に合わせる\n\n let module_vis = if is_lib_crate && source_parts.is_empty() {\n\n let module_vis = item_use.vis.to_token_stream().to_string();\n\n if module_vis.is_empty() {\n\n None\n\n } else {\n\n Some(module_vis)\n\n }\n\n } else {\n\n Some(String::from(\"pub\"))\n\n };\n\n\n\n // ソースコードが依存するモジュールを登録\n\n if let None = self.collected_modules.update(\n", "file_path": "src/expuncher.rs", "rank": 15, "score": 4.263096249949562 }, { "content": " start: span.start(),\n\n end: span.end(),\n\n replacement: if let Some(use_tree) = use_tree {\n\n Item::Use(ItemUse {\n\n attrs: item_use.attrs.clone(),\n\n vis: item_use.vis.clone(),\n\n use_token: item_use.use_token,\n\n leading_colon: item_use.leading_colon,\n\n tree: use_tree,\n\n semi_token: item_use.semi_token,\n\n })\n\n .to_token_stream()\n\n .to_string()\n\n } else {\n\n String::new()\n\n },\n\n });\n\n }\n\n }\n\n // トップレベルのmod文を解析\n", "file_path": "src/expuncher.rs", "rank": 16, "score": 4.092591853281496 }, { "content": " }\n\n\n\n fn dump_module(\n\n &self,\n\n module: &ModuleNode,\n\n source_parts: &[String],\n\n crate_name: &str,\n\n ) -> Result<(), String> {\n\n if let Some(source_path) = &module.path {\n\n let file = File::open(source_path).or_else(|_| {\n\n Err(format!(\n\n \"File {0} not exists\n\n ファイル {1} が存在しません\",\n\n source_path.to_str().unwrap_or(\"(undisplayable path)\"),\n\n source_path.to_str().unwrap_or(\"(表示できないパス)\"),\n\n )\n\n .to_owned())\n\n })?;\n\n\n\n // 既に置換の配列はソート済みとする\n", "file_path": "src/expuncher.rs", "rank": 17, "score": 3.91592352561688 }, { "content": " .to_owned())\n\n })?;\n\n let mut content = String::new();\n\n file.read_to_string(&mut content).or_else(|_| {\n\n Err(format!(\n\n \"Failed to read the file {0}\n\nファイル {1} の読み取りに失敗しました\",\n\n source_path.to_str().unwrap_or(\"(undisplayable path)\"),\n\n source_path.to_str().unwrap_or(\"(表示できないパス)\"),\n\n ))\n\n })?;\n\n let ast = syn::parse_file(&content).or_else(|_| {\n\n Err(format!(\n\n \"Failed to parse the source-code {0}\n\nソースコード {1} の構文解析に失敗しました\",\n\n source_path.to_str().unwrap_or(\"(undisplayable path)\"),\n\n source_path.to_str().unwrap_or(\"(表示できないパス)\"),\n\n ))\n\n })?;\n\n\n", "file_path": "src/expuncher.rs", "rank": 19, "score": 3.7928316361196694 }, { "content": " use_tree: &UseTree,\n\n crate_name: &str,\n\n depth: usize,\n\n ) -> Option<UseTree> {\n\n match use_tree {\n\n UseTree::Path(use_path) => {\n\n if let Some(new_tree) =\n\n self.remove_top_module_impl(&use_path.tree, crate_name, depth + 1)\n\n {\n\n Some(UseTree::Path(UsePath {\n\n ident: use_path.ident.clone(),\n\n colon2_token: use_path.colon2_token,\n\n tree: Box::new(new_tree),\n\n }))\n\n } else {\n\n if depth == 0 {\n\n None\n\n } else {\n\n Some(UseTree::Name(UseName {\n\n ident: use_path.ident.clone(),\n", "file_path": "src/expuncher.rs", "rank": 20, "score": 3.745764392574543 }, { "content": " // selfパスの解決\n\n let self_path = match source_path.file_name() {\n\n Some(name) if name == \"mod.rs\" => source_path\n\n .parent()\n\n .ok_or_else(|| {\n\n format!(\n\n \"Failed to get the parent directory of the {0}\n\n{1} より上の階層へ遡ろうとしました\",\n\n source_path.to_str().unwrap_or(\"(undisplayable path)\"),\n\n source_path.to_str().unwrap_or(\"(表示できないパス)\"),\n\n )\n\n })?\n\n .to_path_buf(),\n\n _ => source_path.clone(),\n\n };\n\n\n\n for item in &ast.items {\n\n // トップレベルのuse文を解析\n\n if let Item::Use(item_use) = item {\n\n // use文から依存モジュールを取得\n", "file_path": "src/expuncher.rs", "rank": 21, "score": 3.717625360329942 }, { "content": " ) -> Result<(), String> {\n\n self.analyze_file_impl(source_path, source_parts, \"crate\", source_path)?;\n\n self.collected_modules.sort_replacement_spans();\n\n Ok(())\n\n }\n\n\n\n fn analyze_file_impl(\n\n &mut self,\n\n source_path: &PathBuf,\n\n source_parts: &[String],\n\n crate_name: &str,\n\n crate_path: &PathBuf,\n\n ) -> Result<(), String> {\n\n let mut file = File::open(source_path).or_else(|_| {\n\n Err(format!(\n\n \"File {0} not exists\n\nファイル {1} が存在しません\",\n\n source_path.to_str().unwrap_or(\"(undisplayable path)\"),\n\n source_path.to_str().unwrap_or(\"(表示できないパス)\"),\n\n )\n", "file_path": "src/expuncher.rs", "rank": 22, "score": 3.5712954719962173 }, { "content": " Ident::new(crate_name, use_path.ident.span())\n\n }\n\n _ => use_path.ident.clone(),\n\n },\n\n colon2_token: use_path.colon2_token,\n\n tree: Box::new(self.resolve_modules_impl(&use_path.tree, crate_name)),\n\n }),\n\n // 展開対象のトップレベルのクレートをuse文から削除する\n\n UseTree::Name(use_name) => {\n\n if use_name.ident.to_string() == \"crate\" {\n\n UseTree::Name(UseName {\n\n ident: Ident::new(crate_name, use_name.ident.span()),\n\n })\n\n } else {\n\n use_tree.clone()\n\n }\n\n }\n\n UseTree::Group(use_group) => UseTree::Group(UseGroup {\n\n brace_token: use_group.brace_token,\n\n items: use_group\n", "file_path": "src/expuncher.rs", "rank": 24, "score": 3.4325637145794765 }, { "content": " &mut self,\n\n module_parts: &[String],\n\n source_path: PathBuf,\n\n visibility: Option<&str>,\n\n ) -> Option<PathBuf> {\n\n if module_parts.is_empty() {\n\n match self.path {\n\n Some(_) => Some(source_path),\n\n None => {\n\n self.path = Some(source_path);\n\n self.visibility = if let Some(visibility) = visibility {\n\n Some(String::from(visibility))\n\n } else {\n\n None\n\n };\n\n None\n\n }\n\n }\n\n } else {\n\n let child = self\n", "file_path": "src/expuncher.rs", "rank": 25, "score": 3.162314927509188 }, { "content": "\n\n /// 変更可能な置換用のスパンの動的配列を取得する\n\n pub fn replacement_spans_mut(\n\n &mut self,\n\n module_parts: &[String],\n\n ) -> Option<&mut Vec<ReplacementSpan>> {\n\n if module_parts.is_empty() {\n\n Some(&mut self.replacement_spans)\n\n } else {\n\n if let Some(child) = self.children.get_mut(&module_parts[0]) {\n\n child.replacement_spans_mut(&module_parts[1..])\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n\n\n /// 置換用のスパンの配列を行数列数の早い順にソートする\n\n pub fn sort_replacement_spans(&mut self) {\n\n self.replacement_spans\n\n .sort_unstable_by_key(|span| span.start);\n\n }\n\n}\n", "file_path": "src/expuncher.rs", "rank": 27, "score": 2.876427145953925 }, { "content": " let source_parts = Vec::new();\n\n self.collected_modules\n\n .update(&source_parts, source_path.clone(), Some(\"pub\"));\n\n\n\n self.analyze_file_impl(source_path, &source_parts, \"crate\", source_path)?;\n\n self.collected_modules.sort_replacement_spans();\n\n Ok(())\n\n }\n\n\n\n /// ファイルの内容を基にすべての依存するモジュールを解析する\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `source_path` ソースコードへのパス\n\n ///\n\n /// * `parts_prefix` モジュール解決のためのモジュールパスの接頭辞\n\n pub fn analyze_file(\n\n &mut self,\n\n source_path: &PathBuf,\n\n source_parts: &[String],\n", "file_path": "src/expuncher.rs", "rank": 28, "score": 2.762416656803145 }, { "content": "pub mod expuncher;\n\npub mod module;\n", "file_path": "src/lib.rs", "rank": 29, "score": 2.7310405243564215 }, { "content": "use cargo_expunch::expuncher::Expuncher;\n\nuse cargo_metadata::MetadataCommand;\n\nuse std::env;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "src/main.rs", "rank": 31, "score": 2.6282517247470167 }, { "content": " .children\n\n .entry(module_parts[0].clone())\n\n .or_insert(ModuleNode::new());\n\n\n\n child.update(&module_parts[1..], source_path, visibility)\n\n }\n\n }\n\n\n\n /// 置換用のスパンの配列を取得する\n\n pub fn replacement_spans(&self, module_parts: &[String]) -> Option<&[ReplacementSpan]> {\n\n if module_parts.is_empty() {\n\n Some(&self.replacement_spans)\n\n } else {\n\n if let Some(child) = self.children.get(&module_parts[0]) {\n\n child.replacement_spans(&module_parts[1..])\n\n } else {\n\n None\n\n }\n\n }\n\n }\n", "file_path": "src/expuncher.rs", "rank": 32, "score": 2.4890455209697 }, { "content": " }\n\n }\n\n }\n\n\n\n // `crate`の解決\n\n let use_tree = self.resolve_modules(&item_use.tree, crate_name);\n\n\n\n let use_tree = if source_parts.is_empty() {\n\n // トップレベルのソースコードの解析時に限りトップレベルのモジュールのuseを削除する\n\n self.remove_top_module(&use_tree, crate_name)\n\n } else {\n\n Some(use_tree)\n\n };\n\n\n\n // use文の削除置換の追加\n\n if let Some(replacement_spans) =\n\n self.collected_modules.replacement_spans_mut(&source_parts)\n\n {\n\n let span = item.span();\n\n replacement_spans.push(ReplacementSpan {\n", "file_path": "src/expuncher.rs", "rank": 33, "score": 2.4495202714303876 }, { "content": " None\n\n } else {\n\n Some(&module_vis)\n\n },\n\n ) {\n\n // 新たに登録できた場合にのみ依存するモジュールのソースコードを解析\n\n // 注:mod宣言ではクレートは変更されない\n\n self.analyze_file_impl(path, &full_parts, crate_name, crate_path)?;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n /// 解析した内容に基づいてソースコードを標準出力に出力する\n\n pub fn dump(&self) -> Result<(), String> {\n\n self.dump_module(&self.collected_modules, &Vec::new(), \"crate\")\n", "file_path": "src/expuncher.rs", "rank": 34, "score": 2.4235146690159093 }, { "content": " let module_items = collect_module_items(\n\n &item_use.tree,\n\n &self.package_name,\n\n &self.package_src_path,\n\n crate_name,\n\n crate_path,\n\n &self_path,\n\n )?;\n\n\n\n for module_item in &module_items {\n\n // useの途中に現れるモジュールも含めて解決\n\n let (ModuleItemAccessibility::Indirect(module_item_path)\n\n | ModuleItemAccessibility::Direct(module_item_path)) = module_item;\n\n\n\n // モジュールのパス\n\n let (ModuleItemPath::File(parts, _)\n\n | ModuleItemPath::Dir(parts, _)\n\n | ModuleItemPath::Insoluble(parts)) = module_item_path;\n\n // モジュールパスの結合\n\n let full_parts = concat_module_parts(source_parts, parts, crate_name);\n", "file_path": "src/expuncher.rs", "rank": 37, "score": 1.3695769170636192 }, { "content": " if let ModuleItemPath::File(_, path) = &module_item_path {\n\n // mod文の削除置換\n\n if let Some(replacement_spans) =\n\n self.collected_modules.replacement_spans_mut(&source_parts)\n\n {\n\n let span = item.span();\n\n replacement_spans.push(ReplacementSpan {\n\n start: span.start(),\n\n end: span.end(),\n\n replacement: String::new(),\n\n });\n\n }\n\n\n\n let module_vis = item_mod.vis.to_token_stream().to_string();\n\n\n\n // ソースコードが依存するモジュールを登録\n\n if let None = self.collected_modules.update(\n\n &full_parts,\n\n path.clone(),\n\n if module_vis.is_empty() {\n", "file_path": "src/expuncher.rs", "rank": 38, "score": 1.3415995181614557 }, { "content": " &full_parts,\n\n path.clone(),\n\n if let Some(vis) = &module_vis {\n\n Some(vis)\n\n } else {\n\n None\n\n },\n\n ) {\n\n // 依存するモジュールのソースコードを解析\n\n self.analyze_file_impl(\n\n path,\n\n &full_parts,\n\n // ライブラリクレートの場合はクレートを変更する\n\n &String::from(if is_lib_crate {\n\n &self.package_name\n\n } else {\n\n crate_name\n\n }),\n\n if is_lib_crate { path } else { crate_path },\n\n )?;\n", "file_path": "src/expuncher.rs", "rank": 39, "score": 1.2997352211908275 }, { "content": "\n\n if line_number == replacement_span.end.line {\n\n // 置換終了行は置換終了列以降の文字列を出力\n\n let post_line: String =\n\n line.chars().skip(replacement_span.end.column).collect();\n\n print!(\"{}\", post_line);\n\n\n\n // 次の置換に遷移\n\n replacement_span_or_none = replacement_spans_iter.next();\n\n }\n\n } else {\n\n // 置換が存在しない場合はそのまま出力\n\n print!(\"{}\", line);\n\n }\n\n\n\n // 改行\n\n println!();\n\n }\n\n }\n\n }\n", "file_path": "src/expuncher.rs", "rank": 40, "score": 1.2415082334441994 }, { "content": "\n\n // 依存するソースコードを展開\n\n for (name, child) in &module.children {\n\n // モジュールパスの結合\n\n let full_parts = concat_module_parts(source_parts, &vec![name.clone()], crate_name);\n\n // モジュールの参照先がライブラリクレートか\n\n let is_lib_crate = &full_parts == &[self.package_name.clone()];\n\n\n\n println!();\n\n println!(\n\n \"{}mod {} {{\",\n\n if let Some(visibility) = &child.visibility {\n\n visibility.clone() + \" \"\n\n } else {\n\n String::from(\"\")\n\n },\n\n name\n\n );\n\n self.dump_module(\n\n child,\n", "file_path": "src/expuncher.rs", "rank": 41, "score": 1.1394183645688345 }, { "content": " else if let Item::Mod(item_mod) = item {\n\n // 宣言文の場合のみ処理\n\n if let None = item_mod.content {\n\n // モジュールパスの結合\n\n let full_parts = concat_module_parts(\n\n source_parts,\n\n &vec![item_mod.ident.to_string()],\n\n crate_name,\n\n );\n\n\n\n // mod文から依存モジュールを取得\n\n let module_item_path = make_module_item_path(\n\n &full_parts,\n\n &self.package_name,\n\n &self.package_src_path,\n\n crate_path,\n\n &self_path,\n\n )?;\n\n\n\n // ファイルが解決されるモジュールのみを登録\n", "file_path": "src/expuncher.rs", "rank": 42, "score": 0.978494190402996 }, { "content": " let mut replacement_spans_iter = module.replacement_spans.iter();\n\n let mut replacement_span_or_none = replacement_spans_iter.next();\n\n\n\n // 注:LineColumn::columnはUTF-8文字としてのカウントである\n\n for (line_number, line) in BufReader::new(file).lines().enumerate() {\n\n let line_number = line_number + 1; // 1-indexed\n\n\n\n if let io::Result::Ok(line) = line {\n\n if let Some(replacement_span) = replacement_span_or_none {\n\n if line_number < replacement_span.start.line {\n\n // 置換開始行以前はそのままの一行を出力\n\n print!(\"{}\", line);\n\n } else if line_number == replacement_span.start.line {\n\n // 置換開始行は置換開始列以前の文字列を出力\n\n let pre_line: String =\n\n line.chars().take(replacement_span.start.column).collect();\n\n print!(\"{}\", pre_line);\n\n // 置換文字列を出力\n\n print!(\"{}\", replacement_span.replacement);\n\n }\n", "file_path": "src/expuncher.rs", "rank": 43, "score": 0.723158476497987 } ]
Rust
src/resource.rs
pduval/rustic_hal
ca1651aaf2651d21d5268ba9b7edc016f4c4801f
use std::collections::btree_map::Entry; use std::collections::*; use std::vec::*; use serde::de::Error; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use super::link::HalLink; use super::{HalError, HalResult}; use serde_json::{from_value, to_value, Map, Value as JsonValue}; #[derive(Clone, Debug)] pub struct OneOrMany<T> { force_many: bool, content: Vec<T>, } impl<T> OneOrMany<T> where T: Sized + Clone, { pub fn new() -> OneOrMany<T> { OneOrMany { content: Vec::new(), force_many: false, } } pub fn force_many(mut self) -> Self { self.force_many = true; self } pub fn len(&self) -> usize { self.content.len() } pub fn is_empty(&self) -> bool { self.content.is_empty() } pub fn single(&self) -> Option<&T> { if self.is_empty() { None } else { Some(&self.content[0]) } } pub fn many(&self) -> &Vec<T> { &self.content } pub fn push(&mut self, newval: &T) { self.content.push(newval.clone()); } pub fn with(mut self, newval: &T) -> Self { self.content.push(newval.clone()); self } } impl<T> Serialize for OneOrMany<T> where T: Serialize + Clone, { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if self.is_empty() && !self.force_many { ().serialize(serializer) } else if self.len() == 1 && !self.force_many { self.single().serialize(serializer) } else { self.content.serialize(serializer) } } } impl<'de, T> Deserialize<'de> for OneOrMany<T> where for<'d> T: Deserialize<'d> + Clone, { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let value: JsonValue = Deserialize::deserialize(deserializer)?; let v2 = value.clone(); match v2 { JsonValue::Object(_) => { let obj: T = match from_value(value) { Ok(v) => v, Err(e) => return Err(D::Error::custom(format!("JSON Error: {:?}", e))), }; let mut res = OneOrMany::new(); res.push(&obj); Ok(res) } JsonValue::Array(_) => { let obj: Vec<T> = match from_value(value) { Ok(v) => from_value(v).unwrap(), Err(e) => return Err(D::Error::custom(format!("JSON Error: {:?}", e))), }; let mut res = OneOrMany::new(); res.content = obj; Ok(res) } _ => { let obj: T = match from_value(value) { Ok(v) => v, Err(e) => return Err(D::Error::custom(format!("JSON Error: {:?}", e))) }; let mut res = OneOrMany::new(); res.push(&obj); Ok(res) } } } } #[derive(Clone, Serialize, Deserialize)] pub struct HalResource { #[serde(rename = "_links", default, skip_serializing_if = "BTreeMap::is_empty")] links: BTreeMap<String, OneOrMany<HalLink>>, #[serde( rename = "_embedded", default, skip_serializing_if = "BTreeMap::is_empty" )] embedded: BTreeMap<String, OneOrMany<HalResource>>, #[serde( rename = "_curies", default, skip_serializing_if = "BTreeMap::is_empty" )] curies: BTreeMap<String, HalLink>, #[serde(flatten)] data: Option<JsonValue>, } impl HalResource { pub fn new<T>(payload: T) -> HalResource where T: Serialize, { let val = match to_value(payload) { Ok(val) => match val { JsonValue::Object(_) => Some(val), _ => None, }, _ => None, }; HalResource { links: BTreeMap::new(), embedded: BTreeMap::new(), curies: BTreeMap::new(), data: val, } } pub fn with_link<S, L>(mut self, name: S, link: L) -> Self where S: Into<String>, L: Into<HalLink>, { let lk_name = name.into(); match self.links.entry(lk_name.clone()) { Entry::Vacant(entry) => { let mut lk = OneOrMany::new(); let mut lk = match lk_name.as_ref() { "curies" => lk.force_many(), _ => lk, }; lk.push(&(link.into())); entry.insert(lk); } Entry::Occupied(mut entry) => { let mut content = entry.get_mut(); content.push(&(link.into())); } } self } pub fn get_link(&self, name: &str) -> Option<&HalLink> { match self.links.get(name) { Some(link) => link.single(), None => None, } } pub fn get_self(&self) -> Option<&HalLink> { self.get_link("self") } pub fn get_links(&self, name: &str) -> Option<&Vec<HalLink>> { match self.links.get(name) { Some(link) => Some(link.many()), None => None, } } pub fn with_resource(mut self, name: &str, resource: HalResource) -> Self { match self.embedded.entry(name.to_string()) { Entry::Vacant(entry) => { let mut resources = OneOrMany::new(); resources.push(&resource); entry.insert(resources); } Entry::Occupied(mut entry) => { let mut content = entry.get_mut(); content.push(&resource); } } self } pub fn with_resources(mut self, name: &str, resources: Vec<HalResource>) -> Self { match self.embedded.entry(name.to_string()) { Entry::Vacant(entry) => { let mut _resources = OneOrMany::new().force_many(); for resource in resources.iter() { _resources.push(resource) } entry.insert(_resources); } Entry::Occupied(mut entry) => { let mut content = entry.get_mut(); for resource in resources.iter() { content.push(&resource); } } } self } pub fn with_curie(self, name: &str, href: &str) -> Self { self.with_link("curies", HalLink::new(href).templated(true).with_name(name)) } pub fn with_extra_data<V>(mut self, name: &str, value: V) -> Self where V: Serialize, { match self.data { Some(JsonValue::Object(ref mut m)) => { m.insert(name.to_string(), to_value(value).unwrap()); } _ => { let mut data = Map::<String, JsonValue>::new(); data.insert(name.to_string(), to_value(value).unwrap()); self.data = Some(JsonValue::Object(data)); } }; self } pub fn get_extra_data<V>(&self, name: &str) -> HalResult<V> where for<'de> V: Deserialize<'de>, { let data = match self.data { Some(JsonValue::Object(ref m)) => m, _ => return Err(HalError::Custom("Invalid payload".to_string())), }; match data.get(name) { Some(v) => from_value::<V>(v.clone()).or_else(|e| Err(HalError::Json(e))), None => Err(HalError::Custom(format!("Key {} missing in payload", name))), } } pub fn get_data<V>(&self) -> HalResult<V> where for<'de> V: Deserialize<'de>, { match self.data { Some(ref val) => from_value::<V>(val.clone()).or_else(|e| Err(HalError::Json(e))), None => Err(HalError::Custom("No value".to_owned())), } } } impl PartialEq for HalResource { fn eq(&self, other: &HalResource) -> bool { self.get_self() == other.get_self() } }
use std::collections::btree_map::Entry; use std::collections::*; use std::vec::*; use serde::de::Error; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use super::link::HalLink; use super::{HalError, HalResult}; use serde_json::{from_value, to_value, Map, Value as JsonValue}; #[derive(Clone, Debug)] pub struct OneOrMany<T> { force_many: bool, content: Vec<T>, } impl<T> OneOrMany<T> where T: Sized + Clone, { pub fn new() -> OneOrMany<T> { OneOrMany { content: Vec::new(), force_many: false, } } pub fn force_many(mut self) -> Self { self.force_many = true; self } pub fn len(&self) -> usize { self.content.len() } pub fn is_empty(&self) -> bool { self.content.is_empty() } pub fn single(&self) -> Option<&T> { if self.is_empty() { None } else { Some(&self.content[0]) } } pub fn many(&self) -> &Vec<T> { &self.content } pub fn push(&mut self, newval: &T) { self.content.push(newval.clone()); } pub fn with(mut self, newval: &T) -> Self { self.content.push(newval.clone()); self } } impl<T> Serialize for OneOrMany<T> where T: Serialize + Clone, { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if self.is_empty() && !self.force_many { ().serialize(serializer) } else if self.len() == 1 && !self.force_many { self.single().serialize(serializer) } else { self.content.serialize(serializer) } } } impl<'de, T> Deserialize<'de> for OneOrMany<T> where for<'d> T: Deserialize<'d> + Clone, { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let value: JsonValue = Deserialize::deserialize(deserializer)?; let v2 = value.clone(); match v2 { JsonValue::Object(_) => { let obj: T = match from_value(value) { Ok(v) => v, Err(e) => return Err(D::Error::custom(format!("JSON Error: {:?}", e))), }; let mut res = OneOrMany::new(); res.push(&obj); Ok(res) } JsonValue::Array(_) => { let obj: Vec<T> = match from_value(value) { Ok(v) => from_value(v).unwrap(), Err(e) => return Err(D::Error::custom(format!("JSON Error: {:?}", e))), }; let mut res = OneOrMany::new(); res.content = obj; Ok(res) } _ => { let obj: T = match from_value(value) { Ok(v) => v, Err(e) => return Err(D::Error::custom(format!("JSON Error: {:?}", e))) }; let mut res = OneOrMany::new(); res.push(&obj); Ok(res) } } } } #[derive(Clone, Serialize, Deserialize)] pub struct HalResource { #[serde(rename = "_links", default, skip_serializing_if = "BTreeMap::is_empty")] links: BTreeMap<String, OneOrMany<HalLink>>, #[serde( rename = "_embedded", default, skip_serializing_if = "BTreeMap::is_empty" )] embedded: BTreeMap<String, OneOrMany<HalResource>>, #[serde( rename = "_curies", default, skip_serializing_if = "BTreeMap::is_empty" )] curies: BTreeMap<String, HalLink>, #[serde(flatten)] data: Option<JsonValue>, } impl HalResourc
h(&resource); } } } self } pub fn with_curie(self, name: &str, href: &str) -> Self { self.with_link("curies", HalLink::new(href).templated(true).with_name(name)) } pub fn with_extra_data<V>(mut self, name: &str, value: V) -> Self where V: Serialize, { match self.data { Some(JsonValue::Object(ref mut m)) => { m.insert(name.to_string(), to_value(value).unwrap()); } _ => { let mut data = Map::<String, JsonValue>::new(); data.insert(name.to_string(), to_value(value).unwrap()); self.data = Some(JsonValue::Object(data)); } }; self } pub fn get_extra_data<V>(&self, name: &str) -> HalResult<V> where for<'de> V: Deserialize<'de>, { let data = match self.data { Some(JsonValue::Object(ref m)) => m, _ => return Err(HalError::Custom("Invalid payload".to_string())), }; match data.get(name) { Some(v) => from_value::<V>(v.clone()).or_else(|e| Err(HalError::Json(e))), None => Err(HalError::Custom(format!("Key {} missing in payload", name))), } } pub fn get_data<V>(&self) -> HalResult<V> where for<'de> V: Deserialize<'de>, { match self.data { Some(ref val) => from_value::<V>(val.clone()).or_else(|e| Err(HalError::Json(e))), None => Err(HalError::Custom("No value".to_owned())), } } } impl PartialEq for HalResource { fn eq(&self, other: &HalResource) -> bool { self.get_self() == other.get_self() } }
e { pub fn new<T>(payload: T) -> HalResource where T: Serialize, { let val = match to_value(payload) { Ok(val) => match val { JsonValue::Object(_) => Some(val), _ => None, }, _ => None, }; HalResource { links: BTreeMap::new(), embedded: BTreeMap::new(), curies: BTreeMap::new(), data: val, } } pub fn with_link<S, L>(mut self, name: S, link: L) -> Self where S: Into<String>, L: Into<HalLink>, { let lk_name = name.into(); match self.links.entry(lk_name.clone()) { Entry::Vacant(entry) => { let mut lk = OneOrMany::new(); let mut lk = match lk_name.as_ref() { "curies" => lk.force_many(), _ => lk, }; lk.push(&(link.into())); entry.insert(lk); } Entry::Occupied(mut entry) => { let mut content = entry.get_mut(); content.push(&(link.into())); } } self } pub fn get_link(&self, name: &str) -> Option<&HalLink> { match self.links.get(name) { Some(link) => link.single(), None => None, } } pub fn get_self(&self) -> Option<&HalLink> { self.get_link("self") } pub fn get_links(&self, name: &str) -> Option<&Vec<HalLink>> { match self.links.get(name) { Some(link) => Some(link.many()), None => None, } } pub fn with_resource(mut self, name: &str, resource: HalResource) -> Self { match self.embedded.entry(name.to_string()) { Entry::Vacant(entry) => { let mut resources = OneOrMany::new(); resources.push(&resource); entry.insert(resources); } Entry::Occupied(mut entry) => { let mut content = entry.get_mut(); content.push(&resource); } } self } pub fn with_resources(mut self, name: &str, resources: Vec<HalResource>) -> Self { match self.embedded.entry(name.to_string()) { Entry::Vacant(entry) => { let mut _resources = OneOrMany::new().force_many(); for resource in resources.iter() { _resources.push(resource) } entry.insert(_resources); } Entry::Occupied(mut entry) => { let mut content = entry.get_mut(); for resource in resources.iter() { content.pus
random
[ { "content": "fn is_not(b: &bool) -> bool {\n\n !*b\n\n}\n\nmacro_rules! chainable_string {\n\n ($x: ident, $y: ident) => {\n\n pub fn $y(mut self, $x: &str) -> Self {\n\n self.$x = Some($x.to_string());\n\n self\n\n }\n\n\n\n pub fn $x(&self) -> Option<String> {\n\n self.$x.clone()\n\n }\n\n }\n\n}\n\n\n\nimpl HalLink {\n\n pub fn new<S>(href: S) -> HalLink\n\n where\n\n S: Into<String>,\n", "file_path": "src/link.rs", "rank": 0, "score": 95988.17405961143 }, { "content": "#[test]\n\nfn ensure_full_link_gets_deserialized() {\n\n let link: HalLink = from_str(\n\n r#\"\n\n{\n\n \"href\": \"https://www.google.com\",\n\n \"name\": \"google\",\n\n \"templated\": false,\n\n \"hreflang\": \"en-US\",\n\n \"deprecation\": \"https://www.google.com/deprecation\",\n\n \"title\": \"Google Search\"\n\n}\"#,\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(link.href, \"https://www.google.com\");\n\n assert_eq!(link.name, Some(\"google\".to_string()));\n\n assert_eq!(link.templated, false);\n\n assert_eq!(link.hreflang, Some(\"en-US\".to_string()));\n\n assert_eq!(\n\n link.deprecation,\n\n Some(\"https://www.google.com/deprecation\".to_string())\n\n );\n\n assert_eq!(link.title, Some(\"Google Search\".to_string()));\n\n}\n", "file_path": "src/tests/link.rs", "rank": 1, "score": 88766.45005429215 }, { "content": "#[test]\n\nfn ensure_href_gets_deserialized() {\n\n let link: HalLink = from_str(r#\"{\"href\":\"https://test.com\"}\"#).unwrap();\n\n assert_eq!(link.href, \"https://test.com\");\n\n}\n\n\n", "file_path": "src/tests/link.rs", "rank": 2, "score": 85228.58169117357 }, { "content": "#[test]\n\nfn ensure_templated_gets_deserialized() {\n\n let link: HalLink = from_str(r#\"{\"href\":\"https://test.com\",\"templated\":true}\"#).unwrap();\n\n assert_eq!(link.href, \"https://test.com\");\n\n assert_eq!(link.templated, true);\n\n}\n\n\n", "file_path": "src/tests/link.rs", "rank": 3, "score": 85228.58169117357 }, { "content": "#[test]\n\nfn check_curies_get_serialized_in_links() {\n\n let r1 = HalResource::new(Test1 {\n\n a: \"Test\".to_string(),\n\n }).with_curie(\"cur\", \"https://curie.org\")\n\n .with_link(\"self\", \"https://self.com\");\n\n let s = to_string(&r1).unwrap();\n\n let target = \"{\\\"_links\\\":{\\\"curies\\\":[{\\\"href\\\":\\\"https://curie.org\\\",\\\"templated\\\":true,\\\"name\\\":\\\"cur\\\"}],\\\"self\\\":{\\\"href\\\":\\\"https://self.com\\\"}},\\\"a\\\":\\\"Test\\\"}\";\n\n assert_eq!(s, target);\n\n}\n\n\n", "file_path": "src/tests/resource.rs", "rank": 4, "score": 82374.52553912053 }, { "content": "#[test]\n\nfn check_data_gets_serialized() {\n\n let f: HalResource = HalResource::new(Test1 {\n\n a: \"Test\".to_string(),\n\n });\n\n let s = to_string(&f).unwrap();\n\n assert_eq!(s, r#\"{\"a\":\"Test\"}\"#);\n\n}\n\n\n", "file_path": "src/tests/resource.rs", "rank": 5, "score": 66277.26752170922 }, { "content": "#[test]\n\nfn check_extra_fields_get_deserialized() {\n\n let source = r#\"{ \"_links\":{\"self\":{\"href\": \"https://www.test.com\"}}, \"a\": \"123\", \"b\":456}\"#;\n\n let hal: HalResource = from_str(source).unwrap();\n\n assert_eq!(hal.get_extra_data::<i32>(\"b\").unwrap(), 456);\n\n}\n\n\n", "file_path": "src/tests/resource.rs", "rank": 6, "score": 63995.402670622105 }, { "content": "#[test]\n\nfn check_simple_resource_gets_deserialized() {\n\n let source = r#\"{ \"_links\":{\"self\":{\"href\": \"https://www.test.com\"}}, \"a\": \"123\"}\"#;\n\n let hal: HalResource = from_str(source).unwrap();\n\n assert_eq!(hal.get_self(), Some(&HalLink::new(\"https://www.test.com\")));\n\n}\n\n\n", "file_path": "src/tests/resource.rs", "rank": 7, "score": 63995.402670622105 }, { "content": "#[test]\n\nfn ensure_array_gets_deserialized() {\n\n let s = r#\"{\"oom\":[\"test\",\"test2\"]}\"#;\n\n let boh: Boh = from_str(s).unwrap();\n\n assert_eq!(2, boh.oom.len());\n\n assert_eq!(\"test\", boh.oom.many()[0]);\n\n assert_eq!(\"test2\", boh.oom.many()[1]);\n\n\n\n}", "file_path": "src/tests/one_or_many.rs", "rank": 8, "score": 63995.402670622105 }, { "content": "#[test]\n\nfn check_links_get_fully_serialized() {\n\n let f = HalResource::new(Test1 {\n\n a: \"Test\".to_string(),\n\n }).with_link(\n\n \"self\",\n\n HalLink::new(\"https://self.com\")\n\n .with_title(\"Self Link\")\n\n .with_name(\"moi\")\n\n .with_deprecation(\"http://explain.com/why\")\n\n );\n\n let s = to_string(&f).unwrap();\n\n assert_eq!(s, \"{\\\"_links\\\":{\\\"self\\\":{\\\"href\\\":\\\"https://self.com\\\",\\\"deprecation\\\":\\\"http://explain.com/why\\\",\\\"name\\\":\\\"moi\\\",\\\"title\\\":\\\"Self Link\\\"}},\\\"a\\\":\\\"Test\\\"}\");\n\n}\n\n\n", "file_path": "src/tests/resource.rs", "rank": 9, "score": 62774.62791924361 }, { "content": "#[test]\n\nfn check_link_arrays_get_serialized() {\n\n let f = HalResource::new(Test1 {\n\n a: \"Test\".to_string(),\n\n }).with_link(\"self\", \"https://self.com\")\n\n .with_link(\"alfa\", \"https://self.com/beta\")\n\n .with_link(\"alfa\", \"https://self.com/gamma\");\n\n\n\n let s = to_string(&f).unwrap();\n\n assert_eq!(s, r#\"{\"_links\":{\"alfa\":[{\"href\":\"https://self.com/beta\"},{\"href\":\"https://self.com/gamma\"}],\"self\":{\"href\":\"https://self.com\"}},\"a\":\"Test\"}\"#);\n\n}\n\n\n", "file_path": "src/tests/resource.rs", "rank": 10, "score": 62774.62791924361 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct Test1 {\n\n a: String\n\n}\n\n\n", "file_path": "src/tests/serde_types.in.rs", "rank": 11, "score": 61638.988866575164 }, { "content": "#[test]\n\nfn check_link_gets_serialized_without_empty_attributes() {\n\n let f = HalResource::new(Test1 {\n\n a: \"Test\".to_string(),\n\n }).with_link(\"self\", \"https://self.com\");\n\n let s = to_string(&f).unwrap();\n\n assert_eq!(\n\n s,\n\n r#\"{\"_links\":{\"self\":{\"href\":\"https://self.com\"}},\"a\":\"Test\"}\"#\n\n );\n\n}\n\n\n", "file_path": "src/tests/resource.rs", "rank": 12, "score": 58913.52479083072 }, { "content": "fn speedy_serialisation(c: &mut Criterion) {\n\n let source = r#\"{ \"_links\":{\"self\":{\"href\": \"https://www.test.com\"}}, \"a\": \"123\", \"b\":456}\"#;\n\n let hal: HalResource = from_str(source).unwrap();\n\n c.bench_function(\"simple serialisation\", move |b| {\n\n b.iter(|| to_string(&hal).unwrap())\n\n });\n\n}\n\n\n\ncriterion_group!(benches, speedy_serialisation);\n\ncriterion_main!(benches);\n", "file_path": "benches/simple_serialisation.rs", "rank": 13, "score": 57365.28548330864 }, { "content": "#[test]\n\nfn check_extra_fields_get_serialized() {\n\n let f = HalResource::new(Test1 {\n\n a: \"Test\".to_string(),\n\n }).with_extra_data(\"int\", 123)\n\n .with_extra_data(\"string\", \"Hello!?\");\n\n let s = to_string(&f).unwrap();\n\n assert_eq!(s, r#\"{\"a\":\"Test\",\"int\":123,\"string\":\"Hello!?\"}\"#);\n\n}\n\n\n", "file_path": "src/tests/resource.rs", "rank": 14, "score": 44468.08420930156 }, { "content": "#[test]\n\nfn check_embedded_resource_gets_serialized() {\n\n let r1 = HalResource::new(Test1 {\n\n a: \"Test2\".to_string(),\n\n }).with_link(\"self\", \"https://self2.com\");\n\n\n\n let f = HalResource::new(Test1 {\n\n a: \"Test\".to_string(),\n\n }).with_link(\"self\", \"https://self.com\")\n\n .with_resource(\"child\", r1);\n\n\n\n let s = to_string(&f).unwrap();\n\n let target = \"{\\\"_links\\\":{\\\"self\\\":{\\\"href\\\":\\\"https://self.com\\\"}},\\\"_embedded\\\":{\\\"child\\\":{\\\"_links\\\":{\\\"self\\\":{\\\"href\\\":\\\"https://self2.com\\\"}},\\\"a\\\":\\\"Test2\\\"}},\\\"a\\\":\\\"Test\\\"}\";\n\n assert_eq!(s, target);\n\n}\n\n\n", "file_path": "src/tests/resource.rs", "rank": 15, "score": 44468.08420930156 }, { "content": "#[test]\n\nfn ensure_two_objects_get_serialized_as_array() {\n\n\n\n let boh = Boh { oom: OneOrMany::new().with(&\"test\".to_owned()).with(&\"test2\".to_owned()) };\n\n assert_eq!(to_string(&boh).unwrap(), r#\"{\"oom\":[\"test\",\"test2\"]}\"#);\n\n}\n\n\n\n\n", "file_path": "src/tests/one_or_many.rs", "rank": 16, "score": 41895.33421569497 }, { "content": "#[test]\n\nfn ensure_one_object_gets_serialized_as_one() {\n\n let boh = Boh { oom: OneOrMany::new().with(&\"test\".to_owned()) };\n\n assert_eq!(to_string(&boh).unwrap(), r#\"{\"oom\":\"test\"}\"#);\n\n}\n\n\n", "file_path": "src/tests/one_or_many.rs", "rank": 17, "score": 41895.33421569497 }, { "content": "#[test]\n\nfn check_force_many_serializes_to_empty_array_if_empty_resources() {\n\n let resource = HalResource::new(\"\").with_resources(\"empty_array\", Vec::new());\n\n\n\n let s = to_string(&resource).unwrap();\n\n assert_eq!(s, r#\"{\"_embedded\":{\"empty_array\":[]}}\"#);\n\n}\n", "file_path": "src/tests/resource.rs", "rank": 18, "score": 40740.13934572168 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct Boh\n\n{\n\n oom: OneOrMany<String>\n\n}\n\n\n", "file_path": "src/tests/one_or_many.rs", "rank": 19, "score": 39630.539621209406 }, { "content": "#[cfg(not(feature = \"serde_codegen\"))]\n\nfn main() {\n\n // do nothing\n\n}\n", "file_path": "build.rs", "rank": 20, "score": 30887.25538844214 }, { "content": "use serde_json::Error as JsonError;\n\nuse std::error::Error;\n\nuse std::fmt;\n\n\n\n#[derive(Debug)]\n\npub enum HalError {\n\n Json(JsonError),\n\n Custom(String),\n\n}\n\n\n\npub type HalResult<T> = Result<T, HalError>;\n\n\n\nimpl fmt::Display for HalError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n HalError::Json(ref e) => write!(f, \"JSON Error: {}\", e),\n\n HalError::Custom(ref s) => write!(f, \"Notify error: {}\", s),\n\n }\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 21, "score": 25254.30242383248 }, { "content": "\n\nimpl Error for HalError {\n\n fn description(&self) -> &str {\n\n match *self {\n\n HalError::Json(_) => \"Error in json processing\",\n\n HalError::Custom(_) => \"Internal Hal Error\",\n\n }\n\n }\n\n}\n\n\n\nimpl From<JsonError> for HalError {\n\n fn from(error: JsonError) -> Self {\n\n HalError::Json(error)\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 22, "score": 25250.71855325583 }, { "content": " /// If the value is a URI Template then the Link Object SHOULD have a\n\n /// \"templated\" attribute whose value is true.\n\n pub href: String,\n\n\n\n /// The \"templated\" property is OPTIONAL.\n\n ///\n\n /// Its value is boolean and SHOULD be true when the Link Object's \"href\"\n\n /// property is a URI Template.\n\n ///\n\n /// Its value SHOULD be considered false if it is undefined or any other\n\n /// value than true.\n\n #[serde(skip_serializing_if = \"is_not\", default)]\n\n pub templated: bool,\n\n /// The \"type\" property is OPTIONAL.\n\n ///\n\n /// Its value is a string used as a hint to indicate the media type\n\n /// expected when dereferencing the target resource.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub media_type: Option<String>,\n\n /// The \"deprecation\" property is OPTIONAL.\n", "file_path": "src/link.rs", "rank": 23, "score": 23700.093886719107 }, { "content": "use std::convert::{From, Into};\n\n\n\n/// A Link object for linking HAL Resources.\n\n///\n\n/// The link represents a related resource.\n\n/// If follows [the HAL Draft Spec](https://tools.ietf.org/html/draft-kelly-json-hal-08#section-5)\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// use rustic_hal::HalLink;\n\n///\n\n/// let link = HalLink::new(\"http://sowewhere.com\");\n\n/// ```\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct HalLink {\n\n /// The \"href\" property is REQUIRED.\n\n ///\n\n /// Its value is either a URI [RFC3986] or a URI Template [RFC6570].\n\n ///\n", "file_path": "src/link.rs", "rank": 24, "score": 23698.719833434167 }, { "content": " /// The \"profile\" property is OPTIONAL.\n\n ///\n\n /// Its value is a string which is a URI that hints about the profile (as\n\n /// defined by [I-D.wilde-profile-link]) of the target resource.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub profile: Option<String>,\n\n /// The \"title\" property is OPTIONAL.\n\n ///\n\n /// Its value is a string and is intended for labelling the link with a\n\n /// human-readable identifier (as defined by [RFC5988]).\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub title: Option<String>,\n\n /// The \"hreflang\" property is OPTIONAL.\n\n ///\n\n /// Its value is a string and is intended for indicating the language of\n\n /// the target resource (as defined by [RFC5988]).\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub hreflang: Option<String>,\n\n}\n\n\n", "file_path": "src/link.rs", "rank": 25, "score": 23696.77271078138 }, { "content": " ///\n\n /// Its presence indicates that the link is to be deprecated (i.e.\n\n /// removed) at a future date. Its value is a URL that SHOULD provide\n\n /// further information about the deprecation.\n\n ///\n\n /// A client SHOULD provide some notification (for example, by logging a\n\n /// warning message) whenever it traverses over a link that has this\n\n /// property. The notification SHOULD include the deprecation property's\n\n /// value so that a client manitainer can easily find information about\n\n /// the deprecation.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub deprecation: Option<String>,\n\n\n\n /// The \"name\" property is OPTIONAL.\n\n ///\n\n /// Its value MAY be used as a secondary key for selecting Link Objects\n\n /// which share the same relation type.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub name: Option<String>,\n\n\n", "file_path": "src/link.rs", "rank": 26, "score": 23696.68070524682 }, { "content": " {\n\n HalLink {\n\n href: href.into(),\n\n templated: false,\n\n media_type: None,\n\n deprecation: None,\n\n name: None,\n\n profile: None,\n\n title: None,\n\n hreflang: None,\n\n }\n\n }\n\n\n\n pub fn templated(mut self, templated: bool) -> Self {\n\n self.templated = templated;\n\n self\n\n }\n\n\n\n chainable_string!(media_type, with_media_type);\n\n chainable_string!(deprecation, with_deprecation);\n", "file_path": "src/link.rs", "rank": 27, "score": 23695.734154033486 }, { "content": " chainable_string!(name, with_name);\n\n chainable_string!(profile, with_profile);\n\n chainable_string!(title, with_title);\n\n chainable_string!(hreflang, with_hreflang);\n\n}\n\n\n\nimpl<T> From<T> for HalLink\n\nwhere\n\n T: Into<String>,\n\n{\n\n fn from(s: T) -> Self {\n\n HalLink::new(s)\n\n }\n\n}\n\n\n\n/// Two links are the same if their href is the same\n\n/// The rest is immaterial\n\nimpl PartialEq for HalLink {\n\n fn eq(&self, other: &HalLink) -> bool {\n\n self.href == other.href\n\n }\n\n}\n", "file_path": "src/link.rs", "rank": 28, "score": 23694.204514650748 }, { "content": "#[derive(Serialize, Deserialize)]\n", "file_path": "src/tests/serde_types.in.rs", "rank": 29, "score": 22998.08734694254 }, { "content": "//use serde::de::Deserialize;\n\nuse super::super::link::HalLink;\n\nuse serde_json::from_str;\n\n\n\n#[test]\n", "file_path": "src/tests/link.rs", "rank": 30, "score": 22591.9698516277 }, { "content": "//use serde::de::Deserialize;\n\nuse super::super::link::HalLink;\n\nuse super::super::resource::OneOrMany;\n\nuse serde_json::{from_str, to_string};\n\nuse serde::*;\n\n\n\n#[derive(Serialize, Deserialize)]\n", "file_path": "src/tests/one_or_many.rs", "rank": 42, "score": 13.777378818724117 }, { "content": "use super::{\n\n super::{resource::*, HalLink},\n\n Test1,\n\n};\n\nuse serde_json::{from_str, to_string};\n\n\n\n//#[derive(Serialize, Deserialize)]\n\n// struct Test1 {\n\n// a: String\n\n//}\n\n\n\n#[test]\n", "file_path": "src/tests/resource.rs", "rank": 46, "score": 11.672692596243332 }, { "content": "//! # }\n\n//! ```\n\n//!\n\n//! ## Credits\n\n//!\n\n//! This library is heavily inspired by the [hal-rs](https://github.com/hjr3/hal-rs) library by Herman J. Radtke III.\n\n//!\n\n\n\nextern crate serde;\n\nextern crate serde_json;\n\n#[macro_use]\n\nextern crate serde_derive;\n\n\n\npub mod error;\n\npub mod link;\n\npub mod resource;\n\n\n\npub use self::error::{HalError, HalResult};\n\npub use self::link::HalLink;\n\npub use self::resource::HalResource;\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "src/lib.rs", "rank": 48, "score": 10.617981067283383 }, { "content": "//! extern crate rustic_hal;\n\n//! extern crate serde_json;\n\n//! extern crate serde;\n\n//! #[macro_use] extern crate serde_derive;\n\n//!\n\n//! use rustic_hal::*;\n\n//! use serde::Serialize;\n\n//! use serde_json::to_string;\n\n//!\n\n//! #[derive(Serialize)]\n\n//! pub struct MyResource {\n\n//! pub test: String\n\n//! }\n\n//!\n\n//! # fn main() {\n\n//! let mr = MyResource { test: \"Hello, World!\".to_string() };\n\n//! let hal_res = HalResource::new(mr).with_link(\"self\", \"/api/myresource/0\");\n\n//!\n\n//! println!(\"json representation: {}\", to_string(&hal_res).unwrap());\n\n//!\n", "file_path": "src/lib.rs", "rank": 51, "score": 9.974156894135973 }, { "content": "# Rustic Hal\n\n\n\nA simple library for serializing (and deserializing coming soon) resources following the [HAL Spec](https://tools.ietf.org/html/draft-kelly-json-hal-08)\n\n\n\n[![Clippy Linting Result](https://clippy.bashy.io/github/pduval/rustic_hal/master/badge.svg)](https://clippy.bashy.io/github/pduval/rustic_hal/master/log)\n\n[![Build Status](https://travis-ci.org/pduval/rustic_hal.svg)](https://travis-ci.org/pduval/rustic_hal)\n\n[![](http://meritbadge.herokuapp.com/rustic_hal)](https://crates.io/crates/rustic_hal)\n\n\n\n## Usage\n\n\n\n### On stable rust (>= 1.15)\n\n\n\nAdd the dependency to your Cargo.toml:\n\n\n\n```toml\n\n\n\n[dependencies]\n\nrustic_hal=\"0.2\"\n\nserde=\"1.0\"\n\nserde_json=\"1.0\"\n\nserde_derive=\"1.0\"\n\n\n\n```\n\nand to use:\n\n\n\n```rust\n\n\n\nextern crate rustic_hal;\n\nextern crate serde;\n\n#[macro_use]\n\nextern crate serde_derive;\n\nextern crate serde_json;\n\n\n\nuse rustic_hal::*;\n\nuse serde_json::to_string;\n\n\n\n#[derive(Serialize)]\n\npub struct MyResource {\n\n pub test: String,\n\n}\n\n\n\nfn main() {\n\n let mr = MyResource {\n\n test: \"Hello, World!\".to_string(),\n\n };\n\n let hal_res = HalResource::new(mr).with_link(\"self\", \"/api/myresource/0\");\n\n println!(\"json representation: {:?}\", to_string(&hal_res));\n\n}\n\n\n\n```\n\n## Documentation\n\n\n\nsee [https://pduval.github.io/rustic_hal/rustic_hal/](https://pduval.github.io/rustic_hal/rustic_hal/) for the cargo-doc pages.\n\n\n\n## Credits\n\n\n\nThis library is heavily inspired by (read copied from) the [hal-rs](https://github.com/hjr3/hal-rs) library by Herman J. Radtke III.\n\n\n", "file_path": "README.md", "rank": 52, "score": 8.32353927186617 }, { "content": "//! # Rustic Hal\n\n//!\n\n//! A simple library for serializing (and deserializing coming soon) resources following the [HAL Spec](https://tools.ietf.org/html/draft-kelly-json-hal-08)\n\n//!\n\n//! ## Usage\n\n//!\n\n//! Add the dependency to your Cargo.toml:\n\n//!\n\n//! ```toml\n\n//!\n\n//! [dependencies]\n\n//! rustic_hal=\"0.2.0\"\n\n//! serde=\"1.0\"\n\n//! serde_json=\"1.0\"\n\n//! serde_derive=\"1.0\"\n\n//!\n\n//! ```\n\n//! and to use:\n\n//!\n\n//! ```rust\n", "file_path": "src/lib.rs", "rank": 53, "score": 7.72216435283886 }, { "content": "include!(\"serde_types.in.rs\");\n\n\n\npub mod link;\n\npub mod resource;\n\npub mod one_or_many;", "file_path": "src/tests/mod.rs", "rank": 54, "score": 6.6685658805208945 }, { "content": "#[macro_use]\n\nextern crate criterion;\n\n\n\nextern crate rustic_hal;\n\nextern crate serde_json;\n\n\n\nuse rustic_hal::resource::*;\n\nuse rustic_hal::HalLink;\n\nuse serde_json::{from_str, to_string};\n\n\n\nuse criterion::Criterion;\n\n\n", "file_path": "benches/simple_serialisation.rs", "rank": 55, "score": 6.44647144110148 }, { "content": "#[cfg(feature = \"serde_codegen\")]\n", "file_path": "build.rs", "rank": 56, "score": 2.5099276277298728 } ]
Rust
src/poly/comp.rs
HColeman127/algeo
bf2ac1514041ff00a1e571732ac01647329ece72
use std::marker::PhantomData; use itertools::{EitherOrBoth, Itertools}; use crate::core::num::Field; use super::elts::*; use super::mdeg::MultiDegree; use super::ord::MonomialOrder; pub struct Computer<F: Field, O: MonomialOrder> { _marker: PhantomData<(F, O)>, } impl<F: Field, O: MonomialOrder> Computer<F, O> { pub fn sort_terms(f: &Polynomial<F>) -> Polynomial<F> { Polynomial::new_unchecked( f.terms() .sorted_by(|s, t| O::cmp(&s.mdeg, &t.mdeg)) .cloned() .collect(), ) } pub fn leading_term(f: &Polynomial<F>) -> Option<&Term<F>> { f.terms().max_by(|s, t| O::cmp(&s.mdeg, &t.mdeg)) } pub fn leading_coef(f: &Polynomial<F>) -> Option<F> { Some(Self::leading_term(f)?.coef) } pub fn divide( f: &Polynomial<F>, divs: &[Polynomial<F>], ) -> (Polynomial<F>, Vec<Polynomial<F>>) { let m = divs.len(); let mut quotients = vec![Polynomial::<F>::zero(); m]; let mut remainder = Polynomial::<F>::zero(); let mut f = f.clone(); 'outer: while let Some(lt_f) = Self::leading_term(&f).cloned() { for (g, q) in divs.iter().zip(quotients.iter_mut()) { if let Some(lt_g) = Self::leading_term(&g) { if let Some(a) = lt_f.try_div(lt_g) { *q = &*q + &a; f = f - a * g; continue 'outer; } } } f.terms.sort_by(|s, t| O::cmp(&s.mdeg, &t.mdeg)); if let Some(lt_f) = f.terms.pop() { remainder = remainder + lt_f; } } (remainder, quotients) } pub fn monic_lcm(s: &Term<F>, t: &Term<F>) -> Term<F> { if s.is_zero() || t.is_zero() { return Term::zero(); } Term::monic(MultiDegree( s.mdeg .degs() .zip_longest(t.mdeg.degs()) .map(|pair| match pair { EitherOrBoth::Both(a, b) => *a.max(b), EitherOrBoth::Left(a) => *a, EitherOrBoth::Right(b) => *b, }) .collect(), )) } pub fn try_reduce(f: &Polynomial<F>, g: &Polynomial<F>) -> Option<Polynomial<F>> { let lt_f = Self::leading_term(f)?; let lt_g = Self::leading_term(g)?; let lcm = Self::monic_lcm(&lt_f, &lt_g); Some(lcm.try_div(&lt_f)? * f - lcm.try_div(&lt_g)? * g) } pub fn buchberger_criterion(generators: &[Polynomial<F>]) -> bool { let m = generators.len(); for (i, j) in (0..m).cartesian_product(0..m).filter(|&(f, g)| f != g) { if let Some(h) = Self::try_reduce(&generators[i], &generators[j]) { if !Self::divide(&h, generators).0.is_zero() { return false; } } else { panic!("should not happen. yeah i know, joseph, 'parse don't validate'"); } } true } fn buchberger_extend(generators: &mut Vec<Polynomial<F>>) { let mut i = 0; 'outer: while i < generators.len() { for j in 0..i { let (r, _) = Self::divide( &Self::try_reduce(&generators[i], &generators[j]) .expect("should not happen. yeah i know, joseph, 'parse don't validate'"), generators, ); if let Some(lc_r) = Self::leading_coef(&r) { generators.push(&r / lc_r); i = 0; continue 'outer; } } i += 1; } } fn buchberger_minimize(generators: &mut Vec<Polynomial<F>>) { 'outer: loop { break { for (i, j) in pairs_iter(generators.len()) { if Self::leading_term(&generators[j]) .unwrap() .divides(&Self::leading_term(&generators[i]).unwrap()) { generators.remove(i); continue 'outer; } } }; } } fn buchberger_reduce(generators: &mut Vec<Polynomial<F>>) { 'outer: loop { break { for (i, j) in pairs_iter(generators.len()) { for t in generators[i].terms() { if Self::leading_term(&generators[j]).unwrap().divides(t) { let g = generators.remove(i); let (g_rem, _) = Self::divide(&g, &generators); generators.push(g_rem); continue 'outer; } } } }; } } pub fn buchberger_algorithm(generators: &mut Vec<Polynomial<F>>) { Self::buchberger_extend(generators); Self::buchberger_minimize(generators); Self::buchberger_reduce(generators); } } #[inline] fn pairs_iter(n: usize) -> impl Iterator<Item = (usize, usize)> { (0..n).cartesian_product(0..n).filter(|(i, j)| i != j) } #[cfg(test)] mod tests { use std::ops::Add; use super::*; use crate::{ core::num::Rational, poly::{elts::Polynomial, elts::Term}, }; use super::super::ord::Lex; type Poly = Polynomial<f64>; type Comp = Computer<f64, Lex>; #[allow(unused)] macro_rules! pp { ($poly:expr) => { println!("{} = {}", stringify!($poly), &$poly); }; } #[allow(unused)] macro_rules! pps { ($polys:expr) => { for i in 0..($polys.len()) { println!("{}[{}] = {}", stringify!($polys), i, &$polys[i]); } }; } #[allow(unused)] #[test] fn dbg_stuff() { let c = |coef: f64| Poly::from(coef); fn_vars! { f64: x y z } /* let f = y(1); let divs = [ Poly::zero(), y(1) + c(1.0), ]; println!("{} / [{}, {}]", &f, &divs[0], &divs[1]); let (r, q) = Computer::<Lex, f64>::divide(&f, &divs); pp!(r); pp!(q[0]); pp!(q[1]); pp!(&q[0] * &divs[0]); pp!(&q[1] * &divs[1]); */ let f = x(2) + x(3) + x(1) + x(2) + x(0) + x(3); pp!(f); } #[test] fn division() { let poly_iter = polys(2, 3, 1) .into_iter() .cartesian_product(polys(2, 1, 2).into_iter().cartesian_product(polys(2, 1, 2))); for (f, (g1, g2)) in poly_iter { test_result_equality(&f, &[g1, g2]); } } fn polys(vars: i32, max_deg: u8, max_coef: i32) -> Vec<Polynomial<f64>> { (0..vars) .map(|_| (0..=max_deg).rev()) .multi_cartesian_product() .map(MultiDegree::from_vec) .filter(|mdeg| mdeg.total_deg() <= max_deg) .map(|mdeg| { (0..=max_coef) .map(f64::from) .map(move |coef| Term::new(coef, mdeg.clone())) }) .multi_cartesian_product() .map(|v| Poly::new_unchecked(v.into_iter().filter(|t| !t.is_zero()).collect())) .collect() } #[cfg(test)] fn poly_assert_eq(f1: &Poly, f2: &Poly) { assert_eq!(Comp::sort_terms(f1).terms, Comp::sort_terms(f2).terms); } #[cfg(test)] fn test_result_equality(f: &Poly, g: &[Poly]) { let (r, q) = Comp::divide(f, g); let f2 = q.iter().zip_eq(g).map(|(qi, gi)| qi * gi).fold(r, Add::add); poly_assert_eq(f, &f2); } #[test] fn reduction() { let c = |coef| Polynomial::from(Rational::new_i64(coef, 1)); fn_vars! { Rational: x y } type CompQ = Computer<Rational, Lex>; let f = c(5) * x(4) * y(3) + c(2) * x(2) * y(1) + c(3) * x(1) * y(2) + y(2) + c(3); let g = c(8) * x(5) * y(2) + x(3) + c(3) * x(2) * y(2) + y(4) + c(6); pp!(f); pp!(g); println!(); if let Some(h) = CompQ::try_reduce(&f, &g) { pp!(h); } else { println!("could not reduce"); } } #[test] fn buchberger() { let q = |a, b| Polynomial::from(Rational::new_i64(a, b)); let c = |coef| q(coef, 1); fn_vars! { Rational: x y } type CompQ = Computer<Rational, Lex>; fn print_buchberger(g: &[Polynomial<Rational>]) -> Vec<Polynomial<Rational>> { let mut g = Vec::from(g); println!("initial:"); pps!(g); CompQ::buchberger_algorithm(&mut g); println!("\nresult:"); pps!(g); println!("--------------------"); g } print_buchberger(&[x(3) * y(1) - x(1) * y(2) + c(1), x(2) * y(2) - y(3) - c(1)]); print_buchberger(&[ x(2) + x(1) * y(5) + y(4), x(1) * y(6) - x(1) * y(3) + y(5) - y(2), x(1) * y(5) - x(1) * y(2), ]); } }
use std::marker::PhantomData; use itertools::{EitherOrBoth, Itertools}; use crate::core::num::Field; use super::elts::*; use super::mdeg::MultiDegree; use super::ord::MonomialOrder; pub struct Computer<F: Field, O: MonomialOrder> { _marker: PhantomData<(F, O)>, } impl<F: Field, O: MonomialOrder> Computer<F, O> { pub fn sort_terms(f: &Polynomial<F>) -> Polynomial<F> { Polynomial::new_unchecked( f.terms() .sorted_by(|s, t| O::cmp(&s.mdeg, &t.mdeg)) .cloned() .collect(), ) } pub fn leading_term(f: &Polynomial<F>) -> Option<&Term<F>> { f.terms().max_by(|s, t| O::cmp(&s.mdeg, &t.mdeg)) } pub fn leading_coef(f: &Polynomial<F>) -> Option<F> { Some(Self::leading_term(f)?.coef) } pub fn divide( f: &Polynomial<F>, divs: &[Polynomial<F>], ) -> (Polynomial<F>, Vec<Polynomial<F>>) { let m = divs.len(); let mut quotients = vec![Polynomial::<F>::zero(); m]; let mut remainder = Polynomial::<F>::zero(); let mut f = f.clone(); 'outer: while let Some(lt_f) = Self::leading_term(&f).cloned() { for (g, q) in divs.iter().zip(quotients.iter_mut()) { if let Some(lt_g) = Self::leading_term(&g) { if let Some(a) = lt_f.try_div(lt_g) { *q = &*q + &a; f = f - a * g; continue 'outer; } } } f.terms.sort_by(|s, t| O::cmp(&s.mdeg, &t.mdeg)); if let Some(lt_f) = f.terms.pop() { remainder = remainder + lt_f; } } (remainder, quotients) } pub fn monic_lcm(s: &Term<F>, t: &Term<F>) -> Term<F> { if s.is_zero() || t.is_zero() { return Term::zero(); } Term::monic(MultiDegree( s.mdeg .degs() .zip_longest(t.mdeg.degs()) .map(|pair| match pair { EitherOrBoth::Both(a, b) => *a.max(b), EitherOrBoth::Left(a) => *a, EitherOrBoth::Right(b) => *b, }) .collect(), )) } pub fn try_reduce(f: &Polynomial<F>, g: &Polynomial<F>) -> Option<Polynomial<F>> { let lt_f = Self::leading_term(f)?; let lt_g = Self::leading_term(g)?; let lcm = Self::monic_lcm(&lt_f, &lt_g); Some(lcm.try_div(&lt_f)? * f - lcm.try_div(&lt_g)? * g) } pub fn buchberger_criterion(generators: &[Polynomial<F>]) -> bool { let m = generators.len(); for (i, j) in (0..m).cartesian_product(0..m).filter(|&(f, g)| f != g) { if let Some(h) = Self::try_reduce(&generators[i], &generators[j]) { if !Self::divide(&h, generators).0.is_zero() { return false; } } else { panic!("should not happen. yeah i know, joseph, 'parse don't validate'"); } } true } fn buchberger_extend(generators: &mut Vec<Polynomial<F>>) { let mut i = 0; 'outer: while i < generators.len() { for j in 0..i { let (r, _) = Self::divide( &Self::try_reduce(&generators[i], &generators[j]) .expect("should not happen. yeah i know, joseph, 'parse don't validate'"), generators, ); if let Some(lc_r) = Self::leading_coef(&r) { generators.push(&r / lc_r); i = 0; continue 'outer; } } i += 1; } } fn buchberger_minimize(generators: &mut Vec<Polynomial<F>>) { 'outer: loop { break { for (i, j) in pairs_iter(generators.len()) { if Self::leading_term(&generators[j]) .unwrap() .divides(&Self::leading_term(&generators[i]).unwrap()) { generators.remove(i); continue 'outer; } } }; } } fn buchberger_reduce(generators: &mut Vec<Polynomial<F>>) { 'outer: loop { break { for (i, j) in pairs_iter(generators.len()) { for t in generators[i].terms() { if Self::leading_term(&generators[j]).unwrap().divides(t) { let g = generators.remove(i); let (g_rem, _) = Self::divide(&g, &generators); generators.push(g_rem); continue 'outer; } } } }; } } pub fn buchberger_algorithm(generators: &mut Vec<Polynomial<F>>) { Self::buchberger_extend(generators); Self::buchberger_minimize(generators); Self::buchberger_reduce(generators); } } #[inline] fn pairs_iter(n: usize) -> impl Iterator<Item = (usize, usize)> { (0..n).cartesian_product(0..n).filter(|(i, j)| i != j) } #[cfg(test)] mod tests { use std::ops::Add; use super::*; use crate::{ core::num::Rational, poly::{elts::Polynomial, elts::Term}, }; use super::super::ord::Lex; type Poly = Polynomial<f64>; type Comp = Computer<f64, Lex>; #[allow(unused)] macro_rules! pp { ($poly:expr) => { println!("{} = {}", stringify!($poly), &$poly); }; } #[allow(unused)] macro_rules! pps { ($polys:expr) => { for i in 0..($polys.len()) { println!("{}[{}] = {}", stringify!($polys), i, &$polys[i]); } }; } #[allow(unused)] #[test] fn dbg_stuff() { let c = |coef: f64| Poly::from(coef); fn_vars! { f64: x y z } /* let f = y(1); let divs = [ Poly::zero(), y(1) + c(1.0), ]; println!("{} / [{}, {}]", &f, &divs[0], &divs[1]); let (r, q) = Computer::<Lex, f64>::divide(&f, &divs); pp!(r); pp!(q[0]); pp!(q[1]); pp!(&q[0] * &divs[0]); pp!(&q[1] * &divs[1]); */ let f = x(2) + x(3) + x(1) + x(2) + x(0) + x(3); pp!(f); } #[test] fn division() { let poly_iter = polys(2, 3, 1) .into_iter() .cartesian_product(polys(2, 1, 2).into_iter().cartesian_product(polys(2, 1, 2))); for (f, (g1, g2)) in poly_iter { test_result_equality(&f, &[g1, g2]); } } fn polys(vars: i32, max_deg: u8, max_coef: i32) -> Vec<Polynomial<f64>> { (0..vars) .map(|_| (0..=max_deg).rev()) .multi_cartesian_product() .map(MultiDegree::from_vec) .filter(|mdeg| mdeg.total_deg() <= max_deg) .map(|mdeg| { (0..=max_coef) .map(f64::from) .map(move |coef| Term::new(coef, mdeg.clone())) }) .multi_cartesian_product() .map(|v| Poly::new_unchecked(v.into_iter().filter(|t| !t.is_zero()).collect())) .collect() } #[cfg(test)] fn poly_assert_eq(f1: &Poly, f2: &Poly) { assert_eq!(Comp::sort_terms(f1).terms, Comp::sort_terms(f2).terms); } #[cfg(test)] fn test_result_equality(f: &Poly, g: &[Poly]) { let (r, q) = Comp::divide(f, g); let f2 = q.iter().zip_eq(g).map(|(qi, gi)| qi * gi).fold(r, Add::add); poly_assert_eq(f, &f2); } #[test] fn reduction() { let c = |coef| Polynomial::from(Rational::new_i64(coef, 1)); fn_vars! { Rational: x y } type CompQ = Computer<Rational, Lex>; let f = c(5) * x(4) * y(3) + c(2) * x(2) * y(1) + c(3) * x(1) * y(2) + y(2) + c(3); let g = c(8) * x(5) * y(2) + x(3) + c(3) * x(2) * y(2) + y(4) + c(6); pp!(f); pp!(g); println!(); if let Some(h) = CompQ::try_reduce(&f, &g) { pp!(h); } else { println!("could not reduce"); } } #[test] fn buchberger() { let q = |a, b| Polynomial::from(Rational::new_i64(a, b)); let c = |coef| q(coef, 1); fn_vars! { Rational: x y } type CompQ = Computer<Rational, Lex>; fn print_buchberger(g: &[Polynomial<Rational>]) -> Vec<Polynomial<Rational>> { let mut g = Vec::from(g); println!("initial:"); pps!(g); CompQ::buchberger_algorithm(&mut g); println!("\nresult:"); pps!(g); println!("--------------------"); g } print_buchberger(&[x(3) * y(1) - x(1) * y(2) + c(1), x(2) * y(2) - y(3) - c(1)]);
; } }
print_buchberger(&[ x(2) + x(1) * y(5) + y(4), x(1) * y(6) - x(1) * y(3) + y(5) - y(2), x(1) * y(5) - x(1) * y(2), ])
call_expression
[ { "content": "pub fn mat_iterator<'a, F: Field>(n: usize, m: usize, values: &'a [F])-> impl 'a + Iterator<Item=Mat<F>>{\n\n\tMatIterator {\n\n\t\tvalues,\n\n\t\tn,\n\n\t\tm,\n\n\t\tnum: 0\n\n\t}\n\n}\n\n\n\npub struct MatIterator<'a, F: Field> {\n\n\tvalues: &'a [F],\n\n\tn: usize,\n\n\tm: usize,\n\n\tnum: usize,\n\n}\n\n\n\nimpl<'a, F: Field> Iterator for MatIterator<'a, F> {\n\n type Item = Mat<F>;\n\n\n\n fn next(&mut self) -> Option<Mat<F>> {\n", "file_path": "src/linalg/util.rs", "rank": 0, "score": 149149.6069762912 }, { "content": "type ReducedRowEchelonForm<'a, F> = RowEquivalentForm<'a, F, true, true>;\n\n\n\n\n\nimpl<'a, F: Field + EpsilonEquality, const R: bool> RowEquivalentForm<'a, F, true, R> {\n\n\n\n\t/// Computes the nullity (dimension of the kernel)\n\n\tpub fn nullity(&self) -> usize {\n\n\t\tself.b.cols() - self.rank()\n\n\t}\n\n\n\n\t/// Computes the rank (dimension of the range)\n\n\tpub fn rank(&self) -> usize {\n\n\t\tself.b.rows() - self.b.zero_rows()\n\n\t}\n\n}\n\n\n\n\n\nimpl<F: Field + StabilityCmp + EpsilonEquality> Mat<F> {\n\n\n\n\t/// Computes row echelon form\n", "file_path": "src/linalg/algorithm.rs", "rank": 2, "score": 125671.55363755304 }, { "content": "pub fn superscript(n: u8) -> String {\n\n match n {\n\n 0 => String::from(\"⁰\"),\n\n 1 => String::from(\"¹\"),\n\n 2 => String::from(\"²\"),\n\n 3 => String::from(\"³\"),\n\n 4 => String::from(\"⁴\"),\n\n 5 => String::from(\"⁵\"),\n\n 6 => String::from(\"⁶\"),\n\n 7 => String::from(\"⁷\"),\n\n 8 => String::from(\"⁸\"),\n\n 9 => String::from(\"⁹\"),\n\n _ => superscript(n / 10) + &superscript(n % 10),\n\n }\n\n}\n\n\n\n// implementations -------------------------------------------------------------\n\n\n\nimpl Display for MultiDegree {\n\n fn fmt(&self, f: &mut Formatter) -> Result {\n", "file_path": "src/poly/display.rs", "rank": 3, "score": 122627.14664652469 }, { "content": "struct LUDecompositionInternal<F: Field> {\n\n pub p: Vec<usize>,\n\n pub l: Mat<F>,\n\n pub u: Mat<F>,\n\n}\n\n\n\nimpl<F: Field> LUDecompositionInternal<F> {\n\n // assumes src < target\n\n fn permute(&mut self, src: usize, target: usize) {\n\n self.p.swap(src, target);\n\n self.u.permute_rows(src, target);\n\n self.l.permute_under_diagonal(src, target);\n\n }\n\n\n\n fn scale_row(&mut self, r: usize, scalar: F) {\n\n self.l[(r, r)] = scalar;\n\n self.u.scale_row(r, F::ONE / scalar);\n\n }\n\n\n\n fn replace_col_under_row(&mut self, r: usize, c: usize) {\n", "file_path": "src/linalg/algorithm.rs", "rank": 4, "score": 109853.50529267953 }, { "content": "pub fn num_to_seq(n: usize, base: usize) -> impl Iterator<Item = usize> {\n\n\tBaseSeqIterator::new(n, base)\n\n}\n\n\n", "file_path": "src/linalg/util.rs", "rank": 5, "score": 108236.03213642986 }, { "content": "pub fn get_max_index<T, I, C>(slice: I, le: C) -> usize where\n\n\tI: Iterator<Item=T>,\n\n\tC: Fn(&T, &T)->Option<Ordering> {\n\n\n\n\tslice\n\n\t\t.enumerate()\n\n .max_by(|(_n, x), (_m, y)| le(x, y).unwrap())\n\n .unwrap()\n\n\t\t.0\n\n}\n\n\n", "file_path": "src/linalg/util.rs", "rank": 6, "score": 107271.77376130765 }, { "content": "/// type of the degrees in a multidegree\n\n///\n\n/// may need to be changed to `u8` as many of the computations are turning out to require nonnegative degrees\n\ntype D = u8;\n\n\n\n/// Multidegree for a monomial; wraps a `Vec<D>`.\n\n///\n\n/// This is the treatment of multidegrees as (nonnegative) integer tuples.\n\n#[derive(Clone, PartialEq, Eq, Debug)]\n\npub struct MultiDegree(pub Vec<D>);\n\n\n\nimpl MultiDegree {\n\n /// Returns an empty multidegree.\n\n ///\n\n /// In any mathematical context, you should prefer `MDeg::zero`.\n\n #[inline]\n\n pub fn new() -> Self {\n\n MultiDegree::zero()\n\n }\n\n\n\n /// Quick fix for sanitizing a multidegree.\n\n /// \n\n /// Should be avoided if possible; prefer clean operations.\n", "file_path": "src/poly/mdeg.rs", "rank": 7, "score": 105337.16785345867 }, { "content": "// (0,0), (0,1), ..., (0,cols)\n\n// (1,0), (1,1), ..., (1,cols)\n\n// ... ...\n\n// (row,0) ...\n\npub fn get_box_iter(rows: usize, cols: usize) -> impl Iterator<Item=(usize, usize)>{\n\n\t(0..rows).map(move |r| ((0..cols).map(move |c| (r,c) ) ) ).flatten()\n\n}\n\n\n", "file_path": "src/linalg/util.rs", "rank": 8, "score": 103214.64610931312 }, { "content": "type RowEchelonForm<'a, F> = RowEquivalentForm<'a, F, true, false>;\n", "file_path": "src/linalg/algorithm.rs", "rank": 9, "score": 97225.62337564302 }, { "content": "/// type for indexing the indeterminates\n\ntype I = usize;\n\n\n", "file_path": "src/poly/mdeg.rs", "rank": 10, "score": 92089.66998353068 }, { "content": "/// The graded order on multidegrees.\n\n///\n\n/// Simply compares the total degrees.\n\n///\n\n/// This is the usual grading on a univariate polynomial ring.\n\n///\n\n/// Important note: this is not a 'monomial order' as it is not antisymmetric;\n\n/// should probably be moved or hidden to avoid confusion\n\npub fn grad(a: &MultiDegree, b: &MultiDegree) -> Ordering {\n\n a.total_deg().cmp(&b.total_deg())\n\n}\n\n\n\n/// The [Graded Lexicographic Order](https://w.wiki/3zwp) on multidegrees.\n\n///\n\n/// applies the graded order; if equal, applies lexicographic\n\npub struct GrLex;\n\n\n\nimpl MonomialOrder for GrLex {\n\n fn cmp(a: &MultiDegree, b: &MultiDegree) -> Ordering {\n\n match grad(a, b) {\n\n Ordering::Equal => Lex::cmp(a, b),\n\n lt_or_gt => lt_or_gt,\n\n }\n\n }\n\n}\n\n\n\n/// The [Graded Reverse Lexicographic Order](https://w.wiki/3zwq) on\n\n/// multidegrees.\n", "file_path": "src/poly/ord.rs", "rank": 11, "score": 87445.94980430372 }, { "content": "pub trait Field:\n\n Sized\n\n + Copy\n\n + std::fmt::Debug\n\n + std::fmt::Display\n\n + PartialEq\n\n + OverloadAddition\n\n + OverloadSubtraction\n\n + Zero\n\n + OverloadMultiplication\n\n + OverloadDivision\n\n + One\n\n{\n\n fn powi32(&self, p: i32) -> Self;\n\n /// multiplicative inverse\n\n fn inv(self) -> Self {\n\n Self::ONE / self\n\n }\n\n}\n\n\n", "file_path": "src/core/num/traits.rs", "rank": 12, "score": 77953.79060273425 }, { "content": "/// More possible orders:\n\n/// - https://en.wikipedia.org/wiki/Monomial_order\n\n/// - https://faculty.math.illinois.edu/Macaulay2/doc/Macaulay2-1.15/share/doc/Macaulay2/Macaulay2Doc/html/_monomial_sporderings.html\n\n///\n\nstruct _PlaceHolder;\n\n\n\n#[cfg(test)]\n\nmod order_tests {\n\n use itertools::Itertools;\n\n use std::cmp::Ordering;\n\n\n\n use super::*;\n\n use crate::poly::mdeg::MultiDegree;\n\n\n\n macro_rules! mdeg {\n\n ($( $deg:expr ),* $(,)?) => {\n\n &MultiDegree::from_vec(vec![ $( $deg ),* ])\n\n };\n\n }\n\n\n\n fn _dbg_suite(ord: fn(&MultiDegree, &MultiDegree) -> Ordering) {\n\n let d = |s: &[u8]| format!(\"{:?}{:?}{:?}\", s[0], s[1], s[2]);\n\n\n\n let c = |o: Ordering| match o {\n", "file_path": "src/poly/ord.rs", "rank": 13, "score": 65445.87689325272 }, { "content": "/// A [Monomial Order](https://en.wikipedia.org/wiki/Monomial_order) for multidegrees.\n\n///\n\n/// Necessary conditions not checked by rust:\n\n/// - well-order\n\n/// - respects multiplication: $\\alpha \\leq \\beta \\implies \\alpha + \\gamma \\leq \\beta + \\gamma$, for all multidegrees $\\alpha, \\beta, \\gamma \\in \\Z^n$.\n\npub trait MonomialOrder {\n\n fn cmp(a: &MultiDegree, b: &MultiDegree) -> Ordering;\n\n}\n\n\n\n/// The [Lexicographic Order](https://w.wiki/3zwi) on multidegrees.\n\n///\n\n/// if a != b, compares first unequal degrees from the left\n\n///\n\n/// e.g., a < b iff ∃k s.t. a_k < b_k and a_i = b_i, for i = 0,...,k-1\n\npub struct Lex;\n\n\n\nimpl MonomialOrder for Lex {\n\n fn cmp(a: &MultiDegree, b: &MultiDegree) -> Ordering {\n\n let mut iter_a = a.degs();\n\n let mut iter_b = b.degs();\n\n\n\n loop {\n\n match (iter_a.next(), iter_b.next()) {\n\n (None, None) => break Ordering::Equal,\n\n (None, Some(_)) => break Ordering::Less, // might need to check rhs nonzero\n", "file_path": "src/poly/ord.rs", "rank": 14, "score": 58215.52170496134 }, { "content": "#[macro_use]\n\npub mod macros;\n\n\n\n/// Computation related to finding Gröbner bases.\n\npub mod comp;\n\n/// Monomial orders.\n\npub mod ord;\n\n/// Struct and trait implementations relating to the polynomial ring $F[x_1, \\dots, x_n]$\n\npub mod elts;\n\n\n\npub mod mdeg;\n\nmod display;\n\n\n\n\n\n/* #[doc(inline)]\n\npub use ring::{u, v, w, x, y, z, Polynomial, Term}; */\n", "file_path": "src/poly/mod.rs", "rank": 15, "score": 52314.65880330191 }, { "content": "fn main() {\n\n let x = |d| Polynomial::<Rational>::var(0, d);\n\n let y = |d| Polynomial::<Rational>::var(1, d);\n\n\n\n let p = x(1) + y(1);\n\n let q = x(1) - y(1);\n\n\n\n println!(\"\\n({})({}) = {}\", p, q, &p * &q);\n\n}\n", "file_path": "src/main.rs", "rank": 36, "score": 45032.68821089004 }, { "content": "struct BaseSeqIterator {\n\n\tn: usize,\n\n\tbase: usize\n\n}\n\n\n\nimpl BaseSeqIterator {\n\n\tpub fn new(n: usize, base: usize) -> BaseSeqIterator {\n\n\t\tBaseSeqIterator {\n\n\t\t\tn: n*base,\n\n\t\t\tbase\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Iterator for BaseSeqIterator {\n\n\ttype Item = usize;\n\n\n\n\tfn next(&mut self) -> Option<Self::Item> {\n\n\t\tself.n /= self.base;\n\n\n\n\t\tif self.n == 0 {\n\n\t\t\tNone\n\n\t\t} else {\n\n\t\t\tSome(self.n % self.base)\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "src/linalg/util.rs", "rank": 37, "score": 42296.36027663024 }, { "content": "/// Partial order to indicate which element has greater stability.\n\n/// More precisely, it is used to determine what number is best to divide by.\n\n/// For lu decomposition (or just Gaussian elimination in general), it is\n\n/// always best to divide by large floats, since this will lead to the least\n\n/// rounding/ float point problems.\n\n///\n\n/// Needs to satisfy 0<= every number, (and every number !<= 0) in order\n\n/// to avoid divide by 0.\n\npub trait StabilityCmp {\n\n fn stability_cmp(&self, other: &Self) -> Option<Ordering>;\n\n}\n\n\n\nmacro_rules! stability_cmp_impl {\n\n ($($t:ty)*) => {\n\n $(\n\n impl StabilityCmp for $t {\n\n fn stability_cmp(&self, other: &Self) -> Option<Ordering> {\n\n self.abs().partial_cmp(&other.abs())\n\n }\n\n }\n\n )*\n\n }\n\n}\n\n\n\nstability_cmp_impl! { f32 f64 }\n\n\n\n/// stability cmp for Rational. First priority is making it so that 0 is leq than\n\n/// everything in order for 0 to never be chosen as max value to divide by\n", "file_path": "src/core/num/traits.rs", "rank": 38, "score": 36358.06131822058 }, { "content": "pub trait EpsilonEquality {\n\n fn epsilon_equals(&self, other: &Self) -> bool;\n\n}\n\n\n\nmacro_rules! epsilon_equality_impl {\n\n ($($t:ty)*) => {\n\n $(\n\n impl EpsilonEquality for $t {\n\n fn epsilon_equals(&self, other: &Self) -> bool {\n\n <$t>::abs(self - other) < <$t>::EPSILON\n\n }\n\n }\n\n )*\n\n }\n\n}\n\n\n\nepsilon_equality_impl! { f32 f64 }\n\n\n\nimpl EpsilonEquality for Rational {\n\n fn epsilon_equals(&self, other: &Self) -> bool {\n\n self == other\n\n }\n\n}\n\n\n\n// Numerical Stability Norm ----------------------------------------------------\n\n\n", "file_path": "src/core/num/traits.rs", "rank": 39, "score": 36352.849363712 }, { "content": "pub trait One: Sized + Mul {\n\n /// if this doesn't work use function, like joseph said\n\n const ONE: Self;\n\n fn is_one(&self) -> bool;\n\n}\n\n\n\nmacro_rules! impl_one_for_primitives {\n\n ($($t:ty)*) => {\n\n $(\n\n impl One for $t {\n\n const ONE: Self = 1 as Self;\n\n\n\n #[inline]\n\n fn is_one(&self) -> bool {\n\n *self == Self::ONE\n\n }\n\n }\n\n )*\n\n }\n\n}\n", "file_path": "src/core/num/traits.rs", "rank": 40, "score": 32957.264452818235 }, { "content": "pub trait Zero: Sized + Add<Self, Output = Self> {\n\n /// if this doesn't work use function, like joseph said\n\n const ZERO: Self;\n\n fn is_zero(&self) -> bool;\n\n}\n\n\n\nmacro_rules! impl_zero_for_primitives {\n\n ($($t:ty)*) => {\n\n $(\n\n impl Zero for $t {\n\n const ZERO: Self = 0 as Self;\n\n\n\n #[inline]\n\n fn is_zero(&self) -> bool {\n\n *self == Self::ZERO\n\n }\n\n }\n\n )*\n\n }\n\n}\n", "file_path": "src/core/num/traits.rs", "rank": 41, "score": 27974.87007117336 }, { "content": "pub mod mat;\n\npub mod util;\n\npub mod algorithm;\n\npub mod cone;", "file_path": "src/linalg/mod.rs", "rank": 42, "score": 27724.7741121195 }, { "content": "pub mod num;\n", "file_path": "src/core/mod.rs", "rank": 43, "score": 27723.42956375434 }, { "content": "mod int;\n\nmod rat;\n\nmod traits;\n\n\n\npub use int::Integer;\n\npub use rat::Rational;\n\npub use traits::{EpsilonEquality, Field, One, StabilityCmp, Zero};\n", "file_path": "src/core/num/mod.rs", "rank": 44, "score": 26104.11783770916 }, { "content": "// div\n\n\n\n#[binop(derefs)]\n\nimpl<F: Field> Div<F> for &Polynomial<F> {\n\n type Output = Polynomial<F>;\n\n\n\n #[allow(clippy::suspicious_arithmetic_impl)]\n\n #[inline]\n\n fn div(self, rhs: F) -> Self::Output {\n\n self * rhs.inv()\n\n }\n\n}\n\n\n\n// tests -----------------------------------------------------------------------\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "src/poly/elts.rs", "rank": 45, "score": 24603.991050540746 }, { "content": "\n\n println!(\"\\nc^2 * t^2 = {}\", d);\n\n println!(\"c^2 * t^2 = {:?}\", d);\n\n }\n\n\n\n #[test]\n\n fn test_poly() {\n\n let c = |coef| Term::<f64>::constant_unchecked(coef);\n\n let x = |deg| Term::<f64>::var(0, deg);\n\n let y = |deg| Term::<f64>::var(1, deg);\n\n let z = |deg| Term::<f64>::var(2, deg);\n\n\n\n let trm = |coef, [i, j, k]: [u8; 3]| c(coef) * x(i) * y(j) * z(k);\n\n\n\n println!(\"x^4 = {}\", x(4));\n\n println!(\"y^2 = {}\", y(2));\n\n println!(\"z^7 = {}\", z(7));\n\n\n\n let p = trm(5.0, [1, 2, 3]);\n\n let q = trm(7.0, [4, 0, 2]);\n", "file_path": "src/poly/elts.rs", "rank": 46, "score": 24603.022595870923 }, { "content": "use std::ops::{Add, Div, Mul, Neg, Sub};\n\nuse xops::binop;\n\n\n\nuse super::mdeg::MultiDegree;\n\nuse crate::core::num::Field;\n\n\n\n// structs ---------------------------------------------------------------------\n\n\n\n/// single term of a multivariate polynomial with coefficients in `F`.\n\n#[derive(Clone, Debug)]\n\npub struct Term<F: Field> {\n\n pub coef: F,\n\n pub mdeg: MultiDegree,\n\n}\n\n\n\n/// a multivariate polynomial with coefficients in the field `F`\n\n#[derive(Clone, Debug)]\n\npub struct Polynomial<F: Field> {\n\n // pub? yeah pub.\n\n pub terms: Vec<Term<F>>,\n", "file_path": "src/poly/elts.rs", "rank": 47, "score": 24602.966005523587 }, { "content": "\n\nimpl<F: Field> From<&Term<F>> for Polynomial<F> {\n\n #[inline]\n\n fn from(term: &Term<F>) -> Self {\n\n Polynomial::from(term.clone())\n\n }\n\n}\n\n\n\n// equality --------------------------------------------------------------------\n\n\n\nimpl<F: Field> PartialEq for Term<F> {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.coef == other.coef && self.mdeg == other.mdeg\n\n }\n\n}\n\n\n\nimpl<F: Field> Eq for Term<F> {}\n\n\n\n// operations ------------------------------------------------------------------\n\n\n", "file_path": "src/poly/elts.rs", "rank": 48, "score": 24599.2667947348 }, { "content": " /// maximum index variable of `self`.\n\n #[inline]\n\n pub fn eval(&self, x: &[F]) -> F {\n\n self.terms().map(|t| t.eval(x)).sum()\n\n }\n\n}\n\n\n\n// defaults --------------------------------------------------------------------\n\n\n\nimpl_zero_default! { Term<F> where F: Field }\n\nimpl_zero_default! { Polynomial<F> where F: Field }\n\n\n\n// conversions -----------------------------------------------------------------\n\n\n\nimpl<F: Field> From<F> for Term<F> {\n\n #[inline]\n\n fn from(coef: F) -> Self {\n\n Term::constant_unchecked(coef)\n\n }\n\n}\n", "file_path": "src/poly/elts.rs", "rank": 49, "score": 24597.447958573885 }, { "content": "impl<F: Field> Neg for &Polynomial<F> {\n\n type Output = Polynomial<F>;\n\n\n\n #[inline]\n\n fn neg(self) -> Self::Output {\n\n // trust that `self` has clean zeros, taking negative won't spoil\n\n Polynomial::new_unchecked(self.terms().map(Neg::neg).collect())\n\n }\n\n}\n\n\n\nimpl<F: Field> Neg for Polynomial<F> {\n\n type Output = Polynomial<F>;\n\n\n\n #[inline]\n\n fn neg(mut self) -> Self::Output {\n\n // trust that `self` has clean zeros, taking negative won't spoil\n\n for t in self.terms_mut() {\n\n t.coef = -t.coef;\n\n }\n\n self\n", "file_path": "src/poly/elts.rs", "rank": 50, "score": 24597.339439249903 }, { "content": " ///\n\n /// WARNING: Does not sanitize zeros.\n\n ///\n\n /// but that doesn't really mean much except for stability stuff\n\n #[inline]\n\n pub fn constant_unchecked(coef: F) -> Self {\n\n Self::term_unchecked(Term::constant_unchecked(coef))\n\n }\n\n\n\n pub fn var(idx: usize, deg: u8) -> Self {\n\n Self::term_unchecked(Term::var(idx, deg))\n\n }\n\n\n\n /// returns the polynomial with only the constant term `1`\n\n #[inline]\n\n pub fn one() -> Self {\n\n Self::term_unchecked(Term::one())\n\n }\n\n\n\n /// Returns an iterator over (immutably) borrowed terms: `Item = &Term<F>`.\n", "file_path": "src/poly/elts.rs", "rank": 51, "score": 24597.19654781771 }, { "content": "use std::fmt::{Display, Formatter, Result};\n\n\n\nuse super::{\n\n mdeg::MultiDegree,\n\n elts::{Polynomial, Term},\n\n};\n\nuse crate::core::num::Field;\n\n\n\n// helpers ---------------------------------------------------------------------\n\n\n\nimpl MultiDegree {\n\n fn write_var(&self, f: &mut Formatter, idx: usize, ident: &str) -> Result {\n\n if let Some(&deg) = self.0.get(idx) {\n\n if deg == 1 {\n\n write!(f, \"{}\", ident)?;\n\n } else if deg != 0 {\n\n write!(f, \"{}{}\", ident, superscript(deg))?;\n\n\n\n // bonus latex mode\n\n // write!(f, \"{}^{{{}}}\", ident, deg)?;\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/poly/display.rs", "rank": 52, "score": 24596.889463025407 }, { "content": "}\n\n\n\n// implementations -------------------------------------------------------------\n\n\n\nimpl<F: Field> Term<F> {\n\n /// Returns the term with the given coefficient and multidegree.\n\n ///\n\n /// WARNING: Does not sanitize zeros.\n\n #[inline]\n\n pub fn new_unchecked(coef: F, mdeg: MultiDegree) -> Self {\n\n Term { coef, mdeg }\n\n }\n\n\n\n /// Returns the term with the given coefficient and multidegree.\n\n ///\n\n /// NOTE: Sanitizes zeros.\n\n ///\n\n /// This is the most basic way one should create a new term struct.\n\n pub fn new(coef: F, mdeg: MultiDegree) -> Self {\n\n if coef == F::ZERO {\n", "file_path": "src/poly/elts.rs", "rank": 53, "score": 24596.500280316533 }, { "content": " #[inline]\n\n pub fn zero() -> Self {\n\n Self::new_unchecked(Vec::new())\n\n }\n\n\n\n /// should not be used until we can guarantee that zero terms get filtered\n\n #[inline]\n\n pub fn is_zero(&self) -> bool {\n\n self.terms.is_empty()\n\n }\n\n\n\n /// Returns the polynomial with only the term `t`\n\n ///\n\n /// WARNING: Does not sanitize zeros.\n\n #[inline]\n\n pub fn term_unchecked(t: Term<F>) -> Self {\n\n Self::new_unchecked(vec![t])\n\n }\n\n\n\n /// Returns `coef` as a polynomial\n", "file_path": "src/poly/elts.rs", "rank": 54, "score": 24596.453806666155 }, { "content": " pub fn try_div(&self, other: &Term<F>) -> Option<Term<F>> {\n\n if self.is_zero() {\n\n return Some(Term::zero());\n\n }\n\n if other.is_zero() {\n\n return None;\n\n }\n\n // `self` and `other` guaranteed nonzero\n\n\n\n Some(Term::new_unchecked(\n\n self.coef / other.coef,\n\n self.mdeg.checked_sub(&other.mdeg)?,\n\n ))\n\n }\n\n}\n\n\n\nimpl<F: Field> Polynomial<F> {\n\n /// Returns the polynomial with the given terms.\n\n ///\n\n /// WARNING: Does not sanitize zeros.\n", "file_path": "src/poly/elts.rs", "rank": 55, "score": 24596.44069050195 }, { "content": " /// used for constant terms, i.e., terms without indeterminates\n\n #[inline]\n\n pub fn zero() -> Self {\n\n MultiDegree(Vec::new())\n\n }\n\n\n\n // Check if `self` is the zero multidegree.\n\n pub fn is_zero(&self) -> bool {\n\n self.0.is_empty()\n\n }\n\n\n\n /// Returns the multidegree with the only nonzero entry of `deg` at `idx`.\n\n /// \n\n /// This is the multidegree of the indeterminate variable `x_idx^deg`.\n\n pub fn var(idx: I, deg: D) -> Self {\n\n if deg == 0 {\n\n MultiDegree::zero()\n\n } else {\n\n let mut degs = vec![0; idx];\n\n degs.push(deg);\n", "file_path": "src/poly/mdeg.rs", "rank": 56, "score": 24595.71706093733 }, { "content": " Term::zero()\n\n } else {\n\n Term::new_unchecked(coef, mdeg)\n\n }\n\n }\n\n\n\n /// returns a term with the given coefficient and multidegree `MDeg::0`\n\n ///\n\n /// WARNING: Does not sanitize zeros.\n\n ///\n\n /// this is used for interpreting elements of the field as possible terms\n\n /// in polynomials over the field\n\n #[inline]\n\n pub fn constant_unchecked(coef: F) -> Self {\n\n Term::new_unchecked(coef, MultiDegree::zero())\n\n }\n\n\n\n /// Returns the constant zero term.\n\n #[inline]\n\n pub fn zero() -> Self {\n", "file_path": "src/poly/elts.rs", "rank": 57, "score": 24595.35532950488 }, { "content": " /// should be changed to return some form of `Result<F, EvaluationError>`\n\n pub fn eval(&self, x: &[F]) -> F {\n\n if x.len() < self.mdeg.len() {\n\n panic!(\n\n \"incorrectly sized argument {:?} passed to term {:?}\",\n\n x, self\n\n );\n\n }\n\n\n\n self.mdeg\n\n .degs()\n\n .zip(x)\n\n .map(|(&deg, val)| val.powi32(deg.into()))\n\n .fold(self.coef, Mul::mul)\n\n }\n\n\n\n pub fn divides(&self, other: &Term<F>) -> bool {\n\n !self.is_zero() && self.mdeg.is_succ(&other.mdeg)\n\n }\n\n\n", "file_path": "src/poly/elts.rs", "rank": 58, "score": 24595.132631580127 }, { "content": " fn test_mdeg() {\n\n dbg!(MultiDegree::zero());\n\n\n\n /* let a = MDeg::from_pairs(&[(0, 3), (3, 5), (4, 2)]);\n\n let b = MDeg::from_pairs(&[(0, 1), (1, 2), (4, 1), (5, 2)]);\n\n\n\n let c = MDeg::from_pairs(&[(0, 4), (1, 2), (3, 5), (4, 3), (5, 2)]);\n\n\n\n println!(\"\\na = {}\", a);\n\n println!(\"b = {}\", b);\n\n\n\n let d = &a + &b;\n\n\n\n println!(\"\\nc = {}\", d);\n\n\n\n assert_eq!(a + b, c); */\n\n }\n\n\n\n #[test]\n\n fn test_term() {\n", "file_path": "src/poly/elts.rs", "rank": 59, "score": 24594.942237274434 }, { "content": " Term::constant_unchecked(F::ZERO)\n\n }\n\n\n\n /// Checks whether the term is zero.\n\n #[inline]\n\n pub fn is_zero(&self) -> bool {\n\n self.coef == F::ZERO\n\n }\n\n\n\n /// Returns the constant one term\n\n #[inline]\n\n pub fn one() -> Self {\n\n Term::constant_unchecked(F::ONE)\n\n }\n\n\n\n /// returns the monic term of given multidegree: `Term { coef: 1, mdeg }`\n\n ///\n\n /// WARNING: Does not sanitize zeros.\n\n pub fn monic(mdeg: MultiDegree) -> Self {\n\n Term::new_unchecked(F::ONE, mdeg)\n", "file_path": "src/poly/elts.rs", "rank": 60, "score": 24594.502737556944 }, { "content": " if self.len() <= 5 {\n\n self.write_var(f, 0, \"x\")?;\n\n self.write_var(f, 1, \"y\")?;\n\n self.write_var(f, 2, \"z\")?;\n\n self.write_var(f, 3, \"u\")?;\n\n self.write_var(f, 4, \"v\")?;\n\n self.write_var(f, 5, \"w\")?;\n\n } else {\n\n write!(f, \"X{:?}\", self.0)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<F: Field> Display for Term<F> {\n\n fn fmt(&self, f: &mut Formatter) -> Result {\n\n if self.mdeg.is_zero() {\n\n write!(f, \"{}\", self.coef)\n\n } else if self.coef == F::ONE {\n\n write!(f, \"{}\", self.mdeg)\n", "file_path": "src/poly/display.rs", "rank": 61, "score": 24594.19463102804 }, { "content": " // could be that `self.coef == -rhs.coef`; must sanitize zeros\n\n Term::new(coef, self.mdeg.clone()).into()\n\n } else {\n\n // know that `self` and `rhs` are nonzero with different multidegrees\n\n Polynomial::new_unchecked(vec![self.clone(), rhs.clone()])\n\n }\n\n }\n\n}\n\n\n\n#[binop(commute, derefs)]\n\nimpl<F: Field> Add<&Term<F>> for &Polynomial<F> {\n\n type Output = Polynomial<F>;\n\n\n\n fn add(self, rhs: &Term<F>) -> Self::Output {\n\n if self.is_zero() {\n\n // could be that `rhs == 0`; must sanitize zeros\n\n return rhs.into();\n\n }\n\n if rhs.is_zero() {\n\n // trust that `self` has clean zeros\n", "file_path": "src/poly/elts.rs", "rank": 62, "score": 24594.16411379692 }, { "content": "///\n\n/// This runs the lexicographic order with the indices reversed; not to be\n\n/// confused with simply calling `Ordering::reverse` on the result of [`Lex::cmp`].\n\npub struct RevLex;\n\n\n\nimpl MonomialOrder for RevLex {\n\n fn cmp(a: &MultiDegree, b: &MultiDegree) -> Ordering {\n\n match a.len().cmp(&b.len()) {\n\n Ordering::Equal => {\n\n for (deg_a, deg_b) in a.degs().zip(b.degs()).rev() {\n\n match deg_a.cmp(deg_b) {\n\n Ordering::Equal => continue,\n\n lt_or_gt => return lt_or_gt,\n\n }\n\n }\n\n }\n\n lt_or_gt => return lt_or_gt,\n\n }\n\n Ordering::Equal\n\n }\n\n}\n\n\n", "file_path": "src/poly/ord.rs", "rank": 63, "score": 24593.991835584922 }, { "content": " return None;\n\n }\n\n // `self` and `other` are guaranteed nonzero\n\n\n\n let mut degs = Vec::with_capacity(self.len());\n\n let mut zero_cache = 0;\n\n\n\n for pair in self.degs().zip_longest(rhs.degs()) {\n\n let c = match pair {\n\n EitherOrBoth::Both(a, b) => match a.cmp(b) {\n\n // `a - b < 0`; subtraction has failed\n\n Ordering::Less => return None,\n\n // `a - b == 0`; increment zero cache\n\n Ordering::Equal => {\n\n zero_cache += 1;\n\n continue;\n\n }\n\n // `a - b > 0`; carry on with subtraction\n\n Ordering::Greater => a - b,\n\n },\n", "file_path": "src/poly/mdeg.rs", "rank": 64, "score": 24593.913560526456 }, { "content": " fn default() -> Self {\n\n Self::zero()\n\n }\n\n}\n\n\n\n#[binop(derefs)]\n\nimpl Add for &MultiDegree {\n\n type Output = MultiDegree;\n\n\n\n fn add(self, rhs: &MultiDegree) -> Self::Output {\n\n let mut degs = Vec::with_capacity(self.len().max(rhs.len()));\n\n let mut zero_cache = 0;\n\n\n\n for pair in self.degs().zip_longest(rhs.degs()) {\n\n let c = match pair {\n\n EitherOrBoth::Both(0, 0) | EitherOrBoth::Left(0) | EitherOrBoth::Right(0) => {\n\n zero_cache += 1;\n\n continue;\n\n }\n\n EitherOrBoth::Both(a, b) => a + b,\n", "file_path": "src/poly/mdeg.rs", "rank": 65, "score": 24593.76854039763 }, { "content": " ///\n\n /// Should replace return with in-house iterator struct.\n\n #[inline]\n\n pub fn terms(&self) -> std::slice::Iter<Term<F>> {\n\n self.terms.iter()\n\n }\n\n\n\n /// Returns an iterator over mutably borrowed terms: `Item = &mut Term<F>`.\n\n ///\n\n /// Should replace return with in-house iterator struct.\n\n #[inline]\n\n pub fn terms_mut(&mut self) -> std::slice::IterMut<Term<F>> {\n\n self.terms.iter_mut()\n\n }\n\n\n\n /// Evaluates `self` in `F[x_1, ..., x_n]` at the point `x` in `F^m`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Method panics if `m < n`, i.e., if `x` does not provide values up the\n", "file_path": "src/poly/elts.rs", "rank": 66, "score": 24593.717484894572 }, { "content": " pub fn $var<F: Field>(deg: u8) -> Term<F> {\n\n Term::var($idx, deg)\n\n }\n\n };\n\n (@doc_of $var:expr, $idx:expr) => {\n\n concat!(\n\n \"Shorthand for $\",\n\n $var,\n\n \" = x_\",\n\n $idx,\n\n \"\\\\in F[x_1, \\\\dots, x_n]$.\\n\\n\",\n\n \"As a Term, `\",\n\n $var,\n\n \"(d)` is monic with multidegree `{ \",\n\n $idx,\n\n \": d }`.\",\n\n \"\"\n\n )\n\n };\n\n ($var:ident -> $idx:literal) => {\n", "file_path": "src/poly/macros.rs", "rank": 67, "score": 24593.68564280694 }, { "content": " write!(\n\n f,\n\n \" - {}\",\n\n Term::new_unchecked(-term.coef, term.mdeg.clone())\n\n )?;\n\n } else {\n\n write!(f, \" + {}\", term)?;\n\n }\n\n }\n\n\n\n // return\n\n Result::Ok(())\n\n }\n\n}\n\n\n\nimpl Display for Polynomial<crate::core::num::Rational> {\n\n fn fmt(&self, f: &mut Formatter) -> Result {\n\n let mut term_iter = self.terms.iter();\n\n\n\n if let Some(term) = term_iter.next() {\n", "file_path": "src/poly/display.rs", "rank": 68, "score": 24593.506845020387 }, { "content": "\n\n// neg\n\n\n\nimpl<F: Field> Neg for Term<F> {\n\n type Output = Term<F>;\n\n\n\n fn neg(self) -> Self::Output {\n\n // trust that `self` has clean zeros, taking negative won't spoil\n\n Term::new_unchecked(-self.coef, self.mdeg)\n\n }\n\n}\n\n\n\nimpl<F: Field> Neg for &Term<F> {\n\n type Output = Term<F>;\n\n\n\n fn neg(self) -> Self::Output {\n\n -self.clone()\n\n }\n\n}\n\n\n", "file_path": "src/poly/elts.rs", "rank": 69, "score": 24593.1969258221 }, { "content": " }\n\n\n\n /// returns a term representing a single variable/indeterminate:\n\n /// - `var(0, k) = x_0^k = x^k`\n\n /// - `var(1, k) = x_1^k = y^k`\n\n /// - `var(2, k) = x_2^k = z^k`\n\n /// - `var(j, k) = x_j^k`\n\n pub fn var(idx: usize, deg: u8) -> Self {\n\n if deg == 0 {\n\n Term::one()\n\n } else {\n\n Term::monic(MultiDegree::var(idx, deg))\n\n }\n\n }\n\n\n\n /// maps the polynomial element `self =: f ∈ F[x_1, ..., x_n]` to the\n\n /// corresponding\n\n /// polynomial function `eval_f: F^n -> F`, and the image of `x ∈ F^n`\n\n /// under this function is returned\n\n ///\n", "file_path": "src/poly/elts.rs", "rank": 70, "score": 24593.120841613276 }, { "content": "\n\nimpl<F: Field> From<F> for Polynomial<F> {\n\n fn from(coef: F) -> Self {\n\n if coef == F::ZERO {\n\n Polynomial::zero()\n\n } else {\n\n Polynomial::constant_unchecked(coef)\n\n }\n\n }\n\n}\n\n\n\nimpl<F: Field> From<Term<F>> for Polynomial<F> {\n\n fn from(term: Term<F>) -> Self {\n\n if term.is_zero() {\n\n Polynomial::zero()\n\n } else {\n\n Polynomial::term_unchecked(term)\n\n }\n\n }\n\n}\n", "file_path": "src/poly/elts.rs", "rank": 71, "score": 24592.91989603908 }, { "content": " (Some(_), None) => break Ordering::Greater,\n\n (Some(deg_a), Some(deg_b)) => match deg_a.cmp(deg_b) {\n\n Ordering::Equal => continue,\n\n lt_or_gt => break lt_or_gt,\n\n },\n\n }\n\n }\n\n\n\n /* for (deg_a, deg_b) in a.degs().zip(b.degs()) {\n\n match deg_a.cmp(deg_b) {\n\n Ordering::Equal => continue,\n\n lt_or_gt => return lt_or_gt,\n\n }\n\n }\n\n grad(a, b) */\n\n }\n\n}\n\n\n\n/// The Reverse [Lexicographic Order](https://w.wiki/3zwi) order on\n\n/// multidegrees.\n", "file_path": "src/poly/ord.rs", "rank": 72, "score": 24592.787542581293 }, { "content": " #[inline]\n\n pub fn trim_zeros(&mut self) {\n\n if let Some(idx) = self.degs().rposition(|deg| *deg != 0) {\n\n self.0.truncate(idx + 1);\n\n }\n\n }\n\n\n\n /// Returns the multidegree wrapping the given vector, after truncating off\n\n /// any trailing zeros\n\n pub fn from_vec(mut vec: Vec<D>) -> Self {\n\n if let Some(idx) = vec.iter().rposition(|deg| *deg != 0) {\n\n vec.truncate(idx + 1);\n\n MultiDegree(vec)\n\n } else {\n\n MultiDegree::zero()\n\n }\n\n }\n\n\n\n /// returns the empty multidegree\n\n ///\n", "file_path": "src/poly/mdeg.rs", "rank": 73, "score": 24592.68797763957 }, { "content": " // this is a clean operation:\n\n // - all terms nonzero, so zeros stay clean\n\n // - all multidegrees change uniformly, so no simplification is possible\n\n Polynomial::new_unchecked(self.terms().map(|t| t * rhs).collect())\n\n }\n\n}\n\n\n\n#[binop(derefs)]\n\nimpl<F: Field> Mul for &Polynomial<F> {\n\n type Output = Polynomial<F>;\n\n\n\n #[allow(clippy::suspicious_arithmetic_impl)]\n\n fn mul(self, rhs: &Polynomial<F>) -> Self::Output {\n\n self.terms()\n\n .map(|t| t * rhs)\n\n .reduce(Add::add)\n\n .unwrap_or_default()\n\n }\n\n}\n\n\n", "file_path": "src/poly/elts.rs", "rank": 74, "score": 24592.627437099454 }, { "content": " terms.push(rhs.clone());\n\n }\n\n\n\n // trust that `self` started with clean zeros; adding `rhs` doesn't spoil\n\n Polynomial::new_unchecked(terms)\n\n }\n\n}\n\n\n\n#[binop(derefs)]\n\nimpl<F: Field> Add for &Polynomial<F> {\n\n type Output = Polynomial<F>;\n\n\n\n #[inline]\n\n fn add(self, rhs: &Polynomial<F>) -> Self::Output {\n\n // clean operation so long as `Poly + Term` is clean\n\n //\n\n // in fact, if both `self` and `rhs` are trusted to have clean zeros, then this can be improved to only check at the start for zeros, then fold could use a possible `add_nonzero_term` method for polynomials\n\n rhs.terms().fold(self.clone(), Add::add)\n\n }\n\n}\n", "file_path": "src/poly/elts.rs", "rank": 75, "score": 24592.568695119302 }, { "content": " MultiDegree(degs)\n\n }\n\n }\n\n\n\n /// returns iterator over the individual degree components\n\n ///\n\n /// Should replace return with in-house iterator struct.\n\n #[inline]\n\n pub fn degs(&self) -> std::slice::Iter<D> {\n\n self.0.iter()\n\n }\n\n\n\n /// returns mutable iterator over the individual degree components\n\n ///\n\n /// Should replace return with in-house iterator struct.\n\n #[inline]\n\n pub fn degs_mut(&mut self) -> std::slice::IterMut<D> {\n\n self.0.iter_mut()\n\n }\n\n\n", "file_path": "src/poly/mdeg.rs", "rank": 76, "score": 24592.474582239574 }, { "content": "///\n\n/// applies the graded order; if equal, applies reverse lexicographic with the result negated\n\npub struct GRevLex;\n\n\n\nimpl MonomialOrder for GRevLex {\n\n fn cmp(a: &MultiDegree, b: &MultiDegree) -> Ordering {\n\n match grad(a, b) {\n\n Ordering::Equal => RevLex::cmp(a, b).reverse(),\n\n lt_or_gt => lt_or_gt,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/poly/ord.rs", "rank": 77, "score": 24592.29485613809 }, { "content": " } else if self.coef == -F::ONE {\n\n write!(f, \"-{}\", self.mdeg)\n\n } else {\n\n write!(f, \"{}{}\", self.coef, self.mdeg)\n\n }\n\n }\n\n}\n\n\n\nimpl Display for Polynomial<f64> {\n\n fn fmt(&self, f: &mut Formatter) -> Result {\n\n let mut term_iter = self.terms.iter();\n\n\n\n if let Some(term) = term_iter.next() {\n\n write!(f, \"{}\", term)?;\n\n } else {\n\n return write!(f, \"0\");\n\n }\n\n\n\n for term in term_iter {\n\n if term.coef.is_sign_negative() {\n", "file_path": "src/poly/display.rs", "rank": 78, "score": 24592.27389451224 }, { "content": " self + &-rhs\n\n }\n\n}\n\n\n\n#[binop(derefs)]\n\nimpl<F: Field> Sub<&Term<F>> for &Polynomial<F> {\n\n type Output = Polynomial<F>;\n\n\n\n #[inline]\n\n fn sub(self, rhs: &Term<F>) -> Self::Output {\n\n self + &-rhs\n\n }\n\n}\n\n\n\n#[binop(derefs)]\n\nimpl<F: Field> Sub for &Polynomial<F> {\n\n type Output = Polynomial<F>;\n\n\n\n #[inline]\n\n fn sub(self, rhs: &Polynomial<F>) -> Self::Output {\n", "file_path": "src/poly/elts.rs", "rank": 79, "score": 24592.149272910614 }, { "content": " assert_ord_greater!(GRevLex::cmp(mdeg![0, 0, 2], mdeg![1, 0, 0]));\n\n assert_ord_greater!(GRevLex::cmp(mdeg![0, 3, 0], mdeg![1, 0, 1]));\n\n assert_ord_greater!(GRevLex::cmp(mdeg![1, 0, 0], mdeg![0, 1, 0]));\n\n\n\n // dbg_suite(GRevLex::cmp);\n\n }\n\n\n\n #[test]\n\n fn dbg_grlex_vs_grevlex() {\n\n let mut vecs = Vec::new();\n\n\n\n const D: u8 = 5;\n\n\n\n for x in 0..D {\n\n for y in 0..(D - x) {\n\n for z in 0..(D - x - y) {\n\n vecs.push(MultiDegree::from_vec(vec![x, y, z]));\n\n }\n\n }\n\n }\n", "file_path": "src/poly/ord.rs", "rank": 80, "score": 24591.882762388614 }, { "content": " assert_ord_greater!(GrLex::cmp(mdeg![2, 2, 0], mdeg![0, 0, 1]));\n\n assert_ord_greater!(GrLex::cmp(mdeg![0, 0, 2], mdeg![1, 0, 0]));\n\n assert_ord_greater!(GrLex::cmp(mdeg![3, 0, 0], mdeg![1, 0, 2]));\n\n\n\n // dbg_suite(GrLex::cmp);\n\n }\n\n\n\n #[test]\n\n fn test_grevlex() {\n\n use super::GRevLex;\n\n\n\n assert_ord_equal!(GRevLex::cmp(mdeg![0, 0, 0], mdeg![0, 0, 0]));\n\n assert_ord_equal!(GRevLex::cmp(mdeg![1, 0, 0], mdeg![1, 0, 0]));\n\n assert_ord_equal!(GRevLex::cmp(mdeg![1, 2, 3], mdeg![1, 2, 3]));\n\n assert_ord_equal!(GRevLex::cmp(mdeg![0, 0, 1], mdeg![0, 0, 1]));\n\n\n\n assert_ord_less!(GRevLex::cmp(mdeg![2, 2, 0], mdeg![1, 1, 5]));\n\n assert_ord_less!(GRevLex::cmp(mdeg![1, 0, 0], mdeg![0, 1, 1]));\n\n\n\n assert_ord_greater!(GRevLex::cmp(mdeg![2, 2, 0], mdeg![0, 0, 1]));\n", "file_path": "src/poly/ord.rs", "rank": 81, "score": 24591.63631890902 }, { "content": " }\n\n}\n\n\n\n// sub\n\n\n\n#[binop(derefs)]\n\nimpl<F: Field> Sub for &Term<F> {\n\n type Output = Polynomial<F>;\n\n\n\n fn sub(self, rhs: &Term<F>) -> Self::Output {\n\n self + &-rhs\n\n }\n\n}\n\n\n\n#[binop(derefs)]\n\nimpl<F: Field> Sub<&Polynomial<F>> for &Term<F> {\n\n type Output = Polynomial<F>;\n\n\n\n #[inline]\n\n fn sub(self, rhs: &Polynomial<F>) -> Self::Output {\n", "file_path": "src/poly/elts.rs", "rank": 82, "score": 24591.387344609764 }, { "content": " /// returns the 'total degree' of a multidegree, i.e., the sum of the\n\n /// individual degree components\n\n #[inline]\n\n pub fn total_deg(&self) -> D {\n\n self.degs().sum()\n\n }\n\n\n\n /// returns the maximum index for which `self` contains an entry.\n\n ///\n\n /// in other words, this is the minimum value `n` for which we would\n\n /// consider `self` to be an element of the polynomial ring in `n` variables\n\n #[inline]\n\n pub fn len(&self) -> I {\n\n self.0.len()\n\n }\n\n\n\n pub fn is_succ(&self, other: &MultiDegree) -> bool {\n\n if self.len() > other.len() {\n\n return false;\n\n }\n", "file_path": "src/poly/mdeg.rs", "rank": 83, "score": 24590.91695259855 }, { "content": "impl<F: Field> Mul<F> for &Polynomial<F> {\n\n type Output = Polynomial<F>;\n\n\n\n #[inline]\n\n fn mul(self, rhs: F) -> Self::Output {\n\n self * Term::constant_unchecked(rhs)\n\n }\n\n}\n\n\n\n#[binop(commute, derefs)]\n\nimpl<F: Field> Mul<&Term<F>> for &Polynomial<F> {\n\n type Output = Polynomial<F>;\n\n\n\n #[allow(clippy::suspicious_arithmetic_impl)]\n\n fn mul(self, rhs: &Term<F>) -> Self::Output {\n\n if rhs.is_zero() || self.is_zero() {\n\n return Polynomial::zero();\n\n }\n\n // `self` and `rhs` guaranteed nonzero\n\n\n", "file_path": "src/poly/elts.rs", "rank": 84, "score": 24590.866752865684 }, { "content": "// add\n\n\n\n#[binop(derefs)]\n\nimpl<F: Field> Add for &Term<F> {\n\n type Output = Polynomial<F>;\n\n\n\n fn add(self, rhs: &Term<F>) -> Self::Output {\n\n if self.is_zero() {\n\n // could be that `rhs == 0`; must sanitize zeros\n\n return rhs.into();\n\n }\n\n if rhs.is_zero() {\n\n // could be that `self == 0`; must sanitize zeros\n\n return self.into();\n\n }\n\n // `self` and `rhs` guaranteed nonzero terms\n\n\n\n if self.mdeg == rhs.mdeg {\n\n let coef = self.coef + rhs.coef;\n\n\n", "file_path": "src/poly/elts.rs", "rank": 85, "score": 24590.38915593076 }, { "content": " #[inline]\n\n pub fn new_unchecked(terms: Vec<Term<F>>) -> Self {\n\n Polynomial { terms }\n\n }\n\n\n\n /// Returns polynomial with terms from `vec`, filtered for zero coefficients\n\n ///\n\n /// NOTE: Sanitizes zeros.\n\n ///\n\n /// Ideally, this should not be necessary; polynomial computations should\n\n /// be careful to keep themselves clean operations should be\n\n /// structured such that no additional filtering is necessary except during\n\n /// creation\n\n pub fn new(terms: Vec<Term<F>>) -> Self {\n\n Self::new_unchecked(terms.into_iter().filter(|t| !t.is_zero()).collect())\n\n }\n\n\n\n /// Returns the zero polynomial.\n\n ///\n\n /// Currently, this is a polynomial with no terms\n", "file_path": "src/poly/elts.rs", "rank": 86, "score": 24590.295702018506 }, { "content": " self + &rhs.neg()\n\n }\n\n}\n\n\n\n// mul\n\n\n\n#[binop(derefs)]\n\nimpl<F: Field> Mul for &Term<F> {\n\n type Output = Term<F>;\n\n\n\n fn mul(self, rhs: &Term<F>) -> Self::Output {\n\n if self.is_zero() || rhs.is_zero() {\n\n return Term::zero();\n\n }\n\n // `self` and `rhs` guaranteed to be nonzero\n\n Term::new_unchecked(self.coef * rhs.coef, &self.mdeg + &rhs.mdeg)\n\n }\n\n}\n\n\n\n#[binop(derefs)]\n", "file_path": "src/poly/elts.rs", "rank": 87, "score": 24589.995133135893 }, { "content": " fn_var_term! { @with_doc $var, $idx,\n\n fn_var_term!(@doc_of stringify!($var), stringify!($idx))\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! fn_vars {\n\n (@idx x) => { 0 };\n\n (@idx y) => { 1 };\n\n (@idx z) => { 2 };\n\n (@idx w) => { 3 };\n\n (@idx u) => { 4 };\n\n (@idx v) => { 5 };\n\n ($t:ty: $($var:ident)*) => {\n\n $(\n\n fn $var(deg: u8) -> Polynomial<$t> {\n\n crate::poly::elts::Polynomial::<$t>::var(fn_vars!(@idx $var), deg)\n\n }\n\n )*\n\n };\n\n}\n", "file_path": "src/poly/macros.rs", "rank": 88, "score": 24589.961927840126 }, { "content": "\n\n for (a, b) in self.degs().zip(other.degs()) {\n\n if a > b {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n }\n\n\n\n /// Checked subtraction.\n\n /// Computes `self - rhs`, returning none if `self[i] < rhs[i]` for any `i`.\n\n pub fn checked_sub(&self, rhs: &MultiDegree) -> Option<MultiDegree> {\n\n if rhs.is_zero() {\n\n return Some(self.clone());\n\n }\n\n // `other` is guaranteed nonzero\n\n\n\n if self.is_zero() {\n\n // `self - other` < 0 so subtraction would fail\n", "file_path": "src/poly/mdeg.rs", "rank": 89, "score": 24589.897824449366 }, { "content": " // only `self` degs remaining, last of which is trusted to be nonzero, so just take the rest\n\n EitherOrBoth::Left(a) => *a,\n\n // only `rhs` degs remaining, last of which is trusted to be nonzero, so subtraction has failed\n\n EitherOrBoth::Right(_) => return None,\n\n };\n\n // `c` is guaranteed nonzero\n\n\n\n // catch up with zeros\n\n degs.append(&mut vec![0; zero_cache]);\n\n zero_cache = 0;\n\n\n\n degs.push(c);\n\n }\n\n\n\n Some(MultiDegree(degs))\n\n }\n\n}\n\n\n\nimpl Default for MultiDegree {\n\n #[inline]\n", "file_path": "src/poly/mdeg.rs", "rank": 90, "score": 24589.398392107025 }, { "content": " println!(\"Term::zero() = {}\", Term::<f64>::zero());\n\n println!(\"Term::one() = {}\", Term::<f64>::one());\n\n\n\n let x = Term::<f64>::var(0, 1);\n\n let y = Term::<f64>::var(1, 1);\n\n let z = Term::<f64>::var(2, 1);\n\n\n\n println!(\"x = {}\", x);\n\n println!(\"y = {}\", y);\n\n println!(\"z = {}\", z);\n\n\n\n let t = &(x * y) * z;\n\n\n\n println!(\"\\nt = x * y * z = {}\", t);\n\n\n\n let c = Term::from(37.0);\n\n\n\n println!(\"\\nc = {}\", c);\n\n\n\n let d = (&c * &c) * (&t * &t);\n", "file_path": "src/poly/elts.rs", "rank": 91, "score": 24588.742173050483 }, { "content": " Ordering::Less => '<',\n\n Ordering::Equal => '=',\n\n Ordering::Greater => '>',\n\n };\n\n\n\n let iter = (0..3).map(|_| 0..3).multi_cartesian_product();\n\n\n\n for (ref a, ref b) in iter.clone().cartesian_product(iter) {\n\n let mdeg_a = &MultiDegree::from_vec(a.clone());\n\n let mdeg_b = &MultiDegree::from_vec(b.clone());\n\n\n\n if super::grad(mdeg_a, mdeg_b).is_eq() {\n\n println!(\"{} {} {}\", d(a), c(ord(mdeg_a, mdeg_b)), d(b));\n\n }\n\n }\n\n }\n\n\n\n #[cfg(test)]\n\n macro_rules! assert_ord_equal {\n\n ($ord:expr) => {\n", "file_path": "src/poly/ord.rs", "rank": 92, "score": 24588.717815447242 }, { "content": "\n\n for (a, b) in vecs.iter().cartesian_product(&vecs) {\n\n let grlex = GrLex::cmp(a, b);\n\n let grevlex = GRevLex::cmp(a, b);\n\n\n\n if grlex != grevlex {\n\n println!(\n\n \"cmp {} {} => grlex: {:<10} grevlex: {:<10}\",\n\n a,\n\n b,\n\n format!(\"{:?}\", grlex),\n\n format!(\"{:?}\", grevlex)\n\n );\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/poly/ord.rs", "rank": 93, "score": 24588.405149343627 }, { "content": "use itertools::{EitherOrBoth, Itertools};\n\nuse std::cmp::Ordering;\n\nuse std::ops::Add;\n\nuse xops::binop;\n\n\n\n// structs ---------------------------------------------------------------------\n\n\n\n/// type for indexing the indeterminates\n", "file_path": "src/poly/mdeg.rs", "rank": 94, "score": 24587.91645281609 }, { "content": "#![allow(unused)]\n\n\n\n/// Implements the `Default` trait by calling `Self::zero`.\n\n///\n\n/// For hopefully obvious reasons, this only works if the given type has an unambiguous function called `zero`.\n\n// #[macro_export]\n\nmacro_rules! impl_zero_default {\n\n ($Type:ty $(where $($generics:tt)*)?) => {\n\n impl$(<$($generics)*>)? Default for $Type {\n\n #[inline]\n\n fn default() -> Self {\n\n Self::zero()\n\n }\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! fn_var_term {\n\n (@with_doc $var:ident, $idx:literal, $doc_str:expr) => {\n\n #[doc = $doc_str]\n", "file_path": "src/poly/macros.rs", "rank": 95, "score": 24587.022854940886 }, { "content": " return self.clone();\n\n }\n\n // `self` and `rhs` guaranteed nonzero\n\n\n\n let mut terms = self.terms.clone();\n\n\n\n // i think this if/else is a contender for the canonical way to add a nonzero term `rhs` to a polynomial `self`\n\n //\n\n // maybe make into a method of polynomial?\n\n if let Some(i) = terms.iter().position(|t| t.mdeg == rhs.mdeg) {\n\n terms[i].coef += rhs.coef;\n\n\n\n // could be that `self.terms[i] == 0`; must sanitize\n\n if terms[i].is_zero() {\n\n terms.remove(i);\n\n }\n\n } else {\n\n // this is a clean operation:\n\n // - `self` has no nonzero terms of the same multidegree as `rhs`, so no simplifying terms is possible\n\n // - `rhs` is nonzero, so zeros remain clean\n", "file_path": "src/poly/elts.rs", "rank": 96, "score": 24586.878666906527 }, { "content": "use std::cmp::Ordering;\n\n\n\nuse super::mdeg::MultiDegree;\n\n\n\n/// A [Monomial Order](https://en.wikipedia.org/wiki/Monomial_order) for multidegrees.\n\n///\n\n/// Necessary conditions not checked by rust:\n\n/// - well-order\n\n/// - respects multiplication: $\\alpha \\leq \\beta \\implies \\alpha + \\gamma \\leq \\beta + \\gamma$, for all multidegrees $\\alpha, \\beta, \\gamma \\in \\Z^n$.\n", "file_path": "src/poly/ord.rs", "rank": 97, "score": 24586.788715534414 }, { "content": " EitherOrBoth::Left(a) => *a,\n\n EitherOrBoth::Right(b) => *b,\n\n };\n\n // `c` is guaranteed nonzero\n\n\n\n // catch up with zeros\n\n degs.append(&mut vec![0; zero_cache]);\n\n zero_cache = 0;\n\n\n\n degs.push(c);\n\n }\n\n\n\n MultiDegree(degs)\n\n }\n\n}\n", "file_path": "src/poly/mdeg.rs", "rank": 98, "score": 24586.514039824942 }, { "content": " assert_ord_equal!(Lex::cmp(mdeg![1, 2, 3], mdeg![1, 2, 3]));\n\n assert_ord_equal!(Lex::cmp(mdeg![0, 0, 1], mdeg![0, 0, 1]));\n\n\n\n assert_ord_less!(Lex::cmp(mdeg![0, 1, 0], mdeg![1, 0, 1]));\n\n assert_ord_less!(Lex::cmp(mdeg![0, 0, 1], mdeg![1, 0, 0]));\n\n\n\n assert_ord_greater!(Lex::cmp(mdeg![1, 0, 0], mdeg![0, 1, 0]));\n\n assert_ord_greater!(Lex::cmp(mdeg![2, 2, 0], mdeg![0, 0, 1]));\n\n\n\n // dbg_suite(Lex::cmp);\n\n }\n\n\n\n #[test]\n\n fn test_revlex() {\n\n assert_ord_equal!(RevLex::cmp(mdeg![0, 0, 0], mdeg![0, 0, 0]));\n\n assert_ord_equal!(RevLex::cmp(mdeg![1, 0, 0], mdeg![1, 0, 0]));\n\n assert_ord_equal!(RevLex::cmp(mdeg![1, 2, 3], mdeg![1, 2, 3]));\n\n assert_ord_equal!(RevLex::cmp(mdeg![0, 0, 1], mdeg![0, 0, 1]));\n\n\n\n assert_ord_less!(RevLex::cmp(mdeg![1, 0, 0], mdeg![0, 1, 0]));\n", "file_path": "src/poly/ord.rs", "rank": 99, "score": 24586.133509418672 } ]
Rust
tests/auth_test.rs
nappa85/rust-etcd
9fd9f3c33687623772abfff3e5a43579ab9aa677
extern crate etcd; extern crate futures; extern crate hyper; extern crate hyper_tls; extern crate native_tls; extern crate tokio_core; extern crate tokio_timer; use futures::future::Future; use tokio_core::reactor::Core; use etcd::{BasicAuth, Client}; use etcd::auth::{self, AuthChange, NewUser, Role, RoleUpdate, UserUpdate}; #[test] fn auth() { let mut core = Core::new().unwrap(); let client = Client::new(&["http://etcd:2379"], None).unwrap(); let basic_auth = BasicAuth { username: "root".into(), password: "secret".into(), }; let authed_client = Client::new(&["http://etcd:2379"], Some(basic_auth)).unwrap(); let root_user = NewUser::new("root", "secret"); let work: Box<Future<Item = (), Error = ()>> = Box::new( auth::status(&client) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, false); auth::create_user(&client, root_user) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data.name(), "root"); auth::enable(&client) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, AuthChange::Changed); let mut update_guest = RoleUpdate::new("guest"); update_guest.revoke_kv_write_permission("/*"); auth::update_role(&authed_client, update_guest) }) .then(|res| { res.unwrap(); let mut rkt_role = Role::new("rkt"); rkt_role.grant_kv_read_permission("/rkt/*"); rkt_role.grant_kv_write_permission("/rkt/*"); auth::create_role(&authed_client, rkt_role) }) .then(|res| { res.unwrap(); let mut rkt_user = NewUser::new("rkt", "secret"); rkt_user.add_role("rkt"); auth::create_user(&authed_client, rkt_user) }) .then(|res| { let response = res.unwrap(); let rkt_user = response.data; assert_eq!(rkt_user.name(), "rkt"); let role_name = &rkt_user.role_names()[0]; assert_eq!(role_name, "rkt"); let mut update_rkt_user = UserUpdate::new("rkt"); update_rkt_user.update_password("secret2"); update_rkt_user.grant_role("root"); auth::update_user(&authed_client, update_rkt_user) }) .then(|res| { res.unwrap(); auth::get_role(&authed_client, "rkt") }) .then(|res| { let response = res.unwrap(); let role = response.data; assert!(role.kv_read_permissions().contains(&"/rkt/*".to_owned())); assert!(role.kv_write_permissions().contains(&"/rkt/*".to_owned())); auth::delete_user(&authed_client, "rkt") }) .then(|res| { res.unwrap(); auth::delete_role(&authed_client, "rkt") }) .then(|res| { res.unwrap(); let mut update_guest = RoleUpdate::new("guest"); update_guest.grant_kv_write_permission("/*"); auth::update_role(&authed_client, update_guest) }) .then(|res| { res.unwrap(); auth::disable(&authed_client) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, AuthChange::Changed); Ok(()) }), ); assert!(core.run(work).is_ok()); }
extern crate etcd; extern crate futures; extern crate hyper; extern crate hyper_tls; extern crate native_tls; extern crate tokio_core; extern crate tokio_timer; use futures::future::Future; use tokio_core::reactor::Core; use etcd::{BasicAuth, Client}; use etcd::auth::{self, AuthChange, NewUser, Role, RoleUpdate, UserUpdate}; #[test] fn auth() { let mut core = Core::new().unwrap(); let client = Client::new(&["http://etcd:2379"], None).unwrap(); let basic_auth = BasicAuth { username: "root".into(), password: "secret".into(), }; let authed_client = Client::new(&["http://etcd:2379"], Some(basic_auth)).unwrap(); let root_user = NewUser::new("root", "secret"); let work: Box<Future<Item = (), Error = ()>> = Box::new( auth::status(&client) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, false); auth::create_user(&client, root_user) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data.name(), "root"); auth::enable(&client) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, AuthChange::Changed); let mut update_guest = RoleUpdate::new("guest"); update_guest.revoke_kv_write_permission("/*"); auth::update_role(&authed_client, update_guest) }) .then(|res| { res.unwrap(); let mut rkt_role = Role::new("rkt"); rkt_role.grant_kv_read_permission("/rkt/*"); rkt_role.grant_kv_write_permission("/rkt/*"); auth::create_role(&authed_client, rkt_role) }) .then(|res| { res.unwrap(); let mut rkt_user = NewUser::new("rkt", "secret"); rkt_use
res.unwrap(); auth::delete_role(&authed_client, "rkt") }) .then(|res| { res.unwrap(); let mut update_guest = RoleUpdate::new("guest"); update_guest.grant_kv_write_permission("/*"); auth::update_role(&authed_client, update_guest) }) .then(|res| { res.unwrap(); auth::disable(&authed_client) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, AuthChange::Changed); Ok(()) }), ); assert!(core.run(work).is_ok()); }
r.add_role("rkt"); auth::create_user(&authed_client, rkt_user) }) .then(|res| { let response = res.unwrap(); let rkt_user = response.data; assert_eq!(rkt_user.name(), "rkt"); let role_name = &rkt_user.role_names()[0]; assert_eq!(role_name, "rkt"); let mut update_rkt_user = UserUpdate::new("rkt"); update_rkt_user.update_password("secret2"); update_rkt_user.grant_role("root"); auth::update_user(&authed_client, update_rkt_user) }) .then(|res| { res.unwrap(); auth::get_role(&authed_client, "rkt") }) .then(|res| { let response = res.unwrap(); let role = response.data; assert!(role.kv_read_permissions().contains(&"/rkt/*".to_owned())); assert!(role.kv_write_permissions().contains(&"/rkt/*".to_owned())); auth::delete_user(&authed_client, "rkt") }) .then(|res| {
random
[ { "content": "/// Attempts to enable the auth system.\n\npub fn enable<C>(client: &Client<C>) -> Box<Future<Item = Response<AuthChange>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let url = build_url(member, \"/enable\");\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let http_client = http_client.clone();\n\n\n\n let response = uri.and_then(move |uri| {\n\n http_client.put(uri, \"\".to_owned()).map_err(Error::from)\n\n });\n\n\n\n let result = response.and_then(|response| {\n\n let status = response.status();\n", "file_path": "src/auth.rs", "rank": 0, "score": 163790.31635166198 }, { "content": "/// Determines whether or not the auth system is enabled.\n\npub fn status<C>(client: &Client<C>) -> Box<Future<Item = Response<bool>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let url = build_url(member, \"/enable\");\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let http_client = http_client.clone();\n\n\n\n let response = uri.and_then(move |uri| http_client.get(uri).map_err(Error::from));\n\n\n\n let result = response.and_then(|response| {\n\n let status = response.status();\n\n let cluster_info = ClusterInfo::from(response.headers());\n\n let body = response.into_body().concat2().map_err(Error::from);\n", "file_path": "src/auth.rs", "rank": 1, "score": 158020.06138713594 }, { "content": "/// Lists the members of the cluster.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\npub fn list<C>(client: &Client<C>) -> Box<Future<Item = Response<Vec<Member>>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let url = build_url(member, \"\");\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let http_client = http_client.clone();\n\n\n\n let response = uri.and_then(move |uri| http_client.get(uri).map_err(Error::from));\n\n\n\n let result = response.and_then(|response| {\n\n let status = response.status();\n\n let cluster_info = ClusterInfo::from(response.headers());\n\n let body = response.into_body().concat2().map_err(Error::from);\n", "file_path": "src/members.rs", "rank": 3, "score": 136297.98065389518 }, { "content": "/// Returns statistics about operations handled by each etcd member the client was initialized\n\n/// with.\n\n///\n\n/// Fails if JSON decoding fails, which suggests a bug in our schema.\n\npub fn store_stats<C>(client: &Client<C>) -> Box<Stream<Item = Response<StoreStats>, Error = Error>>\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n let futures = client.endpoints().iter().map(|endpoint| {\n\n let url = build_url(&endpoint, \"v2/stats/store\");\n\n let uri = url.parse().map_err(Error::from).into_future();\n\n\n\n client.request(uri)\n\n });\n\n\n\n Box::new(futures_unordered(futures))\n\n}\n\n\n", "file_path": "src/stats.rs", "rank": 4, "score": 121099.49555546633 }, { "content": "/// Returns statistics about each cluster member the client was initialized with.\n\n///\n\n/// Fails if JSON decoding fails, which suggests a bug in our schema.\n\npub fn self_stats<C>(client: &Client<C>) -> Box<Stream<Item = Response<SelfStats>, Error = Error>>\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n let futures = client.endpoints().iter().map(|endpoint| {\n\n let url = build_url(&endpoint, \"v2/stats/self\");\n\n let uri = url.parse().map_err(Error::from).into_future();\n\n\n\n client.request(uri)\n\n });\n\n\n\n Box::new(futures_unordered(futures))\n\n}\n\n\n", "file_path": "src/stats.rs", "rank": 5, "score": 121096.69640257288 }, { "content": "#[test]\n\nfn health() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::no_destructor(core);\n\n\n\n let work = client.health().collect().and_then(|responses| {\n\n for response in responses {\n\n assert_eq!(response.data.health, \"true\");\n\n }\n\n\n\n Ok(())\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n", "file_path": "tests/client_test.rs", "rank": 6, "score": 105923.54758112473 }, { "content": "#[test]\n\nfn versions() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::no_destructor(core);\n\n\n\n let work = client.versions().collect().and_then(|responses| {\n\n for response in responses {\n\n assert_eq!(response.data.cluster_version, \"2.3.0\");\n\n assert_eq!(response.data.server_version, \"2.3.7\");\n\n }\n\n\n\n Ok(())\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n", "file_path": "tests/client_test.rs", "rank": 7, "score": 105923.54758112473 }, { "content": "#[test]\n\nfn get_root() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", Some(60)).and_then(|_| {\n\n kv::get(&inner_client, \"/\", GetOptions::default()).and_then(|res| {\n\n assert_eq!(res.data.action, Action::Get);\n\n\n\n let node = res.data.node;\n\n\n\n assert!(node.created_index.is_none());\n\n assert!(node.modified_index.is_none());\n\n assert_eq!(node.nodes.unwrap().len(), 1);\n\n assert_eq!(node.dir.unwrap(), true);\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 8, "score": 103508.15105289934 }, { "content": "/// Deletes an empty directory or a key-value pair at the given key.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * key: The name of the node to delete.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if the directory is not empty.\n\npub fn delete_dir<C>(client: &Client<C>, key: &str) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n raw_delete(\n\n client,\n\n key,\n\n DeleteOptions {\n\n dir: Some(true),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/kv.rs", "rank": 9, "score": 97851.84891699633 }, { "content": "/// Deletes a node.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * key: The name of the node to delete.\n\n/// * recursive: If true, and the key is a directory, the directory and all child key-value\n\n/// pairs and directories will be deleted as well.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if the key is a directory and `recursive` is `false`.\n\npub fn delete<C>(client: &Client<C>, key: &str, recursive: bool) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n raw_delete(\n\n client,\n\n key,\n\n DeleteOptions {\n\n recursive: Some(recursive),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/kv.rs", "rank": 10, "score": 93767.18205698932 }, { "content": "#[test]\n\nfn https_without_valid_client_certificate() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::https(core, false);\n\n\n\n let work = kv::set(&client, \"/test/foo\", \"bar\", Some(60));\n\n\n\n assert!(client.run(work).is_err());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 11, "score": 93273.17378504848 }, { "content": "/// Handles all delete operations.\n\nfn raw_delete<C>(client: &Client<C>, key: &str, options: DeleteOptions) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n let mut query_pairs = HashMap::new();\n\n\n\n if options.recursive.is_some() {\n\n query_pairs.insert(\"recursive\", format!(\"{}\", options.recursive.unwrap()));\n\n }\n\n\n\n if options.dir.is_some() {\n\n query_pairs.insert(\"dir\", format!(\"{}\", options.dir.unwrap()));\n\n }\n\n\n\n if options.conditions.is_some() {\n\n let conditions = options.conditions.unwrap();\n\n\n\n if conditions.is_empty() {\n\n return Box::new(Err(vec![Error::InvalidConditions]).into_future());\n\n }\n", "file_path": "src/kv.rs", "rank": 12, "score": 93256.1310120592 }, { "content": "/// Handles all set operations.\n\nfn raw_set<C>(client: &Client<C>, key: &str, options: SetOptions) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n let mut http_options = vec![];\n\n\n\n if let Some(ref value) = options.value {\n\n http_options.push((\"value\".to_owned(), value.to_string()));\n\n }\n\n\n\n if let Some(ref ttl) = options.ttl {\n\n http_options.push((\"ttl\".to_owned(), ttl.to_string()));\n\n }\n\n\n\n if let Some(ref dir) = options.dir {\n\n http_options.push((\"dir\".to_owned(), dir.to_string()));\n\n }\n\n\n\n if let Some(ref prev_exist) = options.prev_exist {\n\n http_options.push((\"prevExist\".to_owned(), prev_exist.to_string()));\n", "file_path": "src/kv.rs", "rank": 13, "score": 93256.1310120592 }, { "content": "/// Gets the value of a node.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * key: The name of the node to retrieve.\n\n/// * options: Options to customize the behavior of the operation.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if the key doesn't exist.\n\npub fn get<C>(client: &Client<C>, key: &str, options: GetOptions) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n raw_get(\n\n client,\n\n key,\n\n InternalGetOptions {\n\n recursive: options.recursive,\n\n sort: Some(options.sort),\n\n strong_consistency: options.strong_consistency,\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/kv.rs", "rank": 14, "score": 92131.2157666097 }, { "content": "/// Handles all get operations.\n\nfn raw_get<C>(client: &Client<C>, key: &str, options: InternalGetOptions) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n let mut query_pairs = HashMap::new();\n\n\n\n query_pairs.insert(\"recursive\", format!(\"{}\", options.recursive));\n\n\n\n if options.sort.is_some() {\n\n query_pairs.insert(\"sorted\", format!(\"{}\", options.sort.unwrap()));\n\n }\n\n\n\n if options.wait {\n\n query_pairs.insert(\"wait\", \"true\".to_owned());\n\n }\n\n\n\n if options.wait_index.is_some() {\n\n query_pairs.insert(\"waitIndex\", format!(\"{}\", options.wait_index.unwrap()));\n\n }\n\n\n", "file_path": "src/kv.rs", "rank": 15, "score": 91703.64849802389 }, { "content": "/// Gets all roles.\n\npub fn get_roles<C>(\n\n client: &Client<C>,\n\n) -> Box<Future<Item = Response<Vec<Role>>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let url = build_url(member, \"/roles\");\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let http_client = http_client.clone();\n\n\n\n let response = uri.and_then(move |uri| http_client.get(uri).map_err(Error::from));\n\n\n\n let result = response.and_then(|response| {\n\n let status = response.status();\n", "file_path": "src/auth.rs", "rank": 16, "score": 89843.82674913718 }, { "content": "/// Creates a new role.\n\npub fn create_role<C>(\n\n client: &Client<C>,\n\n role: Role,\n\n) -> Box<Future<Item = Response<Role>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let body = serde_json::to_string(&role)\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let url = build_url(member, &format!(\"/roles/{}\", role.name));\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let params = uri.join(body);\n", "file_path": "src/auth.rs", "rank": 17, "score": 89843.74399985427 }, { "content": "/// Updates an existing role.\n\npub fn update_role<C>(\n\n client: &Client<C>,\n\n role: RoleUpdate,\n\n) -> Box<Future<Item = Response<Role>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let body = serde_json::to_string(&role)\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let url = build_url(member, &format!(\"/roles/{}\", role.name));\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let params = uri.join(body);\n", "file_path": "src/auth.rs", "rank": 18, "score": 89843.74399985428 }, { "content": "/// Creates a new empty directory.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * key: The name of the directory to create.\n\n/// * ttl: If given, the node will expire after this many seconds.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if the key already exists.\n\npub fn create_dir<C>(client: &Client<C>, key: &str, ttl: Option<u64>) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n raw_set(\n\n client,\n\n key,\n\n SetOptions {\n\n dir: Some(true),\n\n prev_exist: Some(false),\n\n ttl: ttl,\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/kv.rs", "rank": 19, "score": 89524.97931021676 }, { "content": "/// Sets the key to an empty directory.\n\n///\n\n/// An existing key-value pair will be replaced, but an existing directory will not.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * key: The name of the directory to set.\n\n/// * ttl: If given, the node will expire after this many seconds.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if the node is an existing directory.\n\npub fn set_dir<C>(client: &Client<C>, key: &str, ttl: Option<u64>) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n raw_set(\n\n client,\n\n key,\n\n SetOptions {\n\n dir: Some(true),\n\n ttl: ttl,\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/kv.rs", "rank": 20, "score": 89524.18796170042 }, { "content": "/// Updates a directory.\n\n///\n\n/// If the directory already existed, only the TTL is updated. If the key was a key-value pair, its\n\n/// value is removed and its TTL is updated.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * key: The name of the node to update.\n\n/// * ttl: If given, the node will expire after this many seconds.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if the node does not exist.\n\npub fn update_dir<C>(client: &Client<C>, key: &str, ttl: Option<u64>) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n raw_set(\n\n client,\n\n key,\n\n SetOptions {\n\n dir: Some(true),\n\n prev_exist: Some(true),\n\n ttl: ttl,\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/kv.rs", "rank": 21, "score": 89524.10994586552 }, { "content": "/// Creates a new key-value pair.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * key: The name of the key-value pair to create.\n\n/// * value: The new value for the node.\n\n/// * ttl: If given, the node will expire after this many seconds.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if the key already exists.\n\npub fn create<C>(client: &Client<C>, key: &str, value: &str, ttl: Option<u64>) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n raw_set(\n\n client,\n\n key,\n\n SetOptions {\n\n prev_exist: Some(false),\n\n ttl: ttl,\n\n value: Some(value),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/kv.rs", "rank": 22, "score": 86149.85855863395 }, { "content": "/// Updates an existing key-value pair.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * key: The name of the key-value pair to update.\n\n/// * value: The new value for the key-value pair.\n\n/// * ttl: If given, the node will expire after this many seconds.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if the key does not exist.\n\npub fn update<C>(client: &Client<C>, key: &str, value: &str, ttl: Option<u64>) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n raw_set(\n\n client,\n\n key,\n\n SetOptions {\n\n prev_exist: Some(true),\n\n ttl: ttl,\n\n value: Some(value),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/kv.rs", "rank": 23, "score": 86149.77688832353 }, { "content": "/// Sets the value of a key-value pair.\n\n///\n\n/// Any previous value and TTL will be replaced.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * key: The name of the key-value pair to set.\n\n/// * value: The new value for the key-value pair.\n\n/// * ttl: If given, the node will expire after this many seconds.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if the node is a directory.\n\npub fn set<C>(client: &Client<C>, key: &str, value: &str, ttl: Option<u64>) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n raw_set(\n\n client,\n\n key,\n\n SetOptions {\n\n ttl: ttl,\n\n value: Some(value),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/kv.rs", "rank": 24, "score": 86149.39518619666 }, { "content": "/// Get a role.\n\npub fn get_role<C, N>(\n\n client: &Client<C>,\n\n name: N,\n\n) -> Box<Future<Item = Response<Role>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n N: Into<String>,\n\n{\n\n let http_client = client.http_client().clone();\n\n let name = name.into();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let url = build_url(member, &format!(\"/roles/{}\", name));\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let http_client = http_client.clone();\n\n\n\n let response = uri.and_then(move |uri| http_client.get(uri).map_err(Error::from));\n", "file_path": "src/auth.rs", "rank": 25, "score": 85871.65161115867 }, { "content": "/// Deletes a role.\n\npub fn delete_role<C, N>(\n\n client: &Client<C>,\n\n name: N,\n\n) -> Box<Future<Item = Response<()>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n N: Into<String>,\n\n{\n\n let http_client = client.http_client().clone();\n\n let name = name.into();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let url = build_url(member, &format!(\"/roles/{}\", name));\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let http_client = http_client.clone();\n\n\n\n let response = uri.and_then(move |uri| http_client.delete(uri).map_err(Error::from));\n", "file_path": "src/auth.rs", "rank": 26, "score": 85871.65161115867 }, { "content": "#[test]\n\nfn set() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n\n\n let work = kv::set(&client, \"/test/foo\", \"baz\", None).and_then(|res| {\n\n assert_eq!(res.data.action, Action::Set);\n\n\n\n let node = res.data.node;\n\n\n\n assert_eq!(node.value.unwrap(), \"baz\");\n\n assert!(node.ttl.is_none());\n\n\n\n Ok(())\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 27, "score": 77376.07655009895 }, { "content": "#[test]\n\nfn get() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", Some(60)).and_then(|_| {\n\n kv::get(&inner_client, \"/test/foo\", GetOptions::default()).and_then(|res| {\n\n assert_eq!(res.data.action, Action::Get);\n\n\n\n let node = res.data.node;\n\n\n\n assert_eq!(node.value.unwrap(), \"bar\");\n\n assert_eq!(node.ttl.unwrap(), 60);\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 28, "score": 77376.07655009895 }, { "content": "#[test]\n\nfn list() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::no_destructor(core);\n\n\n\n let work = members::list(&client).and_then(|res| {\n\n let members = res.data;\n\n let member = &members[0];\n\n\n\n assert_eq!(member.name, \"default\");\n\n\n\n Ok(())\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n", "file_path": "tests/members_test.rs", "rank": 29, "score": 77376.07655009895 }, { "content": "#[test]\n\nfn https() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::https(core, true);\n\n\n\n let work = kv::set(&client, \"/test/foo\", \"bar\", Some(60));\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 30, "score": 77376.07655009895 }, { "content": "#[test]\n\nfn update() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", None).and_then(|_| {\n\n kv::update(&inner_client, \"/test/foo\", \"blah\", Some(30)).and_then(|res| {\n\n assert_eq!(res.data.action, Action::Update);\n\n\n\n let node = res.data.node;\n\n\n\n assert_eq!(node.value.unwrap(), \"blah\");\n\n assert_eq!(node.ttl.unwrap(), 30);\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 31, "score": 77376.07655009895 }, { "content": "#[test]\n\nfn create() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", Some(60)).and_then(|res| {\n\n let node = res.data.node;\n\n\n\n assert_eq!(res.data.action, Action::Create);\n\n assert_eq!(node.value.unwrap(), \"bar\");\n\n assert_eq!(node.ttl.unwrap(), 60);\n\n\n\n Ok(())\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 32, "score": 77376.07655009895 }, { "content": "#[test]\n\nfn watch() {\n\n let (tx, rx) = channel();\n\n\n\n let child = spawn(move || {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::no_destructor(core);\n\n let inner_client = client.clone();\n\n\n\n let work = rx.then(|_| kv::set(&inner_client, \"/test/foo\", \"baz\", None));\n\n\n\n assert!(client.run(work).is_ok());\n\n });\n\n\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", None)\n\n .map_err(|errors| WatchError::Other(errors))\n\n .and_then(move |_| {\n", "file_path": "tests/kv_test.rs", "rank": 33, "score": 77376.07655009895 }, { "content": "#[test]\n\nfn delete() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", None).and_then(|_| {\n\n kv::delete(&inner_client, \"/test/foo\", false).and_then(|res| {\n\n assert_eq!(res.data.action, Action::Delete);\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 34, "score": 77376.07655009895 }, { "content": "#[test]\n\nfn test_compare_and_swap() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", None).and_then(|res| {\n\n let index = res.data.node.modified_index;\n\n\n\n kv::compare_and_swap(\n\n &inner_client,\n\n \"/test/foo\",\n\n \"baz\",\n\n Some(100),\n\n Some(\"bar\"),\n\n index,\n\n ).and_then(|res| {\n\n assert_eq!(res.data.action, Action::CompareAndSwap);\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 35, "score": 77178.80370411286 }, { "content": "#[test]\n\nfn watch_cancel() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", None)\n\n .map_err(|errors| WatchError::Other(errors))\n\n .and_then(move |_| {\n\n kv::watch(\n\n &inner_client,\n\n \"/test/foo\",\n\n WatchOptions {\n\n timeout: Some(Duration::from_millis(1)),\n\n ..Default::default()\n\n },\n\n )\n\n });\n\n\n\n match client.run(work) {\n\n Ok(_) => panic!(\"expected WatchError::Timeout\"),\n\n Err(WatchError::Timeout) => {}\n\n Err(_) => panic!(\"expected WatchError::Timeout\"),\n\n }\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 36, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn leader_stats() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::no_destructor(core);\n\n\n\n let work = stats::leader_stats(&client);\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/stats_test.rs", "rank": 37, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn get_recursive() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::set(&client, \"/test/dir/baz\", \"blah\", None).and_then(|_| {\n\n kv::get(\n\n &inner_client,\n\n \"/test\",\n\n GetOptions {\n\n recursive: true,\n\n sort: true,\n\n ..Default::default()\n\n },\n\n ).and_then(|res| {\n\n let nodes = res.data.node.nodes.unwrap();\n\n\n\n assert_eq!(\n\n nodes[0].clone().nodes.unwrap()[0].clone().value.unwrap(),\n\n \"blah\"\n\n );\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 38, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn compare_and_swap() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", None).and_then(|_| {\n\n kv::compare_and_swap(&inner_client, \"/test/foo\", \"baz\", None, Some(\"bar\"), None).and_then(\n\n |res| {\n\n assert_eq!(res.data.action, Action::CompareAndSwap);\n\n\n\n Ok(())\n\n },\n\n )\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 39, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn self_stats() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::no_destructor(core);\n\n\n\n let work = stats::self_stats(&client).collect().and_then(|_| Ok(()));\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/stats_test.rs", "rank": 40, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn watch_index() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::set(&client, \"/test/foo\", \"bar\", None)\n\n .map_err(|errors| WatchError::Other(errors))\n\n .and_then(move |res| {\n\n let index = res.data.node.modified_index;\n\n\n\n kv::watch(\n\n &inner_client,\n\n \"/test/foo\",\n\n WatchOptions {\n\n index: index,\n\n ..Default::default()\n\n },\n\n ).and_then(move |res| {\n\n let node = res.data.node;\n\n\n\n assert_eq!(node.modified_index, index);\n\n assert_eq!(node.value.unwrap(), \"bar\");\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 41, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn delete_dir() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create_dir(&client, \"/test/dir\", None).and_then(|_| {\n\n kv::delete_dir(&inner_client, \"/test/dir\").and_then(|res| {\n\n assert_eq!(res.data.action, Action::Delete);\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 42, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn store_stats() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::no_destructor(core);\n\n\n\n let work = stats::store_stats(&client).collect().and_then(|_| Ok(()));\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n", "file_path": "tests/stats_test.rs", "rank": 43, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn watch_recursive() {\n\n let (tx, rx) = channel();\n\n\n\n let child = spawn(move || {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::no_destructor(core);\n\n let inner_client = client.clone();\n\n\n\n let work = rx.then(|_| kv::set(&inner_client, \"/test/foo/bar\", \"baz\", None));\n\n\n\n assert!(client.run(work).is_ok());\n\n });\n\n\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n\n\n tx.send(()).unwrap();\n\n\n\n let work = kv::watch(\n\n &client,\n", "file_path": "tests/kv_test.rs", "rank": 44, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn create_dir() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n\n\n let work = kv::create_dir(&client, \"/test/dir\", None).and_then(|res| {\n\n assert_eq!(res.data.action, Action::Create);\n\n\n\n let node = res.data.node;\n\n\n\n assert!(node.dir.is_some());\n\n assert!(node.value.is_none());\n\n\n\n Ok(())\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 45, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn update_dir() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create_dir(&client, \"/test\", None).and_then(|_| {\n\n kv::update_dir(&inner_client, \"/test\", Some(60)).and_then(|res| {\n\n assert_eq!(res.data.node.ttl.unwrap(), 60);\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 46, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn create_in_order() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n\n\n let requests: Vec<FutureKeyValueInfo> = (1..4)\n\n .map(|_| kv::create_in_order(&client, \"/test/foo\", \"bar\", None))\n\n .collect();\n\n\n\n let work = join_all(requests).and_then(|res: Vec<Response<KeyValueInfo>>| {\n\n let mut kvis: Vec<KeyValueInfo> = res.into_iter().map(|response| response.data).collect();\n\n kvis.sort_by_key(|ref kvi| kvi.node.modified_index);\n\n\n\n let keys: Vec<String> = kvis.into_iter().map(|kvi| kvi.node.key.unwrap()).collect();\n\n\n\n assert!(keys[0] < keys[1]);\n\n assert!(keys[1] < keys[2]);\n\n\n\n Ok(())\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 47, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn compare_and_delete() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", None).and_then(|res| {\n\n let index = res.data.node.modified_index;\n\n\n\n kv::compare_and_delete(&inner_client, \"/test/foo\", Some(\"bar\"), index).and_then(|res| {\n\n assert_eq!(res.data.action, Action::CompareAndDelete);\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 48, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn set_dir() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::set_dir(&client, \"/test\", None).and_then(|_| {\n\n kv::set_dir(&inner_client, \"/test\", None)\n\n .then(|result| match result {\n\n Ok(_) => panic!(\"set_dir should fail on an existing dir\"),\n\n Err(_) => Ok(()),\n\n })\n\n .and_then(|_| {\n\n kv::set(&inner_client, \"/test/foo\", \"bar\", None)\n\n .and_then(|_| kv::set_dir(&inner_client, \"/test/foo\", None))\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 49, "score": 75110.07120578586 }, { "content": "#[test]\n\nfn compare_and_delete_only_index() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", None).and_then(|res| {\n\n let index = res.data.node.modified_index;\n\n\n\n kv::compare_and_delete(&inner_client, \"/test/foo\", None, index).and_then(|res| {\n\n assert_eq!(res.data.action, Action::CompareAndDelete);\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 50, "score": 73007.65170708435 }, { "content": "#[test]\n\nfn get_non_recursive() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = join_all(vec![\n\n kv::set(&client, \"/test/dir/baz\", \"blah\", None),\n\n kv::set(&client, \"/test/foo\", \"bar\", None),\n\n ]).and_then(|_| {\n\n kv::get(\n\n &inner_client,\n\n \"/test\",\n\n GetOptions {\n\n sort: true,\n\n ..Default::default()\n\n },\n\n ).and_then(|res| {\n\n let node = res.data.node;\n\n\n\n assert_eq!(node.dir.unwrap(), true);\n", "file_path": "tests/kv_test.rs", "rank": 51, "score": 73007.65170708435 }, { "content": "#[test]\n\nfn compare_and_swap_only_index() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", None).and_then(|res| {\n\n let index = res.data.node.modified_index;\n\n\n\n kv::compare_and_swap(&inner_client, \"/test/foo\", \"baz\", None, None, index).and_then(|res| {\n\n assert_eq!(res.data.action, Action::CompareAndSwap);\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 52, "score": 73007.65170708435 }, { "content": "#[test]\n\nfn compare_and_delete_only_value() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", None).and_then(|_| {\n\n kv::compare_and_delete(&inner_client, \"/test/foo\", Some(\"bar\"), None).and_then(|res| {\n\n assert_eq!(res.data.action, Action::CompareAndDelete);\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 53, "score": 73007.65170708435 }, { "content": "#[test]\n\nfn create_does_not_replace_existing_key() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", Some(60)).and_then(move |_| {\n\n kv::create(&inner_client, \"/test/foo\", \"bar\", Some(60)).then(|result| {\n\n match result {\n\n Ok(_) => panic!(\"expected EtcdError due to pre-existing key\"),\n\n Err(errors) => for error in errors {\n\n match error {\n\n Error::Api(ref error) => assert_eq!(error.message, \"Key already exists\"),\n\n _ => panic!(\"expected EtcdError due to pre-existing key\"),\n\n }\n\n },\n\n }\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 54, "score": 71051.09225090354 }, { "content": "#[test]\n\nfn update_dir_replaces_key() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::set(&client, \"/test/foo\", \"bar\", None).and_then(|_| {\n\n kv::update_dir(&inner_client, \"/test/foo\", Some(60)).and_then(|res| {\n\n let node = res.data.node;\n\n\n\n assert_eq!(node.value.unwrap(), \"\");\n\n assert_eq!(node.ttl.unwrap(), 60);\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 55, "score": 71051.09225090354 }, { "content": "#[test]\n\nfn compare_and_delete_requires_conditions() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", None).and_then(|_| {\n\n kv::compare_and_delete(&inner_client, \"/test/foo\", None, None).then(|result| match result {\n\n Ok(_) => panic!(\"expected Error::InvalidConditions\"),\n\n Err(errors) => if errors.len() == 1 {\n\n match errors[0] {\n\n Error::InvalidConditions => Ok(()),\n\n _ => panic!(\"expected Error::InvalidConditions\"),\n\n }\n\n } else {\n\n panic!(\"expected a single error: Error::InvalidConditions\");\n\n },\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 56, "score": 71051.09225090354 }, { "content": "#[test]\n\nfn update_requires_existing_key() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::no_destructor(core);\n\n\n\n let work = kv::update(&client, \"/test/foo\", \"bar\", None).then(|result| {\n\n match result {\n\n Err(ref errors) => match errors[0] {\n\n Error::Api(ref error) => assert_eq!(error.message, \"Key not found\"),\n\n _ => panic!(\"expected EtcdError due to missing key\"),\n\n },\n\n _ => panic!(\"expected EtcdError due to missing key\"),\n\n }\n\n\n\n let result: Result<(), ()> = Ok(());\n\n\n\n result\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 57, "score": 71051.09225090354 }, { "content": "#[test]\n\nfn compare_and_swap_requires_conditions() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", None).and_then(|_| {\n\n kv::compare_and_swap(&inner_client, \"/test/foo\", \"baz\", None, None, None).then(\n\n |result| match result {\n\n Ok(_) => panic!(\"expected Error::InvalidConditions\"),\n\n Err(errors) => if errors.len() == 1 {\n\n match errors[0] {\n\n Error::InvalidConditions => Ok(()),\n\n _ => panic!(\"expected Error::InvalidConditions\"),\n\n }\n\n } else {\n\n panic!(\"expected a single error: Error::InvalidConditions\");\n\n },\n\n },\n\n )\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 58, "score": 71051.09225090354 }, { "content": "#[test]\n\nfn update_dir_requires_existing_dir() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::no_destructor(core);\n\n\n\n let work = kv::update_dir(&client, \"/test\", None);\n\n\n\n assert!(client.run(work).is_err());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 59, "score": 69225.21251393757 }, { "content": "#[test]\n\nfn create_in_order_must_operate_on_a_directory() {\n\n let core = Core::new().unwrap();\n\n let mut client = TestClient::new(core);\n\n let inner_client = client.clone();\n\n\n\n let work = kv::create(&client, \"/test/foo\", \"bar\", None).and_then(|_| {\n\n kv::create_in_order(&inner_client, \"/test/foo\", \"baz\", None).then(|result| {\n\n assert!(result.is_err());\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 60, "score": 69225.21251393757 }, { "content": "extern crate etcd;\n\nextern crate futures;\n\nextern crate hyper;\n\nextern crate hyper_tls;\n\nextern crate native_tls;\n\nextern crate tokio_core;\n\n\n\nuse futures::{Future, Stream};\n\nuse tokio_core::reactor::Core;\n\n\n\nuse test::TestClient;\n\n\n\nmod test;\n\n\n\n#[test]\n", "file_path": "tests/client_test.rs", "rank": 61, "score": 67120.71425192605 }, { "content": "/// Attempts to disable the auth system.\n\npub fn disable<C>(\n\n client: &Client<C>,\n\n) -> Box<Future<Item = Response<AuthChange>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let url = build_url(member, \"/enable\");\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let http_client = http_client.clone();\n\n\n\n let response = uri.and_then(move |uri| http_client.delete(uri).map_err(Error::from));\n\n\n\n let result = response.and_then(|response| {\n\n let status = response.status();\n", "file_path": "src/auth.rs", "rank": 68, "score": 65043.50476113112 }, { "content": "/// Gets all users.\n\npub fn get_users<C>(\n\n client: &Client<C>,\n\n) -> Box<Future<Item = Response<Vec<UserDetail>>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let url = build_url(member, \"/users\");\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let http_client = http_client.clone();\n\n\n\n let response = uri.and_then(move |uri| http_client.get(uri).map_err(Error::from));\n\n\n\n let result = response.and_then(|response| {\n\n let status = response.status();\n", "file_path": "src/auth.rs", "rank": 69, "score": 62781.67435417681 }, { "content": "/// Updates an existing user.\n\npub fn update_user<C>(\n\n client: &Client<C>,\n\n user: UserUpdate,\n\n) -> Box<Future<Item = Response<User>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let body = serde_json::to_string(&user)\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let url = build_url(member, &format!(\"/users/{}\", user.name));\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let params = uri.join(body);\n", "file_path": "src/auth.rs", "rank": 70, "score": 62781.67435417681 }, { "content": "/// Creates a new user.\n\npub fn create_user<C>(\n\n client: &Client<C>,\n\n user: NewUser,\n\n) -> Box<Future<Item = Response<User>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let body = serde_json::to_string(&user)\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let url = build_url(member, &format!(\"/users/{}\", user.name));\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let params = uri.join(body);\n", "file_path": "src/auth.rs", "rank": 71, "score": 62781.67435417681 }, { "content": "#[derive(Debug, Clone, Deserialize, Eq, Hash, PartialEq)]\n\nstruct Roles {\n\n roles: Option<Vec<Role>>,\n\n}\n\n\n\n/// Parameters used to update an existing authorization role.\n\n#[derive(Debug, Clone, Eq, Hash, PartialEq, Serialize)]\n\npub struct RoleUpdate {\n\n /// The name of the role.\n\n #[serde(rename = \"role\")]\n\n name: String,\n\n /// Permissions being added to the role.\n\n #[serde(rename = \"grant\")]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n grants: Option<Permissions>,\n\n /// Permissions being removed from the role.\n\n #[serde(rename = \"revoke\")]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n revocations: Option<Permissions>,\n\n}\n\n\n", "file_path": "src/auth.rs", "rank": 72, "score": 60539.232395760235 }, { "content": "/// Get a user.\n\npub fn get_user<C, N>(\n\n client: &Client<C>,\n\n name: N,\n\n) -> Box<Future<Item = Response<UserDetail>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n N: Into<String>,\n\n{\n\n let http_client = client.http_client().clone();\n\n let name = name.into();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let url = build_url(member, &format!(\"/users/{}\", name));\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let http_client = http_client.clone();\n\n\n\n let response = uri.and_then(move |uri| http_client.get(uri).map_err(Error::from));\n", "file_path": "src/auth.rs", "rank": 73, "score": 59919.433685490236 }, { "content": "/// Deletes a user.\n\npub fn delete_user<C, N>(\n\n client: &Client<C>,\n\n name: N,\n\n) -> Box<Future<Item = Response<()>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n N: Into<String>,\n\n{\n\n let http_client = client.http_client().clone();\n\n let name = name.into();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let url = build_url(member, &format!(\"/users/{}\", name));\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let http_client = http_client.clone();\n\n\n\n let response = uri.and_then(move |uri| http_client.delete(uri).map_err(Error::from));\n", "file_path": "src/auth.rs", "rank": 74, "score": 59919.433685490236 }, { "content": "/// Constructs the full URL for the versions API call.\n\nfn build_url(endpoint: &Uri, path: &str) -> String {\n\n format!(\"{}{}\", endpoint, path)\n\n}\n", "file_path": "src/client.rs", "rank": 75, "score": 55285.10778661762 }, { "content": "/// Constructs the full URL for an API call.\n\nfn build_url(endpoint: &Uri, path: &str) -> String {\n\n format!(\"{}v2/auth{}\", endpoint, path)\n\n}\n", "file_path": "src/auth.rs", "rank": 76, "score": 54936.82662809697 }, { "content": "/// Watches a node for changes and returns the new value as soon as a change takes place.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * key: The name of the node to watch.\n\n/// * options: Options to customize the behavior of the operation.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if `options.index` is too old and has been flushed out of etcd's internal store of the\n\n/// most recent change events. In this case, the key should be queried for its latest\n\n/// \"modified index\" value and that should be used as the new `options.index` on a subsequent\n\n/// `watch`.\n\n///\n\n/// Fails if a timeout is specified and the duration lapses without a response from the etcd\n\n/// cluster.\n\npub fn watch<C>(\n\n client: &Client<C>,\n\n key: &str,\n\n options: WatchOptions,\n\n) -> Box<Future<Item = Response<KeyValueInfo>, Error = WatchError> + Send>\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n let work = raw_get(\n\n client,\n\n key,\n\n InternalGetOptions {\n\n recursive: options.recursive,\n\n wait_index: options.index,\n\n wait: true,\n\n ..Default::default()\n\n },\n\n ).map_err(|errors| WatchError::Other(errors));\n\n\n\n if let Some(duration) = options.timeout {\n\n Box::new(Timeout::new(work, duration)\n\n .map_err(|e| match e.into_inner() {\n\n Some(we) => we,\n\n None => WatchError::Timeout,\n\n }))\n\n } else {\n\n Box::new(work)\n\n }\n\n}\n\n\n", "file_path": "src/kv.rs", "rank": 77, "score": 38187.50984877146 }, { "content": "/// Deletes a member from the cluster.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * id: The unique identifier of the member to delete.\n\npub fn delete<C>(\n\n client: &Client<C>,\n\n id: String,\n\n) -> Box<Future<Item = Response<()>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let url = build_url(member, &format!(\"/{}\", id));\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n\n .into_future();\n\n\n\n let http_client = http_client.clone();\n\n\n\n let response = uri.and_then(move |uri| http_client.delete(uri).map_err(Error::from));\n\n\n\n let result = response.and_then(|response| {\n", "file_path": "src/members.rs", "rank": 78, "score": 38185.03597227993 }, { "content": "/// Adds a new member to the cluster.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * peer_urls: URLs exposing this cluster member's peer API.\n\npub fn add<C>(\n\n client: &Client<C>,\n\n peer_urls: Vec<String>,\n\n) -> Box<Future<Item = Response<()>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n let peer_urls = PeerUrls { peer_urls };\n\n\n\n let body = match serde_json::to_string(&peer_urls) {\n\n Ok(body) => body,\n\n Err(error) => return Box::new(Err(vec![Error::Serialization(error)]).into_future()),\n\n };\n\n\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let url = build_url(member, \"\");\n\n let uri = Uri::from_str(url.as_str())\n\n .map_err(Error::from)\n", "file_path": "src/members.rs", "rank": 79, "score": 38184.62191092997 }, { "content": "/// Updates the peer URLs of a member of the cluster.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * id: The unique identifier of the member to update.\n\n/// * peer_urls: URLs exposing this cluster member's peer API.\n\npub fn update<C>(\n\n client: &Client<C>,\n\n id: String,\n\n peer_urls: Vec<String>,\n\n) -> Box<Future<Item = Response<()>, Error = Vec<Error>>>\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n let peer_urls = PeerUrls { peer_urls };\n\n\n\n let body = match serde_json::to_string(&peer_urls) {\n\n Ok(body) => body,\n\n Err(error) => return Box::new(Err(vec![Error::Serialization(error)]).into_future()),\n\n };\n\n\n\n let http_client = client.http_client().clone();\n\n\n\n let result = first_ok(client.endpoints().to_vec(), move |member| {\n\n let url = build_url(member, &format!(\"/{}\", id));\n\n let uri = Uri::from_str(url.as_str())\n", "file_path": "src/members.rs", "rank": 80, "score": 38184.12686140284 }, { "content": "extern crate etcd;\n\nextern crate futures;\n\nextern crate hyper;\n\nextern crate hyper_tls;\n\nextern crate native_tls;\n\nextern crate tokio_core;\n\n\n\nuse etcd::stats;\n\nuse futures::{Future, Stream};\n\nuse tokio_core::reactor::Core;\n\n\n\nuse test::TestClient;\n\n\n\nmod test;\n\n\n\n#[test]\n", "file_path": "tests/stats_test.rs", "rank": 81, "score": 37172.940785511084 }, { "content": "extern crate etcd;\n\nextern crate futures;\n\nextern crate hyper;\n\nextern crate hyper_tls;\n\nextern crate native_tls;\n\nextern crate tokio_core;\n\nextern crate tokio_timer;\n\n\n\n\n\nuse futures::future::Future;\n\nuse tokio_core::reactor::Core;\n\nuse etcd::members;\n\n\n\nuse test::TestClient;\n\n\n\nmod test;\n\n\n\n#[test]\n", "file_path": "tests/members_test.rs", "rank": 82, "score": 37172.76899405697 }, { "content": "extern crate etcd;\n\nextern crate futures;\n\nextern crate hyper;\n\nextern crate hyper_tls;\n\nextern crate native_tls;\n\nextern crate tokio_core;\n\nextern crate tokio_timer;\n\n\n\nuse std::thread::spawn;\n\nuse std::time::Duration;\n\n\n\nuse futures::future::{join_all, Future};\n\nuse futures::sync::oneshot::channel;\n\nuse tokio_core::reactor::Core;\n\nuse etcd::{Error, Response};\n\nuse etcd::kv::{\n\n self,\n\n Action,\n\n FutureKeyValueInfo,\n\n GetOptions,\n", "file_path": "tests/kv_test.rs", "rank": 83, "score": 37168.14220701445 }, { "content": "use std::fs::File;\n\nuse std::io::Read;\n\nuse std::ops::Deref;\n\n\n\nuse etcd::{kv, Client};\n\nuse futures::Future;\n\nuse hyper::client::{Client as Hyper, HttpConnector};\n\nuse hyper::client::connect::Connect;\n\nuse hyper_tls::HttpsConnector;\n\nuse native_tls::{Certificate, TlsConnector, Identity};\n\nuse tokio_core::reactor::Core;\n\n\n\n/// Wrapper around Client that automatically cleans up etcd after each test.\n\npub struct TestClient<C>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n c: Client<C>,\n\n core: Core,\n\n run_destructor: bool,\n", "file_path": "tests/test/mod.rs", "rank": 84, "score": 37163.400847535784 }, { "content": "where\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n #[allow(dead_code)]\n\n pub fn run<W, T, E>(&mut self, work: W) -> Result<T, E>\n\n where\n\n W: Future<Item = T, Error = E>,\n\n {\n\n self.core.run(work)\n\n }\n\n}\n\n\n\nimpl<C> Drop for TestClient<C>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n fn drop(&mut self) {\n\n if self.run_destructor {\n\n let work = kv::delete(&self.c, \"/test\", true);\n\n self.core.run(work).unwrap();\n", "file_path": "tests/test/mod.rs", "rank": 85, "score": 37159.153094843976 }, { "content": " .identity(Identity::from_pkcs12(&pkcs12_buffer, \"secret\").unwrap());\n\n }\n\n\n\n let tls_connector = builder.build().unwrap();\n\n\n\n let mut http_connector = HttpConnector::new(1);\n\n http_connector.enforce_http(false);\n\n let https_connector = HttpsConnector::from((http_connector, tls_connector));\n\n\n\n let hyper = Hyper::builder().build(https_connector);\n\n\n\n TestClient {\n\n c: Client::custom(hyper, &[\"https://etcdsecure:2379\"], None).unwrap(),\n\n core,\n\n run_destructor: use_client_cert,\n\n }\n\n }\n\n}\n\n\n\nimpl<C> TestClient<C>\n", "file_path": "tests/test/mod.rs", "rank": 86, "score": 37158.085122478566 }, { "content": " }\n\n }\n\n\n\n /// Creates a new HTTPS client for a test.\n\n #[allow(dead_code)]\n\n pub fn https(core: Core, use_client_cert: bool) -> TestClient<HttpsConnector<HttpConnector>> {\n\n let mut ca_cert_file = File::open(\"/source/tests/ssl/ca.der\").unwrap();\n\n let mut ca_cert_buffer = Vec::new();\n\n ca_cert_file.read_to_end(&mut ca_cert_buffer).unwrap();\n\n\n\n let mut builder = TlsConnector::builder();\n\n builder\n\n .add_root_certificate(Certificate::from_der(&ca_cert_buffer).unwrap());\n\n\n\n if use_client_cert {\n\n let mut pkcs12_file = File::open(\"/source/tests/ssl/client.p12\").unwrap();\n\n let mut pkcs12_buffer = Vec::new();\n\n pkcs12_file.read_to_end(&mut pkcs12_buffer).unwrap();\n\n\n\n builder\n", "file_path": "tests/test/mod.rs", "rank": 87, "score": 37157.08749347018 }, { "content": "}\n\n\n\nimpl TestClient<HttpConnector> {\n\n /// Creates a new client for a test.\n\n #[allow(dead_code)]\n\n pub fn new(core: Core) -> TestClient<HttpConnector> {\n\n TestClient {\n\n c: Client::new(&[\"http://etcd:2379\"], None).unwrap(),\n\n core: core,\n\n run_destructor: true,\n\n }\n\n }\n\n\n\n /// Creates a new client for a test that will not clean up the key space afterwards.\n\n #[allow(dead_code)]\n\n pub fn no_destructor(core: Core) -> TestClient<HttpConnector> {\n\n TestClient {\n\n c: Client::new(&[\"http://etcd:2379\"], None).unwrap(),\n\n core: core,\n\n run_destructor: false,\n", "file_path": "tests/test/mod.rs", "rank": 88, "score": 37156.38302929461 }, { "content": " KeyValueInfo,\n\n WatchError,\n\n WatchOptions,\n\n};\n\n\n\nuse test::TestClient;\n\n\n\nmod test;\n\n\n\n#[test]\n", "file_path": "tests/kv_test.rs", "rank": 89, "score": 37155.15873162244 }, { "content": " tx.send(()).unwrap();\n\n\n\n kv::watch(&inner_client, \"/test/foo\", WatchOptions::default()).and_then(|res| {\n\n assert_eq!(res.data.node.value.unwrap(), \"baz\");\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n\n\n child.join().ok().unwrap();\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 90, "score": 37149.86928455141 }, { "content": " \"/test\",\n\n WatchOptions {\n\n recursive: true,\n\n ..Default::default()\n\n },\n\n ).and_then(|res| {\n\n let node = res.data.node;\n\n\n\n assert_eq!(node.key.unwrap(), \"/test/foo/bar\");\n\n assert_eq!(node.value.unwrap(), \"baz\");\n\n\n\n Ok(())\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n\n\n child.join().ok().unwrap();\n\n}\n", "file_path": "tests/kv_test.rs", "rank": 91, "score": 37149.47118204142 }, { "content": "\n\n let nodes = node.nodes.unwrap();\n\n\n\n assert_eq!(nodes[0].clone().key.unwrap(), \"/test/dir\");\n\n assert_eq!(nodes[0].clone().dir.unwrap(), true);\n\n assert_eq!(nodes[1].clone().key.unwrap(), \"/test/foo\");\n\n assert_eq!(nodes[1].clone().value.unwrap(), \"bar\");\n\n\n\n Ok(())\n\n })\n\n });\n\n\n\n assert!(client.run(work).is_ok());\n\n}\n\n\n", "file_path": "tests/kv_test.rs", "rank": 92, "score": 37149.16200257243 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<C> Deref for TestClient<C>\n\nwhere\n\n C: Clone + Connect + Sync + 'static,\n\n{\n\n type Target = Client<C>;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.c\n\n }\n\n}\n", "file_path": "tests/test/mod.rs", "rank": 93, "score": 37148.455755267554 }, { "content": "/// Deletes a node only if the given current value and/or current modified index match.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * key: The name of the node to delete.\n\n/// * current_value: If given, the node must currently have this value for the operation to\n\n/// succeed.\n\n/// * current_modified_index: If given, the node must currently be at this modified index for the\n\n/// operation to succeed.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if the conditions didn't match or if no conditions were given.\n\npub fn compare_and_delete<C>(\n\n client: &Client<C>,\n\n key: &str,\n\n current_value: Option<&str>,\n\n current_modified_index: Option<u64>,\n\n) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n raw_delete(\n\n client,\n\n key,\n\n DeleteOptions {\n\n conditions: Some(ComparisonConditions {\n\n value: current_value,\n\n modified_index: current_modified_index,\n\n }),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/kv.rs", "rank": 94, "score": 37075.7527880616 }, { "content": "/// Updates a node only if the given current value and/or current modified index\n\n/// match.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * key: The name of the node to update.\n\n/// * value: The new value for the node.\n\n/// * ttl: If given, the node will expire after this many seconds.\n\n/// * current_value: If given, the node must currently have this value for the operation to\n\n/// succeed.\n\n/// * current_modified_index: If given, the node must currently be at this modified index for the\n\n/// operation to succeed.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if the conditions didn't match or if no conditions were given.\n\npub fn compare_and_swap<C>(\n\n client: &Client<C>,\n\n key: &str,\n\n value: &str,\n\n ttl: Option<u64>,\n\n current_value: Option<&str>,\n\n current_modified_index: Option<u64>,\n\n) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n raw_set(\n\n client,\n\n key,\n\n SetOptions {\n\n conditions: Some(ComparisonConditions {\n\n value: current_value,\n\n modified_index: current_modified_index,\n\n }),\n\n ttl: ttl,\n\n value: Some(value),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/kv.rs", "rank": 95, "score": 37075.13465031469 }, { "content": "/// Creates a new key-value pair in a directory with a numeric key name larger than any of its\n\n/// sibling key-value pairs.\n\n///\n\n/// For example, the first value created with this function under the directory \"/foo\" will have a\n\n/// key name like \"00000000000000000001\" automatically generated. The second value created with\n\n/// this function under the same directory will have a key name like \"00000000000000000002\".\n\n///\n\n/// This behavior is guaranteed by the server.\n\n///\n\n/// # Parameters\n\n///\n\n/// * client: A `Client` to use to make the API call.\n\n/// * key: The name of the directory to create a key-value pair in.\n\n/// * value: The new value for the key-value pair.\n\n/// * ttl: If given, the node will expire after this many seconds.\n\n///\n\n/// # Errors\n\n///\n\n/// Fails if the key already exists and is not a directory.\n\npub fn create_in_order<C>(\n\n client: &Client<C>,\n\n key: &str,\n\n value: &str,\n\n ttl: Option<u64>,\n\n) -> FutureKeyValueInfo\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n raw_set(\n\n client,\n\n key,\n\n SetOptions {\n\n create_in_order: true,\n\n ttl: ttl,\n\n value: Some(value),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/kv.rs", "rank": 96, "score": 37074.504964586384 }, { "content": "/// Returns statistics about the leader member of a cluster.\n\n///\n\n/// Fails if JSON decoding fails, which suggests a bug in our schema.\n\npub fn leader_stats<C>(\n\n client: &Client<C>,\n\n) -> Box<Future<Item = Response<LeaderStats>, Error = Error>>\n\nwhere\n\n C: Clone + Connect,\n\n{\n\n let url = build_url(&client.endpoints()[0], \"v2/stats/leader\");\n\n let uri = url.parse().map_err(Error::from).into_future();\n\n\n\n client.request(uri)\n\n}\n\n\n", "file_path": "src/stats.rs", "rank": 97, "score": 37069.39751779878 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct AuthStatus {\n\n /// Whether or not the auth system is enabled.\n\n pub enabled: bool,\n\n}\n\n\n\n/// The type returned when the auth system is successfully enabled or disabled.\n\n#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]\n\npub enum AuthChange {\n\n /// The auth system was successfully enabled or disabled.\n\n Changed,\n\n /// The auth system was already in the desired state.\n\n Unchanged,\n\n}\n\n\n\n/// An existing etcd user with a list of their granted roles.\n\n#[derive(Debug, Clone, Deserialize, Eq, Hash, PartialEq)]\n\npub struct User {\n\n /// The user's name.\n\n #[serde(rename = \"user\")]\n\n name: String,\n", "file_path": "src/auth.rs", "rank": 98, "score": 36202.943406857856 }, { "content": "//! Contains etcd error types.\n\n\n\nuse std::convert::From;\n\nuse std::error::Error as StdError;\n\nuse std::fmt::{Display, Error as FmtError, Formatter};\n\n\n\nuse hyper::{Error as HttpError, StatusCode};\n\nuse hyper_http::uri::InvalidUri;\n\n#[cfg(feature = \"tls\")]\n\nuse native_tls::Error as TlsError;\n\nuse serde_json::Error as SerializationError;\n\nuse tokio_timer::timeout::Error as TokioTimeoutError;\n\nuse url::ParseError as UrlError;\n\n\n\n/// An error returned by an etcd API endpoint.\n\n///\n\n/// This is a logical error, as opposed to other types of errors that may occur when using this\n\n/// crate, such as network or serialization errors. See `Error` for the other types of errors.\n\n#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq)]\n\npub struct ApiError {\n", "file_path": "src/error.rs", "rank": 99, "score": 32449.186763656082 } ]
Rust
src/order_service.rs
alank976/restaurant-order
a623ebede260f11c324982d9831136f2a4e8254a
use std::collections::HashMap; use std::sync::{Arc, RwLock}; use crate::order_item::OrderItem; pub struct OrderService(Arc<RwLock<HashMap<u8, Vec<OrderItem>>>>); impl OrderService { pub fn new() -> Self { OrderService(Arc::new(RwLock::new(HashMap::new()))) } fn new_for_test(m: Arc<RwLock<HashMap<u8, Vec<OrderItem>>>>) -> Self { OrderService(m) } pub fn add(&self, table_id: u8, item: OrderItem) -> Result<(), ()> { match table_id { 1..=100 => { let mut items_by_table_id = self.0 .write() .unwrap(); if let Some(items) = items_by_table_id.get_mut(&table_id) { items.push(item); Ok(()) } else { items_by_table_id.insert(table_id, vec![item]); Ok(()) } } _ => Err(()) } } pub fn get_items(&self, table_id: u8) -> Result<Vec<OrderItem>, ()> { match table_id { 1..=100 => Ok( self.0 .read() .unwrap() .get(&table_id) .map(|it| it.clone()) .unwrap_or(vec![]) ), _ => Err(()) } } pub fn cancel_item(&self, table_id: u8, item_name: String) -> Result<(), ()> { match table_id { 1..=100 => { if let Some(items) = self.0 .write() .unwrap() .get_mut(&table_id) { items.retain(|item| item_name != *item.name); } Ok(()) } _ => Err(()) } } } #[cfg(test)] mod tests { use super::*; fn new_service_and_inner_map() -> (OrderService, Arc<RwLock<HashMap<u8, Vec<OrderItem>>>>) { let m: HashMap<u8, Vec<OrderItem>> = HashMap::new(); let arc_rw = Arc::new(RwLock::new(m)); let svc = OrderService::new_for_test(arc_rw.clone()); (svc, arc_rw.clone()) } #[test] fn it_returns_items_from_hashmap() { let (svc, rw_map) = new_service_and_inner_map(); rw_map .write() .unwrap() .insert(10, vec![OrderItem::new("sushi".to_string())]); let items = svc.get_items(10); assert!(items.is_ok()); let items = items.unwrap(); assert_eq!(1, items.len()); let item = items.get(0).unwrap(); assert_eq!("sushi".to_string(), item.name); } #[test] fn it_returns_nothing_when_no_table_id_found() { let (svc, _) = new_service_and_inner_map(); let items = svc.get_items(2); assert!(items.is_ok()); assert!(items.unwrap().is_empty()); } #[test] fn it_saves_item_into_map() { let (svc, rw_map) = new_service_and_inner_map(); let result = svc.add(1, OrderItem::new("french fries".to_string())); assert!(result.is_ok()); let inner_map = rw_map .read() .unwrap(); let items = inner_map.get(&1); assert!(items.is_some()); let items = items.unwrap(); assert_eq!(1, items.len()); assert_eq!("french fries", items .first() .unwrap() .name() ); } #[test] fn it_saves_items_with_same_name() { let (svc, rw_map) = new_service_and_inner_map(); for _ in 0..2 { let result = svc.add(1, OrderItem::new("french fries".to_string())); assert!(result.is_ok()); } let inner_map = rw_map .read() .unwrap(); let mut items = inner_map.get(&1); assert!(items.is_some()); let items = items.as_mut().unwrap(); assert_eq!(2, items.len()); assert!(items.iter() .map(|item: &OrderItem| item.name()) .all(|name| "french fries" == name)); } #[test] fn it_deletes_item() { let (svc, rw_map) = new_service_and_inner_map(); rw_map .write() .unwrap() .insert(1, vec![OrderItem::new("ramen".to_string())]); svc.cancel_item(1, "ramen".to_string()).unwrap(); assert!(rw_map .read() .unwrap() .get(&1) .unwrap() .is_empty()); } #[test] fn it_rejects_when_table_id_greater_100() { let (svc, _) = new_service_and_inner_map(); let items = svc.get_items(200); assert!(items.is_err()); } }
use std::collections::HashMap; use std::sync::{Arc, RwLock}; use crate::order_item::OrderItem; pub struct OrderService(Arc<RwLock<HashMap<u8, Vec<OrderItem>>>>); impl OrderService { pub fn new() -> Self { OrderService(Arc::new(RwLock::new(HashMap::new()))) } fn new_for_test(m: Arc<RwLock<HashMap<u8, Vec<OrderItem>>>>) -> Self { OrderService(m) } pub fn add(&self, table_id: u8, item: OrderItem) -> Result<(), ()> { match table_id { 1..=100 => { let mut items_by_table_id = self.0 .write() .unwrap(); if let Some(items) = items_by_table_id.get_mut(&table_id) { items.push(item); Ok(()) } else { items_by_table_id.insert(table_id, vec![item]); Ok(()) } } _ => Err(()) } } pub fn get_items(&self, table_id: u8) -> Result<Vec<OrderItem>, ()> { match table_id { 1..=100 => Ok( self.0 .read() .unwrap() .get(&table_id) .map(|it| it.clone()) .unwrap_or(vec![]) ), _ => Err(()) } } pub fn cancel_item(&self, table_id: u8, item_name: String) -> Result<(), ()> { match table_id { 1..=100 => { if let Some(items) = self.0 .write() .unwrap() .get_mut(&table_id) { items.retain(|item| item_name != *item.name); } Ok(()) } _ => Err(()) } } } #[cfg(test)] mod tests { use super::*; fn new_service_and_inner_map() -> (OrderService, Arc<RwLock<HashMap<u8, Vec<OrderItem>>>>) { let m: HashMap<u8, Vec<OrderItem>> = HashMap::new(); let arc_rw = Arc::new(RwLock::new(m)); let svc = OrderService::new_for_test(arc_rw.clone()); (svc, arc_rw.clone()) } #[test] fn it_returns_items_from_hashmap() { let (svc, rw_map) = new_service_and_inner_map(); rw_map .write() .unwrap() .insert(10, vec![OrderItem::new("sushi".to_string())]); let items = svc.get_items(10); assert!(items.is_ok()); let items = items.unwrap(); assert_eq!(1, items.len()); let item = items.get(0).unwrap(); assert_eq!("sushi".to_string(), item.name); } #[test] fn it_returns_nothing_when_no_table_id_found() { let (svc, _) = new_service_and_inner_map(); let items = svc.get_items(2); assert!(items.is_ok()); assert!(items.unwrap().is_empty()); } #[test] fn it_saves_item_into_map() { let (svc, rw_map) = new_service_and_inner_map(); let result = svc.add(1, OrderItem::new("french fries".to_string())); assert!(result.is_ok()); let inner_map = rw_map .read() .unwrap(); let items = inner_map.get(&1); assert!(items.is_some()); let items = items.unwrap(); assert_eq!(1, items.len()); assert_eq!("french fries", items .first() .unwrap() .name() ); } #[test] fn it_saves_items_with_same_name() { let (svc, rw_map) = new_service_and_inner_map(); for _ in 0..2 { let result = svc.add(1, OrderItem::new("french fries".to_string())); assert!(result.is_ok()); } let inner_map = rw_map .read() .unwrap(); let mut items = inner_map.get(&1); assert!(items.is_some()); let items = items.as_mut().unwrap(); assert_eq!(2, items.len()); assert!(items.iter() .map(|item: &OrderItem| item.name()) .all(|name| "french fries" == name)); } #[test] fn it_deletes_item() { let (s
em::new("ramen".to_string())]); svc.cancel_item(1, "ramen".to_string()).unwrap(); assert!(rw_map .read() .unwrap() .get(&1) .unwrap() .is_empty()); } #[test] fn it_rejects_when_table_id_greater_100() { let (svc, _) = new_service_and_inner_map(); let items = svc.get_items(200); assert!(items.is_err()); } }
vc, rw_map) = new_service_and_inner_map(); rw_map .write() .unwrap() .insert(1, vec![OrderIt
function_block-random_span
[ { "content": "#[cfg(test)]\n\nmod tests {\n\n use std::thread;\n\n use std::time::{Duration, Instant};\n\n\n\n use restaurant_order::*;\n\n use restaurant_order::clients::*;\n\n\n\n #[test]\n\n fn integration_test() {\n\n thread::spawn(move || WebServer::new().start(8001));\n\n\n\n thread::sleep(Duration::from_secs(1));\n\n\n\n let client = LocalClient::new(8001);\n\n assert!(client.get_items(1).is_empty(), \"items ordered already in the beginning\");\n\n\n\n // order a breakfast\n\n client.order_item(1, \"bacon_and_eggs\");\n\n\n", "file_path": "tests/integration_test.rs", "rank": 0, "score": 21166.41860350551 }, { "content": " // check if ordered breakfast is there\n\n assert_eq!(1, client.get_items(1).len());\n\n\n\n // customer just changes his/her mind\n\n client.cancel_order(1, \"bacon_and_eggs\");\n\n\n\n // this customer has ordered nothing now\n\n assert!(client.get_items(1).is_empty(), \"item has not been deleted\");\n\n }\n\n\n\n #[test]\n\n fn load_test() {\n\n thread::spawn(move || WebServer::new().start(8002));\n\n\n\n let now = Instant::now();\n\n clients_busy_with_orders(8002, 10, 5, (1, 10));\n\n let elapsed_time = now.elapsed().as_secs();\n\n assert!(elapsed_time < 1);\n\n }\n\n}\n", "file_path": "tests/integration_test.rs", "rank": 1, "score": 21162.622300165385 }, { "content": "fn main() {\n\n raw_run();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 2, "score": 20531.50066972192 }, { "content": "fn run_with_client() {\n\n thread::spawn(move || WebServer::new().start(8000));\n\n\n\n clients_busy_with_orders(8000, 100, 500, (1, 10));\n\n}", "file_path": "src/main.rs", "rank": 3, "score": 19472.52442830249 }, { "content": "fn raw_run() {\n\n WebServer::new().start(8000)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 19472.52442830249 }, { "content": " pub fn name(&self) -> &str {\n\n self.name.as_str()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn it_randomize_cook_time() {\n\n let min = Duration::from_secs(60 * 5);\n\n let max = Duration::from_secs(60 * 15);\n\n\n\n\n\n let item = OrderItem::new(\"ramen\".to_string());\n\n let time_to_cook = item.time_to_cook;\n\n\n\n assert!(time_to_cook <= max && time_to_cook >= min);\n\n }\n\n}", "file_path": "src/order_item.rs", "rank": 5, "score": 15752.01562842901 }, { "content": "use std::time::Duration;\n\n\n\nuse rand::Rng;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize, Clone, PartialEq, Debug)]\n\npub struct OrderItem {\n\n pub(crate) name: String,\n\n time_to_cook: Duration,\n\n}\n\n\n\nimpl OrderItem {\n\n pub fn new(name: String) -> Self {\n\n let cook_mins = rand::thread_rng().gen_range(5, 15);\n\n OrderItem {\n\n name,\n\n time_to_cook: Duration::from_secs(60 * cook_mins),\n\n }\n\n }\n\n\n", "file_path": "src/order_item.rs", "rank": 6, "score": 15751.135246938466 }, { "content": " let result = stateful_service.add(table_id.into_inner(), order_item.into_inner());\n\n match result {\n\n Ok(_) => HttpResponse::Ok(),\n\n _ => HttpResponse::BadRequest(),\n\n }\n\n }\n\n\n\n fn handle_delete_item(path_vars: web::Path<(u8, String)>, stateful_service: web::Data<OrderService>) -> impl Responder {\n\n let (table_id, item_name) = path_vars.into_inner();\n\n match stateful_service.cancel_item(table_id, item_name) {\n\n Ok(_) => HttpResponse::Ok(),\n\n _ => HttpResponse::BadRequest()\n\n }\n\n }\n\n}\n\n\n\npub mod clients {\n\n use std::thread;\n\n use rand::Rng;\n\n use reqwest::Client;\n", "file_path": "src/lib.rs", "rank": 13, "score": 12.832087797665839 }, { "content": " use std::time::Instant;\n\n use crate::order_item::OrderItem;\n\n\n\n pub struct LocalClient { base_url: String, client: Client }\n\n\n\n impl LocalClient {\n\n pub fn new(port: u64) -> Self {\n\n LocalClient {\n\n base_url: format!(\"http://localhost:{}\", port),\n\n client: Client::new(),\n\n }\n\n }\n\n pub fn order_item(&self, table_id: u8, item_name: &str) {\n\n let url = format!(\"{}/tables/{}/order-items\", self.base_url, table_id);\n\n let resp = self.client\n\n .post(url.as_str())\n\n .json(&OrderItem::new(item_name.to_string()))\n\n .send()\n\n .unwrap();\n\n assert!(resp.status().is_success());\n", "file_path": "src/lib.rs", "rank": 15, "score": 11.105857263846046 }, { "content": "use actix_web::{App, HttpResponse, HttpServer, Responder, web};\n\n\n\nuse order_item::OrderItem;\n\nuse order_service::OrderService;\n\n\n\npub mod order_item;\n\npub mod order_service;\n\n\n\n\n\npub struct WebServer {}\n\n\n\nimpl WebServer {\n\n pub fn new() -> Self { WebServer {} }\n\n\n\n pub fn start(&self, port: u64) {\n\n let shared_data = web::Data::new(OrderService::new());\n\n\n\n HttpServer::new(move || {\n\n App::new()\n\n .register_data(shared_data.clone())\n", "file_path": "src/lib.rs", "rank": 17, "score": 9.4275093577122 }, { "content": " }\n\n\n\n pub fn get_items(&self, table_id: u8) -> Vec<OrderItem> {\n\n let url = format!(\"{}/tables/{}/order-items\", self.base_url, table_id);\n\n let mut resp = self.client\n\n .get(url.as_str())\n\n .send()\n\n .unwrap();\n\n assert!(resp.status().is_success(), \"fail to get items\");\n\n let body: Vec<OrderItem> = resp.json().unwrap();\n\n body\n\n }\n\n\n\n pub fn cancel_order(&self, table_id: u8, item_name: &str) {\n\n let url = format!(\"{}/tables/{}/order-items/{}\", self.base_url, table_id, item_name);\n\n let resp = self.client\n\n .delete(url.as_str())\n\n .send()\n\n .unwrap();\n\n assert!(resp.status().is_success(), \"fail to delete item\");\n", "file_path": "src/lib.rs", "rank": 18, "score": 7.1438749336861065 }, { "content": "# restaurant-order\n\n\n\n## used libraries/tools\n\n### web server\n\n- actix: rank 1st in techempower benchmark; great docs\n\n### Serialize/Deserialize\n\n- serde\n\n### REST client\n\n- reqwest: for client in integration test\n\n### CI\n\n- github action: brand new, free, well-integrated\n\n### other dev tools\n\n- vscode devcontainer: always keep DEV environment as declared in JSON\n\n\n\n## Design\n\nInstead of using an actual database, this application works with in memory hashmap for simplicity.\n\nA stateful struct called `OrderService` wraps the hashmap in order to segregate the responsibilities of HTTP request handling and business logics about order management application.\n\nWith actix-web, it allows share state across multiple worker threads so the `OrderService` is being shared.\n\n\n\nIn terms of the choice of data structures, although there is finite number of tables (i.e. 100), scarce store reserves memory such that hash map is chosen. \n\nThe values are used with `Vector` because the food name can be duplicated by common sense. \n\n\n\n\n\n## To run locally\n\n- `cargo run`\n\n- default without (benchmarking) clients\n\n- HTTP server is hosted at `http://localhost:8000`\n\n- endpoints are:\n\n - `GET /tables/{id}/order-items` to get ordered items of a table\n\n - `POST /tables/{id}/order-items` create order for a table\n\n - `DELETE /tables/{id}/order-items/{name}`\n\n- To run with clients, please go to `load_test()` or switch the main fn to `run_with_client()`\n\n\n\n## clients and server interaction\n\n- done in integration test for happy path: `cargo test`\n", "file_path": "README.md", "rank": 19, "score": 5.396427628943638 }, { "content": " }\n\n }\n\n\n\n pub fn clients_busy_with_orders(port: u64,\n\n n_thread_per_action: u64,\n\n n_action_per_thread: u64,\n\n table_range: (u8, u8)) {\n\n let mut thread_handles = vec![];\n\n let now = Instant::now();\n\n\n\n for _ in 0..n_thread_per_action {\n\n thread_handles.push(thread::spawn(move || {\n\n let client = LocalClient::new(port);\n\n for _ in 0..n_action_per_thread {\n\n let table_id = rand::thread_rng().gen_range(table_range.0, table_range.1);\n\n client.get_items(table_id);\n\n }\n\n }))\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 20, "score": 4.659560365315326 }, { "content": " .service(\n\n web::scope(\"/tables/{id}\")\n\n .route(\"/order-items\", web::get().to(WebServer::handle_get_items))\n\n .route(\"/order-items\", web::post().to(WebServer::handle_add_item))\n\n .route(\"/order-items/{name}\", web::delete().to(WebServer::handle_delete_item)),\n\n )\n\n })\n\n .bind(format!(\"127.0.0.1:{}\", port).as_str())\n\n .expect(format!(\"Can not bind to port {}\", port).as_str())\n\n .run()\n\n .unwrap();\n\n }\n\n\n\n fn handle_get_items(table_id: web::Path<u8>, stateful_service: web::Data<OrderService>) -> impl Responder {\n\n stateful_service\n\n .get_items(table_id.into_inner())\n\n .map(|items| web::Json(items))\n\n }\n\n\n\n fn handle_add_item((table_id, order_item): (web::Path<u8>, web::Json<OrderItem>), stateful_service: web::Data<OrderService>) -> impl Responder {\n", "file_path": "src/lib.rs", "rank": 21, "score": 4.65943536238852 }, { "content": " for h in thread_handles {\n\n h.join().unwrap();\n\n }\n\n let time_elapsed = now.elapsed().as_secs();\n\n\n\n let client = LocalClient::new(port);\n\n\n\n for i in (table_range.0)..(table_range.1 + 1) {\n\n println!(\"table {} has items: {:?}\", i, client.get_items(i));\n\n }\n\n\n\n println!(\"Clients spent {}s on messing up the above orders.\", time_elapsed);\n\n }\n\n}\n\n\n\n\n", "file_path": "src/lib.rs", "rank": 22, "score": 3.015478981552076 }, { "content": "use restaurant_order::WebServer;\n\nuse std::thread;\n\nuse restaurant_order::clients::clients_busy_with_orders;\n\n\n\n\n", "file_path": "src/main.rs", "rank": 23, "score": 2.1610281435716168 }, { "content": " for _ in 0..n_thread_per_action {\n\n thread_handles.push(thread::spawn(move || {\n\n let client = LocalClient::new(port);\n\n for _ in 0..n_action_per_thread {\n\n let table_id = rand::thread_rng().gen_range(table_range.0, table_range.1);\n\n client.order_item(table_id, \"bacon\");\n\n }\n\n }))\n\n }\n\n\n\n for _ in 0..n_thread_per_action {\n\n thread_handles.push(thread::spawn(move || {\n\n let client = LocalClient::new(port);\n\n for _ in 0..n_action_per_thread {\n\n let table_id = rand::thread_rng().gen_range(table_range.0, table_range.1);\n\n client.cancel_order(table_id, \"bacon\");\n\n }\n\n }))\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 24, "score": 1.4220824959103109 } ]
Rust
parser/src/lib.rs
codeanonorg/ULP
1412606cf2456a183a33f3ef5a5defe94675b41b
mod report; mod spanned; mod token; use crate::token::Token; use chumsky::{prelude::*, Stream}; use report::Report; use report::{report_of_char_error, report_of_token_error}; use token::lexer; #[derive(Clone, Debug, PartialEq)] pub enum Lit { Num(String), List(Vec<Self>), } #[derive(Clone, Debug, PartialEq)] pub enum Sym { CombS, CombK, CombD, CombI, Map, Iota, Len, Reduce, Filter, Neg, And, Or, Eq, Add, Literal(Lit), Var(u32), Lambda(Vec<Self>), } impl Sym { pub fn lambda<I: IntoIterator<Item = Option<Self>>>(inner: I) -> Option<Self> { Some(Self::Lambda(inner.into_iter().collect::<Option<Vec<_>>>()?)) } } fn literal() -> impl Parser<Token, Lit, Error = Simple<Token>> { use Token::*; use token::Dir::*; let int = filter_map(|span, tok| match tok { Num(n) => Ok(Lit::Num(n)), t => Err(Simple::expected_input_found(span, vec![], Some(t))), }); recursive(|lit| lit.repeated().at_least(1).delimited_by(Bracket(L), Bracket(R)).map(Lit::List).or(int)) } fn parser() -> impl Parser<Token, Option<Vec<Sym>>, Error = Simple<Token>> { use token::Dir::*; use Token::*; let var = filter_map(|span, tok| match tok { Var(i) => Ok(Sym::Var(i)), t => Err(Simple::expected_input_found(span, vec![], Some(t))), }); let lit = literal().map(Sym::Literal); recursive(|instr| { instr .delimited_by(Brace(L), Brace(R)) .map(|v| Sym::lambda(v)) .or(just(Ident("K".to_string())) .to(Sym::CombK) .or(just(Ident("S".to_string())).to(Sym::CombS)) .or(just(Ident("I".to_string())).to(Sym::CombI)) .or(just(Ident("D".to_string())).to(Sym::CombD)) .or(just(Op("i".to_string())).to(Sym::Iota)) .or(just(Op("$".to_string())).to(Sym::Map)) .or(just(Op("+".to_string())).to(Sym::Add)) .or(just(Op("#".to_string())).to(Sym::Len)) .or(just(Op("=".to_string())).to(Sym::Eq)) .or(just(Op("/".to_string())).to(Sym::Reduce)) .or(just(Op("&".to_string())).to(Sym::And)) .or(just(Op("|".to_string())).to(Sym::Or)) .or(just(Op("!".to_string())).to(Sym::Neg)) .or(just(Op("\\".to_string())).to(Sym::Filter)) .or(lit) .or(var) .map(Some)) .recover_with(nested_delimiters(Brace(L), Brace(R), [], |_| None)) .repeated() }) .map(|v| v.into_iter().collect::<Option<Vec<_>>>()) } pub fn parse(src_id: impl Into<String>, input: &str) -> (Option<Vec<Sym>>, Vec<Report>) { let src_id = src_id.into(); let slen = input.len(); let (tokens, tokerr) = lexer().then_ignore(end()).parse_recovery(input); let tokerr = tokerr.into_iter().map({ let src_id = src_id.clone(); move |err| report_of_char_error(src_id.clone(), err) }); if let Some(tokens) = tokens { let (instrs, err) = parser() .then_ignore(end()) .parse_recovery(Stream::from_iter( slen..slen + 1, tokens.into_iter().map(Into::into), )); let tokerr = tokerr .chain( err.into_iter() .map(move |err| report_of_token_error(src_id.clone(), err)), ) .collect(); if let Some(Some(instrs)) = instrs { (Some(instrs), tokerr) } else { (None, tokerr) } } else { (None, tokerr.collect()) } } #[cfg(test)] mod tests { use super::*; use Sym::*; macro_rules! assert_parse { ($input:expr, [$($e:expr),*]) => { { use ariadne::Source; let input = $input; let (res, err) = parse("<test>", input); for report in err { report.eprint(("<test>".into(), Source::from(input))).unwrap(); } assert_eq!(res, Some(vec![$($e),*])); } }; } #[test] fn test_ski() { assert_parse!("S K I", [CombS, CombK, CombI]); } #[test] fn test_lambda() { assert_parse!("{D w1 I}", [Lambda(vec![CombD, Var(1), CombI])]) } #[test] fn test_nested_lambda() { assert_parse!( "{D {+ w1 w2} I}", [Lambda(vec![ CombD, Lambda(vec![Add, Var(1), Var(2)]), CombI ])] ); } }
mod report; mod spanned; mod token; use crate::token::Token; use chumsky::{prelude::*, Stream}; use report::Report; use report::{report_of_char_error, report_of_token_error}; use token::lexer; #[derive(Clone, Debug, PartialEq)] pub enum Lit { Num(String), List(Vec<Self>), } #[derive(Clone, Debug, PartialEq)] pub enum Sym { CombS, CombK, CombD, CombI, Map, Iota, Len, Reduce, Filter, Neg, And, Or, Eq, Add, Literal(Lit), Var(u32), Lambda(Vec<Self>), } impl Sym { pub fn lambda<I: IntoIterator<Item = Option<Self>>>(inner: I) -> Option<Self> { Some(Self::Lambda(inner.into_iter().collect::<Option<Vec<_>>>()?)) } } fn literal() -> impl Parser<Token, Lit, Error = Simple<Token>> { use Token::*;
fn parser() -> impl Parser<Token, Option<Vec<Sym>>, Error = Simple<Token>> { use token::Dir::*; use Token::*; let var = filter_map(|span, tok| match tok { Var(i) => Ok(Sym::Var(i)), t => Err(Simple::expected_input_found(span, vec![], Some(t))), }); let lit = literal().map(Sym::Literal); recursive(|instr| { instr .delimited_by(Brace(L), Brace(R)) .map(|v| Sym::lambda(v)) .or(just(Ident("K".to_string())) .to(Sym::CombK) .or(just(Ident("S".to_string())).to(Sym::CombS)) .or(just(Ident("I".to_string())).to(Sym::CombI)) .or(just(Ident("D".to_string())).to(Sym::CombD)) .or(just(Op("i".to_string())).to(Sym::Iota)) .or(just(Op("$".to_string())).to(Sym::Map)) .or(just(Op("+".to_string())).to(Sym::Add)) .or(just(Op("#".to_string())).to(Sym::Len)) .or(just(Op("=".to_string())).to(Sym::Eq)) .or(just(Op("/".to_string())).to(Sym::Reduce)) .or(just(Op("&".to_string())).to(Sym::And)) .or(just(Op("|".to_string())).to(Sym::Or)) .or(just(Op("!".to_string())).to(Sym::Neg)) .or(just(Op("\\".to_string())).to(Sym::Filter)) .or(lit) .or(var) .map(Some)) .recover_with(nested_delimiters(Brace(L), Brace(R), [], |_| None)) .repeated() }) .map(|v| v.into_iter().collect::<Option<Vec<_>>>()) } pub fn parse(src_id: impl Into<String>, input: &str) -> (Option<Vec<Sym>>, Vec<Report>) { let src_id = src_id.into(); let slen = input.len(); let (tokens, tokerr) = lexer().then_ignore(end()).parse_recovery(input); let tokerr = tokerr.into_iter().map({ let src_id = src_id.clone(); move |err| report_of_char_error(src_id.clone(), err) }); if let Some(tokens) = tokens { let (instrs, err) = parser() .then_ignore(end()) .parse_recovery(Stream::from_iter( slen..slen + 1, tokens.into_iter().map(Into::into), )); let tokerr = tokerr .chain( err.into_iter() .map(move |err| report_of_token_error(src_id.clone(), err)), ) .collect(); if let Some(Some(instrs)) = instrs { (Some(instrs), tokerr) } else { (None, tokerr) } } else { (None, tokerr.collect()) } } #[cfg(test)] mod tests { use super::*; use Sym::*; macro_rules! assert_parse { ($input:expr, [$($e:expr),*]) => { { use ariadne::Source; let input = $input; let (res, err) = parse("<test>", input); for report in err { report.eprint(("<test>".into(), Source::from(input))).unwrap(); } assert_eq!(res, Some(vec![$($e),*])); } }; } #[test] fn test_ski() { assert_parse!("S K I", [CombS, CombK, CombI]); } #[test] fn test_lambda() { assert_parse!("{D w1 I}", [Lambda(vec![CombD, Var(1), CombI])]) } #[test] fn test_nested_lambda() { assert_parse!( "{D {+ w1 w2} I}", [Lambda(vec![ CombD, Lambda(vec![Add, Var(1), Var(2)]), CombI ])] ); } }
use token::Dir::*; let int = filter_map(|span, tok| match tok { Num(n) => Ok(Lit::Num(n)), t => Err(Simple::expected_input_found(span, vec![], Some(t))), }); recursive(|lit| lit.repeated().at_least(1).delimited_by(Bracket(L), Bracket(R)).map(Lit::List).or(int)) }
function_block-function_prefix_line
[ { "content": "/// Check that an ULP program is well formed and returns its associated\n\n/// computation tree\n\npub fn check(mut prog: Vec<Sym>) -> Result<ComputationTree, &'static str> {\n\n if prog.len() == 0 {\n\n Err(\"No symbols\")\n\n } else {\n\n prog.reverse();\n\n linear_check(&prog)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use parser::*;\n\n\n\n use crate::check;\n\n\n\n #[test]\n\n pub fn test_linear_check1() {\n\n let prog = vec![\n\n Sym::Literal(Lit::Num(\"1\".to_string())),\n\n Sym::Map,\n", "file_path": "computations/src/lib.rs", "rank": 3, "score": 59385.65277879838 }, { "content": "pub trait SpannedExt: Sized {\n\n fn spanned(self, span: Span) -> Spanned<Self> {\n\n Spanned { span, value: self }\n\n }\n\n}\n\n\n\nimpl<T> SpannedExt for T {}\n", "file_path": "parser/src/spanned.rs", "rank": 4, "score": 52745.3655761124 }, { "content": "// Check that a sequence of symbols is well formed\n\nfn linear_check(prog: &[Sym]) -> Result<ComputationTree, &'static str> {\n\n let first = Accumulator::new(&prog[0])?;\n\n let next = &prog[1..];\n\n next.iter()\n\n .try_fold(first, |acc, s| acc.next(s))\n\n .and_then(|a| {\n\n // println!(\"debug {:?}\", a);\n\n a.finish().ok_or(\"Symbols remaining\")\n\n })\n\n}\n\n\n", "file_path": "computations/src/lib.rs", "rank": 5, "score": 43345.19929148449 }, { "content": "// Check that a sequence of symbols is well formed (in the context of a lambda)\n\nfn non_linear_check(_prog: &[Sym]) -> Result<ComputationTree, &'static str> {\n\n Err(\"TODO: Lambdas not supported\")\n\n}\n\n\n", "file_path": "computations/src/lib.rs", "rank": 6, "score": 42368.19798548112 }, { "content": "type Report = ariadne::Report<SrcId>;\n\n\n", "file_path": "repl/src/main.rs", "rank": 7, "score": 38565.754347587106 }, { "content": "#[derive(Debug)]\n\nenum State {\n\n WaitForOp,\n\n WaitForVal(BinOp),\n\n}\n\n\n\n// A datastructure to represent the current\n\n// Computation tree together with the current state\n", "file_path": "computations/src/lib.rs", "rank": 8, "score": 36075.53362248157 }, { "content": "fn main() {\n\n let mut rl = Editor::<()>::new();\n\n rl.load_history(HISTORYFILE).unwrap_or(());\n\n\n\n loop {\n\n match rl.readline(\"ULP> \") {\n\n Ok(line) => {\n\n rl.add_history_entry(&line);\n\n let (ast, errors) = parse(\"<repl>\", &line);\n\n for err in errors {\n\n err.eprint((\"<repl>\".to_string(), Source::from(&line))).unwrap();\n\n }\n\n if let Some(ast) = ast {\n\n match check(ast) {\n\n Ok(comp) => println!(\"Computation {:#?}\", comp),\n\n Err(err) => Report::build(ReportKind::Error, \"<repl>\", 0)\n\n .with_message(format!(\"Check error: {}\", err))\n\n .with_note(\"The structure is correct, however ULP could not figure out how to compute the expression.\")\n\n .finish()\n\n .eprint((\"<repl>\".to_string(), Source::from(&line)))\n", "file_path": "repl/src/main.rs", "rank": 9, "score": 31786.185245616074 }, { "content": "use std::ops::{Range, Deref};\n\n\n\npub type Span = Range<usize>;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Spanned<T> {\n\n pub span: Span,\n\n pub value: T,\n\n}\n\n\n\nimpl<T> Deref for Spanned<T> {\n\n type Target = T;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.value\n\n }\n\n}\n\n\n\nimpl<T> From<Spanned<T>> for (T, Span) {\n\n fn from(s: Spanned<T>) -> Self {\n\n (s.value, s.span)\n\n }\n\n}\n\n\n", "file_path": "parser/src/spanned.rs", "rank": 10, "score": 20702.552737306498 }, { "content": "use ariadne::{Color, Fmt};\n\nuse chumsky::{error::SimpleReason, prelude::Simple};\n\nuse std::ops::Range;\n\n\n\nuse crate::token::Token;\n\n\n\npub type SrcId = (String, Range<usize>);\n\npub type Report = ariadne::Report<SrcId>;\n\npub type Label = ariadne::Label<SrcId>;\n\n\n\npub(crate) fn report_of_char_error(src_id: impl Into<String>, err: Simple<char>) -> Report {\n\n let id = (src_id.into(), err.span());\n\n let report = Report::build(ariadne::ReportKind::Error, id.0.clone(), err.span().start);\n\n match err.reason() {\n\n SimpleReason::Unclosed { span, delimiter } => report\n\n .with_message(format!(\n\n \"Unclosed delimiter {}\",\n\n delimiter.fg(Color::Yellow)\n\n ))\n\n .with_label(\n", "file_path": "parser/src/report.rs", "rank": 11, "score": 20184.214910616214 }, { "content": " .with_message(msg)\n\n .finish(),\n\n }\n\n}\n\n\n\npub(crate) fn report_of_token_error(src_id: impl Into<String>, err: Simple<Token>) -> Report {\n\n let id = (src_id.into(), err.span());\n\n let report = Report::build(ariadne::ReportKind::Error, id.0.clone(), err.span().start);\n\n match err.reason() {\n\n SimpleReason::Unclosed { span, delimiter } => report\n\n .with_message(format!(\n\n \"Unclosed delimiter {}\",\n\n delimiter.fg(Color::Yellow)\n\n ))\n\n .with_label(\n\n Label::new((id.0.clone(), span.clone()))\n\n .with_color(Color::Blue)\n\n .with_message(format!(\"Unclosed delimiter is here\")),\n\n )\n\n .finish(),\n", "file_path": "parser/src/report.rs", "rank": 12, "score": 20181.93880250436 }, { "content": " Label::new((id.0.clone(), span.clone()))\n\n .with_color(Color::Blue)\n\n .with_message(format!(\"Unclosed delimiter is here\")),\n\n )\n\n .finish(),\n\n SimpleReason::Unexpected => report\n\n .with_message(if let Some(found) = err.found() {\n\n format!(\"Unexpected input {}\", found.fg(Color::Red))\n\n } else {\n\n format!(\"Unexpected input\")\n\n })\n\n .with_label(Label::new(id).with_color(Color::Blue).with_message(format!(\n\n \"Unexpected token {}\",\n\n err.expected()\n\n .map(|t| t.fg(Color::Cyan).to_string())\n\n .collect::<Vec<_>>()\n\n .join(\", \")\n\n )))\n\n .finish(),\n\n SimpleReason::Custom(msg) => report\n", "file_path": "parser/src/report.rs", "rank": 13, "score": 20176.891799832505 }, { "content": " SimpleReason::Unexpected => report\n\n .with_message(if let Some(found) = err.found() {\n\n format!(\"Unexpected input {}\", found.fg(Color::Red))\n\n } else {\n\n format!(\"Unexpected input\")\n\n })\n\n .with_label(Label::new(id).with_color(Color::Blue).with_message(format!(\n\n \"Expecting one of {}\",\n\n err.expected()\n\n .map(|v| v.fg(Color::Cyan).to_string())\n\n .collect::<Vec<_>>()\n\n .join(\", \")\n\n )))\n\n .finish(),\n\n SimpleReason::Custom(msg) => report\n\n .with_message(msg)\n\n .finish(),\n\n }\n\n}\n", "file_path": "parser/src/report.rs", "rank": 14, "score": 20174.97429100792 }, { "content": "use core::fmt;\n\nuse std::ops::Range;\n\n\n\nuse chumsky::{\n\n error::{Error, Simple},\n\n prelude::*,\n\n text::{ident, int, TextParser},\n\n Parser,\n\n};\n\npub use Dir::*;\n\npub use Token::*;\n\n\n\nuse crate::spanned::{Spanned, SpannedExt};\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum Dir {\n\n L,\n\n R,\n\n}\n\n\n", "file_path": "parser/src/token.rs", "rank": 15, "score": 20050.23291536463 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum Token {\n\n Op(String),\n\n Ident(String),\n\n Num(String),\n\n Var(u32),\n\n Paren(Dir),\n\n Bracket(Dir),\n\n Brace(Dir),\n\n}\n\n\n\nimpl fmt::Display for Token {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Op(s) | Ident(s) | Num(s) => write!(f, \"{}\", s),\n\n Var(i) => write!(f, \"w{}\", i),\n\n Paren(L) => write!(f, \"(\"),\n\n Paren(R) => write!(f, \")\"),\n\n Bracket(L) => write!(f, \"[\"),\n\n Bracket(R) => write!(f, \"]\"),\n", "file_path": "parser/src/token.rs", "rank": 16, "score": 20043.72167168497 }, { "content": " Brace(L) => write!(f, \"{{\"),\n\n Brace(R) => write!(f, \"}}\"),\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn lexer() -> impl Parser<char, Vec<Spanned<Token>>, Error = Simple<char>> {\n\n let op = filter(char::is_ascii_punctuation)\n\n .map(|c| Op(c.to_string()));\n\n let ident = ident().map(Ident);\n\n let num = int(10).map(Num);\n\n let parens = just('(').to(Paren(L)).or(just(')').to(Paren(R)));\n\n let braces = just('{').to(Brace(L)).or(just('}').to(Brace(R)));\n\n let brackets = just('[').to(Bracket(L)).or(just(']').to(Bracket(R)));\n\n let var = just('w')\n\n .ignore_then(int(10))\n\n .try_map(|s, Range { start, end }| {\n\n s.parse().map(Var).map_err(|_| {\n\n Simple::custom(start - 1..end, format!(\"Could not parse variable index\"))\n\n .with_label(\"Variables are of the form w# where # is a natural number\")\n", "file_path": "parser/src/token.rs", "rank": 17, "score": 20043.638870920582 }, { "content": " })\n\n });\n\n braces\n\n .or(parens)\n\n .or(brackets)\n\n .or(op)\n\n .or(num)\n\n .or(var)\n\n .or(ident)\n\n .padded()\n\n .map_with_span(|tok, span| tok.spanned(span))\n\n .recover_with(nested_delimiters(\n\n '(',\n\n ')',\n\n [('[', ']'), ('{', '}')],\n\n |span| Paren(R).spanned(span),\n\n ))\n\n .recover_with(nested_delimiters(\n\n '[',\n\n ']',\n", "file_path": "parser/src/token.rs", "rank": 18, "score": 20041.335227957294 }, { "content": " [('(', ')'), ('{', '}')],\n\n |span| Bracket(R).spanned(span),\n\n ))\n\n .recover_with(nested_delimiters(\n\n '{',\n\n '}',\n\n [('(', ')'), ('[', ']')],\n\n |span| Brace(R).spanned(span),\n\n ))\n\n .repeated()\n\n}\n", "file_path": "parser/src/token.rs", "rank": 19, "score": 20039.86726956081 }, { "content": "}\n\n\n\n/// Type of binary operation symbols\n\n#[derive(Clone, PartialEq, Debug)]\n\npub enum BinOp {\n\n Map,\n\n Filter,\n\n Reduce,\n\n Eq,\n\n Add,\n\n And,\n\n Or,\n\n}\n\n\n\n/// Type of unary operation symbols\n\n#[derive(Clone, PartialEq, Debug)]\n\npub enum UnOp {\n\n Len,\n\n Neg,\n\n Iota,\n", "file_path": "computations/src/trees.rs", "rank": 22, "score": 16.906244116745583 }, { "content": " _ => unreachable!(),\n\n }\n\n }\n\n\n\n fn to_binary(s: &Sym) -> BinOp {\n\n match s {\n\n Sym::Map => BinOp::Map,\n\n Sym::Eq => BinOp::Eq,\n\n Sym::Add => BinOp::Add,\n\n Sym::And => BinOp::And,\n\n Sym::Reduce => BinOp::Reduce,\n\n Sym::Filter => BinOp::Filter,\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n fn to_comb(s: &Sym) -> Combinator {\n\n match s {\n\n Sym::CombS => Combinator::S,\n\n Sym::CombK => Combinator::K,\n", "file_path": "computations/src/lib.rs", "rank": 23, "score": 13.832194640018798 }, { "content": " println!(\"result: {:?}\", err);\n\n assert!(err.is_err())\n\n }\n\n\n\n #[test]\n\n pub fn test_linear_check3() {\n\n let prog = vec![\n\n Sym::Add,\n\n Sym::Reduce,\n\n Sym::Len,\n\n Sym::Map,\n\n Sym::Iota,\n\n Sym::Literal(Lit::Num(\"2\".to_string())),\n\n ];\n\n let err = check(prog);\n\n println!(\"result: {:?}\", err);\n\n assert!(err.is_ok())\n\n }\n\n}\n", "file_path": "computations/src/lib.rs", "rank": 24, "score": 13.796554744213582 }, { "content": " Sym::Add,\n\n Sym::Map,\n\n Sym::Iota,\n\n Sym::Literal(Lit::Num(\"2\".to_string())),\n\n ];\n\n let res = check(prog);\n\n println!(\"result: {:?}\", res);\n\n assert!(res.is_ok())\n\n }\n\n\n\n #[test]\n\n pub fn test_linear_check2() {\n\n let prog = vec![\n\n Sym::Map,\n\n Sym::Add,\n\n Sym::Map,\n\n Sym::Iota,\n\n Sym::Literal(Lit::Num(\"2\".to_string()))\n\n ];\n\n let err = check(prog);\n", "file_path": "computations/src/lib.rs", "rank": 25, "score": 13.775643588692251 }, { "content": " Ok(ComputationTree::CombOp(Self::to_comb(s)))\n\n }\n\n Sym::Map | Sym::Eq | Sym::Add | Sym::And | Sym::Or | Sym::Filter | Sym::Reduce => {\n\n Ok(ComputationTree::BinOpSym(Self::to_binary(s)))\n\n }\n\n Sym::Iota | Sym::Len | Sym::Neg => Ok(ComputationTree::UnOpSym(Self::to_unary(s))),\n\n Sym::Literal(n) => Ok(ComputationTree::Lit(n.clone().into())),\n\n Sym::Var(v) => Ok(ComputationTree::Lit(Literal::Var(v.clone()))),\n\n Sym::Lambda(prog) => non_linear_check(prog).and_then(|body| {\n\n Ok(ComputationTree::Lambda {\n\n vars: Self::count_variables(prog),\n\n body: Box::new(body),\n\n })\n\n }),\n\n }\n\n }\n\n\n\n /// Given an accumulator and a symbol, next computes a new accumulator\n\n /// and consume the symbol to extend the current computation tree.\n\n fn next(self, s: &Sym) -> Result<Accumulator, &'static str> {\n", "file_path": "computations/src/lib.rs", "rank": 26, "score": 13.017027941189275 }, { "content": "use std::fmt;\n\n\n\n/// Type of literals\n\n/// Literals are constant values\n\n#[derive(Clone, PartialEq, Debug)]\n\npub enum Literal {\n\n True,\n\n False,\n\n Num(String),\n\n Var(u32),\n\n Table(Vec<Literal>),\n\n}\n\n\n\nimpl From<parser::Lit> for Literal {\n\n fn from(lit: parser::Lit) -> Self {\n\n match lit {\n\n parser::Lit::Num(n) => Self::Num(n),\n\n parser::Lit::List(v) => Self::Table(v.into_iter().map(|n| n.into()).collect()),\n\n }\n\n }\n", "file_path": "computations/src/trees.rs", "rank": 27, "score": 12.515831840472917 }, { "content": "}\n\n\n\n/// Type of combinator symbols\n\n#[derive(Clone, PartialEq, Debug)]\n\npub enum Combinator {\n\n S,\n\n K,\n\n I,\n\n D,\n\n}\n\n\n\n/// Type of Computation Trees\n\n/// A computation tree represents a structured sequence\n\n/// of computations to perform\n\n#[derive(Clone, Debug)]\n\npub enum ComputationTree {\n\n BinOpSym(BinOp),\n\n UnOpSym(UnOp),\n\n CombOp(Combinator),\n\n Lit(Literal),\n", "file_path": "computations/src/trees.rs", "rank": 29, "score": 10.560895553605727 }, { "content": " match self.state {\n\n State::WaitForOp => match s {\n\n Sym::Map | Sym::Eq | Sym::Filter | Sym::Reduce | Sym::Add | Sym::And | Sym::Or => {\n\n Ok(Accumulator {\n\n state: State::WaitForVal(Self::to_binary(s)),\n\n ..self\n\n })\n\n }\n\n Sym::Iota | Sym::Len => Ok(Accumulator {\n\n state: State::WaitForOp,\n\n acc: ComputationTree::UnaryOp {\n\n op: Self::to_unary(s),\n\n lhs: Box::new(self.acc),\n\n },\n\n }),\n\n _ => Err(\"Expected operator\"),\n\n },\n\n State::WaitForVal(op) => Self::to_tree(s).and_then(|tree| {\n\n Ok(Accumulator {\n\n state: State::WaitForOp,\n", "file_path": "computations/src/lib.rs", "rank": 30, "score": 9.872724784529254 }, { "content": " fn is_done(&self) -> bool {\n\n match self.state {\n\n State::WaitForOp => true,\n\n State::WaitForVal(_) => false,\n\n }\n\n }\n\n\n\n fn finish(self) -> Option<ComputationTree> {\n\n if self.is_done() {\n\n Some(self.acc)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn to_unary(s: &Sym) -> UnOp {\n\n match s {\n\n Sym::Len => UnOp::Len,\n\n Sym::Neg => UnOp::Neg,\n\n Sym::Iota => UnOp::Iota,\n", "file_path": "computations/src/lib.rs", "rank": 31, "score": 8.39511670667028 }, { "content": "#[allow(dead_code)]\n\nmod trees;\n\n\n\nuse parser::Sym;\n\nuse trees::{ComputationTree, Literal, UnOp};\n\n\n\nuse crate::trees::{BinOp, Combinator};\n\n\n\n// States for our state machine\n\n// WaitForOp -> We are waiting for an operator as the next symbol\n\n// WaitForVal(op) -> We are waiting for at least one symbol\n\n// This symbol will be the second parameter for the binary operator \"op\"\n\n#[derive(Debug)]\n", "file_path": "computations/src/lib.rs", "rank": 32, "score": 7.217511494565908 }, { "content": "use ariadne::{ReportKind, Source};\n\nuse computations::check;\n\nuse parser::parse;\n\nuse rustyline::{Editor, error::ReadlineError};\n\nuse std::ops::Range;\n\n\n\nconst HISTORYFILE: &'static str = \"/tmp/ulp-repl.history\";\n\n\n", "file_path": "repl/src/main.rs", "rank": 33, "score": 6.643652459248333 }, { "content": " Sym::CombD => Combinator::D,\n\n Sym::CombI => Combinator::I,\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n /// TODO : captures in lambdas ??\n\n fn count_variables(prog: &Vec<Sym>) -> u32 {\n\n prog.iter()\n\n .map(|s| match s {\n\n Sym::Var(_) => 1,\n\n _ => 0,\n\n })\n\n .sum()\n\n }\n\n\n\n /// Convert an arbitrary symbol to a computation tree\n\n fn to_tree<'a>(s: &Sym) -> Result<ComputationTree, &'static str> {\n\n match s {\n\n Sym::CombS | Sym::CombK | Sym::CombD | Sym::CombI => {\n", "file_path": "computations/src/lib.rs", "rank": 34, "score": 6.570242970084699 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Literal::True => write!(f, \"true\"),\n\n Literal::False => write!(f, \"false\"),\n\n Literal::Num(n) => write!(f, \"{}\", n),\n\n Literal::Table(v) => {\n\n write!(f, \"[\")?;\n\n if v.len() != 0 {\n\n write!(f, \"{}\", v[0])?;\n\n for i in &v[1..] {\n\n write!(f, \" {}\", i)?;\n\n }\n\n }\n\n write!(f, \"]\")\n\n }\n\n Literal::Var(u) => write!(f, \"x{}\", u),\n\n }\n\n }\n\n}\n", "file_path": "computations/src/trees.rs", "rank": 38, "score": 4.394846882117076 }, { "content": " BinaryOp {\n\n op: BinOp,\n\n lhs: Box<Self>,\n\n rhs: Box<Self>,\n\n },\n\n UnaryOp {\n\n op: UnOp,\n\n lhs: Box<Self>,\n\n },\n\n Lambda {\n\n vars: u32,\n\n body: Box<ComputationTree>,\n\n },\n\n App {\n\n lhs: Box<ComputationTree>,\n\n rhs: Box<ComputationTree>,\n\n },\n\n}\n\n\n\nimpl fmt::Display for Literal {\n", "file_path": "computations/src/trees.rs", "rank": 39, "score": 2.7732972246204013 }, { "content": " .unwrap(),\n\n }\n\n }\n\n },\n\n Err(ReadlineError::Interrupted) => {},\n\n Err(ReadlineError::Eof) => break,\n\n Err(err) => eprintln!(\"Fatal error: {}\", err),\n\n }\n\n }\n\n\n\n rl.save_history(HISTORYFILE).unwrap_or(());\n\n}\n", "file_path": "repl/src/main.rs", "rank": 40, "score": 2.6664921281850105 }, { "content": "#[derive(Debug)]\n\nstruct Accumulator {\n\n state: State,\n\n acc: ComputationTree,\n\n}\n\n\n\nimpl Accumulator {\n\n // Build a new accumulator containing only the symbol \"s\" viewed\n\n // as a computation tree\n\n fn new(s: &Sym) -> Result<Self, &'static str> {\n\n Self::to_tree(s).and_then(|acc| {\n\n Ok(Accumulator {\n\n state: State::WaitForOp,\n\n acc,\n\n })\n\n })\n\n }\n\n\n\n // Check if the state described by the accumulator is\n\n // accepting or not.\n\n // If we are waiting for an\n", "file_path": "computations/src/lib.rs", "rank": 41, "score": 2.519130011187534 }, { "content": "# ULP\n\n\n\n**U**n **L**angage de **P**rogrammation (ULP) est un\n\nlangage jouet inspiré de [APL](https://fr.wikipedia.org/wiki/APL_(langage)).\n\n\n\n## Objectifs\n\n\n\n+ Un langage à tableau style APL\n\n+ Fortement *point free*\n\n+ Statiquement et fortement typé\n\n+ Compilation native\n\n+ Syntaxe simple (sans unicode !)\n\n\n\n## Syntaxe\n\n\n\n### Combinateurs de base\n\n\n\n```\n\nF ` flip (λf.λx.λy.f y x)\n\nD ` generalized substitution (λf.λg.λh.λx. f (g x) (h x))\n\nK ` constant function λx.λy.x\n\nI ` identity function λx.x\n\nS ` substitution (λf.λg.λh.λx. f (g x) (h x)) = {D w I}\n\n```\n\n\n\n### Lambdas\n\n\n\n```\n\n{f w} ` anonymous function of parameter w\n\n{g w1 w2} ` anonymous function of parameters w1 w2\n\n```\n\n\n\n### Opérateurs de base\n\n\n\n```\n\n+ ` addition\n\n- ` soustraction\n\n* ` multiplication\n\n: ` division\n\n% ` modulo\n\n```\n\n\n\n### Tableaux\n\n\n\n```\n\n# ` longueurs\n\n$ ` map\n\n/ ` reduce\n\n\\ ` filter\n\nM ` max\n\nm ` min\n\n```\n\n\n\n### Booléens\n\n\n\n```\n\n= ` égalité\n\n! ` négation\n\n& ` et\n\n| ` ou\n\n< ` Plus petit\n\n> ` Plus grand\n\n```\n\n\n\n```\n\nLE <- D | < =\n\nLE 0 [1 2 3] = [True True True]\n\n```\n\n\n\n\n\n```\n\n+/#$[[1 2 3 5] [1 2 3 4]] ` calculer le nombre d'éléments dans une matrice\n\n```\n\n\n", "file_path": "README.md", "rank": 42, "score": 2.1829861046673953 } ]
Rust
src/resolver/storage.rs
casper-ecosystem/caspiler
69874a86537fb6f1a138f03e382686f03e46751e
use num_bigint::BigInt; use num_traits::FromPrimitive; use num_traits::One; use num_traits::Zero; use super::cfg::{ControlFlowGraph, Instr, Vartable}; use super::expression::{cast, expression, Expression}; use output::Output; use parser::pt; use resolver; pub fn array_offset( loc: &pt::Loc, start: Expression, index: Expression, elem_ty: resolver::Type, ns: &resolver::Namespace, ) -> Expression { let elem_size = elem_ty.storage_slots(ns); if elem_size == BigInt::one() { Expression::Add(*loc, Box::new(start), Box::new(index)) } else if (elem_size.clone() & (elem_size.clone() - BigInt::one())) == BigInt::zero() { Expression::ShiftLeft( *loc, Box::new(start), Box::new(Expression::ShiftLeft( *loc, Box::new(index), Box::new(Expression::NumberLiteral( *loc, 256, BigInt::from_usize(elem_size.bits()).unwrap(), )), )), ) } else { Expression::Add( *loc, Box::new(start), Box::new(Expression::Multiply( *loc, Box::new(index), Box::new(Expression::NumberLiteral(*loc, 256, elem_size)), )), ) } } pub fn delete( loc: &pt::Loc, var: &pt::Expression, cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let (var_expr, var_ty) = expression(var, cfg, contract_no, ns, vartab, errors)?; let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, "cannot use ‘delete’ in constant expression".to_string(), )); return Err(()); } }; if let resolver::Type::StorageRef(ty) = &var_ty { if ty.is_mapping() { errors.push(Output::error( *loc, "‘delete’ cannot be applied to mapping type".to_string(), )); return Err(()); } cfg.writes_contract_storage = true; cfg.add( tab, Instr::ClearStorage { ty: ty.as_ref().clone(), storage: var_expr, }, ); } else { errors.push(Output::error( *loc, "argument to ‘delete’ should be storage reference".to_string(), )); return Err(()); } Ok((Expression::Poison, resolver::Type::Undef)) } pub fn array_push( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, ty: &resolver::Type, args: &[pt::Expression], cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, format!("cannot call method ‘{}’ in constant expression", func.name), )); return Err(()); } }; if args.len() > 1 { errors.push(Output::error( func.loc, "method ‘push()’ takes at most 1 argument".to_string(), )); return Err(()); } let slot_ty = resolver::Type::Uint(256); let length_pos = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: length_pos, expr: Expression::StorageLoad(*loc, slot_ty.clone(), Box::new(var_expr.clone())), }, ); let elem_ty = ty.storage_deref(); let entry_pos = tab.temp_anonymous(&slot_ty); cfg.writes_contract_storage = true; cfg.add( tab, Instr::Set { res: entry_pos, expr: array_offset( loc, Expression::Keccak256(*loc, vec![(var_expr.clone(), slot_ty.clone())]), Expression::Variable(*loc, length_pos), elem_ty.clone(), ns, ), }, ); if args.len() == 1 { let (val_expr, val_ty) = expression(&args[0], cfg, contract_no, ns, &mut Some(tab), errors)?; let pos = tab.temp_anonymous(&elem_ty); cfg.add( tab, Instr::Set { res: pos, expr: cast( &args[0].loc(), val_expr, &val_ty, &elem_ty.deref(), true, ns, errors, )?, }, ); cfg.add( tab, Instr::SetStorage { ty: elem_ty.clone(), local: pos, storage: Expression::Variable(*loc, entry_pos), }, ); } let new_length = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: new_length, expr: Expression::Add( *loc, Box::new(Expression::Variable(*loc, length_pos)), Box::new(Expression::NumberLiteral(*loc, 256, BigInt::one())), ), }, ); cfg.add( tab, Instr::SetStorage { ty: slot_ty, local: new_length, storage: var_expr, }, ); if args.is_empty() { Ok((Expression::Variable(*loc, entry_pos), elem_ty)) } else { Ok((Expression::Poison, resolver::Type::Undef)) } } pub fn array_pop( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, ty: &resolver::Type, args: &[pt::Expression], cfg: &mut ControlFlowGraph, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, format!("cannot call method ‘{}’ in constant expression", func.name), )); return Err(()); } }; if !args.is_empty() { errors.push(Output::error( func.loc, "method ‘pop()’ does not take any arguments".to_string(), )); return Err(()); } let slot_ty = resolver::Type::Uint(256); let length_pos = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: length_pos, expr: Expression::StorageLoad(*loc, slot_ty.clone(), Box::new(var_expr.clone())), }, ); let empty_array = cfg.new_basic_block("empty_array".to_string()); let has_elements = cfg.new_basic_block("has_elements".to_string()); cfg.writes_contract_storage = true; cfg.add( tab, Instr::BranchCond { cond: Expression::Equal( *loc, Box::new(Expression::Variable(*loc, length_pos)), Box::new(Expression::NumberLiteral(*loc, 256, BigInt::zero())), ), true_: empty_array, false_: has_elements, }, ); cfg.set_basic_block(empty_array); cfg.add(tab, Instr::AssertFailure { expr: None }); cfg.set_basic_block(has_elements); let new_length = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: new_length, expr: Expression::Subtract( *loc, Box::new(Expression::Variable(*loc, length_pos)), Box::new(Expression::NumberLiteral(*loc, 256, BigInt::one())), ), }, ); let elem_ty = ty.storage_deref().deref().clone(); let entry_pos = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: entry_pos, expr: array_offset( loc, Expression::Keccak256(*loc, vec![(var_expr.clone(), slot_ty.clone())]), Expression::Variable(*loc, new_length), elem_ty.clone(), ns, ), }, ); let res_pos = tab.temp_anonymous(&elem_ty); cfg.add( tab, Instr::Set { res: res_pos, expr: Expression::StorageLoad( *loc, elem_ty.clone(), Box::new(Expression::Variable(*loc, entry_pos)), ), }, ); cfg.add( tab, Instr::ClearStorage { ty: elem_ty.clone(), storage: Expression::Variable(*loc, entry_pos), }, ); cfg.add( tab, Instr::SetStorage { ty: slot_ty, local: new_length, storage: var_expr, }, ); Ok((Expression::Variable(*loc, res_pos), elem_ty)) } pub fn bytes_push( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, args: &[pt::Expression], cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, format!("cannot call method ‘{}’ in constant expression", func.name), )); return Err(()); } }; cfg.writes_contract_storage = true; let val = match args.len() { 0 => Expression::NumberLiteral(*loc, 8, BigInt::zero()), 1 => { let (val_expr, val_ty) = expression(&args[0], cfg, contract_no, ns, &mut Some(tab), errors)?; cast( &args[0].loc(), val_expr, &val_ty, &resolver::Type::Bytes(1), true, ns, errors, )? } _ => { errors.push(Output::error( func.loc, "method ‘push()’ takes at most 1 argument".to_string(), )); return Err(()); } }; if args.is_empty() { Ok(( Expression::StorageBytesPush(*loc, Box::new(var_expr), Box::new(val)), resolver::Type::Bytes(1), )) } else { Ok(( Expression::StorageBytesPush(*loc, Box::new(var_expr), Box::new(val)), resolver::Type::Undef, )) } } pub fn bytes_pop( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, args: &[pt::Expression], cfg: &mut ControlFlowGraph, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { cfg.writes_contract_storage = true; if !args.is_empty() { errors.push(Output::error( func.loc, "method ‘pop()’ does not take any arguments".to_string(), )); return Err(()); } Ok(( Expression::StorageBytesPop(*loc, Box::new(var_expr)), resolver::Type::Bytes(1), )) } pub fn mapping_subscript( loc: &pt::Loc, mapping: Expression, mapping_ty: &resolver::Type, index: &pt::Expression, cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let (key_ty, value_ty) = match mapping_ty.deref() { resolver::Type::Mapping(k, v) => (k, v), _ => unreachable!(), }; let (index_expr, index_ty) = expression(index, cfg, contract_no, ns, vartab, errors)?; let index_expr = cast( &index.loc(), index_expr, &index_ty, key_ty, true, ns, errors, )?; let slot_ty = resolver::Type::Uint(256); let index_ty = if let resolver::Type::Enum(n) = index_ty { ns.enums[n].ty.clone() } else { index_ty }; let slot = Expression::Keccak256(*loc, vec![(mapping, slot_ty), (index_expr, index_ty)]); Ok((slot, resolver::Type::StorageRef(value_ty.clone()))) }
use num_bigint::BigInt; use num_traits::FromPrimitive; use num_traits::One; use num_traits::Zero; use super::cfg::{ControlFlowGraph, Instr, Vartable}; use super::expression::{cast, expression, Expression}; use output::Output; use parser::pt; use resolver; pub fn array_offset( loc: &pt::Loc, start: Expression, index: Expression, elem_ty: resolver::Type, ns: &resolver::Namespace, ) -> Expression { let elem_size = elem_ty.storage_slots(ns); if elem_size == BigInt::one() { Expression::Add(*loc, Box::new(start), Box::new(index)) } else if (elem_size.clone() & (elem_size.clone() - BigInt::one())) == BigInt::zero() { Expression::ShiftLeft( *loc, Box::new(start), Box::new(Expression::ShiftLeft( *loc, Box::new(index), Box::new(Expression::NumberLiteral( *loc, 256, BigInt::from_usize(elem_size.bits()).unwrap(), )), )), ) } else { Expression::Add( *loc, Box::new(start), Box::new(Expression::Multiply( *loc, Box::new(index), Box::new(Expression::NumberLiteral(*loc, 256, elem_size)), )), ) } } pub fn delete( loc: &pt::Loc, var: &pt::Expression, cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let (var_expr, var_ty) = expression(var, cfg, contract_no, ns, vartab, errors)?; let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, "cannot use ‘delete’ in constant expression".to_string(), )); return Err(()); } }; if let resolver::Type::StorageRef(ty) = &var_ty { if ty.is_mapping() { errors.push(Output::error( *loc, "‘delete’ cannot be applied to mapping type".to_string(), )); return Err(()); } cfg.writes_contract_storage = true; cfg.add( tab, Instr::ClearStorage { ty: ty.as_ref().clone(), storage: var_expr, }, ); } else { errors.push(Output::error( *loc, "argument to ‘delete’ should be storage reference".to_string(), )); return Err(()); } Ok((Expression::Poison, resolver::Type::Undef)) } pub fn array_push( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, ty: &resolver::Type, args: &[pt::Expression], cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, format!("cannot call method ‘{}’ in constant expression", func.name), )); return Err(()); } }; if args.len() > 1 { errors.push(Output::error( func.loc, "method ‘push()’ takes at most 1 argument".to_string(), )); return Err(()); } let slot_ty = resolver::Type::Uint(256); let length_pos = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: length_pos, expr: Expression::StorageLoad(*loc, slot_ty.clone(), Box::new(var_expr.clone())), }, ); let elem_ty = ty.storage_deref(); let entry_pos = tab.temp_anonymous(&slot_ty); cfg.writes_contract_storage = true; cfg.add( tab, Instr::Set { res: entry_pos, expr: array_offset( loc, Expression::Keccak256(*loc, vec![(var_expr.clone(), slot_ty.clone())]), Expression::Variable(*loc, length_pos), elem_ty.clone(), ns, ), }, ); if args.len() == 1 { let (val_expr, val_ty) = expression(&args[0], cfg, contract_no, ns, &mut Some(tab), errors)?; let pos = tab.temp_anonymous(&elem_ty); cfg.add( tab, Instr::Set { res: pos, expr: cast( &args[0].loc(), val_expr, &val_ty, &elem_ty.deref(), true, ns, errors, )?, }, ); cfg.add( tab, Instr::SetStorage { ty: elem_ty.clone(), local: pos, storage: Expression::Variable(*loc, entry_pos), }, ); } let new_length = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: new_length, expr: Expression::Add( *loc, Box::new(Expression::Variable(*loc, length_pos)), Box::new(Expression::NumberLiteral(*loc, 256, BigInt::one())), ), }, ); cfg.add( tab, Instr::SetStorage { ty: slot_ty, local: new_length, storage: var_expr, }, ); if args.is_empty() { Ok((Expression::Variable(*loc, entry_pos), elem_ty)) } else { Ok((Expression::Poison, resolver::Type::Undef)) } } pub fn array_pop( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, ty: &resolver::Type, args: &[pt::Expression], cfg: &mut ControlFlowGraph, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, format!("cannot call method ‘{}’ in constant expression", func.name), )); return Err(()); } }; if !args.is_empty() { errors.push(Output::error( func.loc, "method ‘pop()’ does not take any arguments".to_string(), )); return Err(()); } let slot_ty = resolver::Type::Uint(256); let length_pos = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: length_pos, expr: Expression::StorageLoad(*loc, slot_ty.clone(), Box::new(var_expr.clone())), }, ); let empty_array = cfg.new_basic_block("empty_array".to_string()); let has_elements = cfg.new_basic_block("has_elements".to_string()); cfg.writes_contract_storage = true; cfg.add( tab, Instr::BranchCond { cond: Expression::Equal( *loc, Box::new(Expression::Variable(*loc, length_pos)), Box::new(Expression::NumberLiteral(*loc, 256, BigInt::zero())), ), true_: empty_array, false_: has_elements, }, ); cfg.set_basic_block(empty_array); cfg.add(tab, Instr::AssertFailure { expr: None }); cfg.set_basic_block(has_elements); let new_length = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: new_length, expr: Expression::Subtract( *loc, Box::new(Expression::Variable(*loc, length_pos)), Box::new(Expression::NumberLiteral(*loc, 256, BigInt::one())), ), }, ); let elem_ty = ty.storage_deref().deref().clone(); let entry_pos = tab.temp_anonymous(&slot_ty); cfg.add( tab, Instr::Set { res: entry_pos, expr: array_offset( loc, Expression::Keccak256(*loc, vec![(var_expr.clone(), slot_ty.clone())]), Expression::Variable(*loc, new_length), elem_ty.clone(), ns, ), }, ); let res_pos = tab.temp_anonymous(&elem_ty); cfg.add( tab, Instr::Set { res: res_pos, expr: Expression::StorageLoad( *loc, elem_ty.clone(), Box::new(Expression::Variable(*loc, entry_pos)), ), }, ); cfg.add( tab, Instr::ClearStorage { ty: elem_ty.clone(), storage: Expression::Variable(*loc, entry_pos), }, ); cfg.add( tab, Instr::SetStorage { ty: slot_ty, local: new_length, storage: var_expr, }, ); Ok((Expression::Variable(*loc, res_pos), elem_ty)) } pub fn bytes_push( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, args: &[pt::Expression], cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let tab = match vartab { &mut Some(ref mut tab) => tab, None => { errors.push(Output::error( *loc, format!("cannot call method ‘{}’ in constant expression", func.name), )); return Err(()); } }; cfg.writes_contract_storage = true; let val = match args.len() { 0 => Expression::NumberLiteral(*loc, 8, BigInt::zero()), 1 => { let (val_expr, val_ty) = expression(&args[0], cfg, contract_no, ns, &mut Some(tab), errors)?; cast( &args[0].loc(), val_expr, &val_ty, &resolver::Type::Bytes(1), true, ns, errors, )? } _ => { errors.push(Output::error( func.loc, "method ‘push()’ takes at most 1 argument".to_string(), )); return Err(()); } };
} pub fn bytes_pop( loc: &pt::Loc, var_expr: Expression, func: &pt::Identifier, args: &[pt::Expression], cfg: &mut ControlFlowGraph, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { cfg.writes_contract_storage = true; if !args.is_empty() { errors.push(Output::error( func.loc, "method ‘pop()’ does not take any arguments".to_string(), )); return Err(()); } Ok(( Expression::StorageBytesPop(*loc, Box::new(var_expr)), resolver::Type::Bytes(1), )) } pub fn mapping_subscript( loc: &pt::Loc, mapping: Expression, mapping_ty: &resolver::Type, index: &pt::Expression, cfg: &mut ControlFlowGraph, contract_no: Option<usize>, ns: &resolver::Namespace, vartab: &mut Option<&mut Vartable>, errors: &mut Vec<Output>, ) -> Result<(Expression, resolver::Type), ()> { let (key_ty, value_ty) = match mapping_ty.deref() { resolver::Type::Mapping(k, v) => (k, v), _ => unreachable!(), }; let (index_expr, index_ty) = expression(index, cfg, contract_no, ns, vartab, errors)?; let index_expr = cast( &index.loc(), index_expr, &index_ty, key_ty, true, ns, errors, )?; let slot_ty = resolver::Type::Uint(256); let index_ty = if let resolver::Type::Enum(n) = index_ty { ns.enums[n].ty.clone() } else { index_ty }; let slot = Expression::Keccak256(*loc, vec![(mapping, slot_ty), (index_expr, index_ty)]); Ok((slot, resolver::Type::StorageRef(value_ty.clone()))) }
if args.is_empty() { Ok(( Expression::StorageBytesPush(*loc, Box::new(var_expr), Box::new(val)), resolver::Type::Bytes(1), )) } else { Ok(( Expression::StorageBytesPush(*loc, Box::new(var_expr), Box::new(val)), resolver::Type::Undef, )) }
if_condition
[ { "content": "/// Cast from one type to another, which also automatically derefs any Type::Ref() type.\n\n/// if the cast is explicit (e.g. bytes32(bar) then implicit should be set to false.\n\npub fn cast(\n\n loc: &pt::Loc,\n\n expr: Expression,\n\n from: &resolver::Type,\n\n to: &resolver::Type,\n\n implicit: bool,\n\n ns: &resolver::Namespace,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<Expression, ()> {\n\n if from == to {\n\n return Ok(expr);\n\n }\n\n\n\n // First of all, if we have a ref then derefence it\n\n if let resolver::Type::Ref(r) = from {\n\n return cast(\n\n loc,\n\n Expression::Load(*loc, Box::new(expr)),\n\n r,\n\n to,\n", "file_path": "src/resolver/expression.rs", "rank": 0, "score": 344194.5579587738 }, { "content": "/// Resolve the type of a variable declaration\n\npub fn resolve_var_decl_ty(\n\n ty: &pt::Expression,\n\n storage: &Option<pt::StorageLocation>,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<resolver::Type, ()> {\n\n let mut var_ty = ns.resolve_type(contract_no, false, &ty, errors)?;\n\n\n\n if let Some(storage) = storage {\n\n if !var_ty.can_have_data_location() {\n\n errors.push(Output::error(\n\n *storage.loc(),\n\n format!(\n\n \"data location ‘{}’ only allowed for array, struct or mapping type\",\n\n storage\n\n ),\n\n ));\n\n return Err(());\n\n }\n", "file_path": "src/resolver/cfg.rs", "rank": 4, "score": 332294.71393942874 }, { "content": "/// Resolve function call\n\npub fn function_call_expr(\n\n loc: &pt::Loc,\n\n ty: &pt::Expression,\n\n args: &[pt::Expression],\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n let (ty, call_args, call_args_loc) = collect_call_args(ty, errors)?;\n\n\n\n match ty {\n\n pt::Expression::MemberAccess(_, member, func) => method_call_pos_args(\n\n loc,\n\n member,\n\n func,\n\n args,\n\n &call_args,\n\n call_args_loc,\n", "file_path": "src/resolver/expression.rs", "rank": 5, "score": 327225.8409301441 }, { "content": "/// Function call arguments\n\npub fn collect_call_args<'a>(\n\n expr: &'a pt::Expression,\n\n errors: &mut Vec<Output>,\n\n) -> Result<\n\n (\n\n &'a pt::Expression,\n\n Vec<&'a pt::NamedArgument>,\n\n Option<pt::Loc>,\n\n ),\n\n (),\n\n> {\n\n let mut named_arguments = Vec::new();\n\n let mut expr = expr;\n\n let mut loc: Option<pt::Loc> = None;\n\n\n\n while let pt::Expression::FunctionCallBlock(_, e, block) = expr {\n\n match block.as_ref() {\n\n pt::Statement::Args(_, args) => {\n\n if let Some(l) = loc {\n\n loc = Some(pt::Loc(l.0, block.loc().1));\n", "file_path": "src/resolver/expression.rs", "rank": 6, "score": 320886.97278821666 }, { "content": "/// Resolve function call expression with named arguments\n\npub fn named_function_call_expr(\n\n loc: &pt::Loc,\n\n ty: &pt::Expression,\n\n args: &[pt::NamedArgument],\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n let (ty, call_args, call_args_loc) = collect_call_args(ty, errors)?;\n\n\n\n match ty {\n\n pt::Expression::MemberAccess(_, member, func) => method_call_with_named_args(\n\n loc,\n\n member,\n\n func,\n\n args,\n\n &call_args,\n\n cfg,\n", "file_path": "src/resolver/expression.rs", "rank": 7, "score": 318354.6096917002 }, { "content": "pub fn add_builtin_function(ns: &mut Namespace, contract_no: usize) {\n\n add_assert(ns, contract_no);\n\n add_print(ns, contract_no);\n\n add_revert(ns, contract_no);\n\n add_require(ns, contract_no);\n\n add_selfdestruct(ns, contract_no);\n\n add_crypto_hash(ns, contract_no);\n\n}\n\n\n", "file_path": "src/resolver/builtin.rs", "rank": 8, "score": 317802.2528726249 }, { "content": "/// Resolve a method call with positional arguments\n\nfn method_call_pos_args(\n\n loc: &pt::Loc,\n\n var: &pt::Expression,\n\n func: &pt::Identifier,\n\n args: &[pt::Expression],\n\n call_args: &[&pt::NamedArgument],\n\n call_args_loc: Option<pt::Loc>,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n let (var_expr, var_ty) = expression(var, cfg, contract_no, ns, vartab, errors)?;\n\n\n\n if let resolver::Type::StorageRef(ty) = &var_ty {\n\n match ty.as_ref() {\n\n resolver::Type::Array(_, dim) => {\n\n if let Some(loc) = call_args_loc {\n\n errors.push(Output::error(\n", "file_path": "src/resolver/expression.rs", "rank": 13, "score": 288928.7975091408 }, { "content": "fn method_call_with_named_args(\n\n loc: &pt::Loc,\n\n var: &pt::Expression,\n\n func_name: &pt::Identifier,\n\n args: &[pt::NamedArgument],\n\n call_args: &[&pt::NamedArgument],\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n let (var_expr, var_ty) = expression(var, cfg, contract_no, ns, vartab, errors)?;\n\n\n\n if let resolver::Type::Contract(external_contract_no) = &var_ty.deref() {\n\n let call_args = parse_call_args(&call_args, true, cfg, contract_no, ns, vartab, errors)?;\n\n\n\n let mut arguments = HashMap::new();\n\n\n\n for arg in args {\n", "file_path": "src/resolver/expression.rs", "rank": 14, "score": 288906.5884630058 }, { "content": "/// Resolve type(x).foo\n\npub fn type_name_expr(\n\n loc: &pt::Loc,\n\n args: &[pt::Expression],\n\n field: &pt::Identifier,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n if args.is_empty() {\n\n errors.push(Output::error(\n\n *loc,\n\n \"missing argument to type()\".to_string(),\n\n ));\n\n return Err(());\n\n }\n\n\n\n if args.len() > 1 {\n\n errors.push(Output::error(\n\n *loc,\n\n format!(\"got {} arguments to type(), only one expected\", args.len(),),\n", "file_path": "src/resolver/expression.rs", "rank": 15, "score": 285560.802870739 }, { "content": "/// Resolve an new contract expression with named arguments\n\npub fn constructor_named_args(\n\n loc: &pt::Loc,\n\n ty: &pt::Expression,\n\n args: &[pt::NamedArgument],\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n let (ty, call_args, _) = collect_call_args(ty, errors)?;\n\n\n\n let call_args = parse_call_args(&call_args, false, cfg, contract_no, ns, vartab, errors)?;\n\n\n\n let no = match ns.resolve_type(contract_no, false, ty, errors)? {\n\n resolver::Type::Contract(n) => n,\n\n _ => {\n\n errors.push(Output::error(*loc, \"contract expected\".to_string()));\n\n return Err(());\n\n }\n", "file_path": "src/resolver/expression.rs", "rank": 16, "score": 285558.28902755637 }, { "content": "fn ty_to_abi(ty: &resolver::Type, ns: &resolver::Namespace, registry: &mut Registry) -> ParamType {\n\n match ty {\n\n /* clike_enums are broken in polkadot. Use u8 for now.\n\n resolver::Type::Enum(n) => ParamType {\n\n ty: registry.builtin_enum_type(&contract.enums[*n]),\n\n display_name: vec![registry.string(&contract.enums[*n].name)],\n\n },\n\n */\n\n resolver::Type::Enum(_) => ParamType {\n\n ty: registry.builtin_type(\"u8\"),\n\n display_name: vec![registry.string(\"u8\")],\n\n },\n\n resolver::Type::Bytes(n) => {\n\n let elem = registry.builtin_type(\"u8\");\n\n ParamType {\n\n ty: registry.builtin_array_type(elem, *n as usize),\n\n display_name: vec![],\n\n }\n\n }\n\n resolver::Type::Undef => unreachable!(),\n", "file_path": "src/abi/substrate.rs", "rank": 17, "score": 274562.1377721917 }, { "content": "/// The parser generates parameter lists for lists. Sometimes this needs to be a\n\n/// simple expression list.\n\npub fn parameter_list_to_expr_list<'a>(\n\n e: &'a pt::Expression,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<Vec<&'a pt::Expression>, ()> {\n\n if let pt::Expression::List(_, v) = &e {\n\n let mut list = Vec::new();\n\n let mut broken = false;\n\n\n\n for e in v {\n\n match &e.1 {\n\n None => {\n\n errors.push(Output::error(e.0, \"stray comma\".to_string()));\n\n broken = true;\n\n }\n\n Some(pt::Parameter {\n\n name: Some(name), ..\n\n }) => {\n\n errors.push(Output::error(name.loc, \"single value expected\".to_string()));\n\n broken = true;\n\n }\n", "file_path": "src/resolver/expression.rs", "rank": 18, "score": 271846.29443008284 }, { "content": "fn add_print(ns: &mut Namespace, contract_no: usize) {\n\n let id = pt::Identifier {\n\n loc: pt::Loc(0, 0),\n\n name: \"print\".to_owned(),\n\n };\n\n\n\n let mut assert = FunctionDecl::new(\n\n pt::Loc(0, 0),\n\n \"print\".to_owned(),\n\n vec![],\n\n pt::FunctionTy::Function,\n\n None,\n\n None,\n\n pt::Visibility::Private(pt::Loc(0, 0)),\n\n vec![Parameter {\n\n name: \"arg0\".to_owned(),\n\n ty: resolver::Type::String,\n\n }],\n\n vec![],\n\n ns,\n", "file_path": "src/resolver/builtin.rs", "rank": 19, "score": 269955.8630811663 }, { "content": "fn add_assert(ns: &mut Namespace, contract_no: usize) {\n\n let id = pt::Identifier {\n\n loc: pt::Loc(0, 0),\n\n name: \"assert\".to_owned(),\n\n };\n\n\n\n let mut assert = FunctionDecl::new(\n\n pt::Loc(0, 0),\n\n \"assert\".to_owned(),\n\n vec![],\n\n pt::FunctionTy::Function,\n\n None,\n\n None,\n\n pt::Visibility::Private(pt::Loc(0, 0)),\n\n vec![Parameter {\n\n name: \"arg0\".to_owned(),\n\n ty: resolver::Type::Bool,\n\n }],\n\n vec![],\n\n ns,\n", "file_path": "src/resolver/builtin.rs", "rank": 20, "score": 269955.8630811663 }, { "content": "fn add_selfdestruct(ns: &mut Namespace, contract_no: usize) {\n\n let id = pt::Identifier {\n\n loc: pt::Loc(0, 0),\n\n name: \"selfdestruct\".to_owned(),\n\n };\n\n\n\n let mut selfdestruct = FunctionDecl::new(\n\n pt::Loc(0, 0),\n\n \"selfdestruct\".to_owned(),\n\n vec![],\n\n pt::FunctionTy::Function,\n\n None,\n\n None,\n\n pt::Visibility::Private(pt::Loc(0, 0)),\n\n vec![Parameter {\n\n name: \"recipient\".to_owned(),\n\n ty: resolver::Type::Address(true),\n\n }],\n\n vec![],\n\n ns,\n", "file_path": "src/resolver/builtin.rs", "rank": 21, "score": 269955.8630811663 }, { "content": "fn add_require(ns: &mut Namespace, contract_no: usize) {\n\n let id = pt::Identifier {\n\n loc: pt::Loc(0, 0),\n\n name: \"require\".to_owned(),\n\n };\n\n\n\n let mut require = FunctionDecl::new(\n\n pt::Loc(0, 0),\n\n \"require\".to_owned(),\n\n vec![],\n\n pt::FunctionTy::Function,\n\n None,\n\n None,\n\n pt::Visibility::Private(pt::Loc(0, 0)),\n\n vec![\n\n Parameter {\n\n name: \"condition\".to_owned(),\n\n ty: resolver::Type::Bool,\n\n },\n\n Parameter {\n", "file_path": "src/resolver/builtin.rs", "rank": 22, "score": 269955.8630811663 }, { "content": "fn add_revert(ns: &mut Namespace, contract_no: usize) {\n\n let id = pt::Identifier {\n\n loc: pt::Loc(0, 0),\n\n name: \"revert\".to_owned(),\n\n };\n\n\n\n let mut revert = FunctionDecl::new(\n\n pt::Loc(0, 0),\n\n \"revert\".to_owned(),\n\n vec![],\n\n pt::FunctionTy::Function,\n\n None,\n\n None,\n\n pt::Visibility::Private(pt::Loc(0, 0)),\n\n vec![Parameter {\n\n name: \"ReasonCode\".to_owned(),\n\n ty: resolver::Type::String,\n\n }],\n\n vec![],\n\n ns,\n", "file_path": "src/resolver/builtin.rs", "rank": 23, "score": 269955.8630811663 }, { "content": "pub fn expression(\n\n expr: &pt::Expression,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n match expr {\n\n pt::Expression::ArrayLiteral(loc, exprs) => {\n\n resolve_array_literal(loc, exprs, cfg, contract_no, ns, vartab, errors)\n\n }\n\n pt::Expression::BoolLiteral(loc, v) => {\n\n Ok((Expression::BoolLiteral(*loc, *v), resolver::Type::Bool))\n\n }\n\n pt::Expression::StringLiteral(v) => {\n\n // Concatenate the strings\n\n let mut result = Vec::new();\n\n let mut loc = pt::Loc(v[0].loc.0, 0);\n\n\n", "file_path": "src/resolver/expression.rs", "rank": 24, "score": 269737.9223707304 }, { "content": "/// Unescape a string literal\n\nfn unescape(literal: &str, start: usize, errors: &mut Vec<output::Output>) -> String {\n\n let mut s = String::new();\n\n let mut indeces = literal.char_indices();\n\n\n\n while let Some((_, ch)) = indeces.next() {\n\n if ch != '\\\\' {\n\n s.push(ch);\n\n continue;\n\n }\n\n\n\n match indeces.next() {\n\n Some((_, '\\n')) => (),\n\n Some((_, '\\\\')) => s.push('\\\\'),\n\n Some((_, '\\'')) => s.push('\\''),\n\n Some((_, '\"')) => s.push('\"'),\n\n Some((_, 'b')) => s.push('\\u{0008}'),\n\n Some((_, 'f')) => s.push('\\u{000c}'),\n\n Some((_, 'n')) => s.push('\\n'),\n\n Some((_, 'r')) => s.push('\\r'),\n\n Some((_, 't')) => s.push('\\t'),\n", "file_path": "src/resolver/expression.rs", "rank": 25, "score": 265174.8663032741 }, { "content": "fn add_crypto_hash(ns: &mut Namespace, contract_no: usize) {\n\n struct HashFunction {\n\n function_name: &'static str,\n\n hash_ty: HashTy,\n\n ret_ty: resolver::Type,\n\n target: Option<Target>,\n\n };\n\n\n\n for hash in &[\n\n HashFunction {\n\n function_name: \"keccak256\",\n\n hash_ty: HashTy::Keccak256,\n\n ret_ty: resolver::Type::Bytes(32),\n\n target: None,\n\n },\n\n HashFunction {\n\n function_name: \"ripemd160\",\n\n hash_ty: HashTy::Ripemd160,\n\n ret_ty: resolver::Type::Bytes(20),\n\n target: None,\n", "file_path": "src/resolver/builtin.rs", "rank": 26, "score": 265154.34929780336 }, { "content": "pub fn generate_cfg(\n\n ast_f: &pt::FunctionDefinition,\n\n resolve_f: &resolver::FunctionDecl,\n\n contract_no: usize,\n\n ns: &resolver::Namespace,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<Box<ControlFlowGraph>, ()> {\n\n let mut cfg = Box::new(ControlFlowGraph::new());\n\n\n\n let mut vartab = Vartable::new();\n\n let mut loops = LoopScopes::new();\n\n\n\n // first add function parameters\n\n for (i, p) in ast_f.params.iter().enumerate() {\n\n let p = p.1.as_ref().unwrap();\n\n if let Some(ref name) = p.name {\n\n if let Some(pos) = vartab.add(name, resolve_f.params[i].ty.clone(), errors) {\n\n ns.check_shadowing(contract_no, name, errors);\n\n\n\n cfg.add(\n", "file_path": "src/resolver/cfg.rs", "rank": 27, "score": 263755.6532561851 }, { "content": "/// Resolve an new expression\n\npub fn new(\n\n loc: &pt::Loc,\n\n ty: &pt::Expression,\n\n args: &[pt::Expression],\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n let (ty, call_args, call_args_loc) = collect_call_args(ty, errors)?;\n\n\n\n let ty = ns.resolve_type(contract_no, false, ty, errors)?;\n\n\n\n match &ty {\n\n resolver::Type::Array(ty, dim) => {\n\n if dim.last().unwrap().is_some() {\n\n errors.push(Output::error(\n\n *loc,\n\n format!(\n", "file_path": "src/resolver/expression.rs", "rank": 28, "score": 256924.84500285418 }, { "content": "// When generating shifts, llvm wants both arguments to have the same width. We want the\n\n// result of the shift to be left argument, so this function coercies the right argument\n\n// into the right length.\n\nfn cast_shift_arg(\n\n loc: &pt::Loc,\n\n expr: Expression,\n\n from_width: u16,\n\n ty: &resolver::Type,\n\n ns: &resolver::Namespace,\n\n) -> Expression {\n\n let to_width = ty.bits(ns);\n\n\n\n if from_width == to_width {\n\n expr\n\n } else if from_width < to_width && ty.signed() {\n\n Expression::SignExt(*loc, ty.clone(), Box::new(expr))\n\n } else if from_width < to_width && !ty.signed() {\n\n Expression::ZeroExt(*loc, ty.clone(), Box::new(expr))\n\n } else {\n\n Expression::Trunc(*loc, ty.clone(), Box::new(expr))\n\n }\n\n}\n\n\n", "file_path": "src/resolver/expression.rs", "rank": 29, "score": 255209.60477485365 }, { "content": "/// Parse call arguments for external calls\n\nfn parse_call_args(\n\n call_args: &[&pt::NamedArgument],\n\n external_call: bool,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<Output>,\n\n) -> Result<CallArgs, ()> {\n\n let mut args: HashMap<&String, &pt::NamedArgument> = HashMap::new();\n\n\n\n for arg in call_args {\n\n if let Some(prev) = args.get(&arg.name.name) {\n\n errors.push(Output::error_with_note(\n\n arg.loc,\n\n format!(\"‘{}’ specified multiple times\", arg.name.name),\n\n prev.loc,\n\n format!(\"location of previous declaration of ‘{}’\", arg.name.name),\n\n ));\n\n return Err(());\n", "file_path": "src/resolver/expression.rs", "rank": 30, "score": 255030.78209578016 }, { "content": "/// Resolve a function call with positional arguments\n\nfn function_call_pos_args(\n\n loc: &pt::Loc,\n\n id: &pt::Identifier,\n\n args: &[pt::Expression],\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n // Try to resolve as a function call\n\n let funcs = ns.resolve_func(contract_no.unwrap(), &id, errors)?;\n\n\n\n let mut resolved_args = Vec::new();\n\n let mut resolved_types = Vec::new();\n\n\n\n for arg in args {\n\n let (expr, expr_type) = expression(arg, cfg, contract_no, ns, vartab, errors)?;\n\n\n\n resolved_args.push(Box::new(expr));\n", "file_path": "src/resolver/expression.rs", "rank": 31, "score": 247081.76574677211 }, { "content": "/// Resolve a function call with named arguments\n\nfn function_call_with_named_args(\n\n loc: &pt::Loc,\n\n id: &pt::Identifier,\n\n args: &[pt::NamedArgument],\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n // Try to resolve as a function call\n\n let funcs = ns.resolve_func(contract_no.unwrap(), &id, errors)?;\n\n\n\n let mut arguments = HashMap::new();\n\n\n\n for arg in args {\n\n if arguments.contains_key(&arg.name.name) {\n\n errors.push(Output::error(\n\n arg.name.loc,\n\n format!(\"duplicate argument with name ‘{}’\", arg.name.name),\n", "file_path": "src/resolver/expression.rs", "rank": 32, "score": 247081.76574677211 }, { "content": "pub fn gen_abi(contract_no: usize, ns: &resolver::Namespace) -> Metadata {\n\n let mut registry = Registry::new();\n\n\n\n let fields = ns.contracts[contract_no]\n\n .variables\n\n .iter()\n\n .filter(|v| !v.is_storage())\n\n .map(|v| StructField {\n\n name: registry.string(&v.name),\n\n ty: ty_to_abi(&v.ty, ns, &mut registry).ty,\n\n })\n\n .collect();\n\n\n\n let storagety = registry.struct_type(\"storage\", fields);\n\n\n\n let fields = ns.contracts[contract_no]\n\n .variables\n\n .iter()\n\n .filter_map(|v| {\n\n if let resolver::ContractVariableType::Storage(storage) = &v.var {\n", "file_path": "src/abi/substrate.rs", "rank": 33, "score": 246679.61819979514 }, { "content": "/// Resolve an expression where a compile-time constant is expected\n\npub fn eval_number_expression(\n\n expr: &Expression,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Loc, BigInt), ()> {\n\n match expr {\n\n Expression::Add(loc, l, r) => Ok((\n\n *loc,\n\n eval_number_expression(l, errors)?.1 + eval_number_expression(r, errors)?.1,\n\n )),\n\n Expression::Subtract(loc, l, r) => Ok((\n\n *loc,\n\n eval_number_expression(l, errors)?.1 - eval_number_expression(r, errors)?.1,\n\n )),\n\n Expression::Multiply(loc, l, r) => Ok((\n\n *loc,\n\n eval_number_expression(l, errors)?.1 * eval_number_expression(r, errors)?.1,\n\n )),\n\n Expression::UDivide(loc, l, r) | Expression::SDivide(loc, l, r) => {\n\n let divisor = eval_number_expression(r, errors)?.1;\n\n\n", "file_path": "src/resolver/eval.rs", "rank": 34, "score": 243734.41861529794 }, { "content": "/// Parse return statement with values\n\nfn return_with_values(\n\n returns: &pt::Expression,\n\n loc: &pt::Loc,\n\n f: &resolver::FunctionDecl,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: usize,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Vartable,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<bool, ()> {\n\n let returns = parameter_list_to_expr_list(returns, errors)?;\n\n\n\n let no_returns = f.returns.len();\n\n\n\n if no_returns > 0 && returns.is_empty() {\n\n errors.push(Output::error(\n\n *loc,\n\n format!(\n\n \"missing return value, {} return values expected\",\n\n no_returns\n", "file_path": "src/resolver/cfg.rs", "rank": 35, "score": 218663.8454894606 }, { "content": "fn check_return(\n\n f: &pt::FunctionDefinition,\n\n cfg: &mut ControlFlowGraph,\n\n vartab: &Vartable,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(), ()> {\n\n let current = cfg.current;\n\n let bb = &mut cfg.bb[current];\n\n\n\n let num_instr = bb.instr.len();\n\n\n\n if num_instr > 0 {\n\n if let Instr::Return { .. } = bb.instr[num_instr - 1] {\n\n return Ok(());\n\n }\n\n }\n\n\n\n if f.returns.is_empty() || !vartab.returns.is_empty() {\n\n bb.add(Instr::Return {\n\n value: vartab\n", "file_path": "src/resolver/cfg.rs", "rank": 36, "score": 218658.16769771295 }, { "content": "/// Do casting between types (no literals)\n\nfn cast_types(\n\n loc: &pt::Loc,\n\n expr: Expression,\n\n from_conv: resolver::Type,\n\n to_conv: resolver::Type,\n\n from: &resolver::Type,\n\n to: &resolver::Type,\n\n implicit: bool,\n\n ns: &resolver::Namespace,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<Expression, ()> {\n\n let address_bits = ns.address_length as u16 * 8;\n\n\n\n #[allow(clippy::comparison_chain)]\n\n match (from_conv, to_conv) {\n\n (resolver::Type::Bytes(1), resolver::Type::Uint(8)) => Ok(expr),\n\n (resolver::Type::Uint(8), resolver::Type::Bytes(1)) => Ok(expr),\n\n (resolver::Type::Uint(from_len), resolver::Type::Uint(to_len)) => {\n\n match from_len.cmp(&to_len) {\n\n Ordering::Greater => {\n", "file_path": "src/resolver/expression.rs", "rank": 37, "score": 218624.77135022747 }, { "content": "/// Resolve an assignment with an operator\n\nfn assign_expr(\n\n loc: &pt::Loc,\n\n var: &pt::Expression,\n\n expr: &pt::Expression,\n\n e: &pt::Expression,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n let (set, set_type) = expression(e, cfg, contract_no, ns, vartab, errors)?;\n\n\n\n let op = |assign: Expression,\n\n ty: &resolver::Type,\n\n errors: &mut Vec<output::Output>|\n\n -> Result<Expression, ()> {\n\n let set = match expr {\n\n pt::Expression::AssignShiftLeft(_, _, _)\n\n | pt::Expression::AssignShiftRight(_, _, _) => {\n", "file_path": "src/resolver/expression.rs", "rank": 38, "score": 218624.7279339027 }, { "content": "/// Parse and resolve the Solidity source code provided in src, for the target chain as specified in target.\n\n/// The result is a list of resolved contracts (if successful) and a list of compiler warnings, errors and\n\n/// informational messages like `found contact N`.\n\n///\n\n/// Note that multiple contracts can be specified in on solidity source file.\n\npub fn parse_and_resolve(\n\n src: &str,\n\n target: Target,\n\n) -> (Option<resolver::Namespace>, Vec<output::Output>) {\n\n let ast = match parser::parse(src) {\n\n Ok(s) => s,\n\n Err(errors) => {\n\n return (None, errors);\n\n }\n\n };\n\n\n\n // resolve\n\n resolver::resolver(ast, target)\n\n}\n", "file_path": "src/lib.rs", "rank": 39, "score": 212376.20214381535 }, { "content": "pub fn gen_abi(contract_no: usize, ns: &Namespace) -> Vec<ABI> {\n\n fn parameter_to_abi(name: &str, ty: &Type, ns: &Namespace) -> ABIParam {\n\n let components = if let Some(n) = ty.is_struct_or_array_of_struct() {\n\n ns.structs[n]\n\n .fields\n\n .iter()\n\n .map(|f| parameter_to_abi(&f.name, &f.ty, ns))\n\n .collect::<Vec<ABIParam>>()\n\n } else {\n\n Vec::new()\n\n };\n\n\n\n ABIParam {\n\n name: name.to_string(),\n\n ty: ty.to_signature_string(ns),\n\n internal_ty: ty.to_string(ns),\n\n components,\n\n }\n\n }\n\n\n", "file_path": "src/abi/ethereum.rs", "rank": 40, "score": 212239.17268769076 }, { "content": "/// Convert a constructor call expression to CFG in expression context\n\nfn emit_constructor_call(\n\n expr: Expression,\n\n expr_ty: resolver::Type,\n\n cfg: &mut ControlFlowGraph,\n\n vartab: &mut Option<&mut Vartable>,\n\n) -> (Expression, resolver::Type) {\n\n let tab = match vartab {\n\n &mut Some(ref mut tab) => tab,\n\n None => unreachable!(),\n\n };\n\n\n\n match expr {\n\n Expression::Constructor {\n\n loc,\n\n contract_no,\n\n constructor_no,\n\n args,\n\n value,\n\n gas,\n\n salt,\n", "file_path": "src/resolver/expression.rs", "rank": 41, "score": 211596.56715647978 }, { "content": "/// Convert a function call expression to CFG in expression context\n\nfn emit_function_call(\n\n expr: Expression,\n\n expr_ty: resolver::Type,\n\n contract_no: usize,\n\n cfg: &mut ControlFlowGraph,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n) -> Vec<(Expression, resolver::Type)> {\n\n let tab = match vartab {\n\n &mut Some(ref mut tab) => tab,\n\n None => unreachable!(),\n\n };\n\n\n\n match expr {\n\n Expression::LocalFunctionCall(_, func, args) => {\n\n let ftype = &ns.contracts[contract_no].functions[func];\n\n\n\n if !ftype.returns.is_empty() {\n\n let mut res = Vec::new();\n\n let mut returns = Vec::new();\n", "file_path": "src/resolver/expression.rs", "rank": 42, "score": 211596.56715647978 }, { "content": "/// Resolve a parsed struct definition. The return value will be true if the entire\n\n/// definition is valid; however, whatever could be parsed will be added to the resolved\n\n/// contract, so that we can continue producing compiler messages for the remainder\n\n/// of the contract, even if the struct contains an invalid definition.\n\npub fn struct_decl(\n\n def: &pt::StructDefinition,\n\n contract_no: Option<usize>,\n\n ns: &mut Namespace,\n\n errors: &mut Vec<Output>,\n\n) -> Option<Vec<StructField>> {\n\n let mut valid = true;\n\n let mut fields: Vec<StructField> = Vec::new();\n\n\n\n for field in &def.fields {\n\n let ty = match ns.resolve_type(contract_no, false, &field.ty, errors) {\n\n Ok(s) => s,\n\n Err(()) => {\n\n valid = false;\n\n continue;\n\n }\n\n };\n\n\n\n if let Some(other) = fields.iter().find(|f| f.name == field.name.name) {\n\n errors.push(Output::error_with_note(\n", "file_path": "src/resolver/types.rs", "rank": 43, "score": 207248.32157756097 }, { "content": "/// Resolve a parsed struct definition. The return value will be true if the entire\n\n/// definition is valid; however, whatever could be parsed will be added to the resolved\n\n/// contract, so that we can continue producing compiler messages for the remainder\n\n/// of the contract, even if the struct contains an invalid definition.\n\npub fn struct_decl(\n\n def: &pt::StructDefinition,\n\n contract_no: Option<usize>,\n\n ns: &mut Namespace,\n\n errors: &mut Vec<Output>,\n\n) -> bool {\n\n let mut valid = true;\n\n let mut fields: Vec<StructField> = Vec::new();\n\n\n\n for field in &def.fields {\n\n let ty = match ns.resolve_type(contract_no, &field.ty, errors) {\n\n Ok(s) => s,\n\n Err(()) => {\n\n valid = false;\n\n continue;\n\n }\n\n };\n\n\n\n if let Some(other) = fields.iter().find(|f| f.name == field.name.name) {\n\n errors.push(Output::error_with_note(\n", "file_path": "src/resolver/structs.rs", "rank": 44, "score": 207248.32157756097 }, { "content": "pub fn contract_variables(\n\n def: &pt::ContractDefinition,\n\n contract_no: usize,\n\n ns: &mut Namespace,\n\n errors: &mut Vec<Output>,\n\n) -> bool {\n\n let mut broken = false;\n\n let mut vartab = Vartable::new();\n\n let mut cfg = ControlFlowGraph::new();\n\n\n\n for parts in &def.parts {\n\n if let pt::ContractPart::ContractVariableDefinition(ref s) = parts {\n\n if !var_decl(s, contract_no, ns, &mut cfg, &mut vartab, errors) {\n\n broken = true;\n\n }\n\n }\n\n }\n\n\n\n cfg.add(&mut vartab, Instr::Return { value: Vec::new() });\n\n\n\n cfg.vars = vartab.drain();\n\n\n\n ns.contracts[contract_no].initializer = cfg;\n\n\n\n broken\n\n}\n\n\n", "file_path": "src/resolver/variables.rs", "rank": 45, "score": 207235.62261557888 }, { "content": "pub fn function_decl(\n\n f: &pt::FunctionDefinition,\n\n i: usize,\n\n contract_no: usize,\n\n ns: &mut Namespace,\n\n errors: &mut Vec<Output>,\n\n) -> bool {\n\n let mut success = true;\n\n\n\n // The parser allows constructors to have return values. This is so that we can give a\n\n // nicer error message than \"returns unexpected\"\n\n match f.ty {\n\n pt::FunctionTy::Function => {\n\n // Function name cannot be the same as the contract name\n\n if let Some(n) = &f.name {\n\n if n.name == ns.contracts[contract_no].name {\n\n errors.push(Output::error(\n\n f.loc,\n\n \"function cannot have same name as the contract\".to_string(),\n\n ));\n", "file_path": "src/resolver/functions.rs", "rank": 46, "score": 207235.62261557888 }, { "content": "#[test]\n\nfn test_cast_errors() {\n\n let (_, errors) = parse_and_resolve(\n\n \"contract test {\n", "file_path": "tests/substrate_expressions/mod.rs", "rank": 47, "score": 206400.61557271227 }, { "content": "/// check if from creates to, recursively\n\nfn circular_reference(from: usize, to: usize, ns: &resolver::Namespace) -> bool {\n\n let creates = ns.contracts[from].creates.borrow();\n\n\n\n if creates.contains(&to) {\n\n return true;\n\n }\n\n\n\n creates.iter().any(|n| circular_reference(*n, to, &ns))\n\n}\n\n\n", "file_path": "src/resolver/expression.rs", "rank": 48, "score": 206240.6181364765 }, { "content": "pub fn load(bs: &str) -> Result<Metadata, serde_json::error::Error> {\n\n serde_json::from_str(bs)\n\n}\n\n\n", "file_path": "src/abi/substrate.rs", "rank": 49, "score": 203589.75963355825 }, { "content": "pub fn no_errors(errors: Vec<output::Output>) {\n\n assert!(\n\n errors\n\n .iter()\n\n .filter(|m| m.level == output::Level::Error)\n\n .count()\n\n == 0\n\n );\n\n}\n", "file_path": "tests/substrate.rs", "rank": 50, "score": 192273.17336579636 }, { "content": "pub fn generate_abi(contract_no: usize, ns: &Namespace, verbose: bool) -> (String, &'static str) {\n\n match ns.target {\n\n Target::Ewasm | Target::Sabre => {\n\n if verbose {\n\n eprintln!(\n\n \"info: Generating Ethereum ABI for contract {}\",\n\n ns.contracts[contract_no].name\n\n );\n\n }\n\n\n\n let abi = ethereum::gen_abi(contract_no, ns);\n\n\n\n (serde_json::to_string(&abi).unwrap(), \"abi\")\n\n }\n\n Target::Substrate => {\n\n if verbose {\n\n eprintln!(\n\n \"info: Generating Substrate ABI for contract {}\",\n\n ns.contracts[contract_no].name\n\n );\n\n }\n\n\n\n let abi = substrate::gen_abi(contract_no, ns);\n\n\n\n (serde_json::to_string_pretty(&abi).unwrap(), \"json\")\n\n }\n\n }\n\n}\n", "file_path": "src/abi/mod.rs", "rank": 51, "score": 187540.3142873748 }, { "content": "/// Do we have any errors\n\npub fn any_errors(output: &[Output]) -> bool {\n\n output.iter().any(|m| m.level == Level::Error)\n\n}\n\n\n\n#[derive(Serialize)]\n\npub struct LocJson {\n\n pub file: String,\n\n pub start: usize,\n\n pub end: usize,\n\n}\n\n\n\n#[derive(Serialize)]\n\n#[allow(non_snake_case)]\n\npub struct OutputJson {\n\n pub sourceLocation: LocJson,\n\n #[serde(rename = \"type\")]\n\n pub ty: String,\n\n pub component: String,\n\n pub severity: String,\n\n pub message: String,\n\n pub formattedMessage: String,\n\n}\n\n\n", "file_path": "src/output.rs", "rank": 52, "score": 187101.18285729285 }, { "content": "/// Resolve the return values\n\nfn resolve_returns(\n\n f: &pt::FunctionDefinition,\n\n storage_allowed: bool,\n\n contract_no: usize,\n\n ns: &mut Namespace,\n\n errors: &mut Vec<Output>,\n\n) -> (Vec<Parameter>, bool) {\n\n let mut returns = Vec::new();\n\n let mut success = true;\n\n\n\n for r in &f.returns {\n\n let r = match r {\n\n (_, Some(p)) => p,\n\n (loc, None) => {\n\n errors.push(Output::error(*loc, \"missing return type\".to_owned()));\n\n success = false;\n\n continue;\n\n }\n\n };\n\n\n", "file_path": "src/resolver/functions.rs", "rank": 53, "score": 186352.24197133625 }, { "content": "/// Try to convert a BigInt into a Expression::NumberLiteral. This checks for sign,\n\n/// width and creates to correct Type.\n\nfn bigint_to_expression(\n\n loc: &pt::Loc,\n\n n: &BigInt,\n\n errors: &mut Vec<Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n // Return smallest type\n\n let bits = n.bits();\n\n\n\n let int_size = if bits < 7 { 8 } else { (bits + 7) & !7 } as u16;\n\n\n\n if n.sign() == Sign::Minus {\n\n if bits > 255 {\n\n errors.push(Output::error(*loc, format!(\"{} is too large\", n)));\n\n Err(())\n\n } else {\n\n Ok((\n\n Expression::NumberLiteral(*loc, int_size, n.clone()),\n\n resolver::Type::Int(int_size),\n\n ))\n\n }\n", "file_path": "src/resolver/expression.rs", "rank": 54, "score": 186250.01470635828 }, { "content": "/// Parse if-then-no-else\n\nfn if_then(\n\n cond: &pt::Expression,\n\n then_stmt: &pt::Statement,\n\n f: &resolver::FunctionDecl,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: usize,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Vartable,\n\n loops: &mut LoopScopes,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<bool, ()> {\n\n let (expr, expr_ty) = expression(cond, cfg, Some(contract_no), ns, &mut Some(vartab), errors)?;\n\n\n\n let then = cfg.new_basic_block(\"then\".to_string());\n\n let endif = cfg.new_basic_block(\"endif\".to_string());\n\n\n\n cfg.add(\n\n vartab,\n\n Instr::BranchCond {\n\n cond: cast(\n", "file_path": "src/resolver/cfg.rs", "rank": 55, "score": 185495.19274276614 }, { "content": "/// Given an parsed literal array, ensure that it is valid. All the elements in the array\n\n/// must of the same type. The array might be a multidimensional array; all the leaf nodes\n\n/// must match.\n\nfn resolve_array_literal(\n\n loc: &pt::Loc,\n\n exprs: &[pt::Expression],\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n let mut dims = Box::new(Vec::new());\n\n let mut flattened = Vec::new();\n\n\n\n check_subarrays(exprs, &mut Some(&mut dims), &mut flattened, errors)?;\n\n\n\n if flattened.is_empty() {\n\n errors.push(Output::error(\n\n *loc,\n\n \"array requires at least one element\".to_string(),\n\n ));\n\n return Err(());\n", "file_path": "src/resolver/expression.rs", "rank": 56, "score": 181290.36521363386 }, { "content": "pub fn first_error(errors: Vec<output::Output>) -> String {\n\n match errors.iter().find(|m| m.level == output::Level::Error) {\n\n Some(m) => m.message.to_owned(),\n\n None => panic!(\"no errors found\"),\n\n }\n\n}\n\n\n", "file_path": "tests/substrate.rs", "rank": 57, "score": 180934.19906784428 }, { "content": "/// Resolve a statement, which might be a block of statements or an entire body of a function\n\nfn statement(\n\n stmt: &pt::Statement,\n\n f: &resolver::FunctionDecl,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: usize,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Vartable,\n\n loops: &mut LoopScopes,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<bool, ()> {\n\n match stmt {\n\n pt::Statement::VariableDefinition(_, decl, init) => {\n\n let var_ty =\n\n resolve_var_decl_ty(&decl.ty, &decl.storage, Some(contract_no), ns, errors)?;\n\n\n\n let e_t = if let Some(init) = init {\n\n let (expr, init_ty) =\n\n expression(init, cfg, Some(contract_no), ns, &mut Some(vartab), errors)?;\n\n\n\n Some(cast(\n", "file_path": "src/resolver/cfg.rs", "rank": 58, "score": 179458.00053751978 }, { "content": "/// Parse if-then-else\n\nfn if_then_else(\n\n cond: &pt::Expression,\n\n then_stmt: &pt::Statement,\n\n else_stmt: &pt::Statement,\n\n f: &resolver::FunctionDecl,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: usize,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Vartable,\n\n loops: &mut LoopScopes,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<bool, ()> {\n\n let (expr, expr_ty) = expression(cond, cfg, Some(contract_no), ns, &mut Some(vartab), errors)?;\n\n\n\n let then = cfg.new_basic_block(\"then\".to_string());\n\n let else_ = cfg.new_basic_block(\"else\".to_string());\n\n let endif = cfg.new_basic_block(\"endif\".to_string());\n\n\n\n cfg.add(\n\n vartab,\n", "file_path": "src/resolver/cfg.rs", "rank": 59, "score": 179452.7889393689 }, { "content": "/// Resolve an new contract expression with positional arguments\n\nfn constructor(\n\n loc: &pt::Loc,\n\n no: usize,\n\n args: &[pt::Expression],\n\n call_args: CallArgs,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n // The current contract cannot be constructed with new. In order to create\n\n // the contract, we need the code hash of the contract. Part of that code\n\n // will be code we're emitted here. So we end up with a crypto puzzle.\n\n let contract_no = match contract_no {\n\n Some(n) if n == no => {\n\n errors.push(Output::error(\n\n *loc,\n\n format!(\n\n \"new cannot construct current contract ‘{}’\",\n", "file_path": "src/resolver/expression.rs", "rank": 60, "score": 178974.06201579777 }, { "content": "/// Resolve an assignment\n\nfn assign(\n\n loc: &pt::Loc,\n\n var: &pt::Expression,\n\n e: &pt::Expression,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n // is it a destructuring assignment\n\n if let pt::Expression::List(_, var) = var {\n\n destructuring(loc, var, e, cfg, contract_no, ns, vartab, errors)\n\n } else {\n\n let (expr, expr_type) = expression(e, cfg, contract_no, ns, vartab, errors)?;\n\n\n\n assign_single(\n\n loc,\n\n var,\n\n expr,\n\n expr_type,\n\n cfg,\n\n contract_no,\n\n ns,\n\n vartab,\n\n errors,\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/resolver/expression.rs", "rank": 61, "score": 178963.61099076437 }, { "content": "/// Resolve an destructuring assignment\n\nfn destructuring(\n\n loc: &pt::Loc,\n\n var: &[(pt::Loc, Option<pt::Parameter>)],\n\n e: &pt::Expression,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n let vartab = match vartab {\n\n &mut Some(ref mut tab) => tab,\n\n None => {\n\n errors.push(Output::error(\n\n *loc,\n\n \"assignment not allowed in constant context\".to_string(),\n\n ));\n\n return Err(());\n\n }\n\n };\n", "file_path": "src/resolver/expression.rs", "rank": 62, "score": 178963.49921971792 }, { "content": "/// Test for equality; first check string equality, then integer equality\n\nfn equal(\n\n loc: &pt::Loc,\n\n l: &pt::Expression,\n\n r: &pt::Expression,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<Expression, ()> {\n\n let (left, left_type) = expression(l, cfg, contract_no, ns, vartab, errors)?;\n\n let (right, right_type) = expression(r, cfg, contract_no, ns, vartab, errors)?;\n\n\n\n // Comparing stringliteral against stringliteral\n\n if let (Expression::BytesLiteral(_, l), Expression::BytesLiteral(_, r)) = (&left, &right) {\n\n return Ok(Expression::BoolLiteral(*loc, l == r));\n\n }\n\n\n\n // compare string against literal\n\n match (&left, &right_type.deref()) {\n", "file_path": "src/resolver/expression.rs", "rank": 63, "score": 178957.78748804983 }, { "content": "/// Try string concatenation\n\nfn addition(\n\n loc: &pt::Loc,\n\n l: &pt::Expression,\n\n r: &pt::Expression,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n let (left, left_type) = expression(l, cfg, contract_no, ns, vartab, errors)?;\n\n let (right, right_type) = expression(r, cfg, contract_no, ns, vartab, errors)?;\n\n\n\n // Concatenate stringliteral with stringliteral\n\n if let (Expression::BytesLiteral(_, l), Expression::BytesLiteral(_, r)) = (&left, &right) {\n\n let mut c = Vec::with_capacity(l.len() + r.len());\n\n c.extend_from_slice(l);\n\n c.extend_from_slice(r);\n\n let length = c.len();\n\n return Ok((\n", "file_path": "src/resolver/expression.rs", "rank": 64, "score": 178957.78748804983 }, { "content": "fn coerce(\n\n l: &resolver::Type,\n\n l_loc: &pt::Loc,\n\n r: &resolver::Type,\n\n r_loc: &pt::Loc,\n\n ns: &resolver::Namespace,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<resolver::Type, ()> {\n\n let l = match l {\n\n resolver::Type::Ref(ty) => ty,\n\n resolver::Type::StorageRef(ty) => ty,\n\n _ => l,\n\n };\n\n let r = match r {\n\n resolver::Type::Ref(ty) => ty,\n\n resolver::Type::StorageRef(ty) => ty,\n\n _ => r,\n\n };\n\n\n\n if *l == *r {\n\n return Ok(l.clone());\n\n }\n\n\n\n coerce_int(l, l_loc, r, r_loc, true, ns, errors)\n\n}\n\n\n", "file_path": "src/resolver/expression.rs", "rank": 65, "score": 178957.78748804983 }, { "content": "struct CallArgs {\n\n gas: Box<Expression>,\n\n salt: Option<Box<Expression>>,\n\n value: Option<Box<Expression>>,\n\n}\n\n\n", "file_path": "src/resolver/expression.rs", "rank": 66, "score": 178861.38557868009 }, { "content": "#[test]\n\nfn local_destructure_call() {\n\n let mut runtime = build_solidity(\n\n r##\"\n\n contract c {\n", "file_path": "tests/substrate_calls/mod.rs", "rank": 67, "score": 178460.58424715349 }, { "content": "// For a given primitive, give the name as Substrate would like it (i.e. 64 bits\n\n// signed int is i64, not int64).\n\nfn primitive_to_string(ty: resolver::Type) -> String {\n\n match ty {\n\n resolver::Type::Bool => \"bool\".into(),\n\n resolver::Type::Uint(n) => format!(\"u{}\", n),\n\n resolver::Type::Int(n) => format!(\"i{}\", n),\n\n resolver::Type::Contract(_) | resolver::Type::Address(_) => \"address\".into(),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/abi/substrate.rs", "rank": 68, "score": 177783.14855355606 }, { "content": "/// Returns an address in https://github.com/ethereum/EIPs/blob/master/EIPS/eip-55.md format\n\n/// Assumes the src is hex number, starting with 0x, no underscores and 40 hexdigits long,\n\n/// i.e. an ethereum address.\n\npub fn to_hexstr_eip55(src: &str) -> String {\n\n assert!(src.starts_with(\"0x\"));\n\n assert!(src.chars().skip(2).all(|c| c.is_ascii_hexdigit()));\n\n\n\n let address: String = src\n\n .chars()\n\n .skip(2)\n\n .map(|c| c.to_ascii_lowercase())\n\n .collect();\n\n\n\n let hash = keccak256(address.as_bytes());\n\n\n\n \"0x\".chars()\n\n .chain(address.chars().enumerate().map(|(i, c)| {\n\n match c {\n\n '0'..='9' => c,\n\n 'a'..='f' => {\n\n // hash is 32 bytes; find the i'th \"nibble\"\n\n let nibble = hash[i >> 1] >> if (i & 1) != 0 { 0 } else { 4 };\n\n\n", "file_path": "src/resolver/address.rs", "rank": 69, "score": 177607.41633572383 }, { "content": " function set(bytes index, int64 val) public {\n\n v[index] = val;\n\n }\n\n\n", "file_path": "tests/substrate_mappings/mod.rs", "rank": 70, "score": 175233.86417835084 }, { "content": " function set(uint64 index, int32 val) public {\n\n v[index] = val;\n\n }\n\n\n", "file_path": "tests/substrate_mappings/mod.rs", "rank": 71, "score": 175233.86417835084 }, { "content": " function set(bar index, int32 val) public {\n\n v[index] = val;\n\n }\n\n\n", "file_path": "tests/substrate_mappings/mod.rs", "rank": 72, "score": 175233.86417835084 }, { "content": "#[test]\n\nfn array_push_delete() {\n\n // ensure that structs and fixed arrays are wiped by delete\n\n let mut runtime = build_solidity(\n\n r#\"\n\n pragma solidity 0;\n\n\n\n contract foo {\n\n uint32[] bar;\n\n\n", "file_path": "tests/ewasm.rs", "rank": 73, "score": 175148.0221085315 }, { "content": "fn var_decl(\n\n s: &pt::ContractVariableDefinition,\n\n contract_no: usize,\n\n ns: &mut Namespace,\n\n cfg: &mut ControlFlowGraph,\n\n vartab: &mut Vartable,\n\n errors: &mut Vec<Output>,\n\n) -> bool {\n\n let ty = match ns.resolve_type(Some(contract_no), false, &s.ty, errors) {\n\n Ok(s) => s,\n\n Err(()) => {\n\n return false;\n\n }\n\n };\n\n\n\n let mut is_constant = false;\n\n let mut visibility: Option<pt::Visibility> = None;\n\n\n\n for attr in &s.attrs {\n\n match &attr {\n", "file_path": "src/resolver/variables.rs", "rank": 74, "score": 174074.88111498425 }, { "content": "/// Resolve try catch statement\n\nfn try_catch(\n\n try: &pt::Statement,\n\n f: &resolver::FunctionDecl,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: usize,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Vartable,\n\n loops: &mut LoopScopes,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<bool, ()> {\n\n if let pt::Statement::Try(_, expr, returns_and_ok, error_stmt, catch_stmt) = &try {\n\n let mut expr = expr;\n\n let mut ok = None;\n\n\n\n while let pt::Expression::FunctionCallBlock(_, e, block) = expr {\n\n if ok.is_some() {\n\n errors.push(Output::error(\n\n block.loc(),\n\n \"unexpected code block\".to_string(),\n\n ));\n", "file_path": "src/resolver/cfg.rs", "rank": 75, "score": 173901.45665438173 }, { "content": "/// Resolve a function call with positional arguments\n\nfn struct_literal(\n\n loc: &pt::Loc,\n\n struct_no: usize,\n\n args: &[pt::Expression],\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n let struct_def = &ns.structs[struct_no];\n\n\n\n if args.len() != struct_def.fields.len() {\n\n errors.push(Output::error(\n\n *loc,\n\n format!(\n\n \"struct ‘{}’ has {} fields, not {}\",\n\n struct_def.name,\n\n struct_def.fields.len(),\n\n args.len()\n", "file_path": "src/resolver/expression.rs", "rank": 76, "score": 173437.3511518474 }, { "content": "/// Resolve an array subscript expression\n\nfn member_access(\n\n loc: &pt::Loc,\n\n e: &pt::Expression,\n\n id: &pt::Identifier,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n // is of the form \"contract_name.enum_name.enum_value\"\n\n if let pt::Expression::MemberAccess(_, e, enum_name) = e {\n\n if let pt::Expression::Variable(contract_name) = e.as_ref() {\n\n if let Some(contract_no) = ns.resolve_contract(contract_name) {\n\n if let Some(e) = ns.resolve_enum(Some(contract_no), enum_name) {\n\n return match ns.enums[e].values.get(&id.name) {\n\n Some((_, val)) => Ok((\n\n Expression::NumberLiteral(\n\n *loc,\n\n ns.enums[e].ty.bits(ns),\n", "file_path": "src/resolver/expression.rs", "rank": 77, "score": 173431.9368451508 }, { "content": "/// Resolve an array subscript expression\n\nfn array_subscript(\n\n loc: &pt::Loc,\n\n array: &pt::Expression,\n\n index: &pt::Expression,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n let (mut array_expr, array_ty) = expression(array, cfg, contract_no, ns, vartab, errors)?;\n\n\n\n if array_ty.is_mapping() {\n\n return mapping_subscript(\n\n loc,\n\n array_expr,\n\n &array_ty,\n\n index,\n\n cfg,\n\n contract_no,\n", "file_path": "src/resolver/expression.rs", "rank": 78, "score": 173431.9368451508 }, { "content": "/// Resolve an assignment\n\nfn assign_single(\n\n loc: &pt::Loc,\n\n var: &pt::Expression,\n\n expr: Expression,\n\n expr_type: resolver::Type,\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n match var {\n\n pt::Expression::Variable(id) => {\n\n let vartab = match vartab {\n\n &mut Some(ref mut tab) => tab,\n\n None => {\n\n errors.push(Output::error(\n\n *loc,\n\n format!(\n\n \"cannot access variable ‘{}’ in constant expression\",\n", "file_path": "src/resolver/expression.rs", "rank": 79, "score": 173426.55958866372 }, { "content": "fn coerce_int(\n\n l: &resolver::Type,\n\n l_loc: &pt::Loc,\n\n r: &resolver::Type,\n\n r_loc: &pt::Loc,\n\n allow_bytes: bool,\n\n ns: &resolver::Namespace,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<resolver::Type, ()> {\n\n let l = match l {\n\n resolver::Type::Ref(ty) => ty,\n\n resolver::Type::StorageRef(ty) => ty,\n\n _ => l,\n\n };\n\n let r = match r {\n\n resolver::Type::Ref(ty) => ty,\n\n resolver::Type::StorageRef(ty) => ty,\n\n _ => r,\n\n };\n\n\n", "file_path": "src/resolver/expression.rs", "rank": 80, "score": 173420.73608594917 }, { "content": "/// Compile a solidity file to list of wasm files and their ABIs. The filename is only used for error messages;\n\n/// the contents of the file is provided in the `src` argument.\n\n///\n\n/// This function only produces a single contract and abi, which is compiled for the `target` specified. Any\n\n/// compiler warnings, errors and informational messages are also provided.\n\n///\n\n/// The ctx is the inkwell llvm context.\n\npub fn compile(\n\n src: &str,\n\n filename: &str,\n\n opt: OptimizationLevel,\n\n target: Target,\n\n) -> (Vec<(Vec<u8>, String)>, Vec<output::Output>) {\n\n let ctx = inkwell::context::Context::create();\n\n\n\n let ast = match parser::parse(src) {\n\n Ok(s) => s,\n\n Err(errors) => {\n\n return (Vec::new(), errors);\n\n }\n\n };\n\n\n\n // resolve\n\n let (ns, errors) = match resolver::resolver(ast, target) {\n\n (None, errors) => {\n\n return (Vec::new(), errors);\n\n }\n", "file_path": "src/lib.rs", "rank": 81, "score": 171969.48918832449 }, { "content": " function set_index(uint32 index, bytes1 val) public returns (bytes) {\n\n bytes bar = hex\"aabbccddeeff\";\n\n\n\n bar[index] = val;\n\n\n\n return bar;\n\n }\n\n }\"##,\n\n );\n\n\n\n runtime.constructor(0, Vec::new());\n\n\n\n runtime.function(\"set_index\", Arg(1, 0x33).encode());\n\n\n\n assert_eq!(\n\n runtime.vm.scratch,\n\n Ret(vec!(0xaa, 0x33, 0xcc, 0xdd, 0xee, 0xff)).encode()\n\n );\n\n\n\n let mut runtime = build_solidity(\n\n r##\"\n\n contract foo {\n", "file_path": "tests/substrate_strings/mod.rs", "rank": 82, "score": 171546.91916208967 }, { "content": "#[test]\n\nfn storage_delete() {\n\n let (_, errors) = parse_and_resolve(\n\n r#\"\n\n contract foo {\n\n int32[] bar;\n\n\n", "file_path": "tests/substrate_arrays/mod.rs", "rank": 83, "score": 169884.22909021185 }, { "content": "/// Traverse the literal looking for sub arrays. Ensure that all the sub\n\n/// arrays are the same length, and returned a flattened array of elements\n\nfn check_subarrays<'a>(\n\n exprs: &'a [pt::Expression],\n\n dims: &mut Option<&mut Vec<u32>>,\n\n flatten: &mut Vec<&'a pt::Expression>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(), ()> {\n\n if let Some(pt::Expression::ArrayLiteral(_, first)) = exprs.get(0) {\n\n // ensure all elements are array literals of the same length\n\n check_subarrays(first, dims, flatten, errors)?;\n\n\n\n for (i, e) in exprs.iter().enumerate().skip(1) {\n\n if let pt::Expression::ArrayLiteral(_, other) = e {\n\n if other.len() != first.len() {\n\n errors.push(Output::error(\n\n e.loc(),\n\n format!(\n\n \"array elements should be identical, sub array {} has {} elements rather than {}\", i + 1, other.len(), first.len()\n\n ),\n\n ));\n\n return Err(());\n", "file_path": "src/resolver/expression.rs", "rank": 84, "score": 169869.63112294406 }, { "content": "#[test]\n\nfn args_and_returns() {\n\n #[derive(Debug, PartialEq, Encode, Decode)]\n\n struct Val32(i32);\n\n\n\n let src = \"\n\n contract args {\n", "file_path": "tests/substrate_functions/mod.rs", "rank": 85, "score": 169537.6842510486 }, { "content": "#[test]\n\nfn abi_call_return_test() {\n\n let mut runtime = build_solidity(\n\n r##\"\n\ncontract test {\n", "file_path": "tests/ewasm.rs", "rank": 86, "score": 169366.7410716142 }, { "content": "/// Resolve a struct literal with named fields\n\nfn named_struct_literal(\n\n loc: &pt::Loc,\n\n struct_no: usize,\n\n args: &[pt::NamedArgument],\n\n cfg: &mut ControlFlowGraph,\n\n contract_no: Option<usize>,\n\n ns: &resolver::Namespace,\n\n vartab: &mut Option<&mut Vartable>,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(Expression, resolver::Type), ()> {\n\n let struct_def = &ns.structs[struct_no];\n\n\n\n if args.len() != struct_def.fields.len() {\n\n errors.push(Output::error(\n\n *loc,\n\n format!(\n\n \"struct ‘{}’ has {} fields, not {}\",\n\n struct_def.name,\n\n struct_def.fields.len(),\n\n args.len()\n", "file_path": "src/resolver/expression.rs", "rank": 87, "score": 168316.8700523829 }, { "content": "fn get_int_length(\n\n l: &resolver::Type,\n\n l_loc: &pt::Loc,\n\n allow_bytes: bool,\n\n ns: &resolver::Namespace,\n\n errors: &mut Vec<output::Output>,\n\n) -> Result<(u16, bool), ()> {\n\n match l {\n\n resolver::Type::Uint(n) => Ok((*n, false)),\n\n resolver::Type::Int(n) => Ok((*n, true)),\n\n resolver::Type::Bytes(n) if allow_bytes => Ok((*n as u16 * 8, false)),\n\n resolver::Type::Enum(n) => {\n\n errors.push(Output::error(\n\n *l_loc,\n\n format!(\"type enum {} not allowed\", ns.enums[*n].print_to_string(),),\n\n ));\n\n Err(())\n\n }\n\n resolver::Type::Struct(n) => {\n\n errors.push(Output::error(\n", "file_path": "src/resolver/expression.rs", "rank": 88, "score": 168311.36946723214 }, { "content": "/// Get the hex digits for an escaped \\x or \\u. Returns either the value or\n\n/// or the offset of the last character\n\nfn get_digits(input: &mut std::str::CharIndices, len: usize) -> Result<u32, usize> {\n\n let mut n = 0;\n\n let offset;\n\n\n\n for _ in 0..len {\n\n if let Some((_, ch)) = input.next() {\n\n if let Some(v) = ch.to_digit(16) {\n\n n = (n << 4) + v;\n\n continue;\n\n }\n\n offset = match input.next() {\n\n Some((i, _)) => i,\n\n None => std::usize::MAX,\n\n };\n\n } else {\n\n offset = std::usize::MAX;\n\n }\n\n\n\n return Err(offset);\n\n }\n\n\n\n Ok(n)\n\n}\n\n\n", "file_path": "src/resolver/expression.rs", "rank": 89, "score": 167865.9082622988 }, { "content": "pub fn first_warning(errors: Vec<output::Output>) -> String {\n\n match errors.iter().find(|m| m.level == output::Level::Warning) {\n\n Some(m) => m.message.to_owned(),\n\n None => panic!(\"no warnings found\"),\n\n }\n\n}\n\n\n", "file_path": "tests/substrate.rs", "rank": 90, "score": 167032.52974612074 }, { "content": "#[test]\n\nfn test_cast_errors() {\n\n let (_, errors) = parse_and_resolve(\n\n \"contract test {\n\n enum state { foo, bar, baz }\n", "file_path": "tests/substrate_enums/mod.rs", "rank": 91, "score": 165167.68565946788 }, { "content": "#[test]\n\nfn storage_ref_var() {\n\n let mut runtime = build_solidity(\n\n r##\"\n\n contract storage_refs {\n\n int32[10] a;\n\n int32[10] b;\n\n \n", "file_path": "tests/substrate_arrays/mod.rs", "rank": 92, "score": 165094.8651748742 }, { "content": "#[test]\n\nfn storage_ref_arg() {\n\n let mut runtime = build_solidity(\n\n r##\"\n\n contract storage_refs {\n\n int32[10] a;\n\n int32[10] b;\n\n \n", "file_path": "tests/substrate_arrays/mod.rs", "rank": 93, "score": 165080.31755024087 }, { "content": "#[test]\n\nfn named_argument_call() {\n\n let src = \"\n\n contract args {\n", "file_path": "tests/substrate_functions/mod.rs", "rank": 94, "score": 165006.131170489 }, { "content": "#[test]\n\nfn positional_argument_call() {\n\n let src = \"\n\n contract args {\n", "file_path": "tests/substrate_functions/mod.rs", "rank": 95, "score": 165006.131170489 }, { "content": "#[test]\n\nfn storage_ref_returns() {\n\n let mut runtime = build_solidity(\n\n r##\"\n\n contract storage_refs {\n\n int32[10] a;\n\n int32[10] b;\n\n \n", "file_path": "tests/substrate_arrays/mod.rs", "rank": 96, "score": 164689.85130011983 }, { "content": "#[test]\n\nfn return_from_struct_storage() {\n\n #[derive(Debug, PartialEq, Encode, Decode)]\n\n struct Foo {\n\n f1: [u8; 3],\n\n f2: u32,\n\n };\n\n\n\n let mut runtime = build_solidity(\n\n r##\"\n\n struct foo {\n\n bytes3 f1;\n\n uint32 f2;\n\n }\n\n contract test_struct_parsing {\n\n foo bar;\n\n\n\n constructor() public {\n\n bar.f1 = \"png\";\n\n bar.f2 = 0x89abcdef;\n\n }\n\n\n", "file_path": "tests/substrate_structs/mod.rs", "rank": 97, "score": 164689.85130011983 }, { "content": "#[test]\n\nfn abi_call_pass_return_test() {\n\n let mut runtime = build_solidity(\n\n r##\"\n\n contract x {\n", "file_path": "tests/ewasm.rs", "rank": 98, "score": 164598.10673518424 }, { "content": " function or(uint32 index, bytes1 val) public returns (bytes) {\n\n bytes bar = hex\"deadcafe\";\n\n\n\n bar[index] |= val;\n\n\n\n return bar;\n\n }\n\n\n", "file_path": "tests/substrate_strings/mod.rs", "rank": 99, "score": 164153.71900858567 } ]
Rust
src/main.rs
drgmr/t8bar
eb0d8db31b5e7aba58ac439ae06cd6ca6dad01ca
#[macro_use] extern crate log; use std::env; use std::fs::File; use std::io::{copy, Read, Write}; use std::path::PathBuf; use std::process::{Command, Stdio}; use rubrail::ItemId; use rubrail::TTouchbar; use rubrail::Touchbar; use serde::Deserialize; #[derive(Debug, Deserialize)] struct Target { hostname: String, github: String, } fn main() { fruitbasket::create_logger(".t8bar.log", fruitbasket::LogDir::Home, 5, 2).unwrap(); let mut nsapp = fruitbasket::Trampoline::new("t8bar", "t8bar", "com.drgmr.t8bar") .version(env!("CARGO_PKG_VERSION")) .plist_key("LSBackgroundOnly", "1") .build(fruitbasket::InstallDir::Custom("target/".to_string())) .unwrap(); nsapp.set_activation_policy(fruitbasket::ActivationPolicy::Prohibited); let stopper = nsapp.stopper(); let mut touchbar = Touchbar::alloc("t8bar"); setup(&mut touchbar, stopper); nsapp .run(fruitbasket::RunPeriod::Forever) .expect("Failed to launch app"); } fn setup(touchbar: &mut Touchbar, stopper: fruitbasket::FruitStopper) { let targets = targets_from_config(); let mut root_bar = touchbar.create_bar(); let mut button_ids = Vec::<ItemId>::new(); let quit_stopper = stopper.clone(); let quit_button_id = touchbar.create_button( None, Some("Quit"), Box::new(move |_| { info!("Exit requested by user"); quit_stopper.stop(); }), ); button_ids.push(quit_button_id); let image_base_path = PathBuf::from(env::var_os("TMPDIR").unwrap()); for target in targets { info!("Building data for {} - {}", target.hostname, target.github); let filepath = image_base_path .clone() .join(format!("{}.png", target.github)); let mut image_file = File::create(filepath.clone()).unwrap(); let mut request = reqwest::get(&format!("https://github.com/{}.png", target.github)).unwrap(); copy(&mut request, &mut image_file).unwrap(); let image = touchbar.create_image_from_path(filepath.to_str().unwrap()); let hostname = target.hostname.clone(); let target_button_id = touchbar.create_button( Some(&image), None, Box::new(move |_| { info!("Button clicked - hostname: {}", hostname); let child = Command::new("osascript") .stdin(Stdio::piped()) .spawn() .unwrap(); info!("Spawned osascript"); let mut stdin = child.stdin.unwrap(); let script = format!( r#"tell application "Screen Sharing" activate tell application "System Events" keystroke "{}.local" keystroke return delay 1 tell application "System Events" click (radio button 1 of radio group 1 of window 1) of application process "Screen Sharing" keystroke return end tell end tell end tell"#, hostname); info!("Sending script"); stdin.write(&script.as_bytes()).unwrap(); info!("Done"); }), ); touchbar.update_button_width(&target_button_id, 50); button_ids.push(target_button_id); } info!("Done building data for buttons"); touchbar.add_items_to_bar(&mut root_bar, button_ids); touchbar.set_bar_as_root(root_bar); } fn targets_from_config() -> Vec<Target> { let home_path = env::var_os("HOME").unwrap(); let config_path = PathBuf::from(home_path) .join(".config") .join("t8bar") .join("config.json"); info!("Expected config path: {:?}", config_path); let mut file = File::open(config_path).unwrap(); let mut contents = String::new(); file.read_to_string(&mut contents).unwrap(); let result: Vec<Target> = serde_json::from_str(&contents).unwrap(); info!("Configuration acquired: {:#?}", result); result }
#[macro_use] extern crate log; use std::env; use std::fs::File; use std::io::{copy, Read, Write}; use std::path::PathBuf; use std::process::{Command, Stdio}; use rubrail::ItemId; use rubrail::TTouchbar; use rubrail::Touchbar; use serde::Deserialize; #[derive(Debug, Deserialize)] struct Target { hostname: String, github: String, } fn main() { fruitbasket::create_logger(".t8bar.log", fruitbasket::LogDir::Home, 5, 2).unwrap(); let mut nsapp = fruitbasket::Trampoline::new("t8bar", "t8bar", "com.drgmr.t8bar") .version(env!("CARGO_PKG_VERSION")) .plist_key("LSBackgroundOnly", "1") .build(fruitbasket::InstallDir::Custom("target/".to_string())) .unwrap(); nsapp.set_activation_policy(fruitbasket::ActivationPolicy::Prohibited); let stopper = nsapp.stopper(); let mut touchbar = Touchbar::alloc("t8bar"); setup(&mut touchbar, stopper); nsapp .run(fruitbasket::RunPeriod::Forever) .expect("Failed to launch app"); } fn setup(touchbar: &mut Touchbar, stopper: fruitbasket::FruitStopper) { let targets = targets_from_config(); let mut root_bar = touchbar.create_bar(); let mut button_ids = Vec::<ItemId>::new(); let quit_stopper = stopper.clone(); let quit_button_id = touchbar.create_button( None, Some("Quit"), Box::new(move |_| { info!("Exit requested by user"); quit_stopper.stop(); }), ); button_ids.push(quit_button_id); let image_base_path = PathBuf::from(env::var_os("TMPDIR").unwrap()); for target in targets { info!("Building data for {} - {}", target.hostname, target.github); let filepath = image_base_path .clone() .join(format!("{}.png", target.github)); let mut image_file = File::create(filepath.clone()).unwrap(); let mut request = reqwest::get(&format!("https://github.com/{}.png", target.github)).unwrap(); copy(&mut request, &mut image_file).unwrap(); let image = touchbar.create_image_from_path(filepath.to_str().unwrap()); let hostname = target.hostname.clone(); let target_button_id = touchbar.create_button( Some(&image), None, Box::new(move |_| { info!("Button clicked - hostname: {}", hostname); let child = Command::new("osascript") .stdin(Stdio::piped()) .spawn() .unwrap(); info!("Spawned osascript"); let mut stdin = child.stdin.unwrap(); let script = format!( r#"tell application "Screen Sharing" activate tell application "System Events" keystroke "{}.local" keystroke return delay 1 tell application "System Events" click (radio button 1 of radio group 1 of window 1) of application process "Screen Sharing" keystroke return end tell end tell end tell"#, hostname); info!("Sending script"); stdin.write(&script.as_bytes()).unwrap(); info!("Done"); }), ); touchbar.update_button_width(&target_button_id, 50); button_ids.push(target_button_id); } info!("Done building data for buttons"); touchbar.add_items_to_bar(&mut root_bar, button_ids); touchbar.set_bar_as_root(root_bar); } fn targets_from_config() -> Vec<Target> { let home_path = env::var_os("HOME").unwrap(); let config_path = PathBuf::from(home_path) .join(".config") .join("t8bar") .join("config.json"); info!("Expected config path: {:?}", config_path); let mut file = File::open(config_path).unwra
p(); let mut contents = String::new(); file.read_to_string(&mut contents).unwrap(); let result: Vec<Target> = serde_json::from_str(&contents).unwrap(); info!("Configuration acquired: {:#?}", result); result }
function_block-function_prefixed
[ { "content": "# t8bar\n\n\n\nA Screen Sharing touch bar utility.\n\n\n\n![Example Image](/media/screenshot.png)\n\n\n\n## Current State\n\n\n\nRelatively simple but arguably bad code is working - this was mostly an\n\nexperiment of building something that's actually useful using Rust. Code\n\nimprovements and bug fixes will come - eventually.\n\n\n\n## Usage\n\n\n\nCreate your configuration file with local hostnames and GitHub names of your\n\n mates, run the binary and enjoy.\n\n\n\nIn `~/.config/t8bar/config.json`:\n\n```json\n\n[\n\n {\n\n \"hostname\": \"cunha.local\",\n\n \"github\": \"drgmr\"\n\n }\n\n]\n\n```\n\n\n\n## License\n\n\n\nThe MIT License (MIT)\n\n\n\nCopyright (c) 2019 Eduardo Cunha\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\n\nthis software and associated documentation files (the \"Software\"), to deal in\n\nthe Software without restriction, including without limitation the rights to\n\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\n\nthe Software, and to permit persons to whom the Software is furnished to do so,\n\nsubject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n\nFOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n\nCOPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n\nIN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n\nCONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "README.md", "rank": 4, "score": 13456.922323937228 } ]
Rust
logger/src/lib.rs
graham/trillium
eac4620156275b5eec41744bd2015c26208a3bab
#![forbid(unsafe_code)] #![warn( rustdoc::missing_crate_level_docs, missing_docs, nonstandard_style, unused_qualifications )] /*! Welcome to the trillium logger! */ pub use crate::formatters::{apache_combined, apache_common, dev_formatter}; use std::fmt::Display; use trillium::{async_trait, Conn, Handler, Info}; /** Components with which common log formats can be constructed */ pub mod formatters; /** A configuration option that determines if format will be colorful. The default is [`ColorMode::Auto`], which only enables color if stdout is detected to be a shell terminal (tty). If this detection is incorrect, you can explicitly set it to [`ColorMode::On`] or [`ColorMode::Off`] **Note**: The actual colorization of output is determined by the log formatters, so it is possible for this to be correctly enabled but for the output to have no colored components. */ #[derive(Clone, Copy, Debug)] #[non_exhaustive] pub enum ColorMode { Auto, On, Off, } impl ColorMode { pub(crate) fn is_enabled(&self) -> bool { match self { ColorMode::Auto => atty::is(atty::Stream::Stdout), ColorMode::On => true, ColorMode::Off => false, } } } impl Default for ColorMode { fn default() -> Self { Self::Auto } } /** Specifies where the logger output should be sent The default is [`Target::Stdout`]. */ #[derive(Clone, Copy, Debug)] #[non_exhaustive] pub enum Target { /** Send trillium logger output to a log crate backend. See [`log`] for output options */ Logger(log::Level), /** Send trillium logger output to stdout */ Stdout, } impl Target { pub(crate) fn write(&self, data: impl Display) { match self { Target::Logger(level) => { log::log!(*level, "{}", data); } Target::Stdout => { println!("{}", data); } } } } impl Default for Target { fn default() -> Self { Self::Stdout } } /** The interface to format a &[`Conn`] as a [`Display`]-able output In general, the included loggers provide a mechanism for composing these, so top level formats like [`dev_formatter`], [`apache_common`] and [`apache_combined`] are composed in terms of component formatters like [`formatters::method`], [`formatters::ip`], [`formatters::timestamp`], and many others (see [`formatters`] for a full list) When implementing this trait, note that [`Display::fmt`] is called on [`LogFormatter::Output`] _after_ the response has been fully sent, but that the [`LogFormatter::format`] is called _before_ the response has been sent. If you need to perform timing-sensitive calculations that represent the full http cycle, move whatever data is needed to make the calculation into a new type that implements Display, ensuring that it is calculated at the right time. ## Implementations ### Tuples LogFormatter is implemented for all tuples of other LogFormatter types, from 2-26 formatters long. The output of these formatters is concatenated with no space between. ### `&'static str` LogFormatter is implemented for &'static str, allowing for interspersing spaces and other static formatting details into tuples. ```rust use trillium_logger::{Logger, formatters}; let handler = Logger::new() .with_formatter(("-> ", formatters::method, " ", formatters::url)); ``` ### `Fn(&Conn, bool) -> impl Display` LogFormatter is implemented for all functions that conform to this signature. ```rust # use trillium_logger::{Logger, dev_formatter}; # use trillium::Conn; # use std::borrow::Cow; # struct User(String); impl User { fn name(&self) -> &str { &self.0 } } fn user(conn: &Conn, color: bool) -> Cow<'static, str> { match conn.state::<User>() { Some(user) => String::from(user.name()).into(), None => "guest".into() } } let handler = Logger::new().with_formatter((dev_formatter, " ", user)); ``` */ pub trait LogFormatter: Send + Sync + 'static { /** The display type for this formatter For a simple formatter, this will likely be a String, or even better, a lightweight type that implements Display. */ type Output: Display + Send + Sync + 'static; /** Extract Output from this Conn */ fn format(&self, conn: &Conn, color: bool) -> Self::Output; } /** The trillium handler for this crate, and the core type */ pub struct Logger<F> { format: F, color_mode: ColorMode, target: Target, } impl Logger<()> { /** Builds a new logger Defaults: * formatter: [`dev_formatter`] * color mode: [`ColorMode::Auto`] * target: [`Target::Stdout`] */ pub fn new() -> Logger<impl LogFormatter> { Logger { format: dev_formatter, color_mode: ColorMode::Auto, target: Target::Stdout, } } } impl<T> Logger<T> { /** replace the formatter with any type that implements [`LogFormatter`] see the trait documentation for [`LogFormatter`] for more details. note that this can be chained with [`Logger::with_target`] and [`Logger::with_color_mode`] ``` use trillium_logger::{Logger, apache_common}; Logger::new().with_formatter(apache_common("-", "-")); ``` */ pub fn with_formatter<Formatter: LogFormatter>( self, formatter: Formatter, ) -> Logger<Formatter> { Logger { format: formatter, color_mode: self.color_mode, target: self.target, } } } impl<F: LogFormatter> Logger<F> { /** specify the color mode for this logger. see [`ColorMode`] for more details. note that this can be chained with [`Logger::with_target`] and [`Logger::with_formatter`] ``` use trillium_logger::{Logger, ColorMode}; Logger::new().with_color_mode(ColorMode::On); ``` */ pub fn with_color_mode(mut self, color_mode: ColorMode) -> Self { self.color_mode = color_mode; self } /** specify the logger target see [`Target`] for more details. note that this can be chained with [`Logger::with_color_mode`] and [`Logger::with_formatter`] ``` use trillium_logger::{Logger, Target}; Logger::new().with_target(Target::Logger(log::Level::Info)); ``` */ pub fn with_target(mut self, target: Target) -> Self { self.target = target; self } } struct LoggerWasRun; #[async_trait] impl<F> Handler for Logger<F> where F: LogFormatter, { async fn init(&mut self, info: &mut Info) { self.target.write(&format!( " 🌱🦀🌱 {} started Listening at {}{} Control-C to quit", info.server_description(), info.listener_description(), info.tcp_socket_addr() .map(|s| format!(" (bound as tcp://{})", s)) .unwrap_or_default(), )); } async fn run(&self, conn: Conn) -> Conn { conn.with_state(LoggerWasRun) } async fn before_send(&self, mut conn: Conn) -> Conn { if conn.state::<LoggerWasRun>().is_some() { let target = self.target; let output = self.format.format(&conn, self.color_mode.is_enabled()); conn.inner_mut().after_send(move |_| target.write(output)); } conn } } pub fn logger() -> Logger<impl LogFormatter> { Logger::new() }
#![forbid(unsafe_code)] #![warn( rustdoc::missing_crate_level_docs, missing_docs, nonstandard_style, unused_qualifications )] /*! Welcome to the trillium logger! */ pub use crate::formatters::{apache_combined, apache_common, dev_formatter}; use std::fmt::Display; use trillium::{async_trait, Conn, Handler, Info}; /** Components with which common log formats can be constructed */ pub mod formatters; /** A configuration option that determines if format will be colorful. The default is [`ColorMode::Auto`], which only enables color if stdout is detected to be a shell terminal (tty). If this detection is incorrect, you can explicitly set it
Self { self.color_mode = color_mode; self } /** specify the logger target see [`Target`] for more details. note that this can be chained with [`Logger::with_color_mode`] and [`Logger::with_formatter`] ``` use trillium_logger::{Logger, Target}; Logger::new().with_target(Target::Logger(log::Level::Info)); ``` */ pub fn with_target(mut self, target: Target) -> Self { self.target = target; self } } struct LoggerWasRun; #[async_trait] impl<F> Handler for Logger<F> where F: LogFormatter, { async fn init(&mut self, info: &mut Info) { self.target.write(&format!( " 🌱🦀🌱 {} started Listening at {}{} Control-C to quit", info.server_description(), info.listener_description(), info.tcp_socket_addr() .map(|s| format!(" (bound as tcp://{})", s)) .unwrap_or_default(), )); } async fn run(&self, conn: Conn) -> Conn { conn.with_state(LoggerWasRun) } async fn before_send(&self, mut conn: Conn) -> Conn { if conn.state::<LoggerWasRun>().is_some() { let target = self.target; let output = self.format.format(&conn, self.color_mode.is_enabled()); conn.inner_mut().after_send(move |_| target.write(output)); } conn } } pub fn logger() -> Logger<impl LogFormatter> { Logger::new() }
to [`ColorMode::On`] or [`ColorMode::Off`] **Note**: The actual colorization of output is determined by the log formatters, so it is possible for this to be correctly enabled but for the output to have no colored components. */ #[derive(Clone, Copy, Debug)] #[non_exhaustive] pub enum ColorMode { Auto, On, Off, } impl ColorMode { pub(crate) fn is_enabled(&self) -> bool { match self { ColorMode::Auto => atty::is(atty::Stream::Stdout), ColorMode::On => true, ColorMode::Off => false, } } } impl Default for ColorMode { fn default() -> Self { Self::Auto } } /** Specifies where the logger output should be sent The default is [`Target::Stdout`]. */ #[derive(Clone, Copy, Debug)] #[non_exhaustive] pub enum Target { /** Send trillium logger output to a log crate backend. See [`log`] for output options */ Logger(log::Level), /** Send trillium logger output to stdout */ Stdout, } impl Target { pub(crate) fn write(&self, data: impl Display) { match self { Target::Logger(level) => { log::log!(*level, "{}", data); } Target::Stdout => { println!("{}", data); } } } } impl Default for Target { fn default() -> Self { Self::Stdout } } /** The interface to format a &[`Conn`] as a [`Display`]-able output In general, the included loggers provide a mechanism for composing these, so top level formats like [`dev_formatter`], [`apache_common`] and [`apache_combined`] are composed in terms of component formatters like [`formatters::method`], [`formatters::ip`], [`formatters::timestamp`], and many others (see [`formatters`] for a full list) When implementing this trait, note that [`Display::fmt`] is called on [`LogFormatter::Output`] _after_ the response has been fully sent, but that the [`LogFormatter::format`] is called _before_ the response has been sent. If you need to perform timing-sensitive calculations that represent the full http cycle, move whatever data is needed to make the calculation into a new type that implements Display, ensuring that it is calculated at the right time. ## Implementations ### Tuples LogFormatter is implemented for all tuples of other LogFormatter types, from 2-26 formatters long. The output of these formatters is concatenated with no space between. ### `&'static str` LogFormatter is implemented for &'static str, allowing for interspersing spaces and other static formatting details into tuples. ```rust use trillium_logger::{Logger, formatters}; let handler = Logger::new() .with_formatter(("-> ", formatters::method, " ", formatters::url)); ``` ### `Fn(&Conn, bool) -> impl Display` LogFormatter is implemented for all functions that conform to this signature. ```rust # use trillium_logger::{Logger, dev_formatter}; # use trillium::Conn; # use std::borrow::Cow; # struct User(String); impl User { fn name(&self) -> &str { &self.0 } } fn user(conn: &Conn, color: bool) -> Cow<'static, str> { match conn.state::<User>() { Some(user) => String::from(user.name()).into(), None => "guest".into() } } let handler = Logger::new().with_formatter((dev_formatter, " ", user)); ``` */ pub trait LogFormatter: Send + Sync + 'static { /** The display type for this formatter For a simple formatter, this will likely be a String, or even better, a lightweight type that implements Display. */ type Output: Display + Send + Sync + 'static; /** Extract Output from this Conn */ fn format(&self, conn: &Conn, color: bool) -> Self::Output; } /** The trillium handler for this crate, and the core type */ pub struct Logger<F> { format: F, color_mode: ColorMode, target: Target, } impl Logger<()> { /** Builds a new logger Defaults: * formatter: [`dev_formatter`] * color mode: [`ColorMode::Auto`] * target: [`Target::Stdout`] */ pub fn new() -> Logger<impl LogFormatter> { Logger { format: dev_formatter, color_mode: ColorMode::Auto, target: Target::Stdout, } } } impl<T> Logger<T> { /** replace the formatter with any type that implements [`LogFormatter`] see the trait documentation for [`LogFormatter`] for more details. note that this can be chained with [`Logger::with_target`] and [`Logger::with_color_mode`] ``` use trillium_logger::{Logger, apache_common}; Logger::new().with_formatter(apache_common("-", "-")); ``` */ pub fn with_formatter<Formatter: LogFormatter>( self, formatter: Formatter, ) -> Logger<Formatter> { Logger { format: formatter, color_mode: self.color_mode, target: self.target, } } } impl<F: LogFormatter> Logger<F> { /** specify the color mode for this logger. see [`ColorMode`] for more details. note that this can be chained with [`Logger::with_target`] and [`Logger::with_formatter`] ``` use trillium_logger::{Logger, ColorMode}; Logger::new().with_color_mode(ColorMode::On); ``` */ pub fn with_color_mode(mut self, color_mode: ColorMode) ->
random
[ { "content": "pub fn dev_formatter(conn: &Conn, color: bool) -> impl Display + Send + 'static {\n\n (method, \" \", url, \" \", response_time, \" \", status).format(conn, color)\n\n}\n\n\n\n/**\n\nformatter for the peer ip address of the connection\n\n\n\n**note**: this can be modified by handlers prior to logging, such as\n\nwhen running a trillium application behind a reverse proxy or load\n\nbalancer that sets a `forwarded` or `x-forwarded-for` header. this\n\nwill display `\"-\"` if there is no available peer ip address, such as\n\nwhen running on a runtime adapter that does not have access to this\n\ninformation\n\n*/\n", "file_path": "logger/src/formatters.rs", "rank": 0, "score": 249430.62699993 }, { "content": "pub fn version(conn: &Conn, _color: bool) -> Version {\n\n conn.inner().http_version()\n\n}\n\n\n\nimpl LogFormatter for &'static str {\n\n type Output = Self;\n\n fn format(&self, _conn: &Conn, _color: bool) -> Self::Output {\n\n self\n\n }\n\n}\n\n\n\nimpl LogFormatter for Arc<str> {\n\n type Output = Self;\n\n fn format(&self, _conn: &Conn, _color: bool) -> Self::Output {\n\n Arc::clone(self)\n\n }\n\n}\n\n\n\nimpl LogFormatter for ColoredString {\n\n type Output = String;\n", "file_path": "logger/src/formatters.rs", "rank": 1, "score": 240391.0259359143 }, { "content": "pub fn bytes(conn: &Conn, _color: bool) -> u64 {\n\n conn.response_len().unwrap_or_default()\n\n}\n\n\n\n/**\n\nformatter that prints an emoji if the request is secure as determined\n\nby [`Conn::is_secure`]\n\n*/\n", "file_path": "logger/src/formatters.rs", "rank": 2, "score": 240391.02593591437 }, { "content": "pub fn url(conn: &Conn, _color: bool) -> String {\n\n match conn.querystring() {\n\n \"\" => conn.path().into(),\n\n query => format!(\"{}?{}\", conn.path(), query),\n\n }\n\n}\n\n\n\nmod response_time_mod {\n\n use super::*;\n\n /**\n\n display output type for the [`response_time`] formatter\n\n */\n\n pub struct ResponseTimeOutput(Instant);\n\n impl Display for ResponseTimeOutput {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.write_fmt(format_args!(\"{:?}\", Instant::now() - self.0))\n\n }\n\n }\n\n\n\n /**\n", "file_path": "logger/src/formatters.rs", "rank": 3, "score": 240391.02593591437 }, { "content": "pub fn method(conn: &Conn, _color: bool) -> Method {\n\n conn.method()\n\n}\n\n\n\n/**\n\nsimple development-mode formatter\n\n\n\ncomposed of\n\n\n\n`\"`[`method`] [`url`] [`response_time`] [`status`]`\"`\n\n*/\n", "file_path": "logger/src/formatters.rs", "rank": 4, "score": 240391.02593591437 }, { "content": "pub fn ip(conn: &Conn, _color: bool) -> Cow<'static, str> {\n\n match conn.inner().peer_ip() {\n\n Some(peer) => format!(\"{:?}\", peer).into(),\n\n None => \"-\".into(),\n\n }\n\n}\n\n\n\nmod status_mod {\n\n use super::*;\n\n /**\n\n The display type for [`status`]\n\n */\n\n #[derive(Copy, Clone)]\n\n pub struct StatusOutput(Status, bool);\n\n impl Display for StatusOutput {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let StatusOutput(status, color) = *self;\n\n let status_string = (status as u16).to_string();\n\n if color {\n\n f.write_fmt(format_args!(\n", "file_path": "logger/src/formatters.rs", "rank": 5, "score": 226149.71348254697 }, { "content": "pub fn apache_common(\n\n request_id: impl LogFormatter,\n\n user_id: impl LogFormatter,\n\n) -> impl LogFormatter {\n\n (\n\n ip, \" \", request_id, \" \", user_id, \" [\", timestamp, \"] \\\"\", method, \" \", url, \" \", version,\n\n \"\\\" \", status, \" \", bytes,\n\n )\n\n}\n\n\n\n/**\n\nformatter that prints the number of response body bytes as a\n\nnumber. see [`body_len_human`] for a human-readable response body\n\nlength with units\n\n*/\n", "file_path": "logger/src/formatters.rs", "rank": 7, "score": 221891.28871548126 }, { "content": "pub fn body_len_human(conn: &Conn, _color: bool) -> Cow<'static, str> {\n\n conn.response_len()\n\n .map(|l| Size::to_string(&Size::Bytes(l), Base::Base10, Style::Smart).into())\n\n .unwrap_or_else(|| Cow::from(\"-\"))\n\n}\n\n\n\n/**\n\n[apache common log format][apache]\n\n\n\n[apache]: https://httpd.apache.org/docs/current/logs.html#common\n\n\n\nThis is defined as follows:\n\n\n\n[`ip`] `request_id` `user_id` `\\[`[`timestamp`]`\\]` \"[`method`] [`url`] [`version`]\" [`status`] [`bytes`]\n\n\n\nwhere `request_id` and `user_id` are mandatory formatters provided at time of usage.\n\n\n\n## usage without `request_id` or `user_id`\n\n```\n\n# use trillium_logger::{Logger, apache_common};\n\nLogger::new().with_formatter(apache_common(\"-\", \"-\"));\n\n```\n\n\n\n## usage with app-specific `user_id`\n\n```\n", "file_path": "logger/src/formatters.rs", "rank": 8, "score": 217305.65703754433 }, { "content": "pub fn secure(conn: &Conn, _: bool) -> &'static str {\n\n if conn.is_secure() {\n\n \"🔒\"\n\n } else {\n\n \" \"\n\n }\n\n}\n\n\n\n/**\n\nformatter for the current url or path of the request, including query\n\n*/\n", "file_path": "logger/src/formatters.rs", "rank": 9, "score": 203563.85152833117 }, { "content": "# use trillium_logger::{Logger, apache_common};\n\n# use trillium::Conn;\n\n# use std::borrow::Cow;\n\n# struct User(String); impl User { fn name(&self) -> &str { &self.0 } }\n\nfn user(conn: &Conn, color: bool) -> Cow<'static, str> {\n\n match conn.state::<User>() {\n\n Some(user) => String::from(user.name()).into(),\n\n None => \"guest\".into()\n\n }\n\n}\n\n\n\nLogger::new().with_formatter(apache_common(\"-\", user));\n\n```\n\n*/\n", "file_path": "logger/src/formatters.rs", "rank": 10, "score": 201779.81683859433 }, { "content": "/// initialize a handler\n\npub fn init(handler: &mut impl trillium::Handler) {\n\n let mut info = \"testing\".into();\n\n block_on(handler.init(&mut info))\n\n}\n\n\n\n// these exports are used by macros\n\npub use futures_lite;\n\npub use futures_lite::{AsyncRead, AsyncReadExt, AsyncWrite};\n\n\n\ncfg_if::cfg_if! {\n\n if #[cfg(feature = \"tokio\")] {\n\n pub use trillium_tokio::block_on;\n\n } else if #[cfg(feature = \"async-std\")] {\n\n pub use trillium_async_std::async_std::task::block_on;\n\n } else if #[cfg(feature = \"smol\")] {\n\n pub use trillium_smol::async_global_executor::block_on;\n\n } else {\n\n compile_error!(\"must enable smol, async-std, or tokio feature\");\n\n pub fn block_on<Fut: std::future::Future<Output = T>, T>(_: Fut) -> T { unreachable!()}\n\n }\n\n}\n", "file_path": "testing/src/lib.rs", "rank": 11, "score": 196727.1557075213 }, { "content": "pub fn app() -> impl trillium::Handler {\n\n (\n\n trillium_logger::Logger::new(),\n\n |conn: trillium::Conn| async move {\n\n let response = async_global_executor::spawn(async {\n\n Timer::after(std::time::Duration::from_millis(10)).await;\n\n \"successfully spawned a task\"\n\n })\n\n .await;\n\n\n\n conn.ok(response)\n\n },\n\n )\n\n}\n", "file_path": "smol/examples/smol.rs", "rank": 12, "score": 190599.76461095473 }, { "content": "pub fn app() -> impl trillium::Handler {\n\n |conn: trillium::Conn| async move {\n\n let response = tokio::task::spawn(async {\n\n tokio::time::sleep(std::time::Duration::from_millis(10)).await;\n\n \"successfully spawned a task\"\n\n })\n\n .await\n\n .unwrap();\n\n conn.ok(response)\n\n }\n\n}\n", "file_path": "tokio/examples/tokio.rs", "rank": 13, "score": 190599.76461095473 }, { "content": "pub fn app() -> impl trillium::Handler {\n\n |conn: trillium::Conn| async move {\n\n let response = task::spawn(async {\n\n task::sleep(std::time::Duration::from_millis(10)).await;\n\n \"successfully spawned a task\"\n\n })\n\n .await;\n\n\n\n conn.ok(response)\n\n }\n\n}\n", "file_path": "async-std/examples/async-std.rs", "rank": 14, "score": 183819.78000634068 }, { "content": "# use trillium::{Conn, Upgrade, Info};\n\n# use std::borrow::Cow;\n\n#[trillium::async_trait]\n\npub trait Handler: Send + Sync + 'static {\n\n async fn run(&self, conn: Conn) -> Conn;\n\n async fn init(&mut self, info: &mut Info); // optional\n\n async fn before_send(&self, conn: Conn); // optional\n\n fn has_upgrade(&self, _upgrade: &Upgrade) -> bool; // optional\n\n async fn upgrade(&self, _upgrade: Upgrade); // mandatory only if has_upgrade returns true\n\n fn name(&self) -> Cow<'static, str>; // optional\n\n}\n\n```\n\nSee each of the function definitions below for advanced implementation.\n\n\n\nFor most application code and even trillium-packaged framework code,\n\n`run` is the only trait function that needs to be implemented.\n\n\n\n*/\n\n\n", "file_path": "trillium/src/handler.rs", "rank": 15, "score": 174323.36430088425 }, { "content": "pub fn header(header_name: &'static str) -> impl LogFormatter {\n\n move |conn: &Conn, _color: bool| {\n\n format!(\"{:?}\", conn.headers().get_str(header_name).unwrap_or(\"\"))\n\n }\n\n}\n\n\n\nmod timestamp_mod {\n\n use super::*;\n\n /**\n\n Display output for [`timestamp`]\n\n */\n\n pub struct Now;\n\n\n\n /**\n\n formatter for the current timestamp. this represents the time that the\n\n log is written, not the beginning timestamp of the request\n\n */\n\n pub fn timestamp(_conn: &Conn, _color: bool) -> Now {\n\n Now\n\n }\n", "file_path": "logger/src/formatters.rs", "rank": 18, "score": 164996.62093542505 }, { "content": "fn user_id(conn: &Conn, _color: bool) -> &'static str {\n\n conn.state::<User>().map(User::name).unwrap_or(\"-\")\n\n}\n\n\n", "file_path": "logger/examples/logger.rs", "rank": 19, "score": 157611.51281328924 }, { "content": " pub trait ConnCounterConnExt {\n\n fn conn_number(&self) -> u64;\n\n }\n\n\n\n impl ConnCounterConnExt for Conn {\n\n fn conn_number(&self) -> u64 {\n\n self.state::<ConnNumber>()\n\n .expect(\"conn_number must be called after the handler\")\n\n .0\n\n }\n\n }\n\n}\n\n\n\nuse conn_counter::{ConnCounterConnExt, ConnCounterHandler};\n\nuse trillium::{Conn, Handler};\n\n\n", "file_path": "trillium/examples/state.rs", "rank": 20, "score": 151342.95203375007 }, { "content": "pub fn apache_combined(\n\n request_id: impl LogFormatter,\n\n user_id: impl LogFormatter,\n\n) -> impl LogFormatter {\n\n (\n\n apache_common(request_id, user_id),\n\n \" \",\n\n header(\"referrer\"),\n\n \" \",\n\n header(\"user-agent\"),\n\n )\n\n}\n\n\n\n/**\n\nformatter for the conn's http method that delegates to [`Method`]'s\n\n[`Display`] implementation\n\n*/\n", "file_path": "logger/src/formatters.rs", "rank": 21, "score": 148804.2103824512 }, { "content": "pub fn run(handler: impl Handler) {\n\n config().run(handler)\n\n}\n\n\n\n/**\n\n# Runs a trillium handler in an async context with default config\n\n\n\nRun the provided trillium handler on an already-running tokio runtime\n\nwith default settings. The defaults are the same as [`crate::run`]. To\n\ncustomize these settings, see [`crate::config`].\n\n\n\nThis function will poll pending until the server shuts down.\n\n\n\n*/\n\npub async fn run_async(handler: impl Handler) {\n\n config().run_async(handler).await\n\n}\n\n\n\n/**\n\n# Configures a server before running it\n", "file_path": "tokio/src/lib.rs", "rank": 22, "score": 146991.71948096127 }, { "content": "pub fn run(handler: impl Handler) {\n\n config().run(handler)\n\n}\n\n\n\n/**\n\n# Runs a trillium handler in an async context with default config\n\n\n\nRun the provided trillium handler on an already-running async-executor\n\nwith default settings. The defaults are the same as [`crate::run`]. To\n\ncustomize these settings, see [`crate::config`].\n\n\n\nThis function will poll pending until the server shuts down.\n\n\n\n*/\n\npub async fn run_async(handler: impl Handler) {\n\n config().run_async(handler).await\n\n}\n\n\n\n/**\n\n# Configures a server before running it\n", "file_path": "smol/src/lib.rs", "rank": 23, "score": 146991.71948096127 }, { "content": "pub fn run(handler: impl Handler) {\n\n runtime::Builder::new_current_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap()\n\n .block_on(run_async(handler));\n\n}\n", "file_path": "aws-lambda/src/lib.rs", "rank": 24, "score": 144795.4525820603 }, { "content": "pub fn run(handler: impl Handler) {\n\n config().run(handler)\n\n}\n\n\n\n/**\n\n# Runs a trillium handler in an async context with default config\n\n\n\nRun the provided trillium handler on an already-running async-std\n\nruntime with default settings. the defaults are the same as\n\n[`crate::run`]. To customize these settings, see [`crate::config`].\n\n\n\nThis function will poll pending until the server shuts down.\n\n*/\n\npub async fn run_async(handler: impl Handler) {\n\n config().run_async(handler).await\n\n}\n\n/**\n\n# Configures a server before running it\n\n\n\n## Defaults\n", "file_path": "async-std/src/lib.rs", "rank": 25, "score": 144795.4525820603 }, { "content": "fn websocket_accept_hash(conn: &Conn) -> Option<String> {\n\n let websocket_key = conn.headers().get_str(SecWebsocketKey)?;\n\n\n\n let hash = Sha1::new()\n\n .chain(websocket_key)\n\n .chain(WEBSOCKET_GUID)\n\n .finalize();\n\n\n\n Some(base64::encode(&hash[..]))\n\n}\n", "file_path": "websockets/src/lib.rs", "rank": 26, "score": 144670.0698330329 }, { "content": "fn websocket_protocol(conn: &Conn, protocols: &[String]) -> Option<String> {\n\n conn.headers()\n\n .get_str(SecWebsocketProtocol)\n\n .and_then(|value| {\n\n value\n\n .split(',')\n\n .map(str::trim)\n\n .find(|req_p| protocols.iter().any(|x| x == req_p))\n\n .map(|s| s.to_owned())\n\n })\n\n}\n\n\n", "file_path": "websockets/src/lib.rs", "rank": 27, "score": 137280.7872683425 }, { "content": "fn app() -> impl trillium::Handler {\n\n api(|conn: trillium::Conn, mut s: Struct| async move {\n\n if let Some(numbers) = &mut s.numbers {\n\n numbers.push(100);\n\n }\n\n conn.with_json(&ApiResponse { s })\n\n })\n\n}\n\n\n", "file_path": "api/tests/tests.rs", "rank": 28, "score": 134178.4132296712 }, { "content": "type Conn = trillium_client::Conn<'static, trillium_smol::TcpConnector>;\n\n\n\nlet conn = Conn::\",\n\n stringify!($fn_name),\n\n \"(\\\"http://localhost:8080/some/route\\\");\n\n\n\nassert_eq!(conn.method(), Method::\",\n\n stringify!($method),\n\n \");\n\nassert_eq!(conn.url().to_string(), \\\"http://localhost:8080/some/route\\\");\n\n```\n\n\"\n\n )\n\n );\n\n };\n\n ($fn_name:ident, $method:ident, $doc_comment:expr) => {\n\n #[doc = $doc_comment]\n\n pub fn $fn_name<U>(url: U) -> Self\n\n where\n\n <U as TryInto<Url>>::Error: Debug,\n", "file_path": "client/src/conn.rs", "rank": 29, "score": 129330.19010797917 }, { "content": "fn handler() -> impl Handler {\n\n (ConnCounterHandler::new(), |conn: Conn| async move {\n\n let conn_number = conn.conn_number();\n\n conn.ok(format!(\"conn number was {}\", conn_number))\n\n })\n\n}\n\n\n", "file_path": "trillium/examples/state.rs", "rank": 30, "score": 128836.32852013108 }, { "content": "pub fn main() {\n\n trillium_smol::run((\n\n State::new(User(\"jacob\")),\n\n Logger::new().with_formatter(apache_combined(\"-\", user_id)),\n\n \"ok\",\n\n ));\n\n}\n", "file_path": "logger/examples/logger.rs", "rank": 31, "score": 123407.8006050155 }, { "content": "fn app(forwarding: Forwarding) -> impl trillium::Handler {\n\n (forwarding, |conn: Conn| async move {\n\n let response = format!(\n\n \"{:?} {:?} {:?}\",\n\n conn.inner().is_secure(),\n\n conn.inner().peer_ip(),\n\n conn.inner().host()\n\n );\n\n conn.ok(response)\n\n })\n\n}\n\n\n", "file_path": "forwarding/tests/tests.rs", "rank": 32, "score": 123032.10791465247 }, { "content": "/// Alias for ConnId::new()\n\npub fn conn_id() -> ConnId {\n\n ConnId::new()\n\n}\n", "file_path": "conn-id/src/lib.rs", "rank": 33, "score": 116672.1332189112 }, { "content": "/// Extension trait to retrieve an id generated by the [`ConnId`] handler\n\npub trait ConnIdExt {\n\n /// Retrieves the id for this conn. This method will panic if it\n\n /// is run before the [`ConnId`] handler.\n\n fn id(&self) -> &str;\n\n}\n\n\n\nimpl<ConnLike> ConnIdExt for ConnLike\n\nwhere\n\n ConnLike: AsRef<StateSet>,\n\n{\n\n fn id(&self) -> &str {\n\n &*self\n\n .as_ref()\n\n .get::<Id>()\n\n .expect(\"ConnId handler must be run before calling IdConnExt::id\")\n\n }\n\n}\n\n\n\n/// Formatter for the trillium_log crate\n\npub mod log_formatter {\n", "file_path": "conn-id/src/lib.rs", "rank": 34, "score": 115008.900201651 }, { "content": "/// Alias for CookiesHandler::new()\n\npub fn cookies() -> CookiesHandler {\n\n CookiesHandler::new()\n\n}\n", "file_path": "cookies/src/cookies_handler.rs", "rank": 35, "score": 113621.34627280588 }, { "content": "#[trillium::async_trait]\n\npub trait StaticConnExt {\n\n /// Send the file at the provided path. Will send a 404 if the\n\n /// file cannot be resolved or if it is a directory.\n\n async fn send_path<A: AsRef<Path> + Send>(self, path: A) -> Self;\n\n\n\n /// Send the file at the provided path. Will send a 404 if the\n\n /// file cannot be resolved or if it is a directory.\n\n async fn send_file(self, file: File) -> Self;\n\n\n\n /// Send the file at the provided path. Will send a 404 if the\n\n /// file cannot be resolved or if it is a directory.\n\n async fn send_file_with_options(self, file: File, options: &StaticOptions) -> Self;\n\n\n\n /// Send the file at the provided path. Will send a 404 if the\n\n /// file cannot be resolved or if it is a directory.\n\n async fn send_path_with_options<A: AsRef<Path> + Send>(\n\n self,\n\n path: A,\n\n options: &StaticOptions,\n\n ) -> Self;\n", "file_path": "static/src/static_conn_ext.rs", "rank": 36, "score": 112821.05938436025 }, { "content": "pub trait CookiesConnExt {\n\n /// adds a cookie to the cookie jar and returns the conn\n\n fn with_cookie(self, cookie: Cookie<'_>) -> Self;\n\n /// gets a reference to the cookie jar\n\n fn cookies(&self) -> &CookieJar;\n\n /// gets a mutable reference to the cookie jar\n\n fn cookies_mut(&mut self) -> &mut CookieJar;\n\n}\n\n\n\nimpl CookiesConnExt for Conn {\n\n fn cookies(&self) -> &CookieJar {\n\n self.state()\n\n .expect(\"Cookies handler must be executed before calling CookiesExt::cookies\")\n\n }\n\n\n\n fn with_cookie(mut self, cookie: Cookie<'_>) -> Self {\n\n self.cookies_mut().add(cookie.into_owned());\n\n self\n\n }\n\n\n\n fn cookies_mut(&mut self) -> &mut CookieJar {\n\n self.state_mut()\n\n .expect(\"Cookies handler must be executed before calling CookiesExt::cookies_mut\")\n\n }\n\n}\n", "file_path": "cookies/src/cookies_conn_ext.rs", "rank": 37, "score": 112816.19980809916 }, { "content": "pub trait HandlebarsConnExt {\n\n /**\n\n Registers an \"assigns\" value on this Conn for use in a template.\n\n See example usage at [`Handlebars::new`](crate::Handlebars::new)\n\n */\n\n fn assign(self, key: impl Into<Cow<'static, str>> + Sized, data: impl Serialize) -> Self;\n\n\n\n /**\n\n renders a registered template by name with the provided data as\n\n assigns. note that this does not use any data accumulated by\n\n [`HandlebarsConnExt::assign`]\n\n\n\n ```\n\n use trillium_handlebars::{HandlebarsHandler, Handlebars, HandlebarsConnExt};\n\n # fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n\n\n #[derive(serde::Serialize)]\n\n struct User { name: &'static str };\n\n\n\n let mut handlebars = Handlebars::new();\n", "file_path": "handlebars/src/handlebars_conn_ext.rs", "rank": 38, "score": 112816.19980809916 }, { "content": "pub trait SessionConnExt {\n\n /**\n\n append a key-value pair to the current session, where the key is a\n\n &str and the value is anything serde-serializable.\n\n */\n\n fn with_session(self, key: &str, value: impl Serialize) -> Self;\n\n\n\n /**\n\n retrieve a reference to the current session\n\n */\n\n fn session(&self) -> &Session;\n\n\n\n /**\n\n retrieve a mutable reference to the current session\n\n */\n\n fn session_mut(&mut self) -> &mut Session;\n\n}\n\n\n\nimpl SessionConnExt for Conn {\n\n fn session(&self) -> &Session {\n", "file_path": "sessions/src/session_conn_ext.rs", "rank": 39, "score": 112816.19980809916 }, { "content": "pub trait TeraConnExt {\n\n /// Adds a key-value pair to the assigns [`Context`], where the key is\n\n /// a &str and the value is any [`Serialize`] type.\n\n fn assign(self, key: &str, value: impl Serialize) -> Self;\n\n\n\n /// Uses the accumulated assigns context to render the template by\n\n /// registered name to the conn body and return the conn. Halts\n\n /// and sets a 200 status on successful render. Must be run\n\n /// downsequence of the [`TeraHandler`], and will panic if the\n\n /// TeraHandler has not already been called.\n\n fn render(self, template: &str) -> Self;\n\n\n\n /// Retrieves a reference to the [`Tera`] instance. Must be called\n\n /// downsequence of the [`TeraHandler`], and will panic if the\n\n /// TeraHandler has not already been called.\n\n fn tera(&self) -> &Tera;\n\n\n\n /// retrieves a reference to the tera assigns context. must be run\n\n /// downsequence of the [`TeraHandler`], and will panic if the\n\n /// TeraHandler has not already been called.\n", "file_path": "tera/src/tera_conn_ext.rs", "rank": 40, "score": 112816.19980809916 }, { "content": "pub trait RouterConnExt {\n\n /**\n\n Retrieves a captured param from the conn. Note that this will only\n\n be Some if the exact param is present in the matched route.\n\n\n\n Routefinder params are defined starting with a colon, but the\n\n colon is not needed when fetching the param.\n\n\n\n ```\n\n use trillium::{conn_unwrap, Conn};\n\n use trillium_router::{Router, RouterConnExt};\n\n\n\n let router = Router::new().get(\"/pages/:page_name\", |conn: Conn| async move {\n\n let page_name = conn_unwrap!(conn.param(\"page_name\"), conn);\n\n let content = format!(\"you have reached the page named {}\", page_name);\n\n conn.ok(content)\n\n });\n\n\n\n use trillium_testing::prelude::*;\n\n assert_ok!(\n", "file_path": "router/src/router_conn_ext.rs", "rank": 41, "score": 112816.19980809916 }, { "content": "type ClientConn<'a> = trillium_client::Conn<'a, trillium_smol::TcpConnector>;\n\nlet mut client_conn = ClientConn::get(&*url).execute().await?;\n\n\n\nassert_eq!(client_conn.status().unwrap(), 200);\n\nassert_eq!(client_conn.response_headers().get_str(\"content-length\"), Some(\"11\"));\n\nassert_eq!(\n\n client_conn.response_body().read_string().await?,\n\n \"hello world\"\n\n);\n\n\n\nstopper.stop(); // stop the server after one request\n\nserver_handle.await?; // wait for the server to shut down\n\n# Result::Ok(()) }) }\n\n```\n\n*/\n\n\n\nmod received_body;\n\npub use received_body::ReceivedBody;\n\n\n\n#[cfg(feature = \"unstable\")]\n", "file_path": "http/src/lib.rs", "rank": 42, "score": 112585.86520333387 }, { "content": "/// start a trillium server on a random port for the provided handler,\n\n/// establish a tcp connection, run the provided test function with\n\n/// that tcp stream, and shut down the server.\n\npub fn with_socket<H, Fun, Fut>(handler: H, tests: Fun)\n\nwhere\n\n H: trillium::Handler,\n\n Fun: FnOnce(trillium_http::transport::BoxedTransport) -> Fut,\n\n Fut: std::future::Future<Output = Result<(), Box<dyn std::error::Error>>>,\n\n{\n\n with_server(handler, move |url| async move {\n\n let tcp = tcp_connect(&url).await?;\n\n tests(tcp).await\n\n })\n\n}\n", "file_path": "testing/src/with_server.rs", "rank": 43, "score": 107912.18556850129 }, { "content": "pub fn with_server<H, Fun, Fut>(handler: H, tests: Fun)\n\nwhere\n\n H: Handler,\n\n Fun: FnOnce(Url) -> Fut,\n\n Fut: Future<Output = Result<(), Box<dyn std::error::Error>>>,\n\n{\n\n block_on(async move {\n\n let port = portpicker::pick_unused_port().expect(\"could not pick a port\");\n\n let url = format!(\"http://localhost:{}\", port).parse().unwrap();\n\n let stopper = Stopper::new();\n\n let (s, r) = async_channel::bounded(1);\n\n let init = trillium::Init::new(move |_| {\n\n let s = s.clone();\n\n async move {\n\n s.send(()).await.unwrap();\n\n }\n\n });\n\n\n\n let server_future = spawn(\n\n trillium_smol::config()\n", "file_path": "testing/src/with_server/smol.rs", "rank": 44, "score": 106076.14393740831 }, { "content": "pub fn with_server<H, Fun, Fut>(handler: H, tests: Fun)\n\nwhere\n\n H: Handler,\n\n Fun: FnOnce(Url) -> Fut,\n\n Fut: Future<Output = Result<(), Box<dyn std::error::Error>>>,\n\n{\n\n block_on(async move {\n\n let port = portpicker::pick_unused_port().expect(\"could not pick a port\");\n\n let url = format!(\"http://localhost:{}\", port).parse().unwrap();\n\n let stopper = Stopper::new();\n\n let (s, r) = async_channel::bounded(1);\n\n let init = trillium::Init::new(move |_| {\n\n let s = s.clone();\n\n async move {\n\n s.send(()).await.unwrap();\n\n }\n\n });\n\n\n\n let server_future = spawn(\n\n trillium_tokio::config()\n", "file_path": "testing/src/with_server/tokio.rs", "rank": 45, "score": 106076.14393740831 }, { "content": "/// extends trillium conns with the ability to render askama templates\n\npub trait AskamaConnExt {\n\n /// renders an askama template, halting the conn and setting a 200\n\n /// status code. also sets the mime type based on the template\n\n /// extension\n\n fn render(self, template: impl Template) -> Self;\n\n}\n\n\n\nimpl AskamaConnExt for trillium::Conn {\n\n fn render(mut self, template: impl Template) -> Self {\n\n let text = template.render().unwrap();\n\n if let Some(extension) = template.extension() {\n\n if let Some(mime) = mime_db::lookup(extension) {\n\n self.headers_mut().insert(ContentType, mime);\n\n }\n\n }\n\n\n\n self.ok(text)\n\n }\n\n}\n", "file_path": "askama/src/lib.rs", "rank": 46, "score": 105971.39384891931 }, { "content": "#[trillium::async_trait]\n\npub trait ApiConnExt {\n\n /**\n\n Sends a json response body. This sets a status code of 200,\n\n serializes the body with serde_json, sets the content-type to\n\n application/json, and [halts](trillium::Conn::halt) the\n\n conn. If serialization fails, a 500 status code is sent as per\n\n [`trillium::conn_try`]\n\n\n\n\n\n ## Examples\n\n\n\n ```\n\n use trillium_api::{json, ApiConnExt};\n\n async fn handler(conn: trillium::Conn) -> trillium::Conn {\n\n conn.with_json(&json!({ \"json macro\": \"is reexported\" }))\n\n }\n\n\n\n # use trillium_testing::prelude::*;\n\n assert_ok!(\n\n get(\"/\").on(&handler),\n", "file_path": "api/src/lib.rs", "rank": 47, "score": 105966.96374366476 }, { "content": "pub trait SseConnExt {\n\n /**\n\n builds and sets a streaming response body that conforms to the\n\n [server-sent-events\n\n spec](https://html.spec.whatwg.org/multipage/server-sent-events.html#server-sent-events)\n\n from a Stream of any [`Eventable`](crate::Eventable) type (such as\n\n [`Event`](crate::Event), as well as setting appropiate headers for\n\n this response.\n\n */\n\n fn with_sse_stream<S, E>(self, sse_stream: S) -> Self\n\n where\n\n S: Stream<Item = E> + Unpin + Send + Sync + 'static,\n\n E: Eventable;\n\n}\n\n\n\nimpl SseConnExt for Conn {\n\n fn with_sse_stream<S, E>(self, sse_stream: S) -> Self\n\n where\n\n S: Stream<Item = E> + Unpin + Send + Sync + 'static,\n\n E: Eventable,\n", "file_path": "sse/src/lib.rs", "rank": 48, "score": 105962.10416740367 }, { "content": "pub fn with_server<H, Fun, Fut>(handler: H, tests: Fun)\n\nwhere\n\n H: Handler,\n\n Fun: FnOnce(Url) -> Fut,\n\n Fut: Future<Output = Result<(), Box<dyn std::error::Error>>>,\n\n{\n\n block_on(async move {\n\n let port = portpicker::pick_unused_port().expect(\"could not pick a port\");\n\n let url = format!(\"http://localhost:{}\", port).parse().unwrap();\n\n let stopper = Stopper::new();\n\n let (s, r) = async_channel::bounded(1);\n\n let init = trillium::Init::new(move |_| {\n\n let s = s.clone();\n\n async move {\n\n s.send(()).await.unwrap();\n\n }\n\n });\n\n\n\n let server_future = spawn(\n\n trillium_async_std::config()\n", "file_path": "testing/src/with_server/async_std.rs", "rank": 49, "score": 104349.32561611716 }, { "content": "pub trait LambdaConnExt {\n\n /// returns the [`lamedh_runtime::Context`] for this conn\n\n fn lambda_context(&self) -> &Context;\n\n}\n\n\n\nimpl LambdaConnExt for trillium::Conn {\n\n fn lambda_context(&self) -> &Context {\n\n &*self\n\n .state::<LambdaContext>()\n\n .expect(\"lambda context should always be set inside of a lambda server\")\n\n }\n\n}\n", "file_path": "aws-lambda/src/context.rs", "rank": 50, "score": 103483.25227452966 }, { "content": "#[async_trait]\n\npub trait ChannelConnExt {\n\n fn channel_client(&self) -> Option<&ChannelClient>;\n\n\n\n fn broadcast(&self, event: impl Into<ChannelEvent>) {\n\n unwrap_or_log_and_return!(self.channel_client()).broadcast(event);\n\n }\n\n\n\n async fn send_event(&self, event: impl Into<ChannelEvent> + Send + 'async_trait) {\n\n unwrap_or_log_and_return!(self.channel_client())\n\n .send_event(event)\n\n .await;\n\n }\n\n\n\n async fn reply_ok(&self, event: &ChannelEvent, response: &(impl Serialize + Sync)) {\n\n unwrap_or_log_and_return!(self.channel_client())\n\n .reply_ok(event, response)\n\n .await;\n\n }\n\n\n\n async fn reply_error(&self, event: &ChannelEvent, error: &(impl Serialize + Sync)) {\n", "file_path": "channels/src/channel_ext.rs", "rank": 51, "score": 103483.25227452966 }, { "content": "#[allow(unused_variables)]\n\n#[trillium::async_trait]\n\npub trait ChannelHandler: Sized + Send + Sync + 'static {\n\n /**\n\n `connect` is called once when each websocket client is connected. The default implementation does nothing.\n\n */\n\n async fn connect(&self, conn: ChannelConn<'_>) {}\n\n\n\n /**\n\n `join_channel` is called when a websocket client sends a\n\n `phx_join` event. There is no default implementation to ensure\n\n that you implement the appropriate access control logic for your\n\n application. If you want clients to be able to connect to any\n\n channel they request, use this definition:\n\n\n\n ```\n\n # use trillium_channels::{ChannelEvent, ChannelConn, ChannelHandler};\n\n # struct MyChannel; #[trillium::async_trait] impl ChannelHandler for MyChannel {\n\n async fn join_channel(&self, conn: ChannelConn<'_>, event: ChannelEvent) {\n\n conn.allow_join(&event, &()).await;\n\n }\n\n # }\n", "file_path": "channels/src/channel_handler.rs", "rank": 52, "score": 102691.95966722819 }, { "content": "#[allow(unused_variables)]\n\n#[async_trait]\n\npub trait WebSocketHandler: Send + Sync + Sized + 'static {\n\n /**\n\n A [`Stream`] type that represents [`Message`]s to be sent to this\n\n client. It is built in your implementation code, in\n\n [`WebSocketHandler::connect`]. Use `Pending<Message>` or another\n\n stream that never returns if you do not need to use this aspect of\n\n the trait.\n\n */\n\n type OutboundStream: Stream<Item = Message> + Unpin + Send + Sync + 'static;\n\n\n\n /**\n\n This interface is the only mandatory function in\n\n WebSocketHandler. It receives an owned WebSocketConn and\n\n optionally returns it along with an `OutboundStream`\n\n type.\n\n */\n\n async fn connect(&self, conn: WebSocketConn) -> Option<(WebSocketConn, Self::OutboundStream)>;\n\n\n\n /**\n\n This interface function is called once with every message received\n", "file_path": "websockets/src/websocket_handler.rs", "rank": 53, "score": 100811.02480842255 }, { "content": "pub trait Server: Send + 'static {\n\n /// an async type like TcpListener or UnixListener. This trait\n\n /// imposes minimal constraints on the type.\n\n type Listener: Send + Sync + 'static;\n\n\n\n /// the individual byte stream (AsyncRead+AsyncWrite) that http\n\n /// will be communicated over. This is often an async \"stream\"\n\n /// like TcpStream or UnixStream.\n\n type Transport: AsyncRead + AsyncWrite + Unpin + Send + Sync + 'static;\n\n\n\n /// The description of this server, to be appended to the Info and potentially logged.\n\n const DESCRIPTION: &'static str;\n\n\n\n /// Asynchronously return a single `Self::Transport` from a\n\n /// `Self::Listener`. Must be implemented.\n\n fn accept(\n\n listener: &mut Self::Listener,\n\n ) -> Pin<Box<dyn Future<Output = Result<Self::Transport>> + Send + '_>>;\n\n\n\n /// Optional method to return a peer ip address from a\n", "file_path": "server-common/src/server.rs", "rank": 54, "score": 100470.96858033337 }, { "content": "pub fn channel<CH: ChannelHandler>(channel_handler: CH) -> Channel<CH> {\n\n Channel::new(channel_handler)\n\n}\n", "file_path": "channels/src/lib.rs", "rank": 55, "score": 96662.8453938653 }, { "content": "/// Provides an extension trait for both [`trillium::Headers`] and\n\n/// also [`trillium::Conn`] for setting and getting various parsed\n\n/// caching headers.\n\npub trait CachingHeadersExt: Sized {\n\n /// returns an [`EntityTag`] if these headers contain an `Etag` header.\n\n fn etag(&self) -> Option<EntityTag>;\n\n /// sets an etag header from an EntityTag.\n\n fn set_etag(&mut self, entity_tag: &EntityTag);\n\n\n\n /// returns a parsed timestamp if these headers contain a `Last-Modified` header.\n\n fn last_modified(&self) -> Option<SystemTime>;\n\n /// sets a formatted `Last-Modified` header from a timestamp.\n\n fn set_last_modified(&mut self, system_time: SystemTime);\n\n\n\n /// returns a parsed [`CacheControlHeader`] if these headers\n\n /// include a `Cache-Control` header. Note that if this is called\n\n /// on a [`Conn`], it returns the request [`Cache-Control`]\n\n /// header.\n\n fn cache_control(&self) -> Option<CacheControlHeader>;\n\n /// sets a `Cache-Control` header on these headers. Note that this\n\n /// is valid in both request and response contexts, and specific\n\n /// directives have different meanings.\n\n fn set_cache_control(&mut self, cache_control: impl Into<CacheControlHeader>);\n", "file_path": "caching-headers/src/caching_conn_ext.rs", "rank": 56, "score": 94970.34545847545 }, { "content": "fn get_sse(mut conn: Conn) -> Conn {\n\n let broadcaster = conn_unwrap!(conn.take_state::<Channel>(), conn);\n\n conn.with_sse_stream(broadcaster)\n\n}\n\n\n\nasync fn post_broadcast(mut conn: Conn) -> Conn {\n\n let broadcaster = conn_unwrap!(conn.take_state::<Channel>(), conn);\n\n let body = conn_try!(conn.request_body_string().await, conn);\n\n log_error!(broadcaster.send(&body).await);\n\n conn.ok(\"sent\")\n\n}\n", "file_path": "sse/examples/sse.rs", "rank": 57, "score": 93407.4285870087 }, { "content": "fn get_sse(mut conn: Conn) -> Conn {\n\n let broadcaster = conn_unwrap!(conn.take_state::<Channel>(), conn);\n\n conn.with_sse_stream(broadcaster)\n\n}\n\n\n\nasync fn post_broadcast(mut conn: Conn) -> Conn {\n\n let broadcaster = conn_unwrap!(conn.take_state::<Channel>(), conn);\n\n let body = conn_try!(conn.request_body_string().await, conn);\n\n log_error!(broadcaster.send(&body).await);\n\n conn.ok(\"sent\")\n\n}\n\n\n", "file_path": "sse/src/lib.rs", "rank": 58, "score": 93407.4285870087 }, { "content": "#[async_trait]\n\npub trait Connector: Clone + Send + Sync + 'static {\n\n /// The async read + write type for this connector, often a TcpStream or TlSStream\n\n type Transport: AsyncRead + AsyncWrite + Unpin + Send + Sync + 'static;\n\n\n\n /// A type that can be used to configure this Connector. It will be passed into [`Connector::connect`].\n\n type Config: Debug + Default + Send + Sync + Clone;\n\n\n\n /// A SocketAddr representation of the other side of this connection\n\n fn peer_addr(transport: &Self::Transport) -> std::io::Result<std::net::SocketAddr>;\n\n\n\n /**\n\n Initiate a connection to the provided url, using the configuration.\n\n\n\n Async trait signature:\n\n ```rust,ignore\n\n async fn connect(url: &Url, config: &Self::Config) -> std::io::Result<Self::Transport>;\n\n ```\n\n */\n\n async fn connect(url: &Url, config: &Self::Config) -> std::io::Result<Self::Transport>;\n\n\n", "file_path": "tls-common/src/lib.rs", "rank": 59, "score": 92999.10363493189 }, { "content": "#[allow(unused_variables)]\n\n#[async_trait]\n\npub trait JsonWebSocketHandler: Send + Sync + 'static {\n\n /**\n\n A type that can be deserialized from the json sent from the\n\n connected clients\n\n */\n\n type InboundMessage: DeserializeOwned + Send + 'static;\n\n\n\n /**\n\n A serializable type that will be sent in the StreamType and\n\n received by the connected websocket clients\n\n */\n\n type OutboundMessage: Serialize + Send + 'static;\n\n\n\n /**\n\n A type that implements a stream of\n\n [`Self::OutboundMessage`]s. This can be\n\n futures_lite::stream::Pending if you never need to send an\n\n outbound message.\n\n */\n\n type StreamType: Stream<Item = Self::OutboundMessage> + Unpin + Send + Sync + 'static;\n", "file_path": "websockets/src/json.rs", "rank": 60, "score": 92076.02115139097 }, { "content": "#[trillium::async_trait]\n\npub trait ConfigExt<ServerType, AcceptorType>\n\nwhere\n\n ServerType: Server + ?Sized,\n\n{\n\n /// resolve a port for this application, either directly\n\n /// configured, from the environmental variable `PORT`, or a default\n\n /// of `8080`\n\n fn port(&self) -> u16;\n\n\n\n /// resolve the host for this application, either directly from\n\n /// configuration, from the `HOST` env var, or `\"localhost\"`\n\n fn host(&self) -> String;\n\n\n\n /// use the [`ConfigExt::port`] and [`ConfigExt::host`] to resolve\n\n /// a vec of potential socket addrs\n\n fn socket_addrs(&self) -> Vec<SocketAddr>;\n\n\n\n /// returns whether this server should register itself for\n\n /// operating system signals. this flag does nothing aside from\n\n /// communicating to the server implementer that this is\n", "file_path": "server-common/src/config_ext.rs", "rank": 61, "score": 91924.75862463063 }, { "content": "fn test_handler(handler: impl Handler) {\n\n trillium_testing::with_socket(handler, |socket| async move {\n\n let (mut client, _) =\n\n async_tungstenite::client_async(\"ws://localhost/some/route\", socket).await?;\n\n\n\n client.send(Message::text(\"hello\")).await?;\n\n let received_message = client.next().await.unwrap()?.into_text()?;\n\n assert_eq!(\n\n \"received your message: hello at path /some/route\",\n\n received_message\n\n );\n\n\n\n client.send(Message::text(\"hey\")).await?;\n\n let received_message = client.next().await.unwrap()?.into_text()?;\n\n assert_eq!(\n\n \"received your message: hey at path /some/route\",\n\n received_message\n\n );\n\n\n\n Ok(())\n\n });\n\n}\n", "file_path": "websockets/tests/test.rs", "rank": 62, "score": 91918.35430044735 }, { "content": "pub fn json_websocket<T>(json_websocket_handler: T) -> WebSocket<JsonHandler<T>>\n\nwhere\n\n T: JsonWebSocketHandler,\n\n{\n\n WebSocket::new_json(json_websocket_handler)\n\n}\n", "file_path": "websockets/src/json.rs", "rank": 63, "score": 91787.84119742524 }, { "content": "#[async_trait]\n\npub trait Acceptor<Input>: Clone + Send + Sync + 'static\n\nwhere\n\n Input: AsyncRead + AsyncWrite + Send + Sync + Unpin + 'static,\n\n{\n\n /// The stream type. For example, TlsStream<Input>\n\n type Output: AsyncRead + AsyncWrite + Send + Sync + Unpin + 'static;\n\n\n\n /// An error type that [`Acceptor::accept`] may return\n\n type Error: Debug + Send + Sync;\n\n\n\n /**\n\n Transform an Input (`AsyncRead + AsyncWrite + Send + Sync + Unpin + 'static`) into Self::Output\n\n\n\n Async trait signature:\n\n ```rust,ignore\n\n async fn accept(&self, input: Input) -> Result<Self::Output, Self::Error>;\n\n ```\n\n */\n\n async fn accept(&self, input: Input) -> Result<Self::Output, Self::Error>;\n\n}\n", "file_path": "tls-common/src/lib.rs", "rank": 64, "score": 89692.13318549498 }, { "content": "/// Alias for [`SessionHandler::new`]\n\npub fn sessions<Store>(store: Store, secret: impl AsRef<[u8]>) -> SessionHandler<Store>\n\nwhere\n\n Store: SessionStore,\n\n{\n\n SessionHandler::new(store, secret)\n\n}\n", "file_path": "sessions/src/session_handler.rs", "rank": 65, "score": 88998.63406457432 }, { "content": "## Logger\n\n\n\n[rustdocs (main)](https://docs.trillium.rs/trillium_logger/)\n\n\n\n```rust,noplaypen\n\n{{#include ../../../logger/examples/logger.rs}}\n\n```\n\n\n\n\n", "file_path": "docs/src/handlers/logger.md", "rank": 66, "score": 88877.2265263882 }, { "content": "/// Alias for [`StaticCompiledHandler::new`]\n\npub fn files(dir: Dir<'static>) -> StaticCompiledHandler {\n\n StaticCompiledHandler::new(dir)\n\n}\n\n\n\n/**\n\nThe preferred interface to build a StaticCompiledHandler\n\n\n\nMacro interface to build a\n\n[`StaticCompiledHandler`]. `static_compiled!(\"assets\")` is\n\nidentical to\n\n`StaticCompiledHandler::new(include_dir!(\"assets\"))`.\n\n\n\n*/\n\n\n\n#[macro_export]\n\nmacro_rules! static_compiled {\n\n ($path:literal) => {\n\n $crate::StaticCompiledHandler::new($crate::include_dir!($path))\n\n };\n\n}\n", "file_path": "static-compiled/src/lib.rs", "rank": 67, "score": 88760.78742673548 }, { "content": "/// Convenience function to build a trillium api handler. This is an\n\n/// alias for [`ApiHandler::new`].\n\npub fn api<F, Fut, BodyType>(handler_fn: F) -> ApiHandler<F, BodyType>\n\nwhere\n\n BodyType: DeserializeOwned + Send + Sync + 'static,\n\n F: Fn(Conn, BodyType) -> Fut + Send + Sync + 'static,\n\n Fut: Future<Output = Conn> + Send + 'static,\n\n{\n\n ApiHandler::new(handler_fn)\n\n}\n\n\n\nimpl<F, Fut, BodyType> ApiHandler<F, BodyType>\n\nwhere\n\n BodyType: DeserializeOwned + Send + Sync + 'static,\n\n F: Fn(Conn, BodyType) -> Fut + Send + Sync + 'static,\n\n Fut: Future<Output = Conn> + Send + 'static,\n\n{\n\n /// Build a new API handler for the given async function. This is\n\n /// aliased as [`api`].\n\n pub fn new(handler_fn: F) -> Self {\n\n Self {\n\n handler_fn,\n", "file_path": "api/src/lib.rs", "rank": 68, "score": 88370.07830129728 }, { "content": "```\n\n# use trillium_logger::{Logger, apache_combined};\n\nLogger::new().with_formatter(apache_combined(\"-\", \"-\"));\n\n```\n\n\n\n## usage with an app-specific `user_id`\n\n\n\n```\n\n# use trillium_logger::{Logger, apache_combined};\n\n# use trillium::Conn;\n\n# use std::borrow::Cow;\n\n# struct User(String); impl User { fn name(&self) -> &str { &self.0 } }\n", "file_path": "logger/src/formatters.rs", "rank": 69, "score": 88064.1200048553 }, { "content": "use crate::LogFormatter;\n\nuse chrono::Local;\n\nuse colored::{ColoredString, Colorize};\n\nuse size::{Base, Size, Style};\n\nuse std::{borrow::Cow, fmt::Display, sync::Arc, time::Instant};\n\nuse trillium::{Conn, Method, Status, Version};\n\n\n\n/**\n\n[apache combined log format][apache]\n\n\n\n[apache]: https://httpd.apache.org/docs/current/logs.html#combined\n\n\n\nThis is defined as follows:\n\n\n\n[`apache_combined`](`request_id`, `user_id`) [`header`]`(\"referrer\")` [`header`]`(\"user-agent\")`\n\n\n\nwhere `request_id` and `user_id` are mandatory formatters provided at time of usage.\n\n\n\n\n\n## usage with empty `request_id` and `user_id`\n", "file_path": "logger/src/formatters.rs", "rank": 70, "score": 88062.75448741738 }, { "content": "\n\n | code | color |\n\n |------|--------|\n\n | 2xx | green |\n\n | 3xx | cyan |\n\n | 4xx | yellow |\n\n | 5xx | red |\n\n | ??? | white |\n\n */\n\n pub fn status(conn: &Conn, color: bool) -> StatusOutput {\n\n StatusOutput(conn.status().unwrap_or(Status::NotFound), color)\n\n }\n\n}\n\n\n\npub use status_mod::status;\n\n\n\n/**\n\nformatter-builder for a particular header, formatted wrapped in\n\nquotes. `\"\"` if the header is not present\n\n\n", "file_path": "logger/src/formatters.rs", "rank": 71, "score": 88062.08870162205 }, { "content": " concat!(\"{\",stringify!($name) ,\":}\")\n\n ),*),\n\n $($name = ($name)),*\n\n ))\n\n }\n\n }\n\n\n\n #[allow(non_snake_case)]\n\n impl<$($name),*> LogFormatter for ($($name,)*) where $($name: LogFormatter),* {\n\n type Output = TupleOutput<($($name::Output,)*)>;\n\n fn format(&self, conn: &Conn, color: bool) -> Self::Output {\n\n let ($(ref $name,)*) = *self;\n\n TupleOutput(($(($name).format(conn, color),)*))\n\n }\n\n }\n\n )\n\n }\n\n\n\n impl_formatter_tuple! { A B }\n\n impl_formatter_tuple! { A B C }\n", "file_path": "logger/src/formatters.rs", "rank": 72, "score": 88060.04016421661 }, { "content": " formatter for the wall-time duration with units that this http\n\n request-response cycle took, from the first bytes read to the\n\n completion of the response.\n\n */\n\n pub fn response_time(conn: &Conn, _color: bool) -> ResponseTimeOutput {\n\n ResponseTimeOutput(conn.inner().start_time())\n\n }\n\n}\n\n\n\npub use response_time_mod::response_time;\n\n\n\n/**\n\nformatter for the http version, as delegated to the display\n\nimplementation of [`Version`]\n\n*/\n", "file_path": "logger/src/formatters.rs", "rank": 73, "score": 88059.69819296047 }, { "content": " fn format(&self, _conn: &Conn, color: bool) -> Self::Output {\n\n if color {\n\n self.to_string()\n\n } else {\n\n (&**self).to_string()\n\n }\n\n }\n\n}\n\n\n\nimpl<F, O> LogFormatter for F\n\nwhere\n\n F: Fn(&Conn, bool) -> O + Send + Sync + 'static,\n\n O: Display + Send + Sync + 'static,\n\n{\n\n type Output = O;\n\n fn format(&self, conn: &Conn, color: bool) -> Self::Output {\n\n self(conn, color)\n\n }\n\n}\n\n\n", "file_path": "logger/src/formatters.rs", "rank": 74, "score": 88058.97060303544 }, { "content": "usage:\n\n\n\n```rust\n\n# use trillium_logger::{Logger, formatters::header};\n\nLogger::new().with_formatter((\"user-agent: \", header(\"user-agent\")));\n\n```\n\n\n\n**note**: this is not a formatter itself, but returns a formatter when\n\ncalled with a header name\n\n*/\n", "file_path": "logger/src/formatters.rs", "rank": 75, "score": 88058.31323523866 }, { "content": " \"{}\",\n\n status_string.color(match status as u16 {\n\n 200..=299 => \"green\",\n\n 300..=399 => \"cyan\",\n\n 400..=499 => \"yellow\",\n\n 500..=599 => \"red\",\n\n _ => \"white\",\n\n })\n\n ))\n\n } else {\n\n f.write_str(&status_string)\n\n }\n\n }\n\n }\n\n\n\n /**\n\n formatter for the http status\n\n\n\n displays just the numeric code of the\n\n status. when color is enabled, it uses the following color encoding:\n", "file_path": "logger/src/formatters.rs", "rank": 76, "score": 88053.71095445944 }, { "content": "\n\n impl Display for Now {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.write_fmt(format_args!(\n\n \"{}\",\n\n Local::now().format(\"%d/%b/%Y:%H:%M:%S %z\")\n\n ))\n\n }\n\n }\n\n}\n\npub use timestamp_mod::timestamp;\n\n\n\n/**\n\nformatter for the response body length, represented as a\n\nhuman-readable string like `5 bytes` or `10.1mb`. prints `-` if there\n\nis no response body. see [`bytes`] for the raw number of bytes\n\n*/\n", "file_path": "logger/src/formatters.rs", "rank": 77, "score": 88053.68541972259 }, { "content": "mod tuples {\n\n use super::*;\n\n /**\n\n display output for the tuple implementation\n\n\n\n The Display type of each tuple element is contained in this type, and\n\n it implements [`Display`] for 2-26-arity tuples.\n\n\n\n Please open an issue if you find yourself needing to do something with\n\n this other than [`Display`] it.\n\n */\n\n pub struct TupleOutput<O>(O);\n\n macro_rules! impl_formatter_tuple {\n\n ($($name:ident)+) => (\n\n #[allow(non_snake_case)]\n\n impl<$($name,)*> Display for TupleOutput<($($name,)*)> where $($name: Display + Send + Sync + 'static,)* {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let ($(ref $name,)*) = self.0;\n\n f.write_fmt(format_args!(\n\n concat!($(\n", "file_path": "logger/src/formatters.rs", "rank": 78, "score": 88053.1383075316 }, { "content": " impl_formatter_tuple! { A B C D }\n\n impl_formatter_tuple! { A B C D E }\n\n impl_formatter_tuple! { A B C D E F }\n\n impl_formatter_tuple! { A B C D E F G }\n\n impl_formatter_tuple! { A B C D E F G H }\n\n impl_formatter_tuple! { A B C D E F G H I }\n\n impl_formatter_tuple! { A B C D E F G H I J }\n\n impl_formatter_tuple! { A B C D E F G H I J K }\n\n impl_formatter_tuple! { A B C D E F G H I J K L }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M N }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M N O }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M N O P }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M N O P Q }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M N O P Q R }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M N O P Q R S }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M N O P Q R S T }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M N O P Q R S T U }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M N O P Q R S T U V }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M N O P Q R S T U V W }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M N O P Q R S T U V W X }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M N O P Q R S T U V W X Y }\n\n impl_formatter_tuple! { A B C D E F G H I J K L M N O P Q R S T U V W X Y Z }\n\n}\n", "file_path": "logger/src/formatters.rs", "rank": 79, "score": 88045.97203301545 }, { "content": "use std::{\n\n fmt::{Display, Formatter, Result},\n\n net::SocketAddr,\n\n};\n\n\n\nconst DEFAULT_SERVER_DESCRIPTION: &str = concat!(\"trillium v\", env!(\"CARGO_PKG_VERSION\"));\n\n\n\n/**\n\nThis struct represents information about the currently connected\n\nserver.\n\n\n\nIt is passed to [`Handler::init`](crate::Handler::init) and the [`Init`](crate::Init) handler.\n\n*/\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Info {\n\n server_description: String,\n\n listener_description: String,\n\n tcp_socket_addr: Option<SocketAddr>,\n\n}\n", "file_path": "trillium/src/info.rs", "rank": 80, "score": 87744.71209330241 }, { "content": "\n\nimpl Info {\n\n /// Returns a user-displayable description of the server. This\n\n /// might be a string like \"trillium x.y.z (trillium-tokio x.y.z)\" or \"my\n\n /// special application\".\n\n pub fn server_description(&self) -> &str {\n\n &self.server_description\n\n }\n\n\n\n /// Returns a user-displayable string description of the location\n\n /// or port the listener is bound to, potentially as a url. Do not\n\n /// rely on the format of this string, as it will vary between\n\n /// server implementations and is intended for user\n\n /// display. Instead, use [`Info::tcp_socket_addr`] for any\n\n /// processing.\n\n pub fn listener_description(&self) -> &str {\n\n &self.listener_description\n\n }\n\n\n\n /// Returns the local_addr of a bound tcp listener, if such a\n", "file_path": "trillium/src/info.rs", "rank": 81, "score": 87732.78094091057 }, { "content": "\n\nimpl Default for Info {\n\n fn default() -> Self {\n\n Self {\n\n server_description: DEFAULT_SERVER_DESCRIPTION.into(),\n\n listener_description: \"\".into(),\n\n tcp_socket_addr: None,\n\n }\n\n }\n\n}\n\n\n\nimpl Display for Info {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result {\n\n f.write_fmt(format_args!(\n\n \"{} listening on {}\",\n\n self.server_description(),\n\n self.listener_description(),\n\n ))\n\n }\n\n}\n", "file_path": "trillium/src/info.rs", "rank": 82, "score": 87731.26938351843 }, { "content": " /// thing exists for this server\n\n pub fn tcp_socket_addr(&self) -> Option<&SocketAddr> {\n\n self.tcp_socket_addr.as_ref()\n\n }\n\n\n\n /// obtain a mutable borrow of the server description, suitable\n\n /// for appending information or replacing it\n\n pub fn server_description_mut(&mut self) -> &mut String {\n\n &mut self.server_description\n\n }\n\n\n\n /// obtain a mutable borrow of the listener description, suitable\n\n /// for appending information or replacing it\n\n pub fn listener_description_mut(&mut self) -> &mut String {\n\n &mut self.listener_description\n\n }\n\n}\n\n\n\nimpl From<&str> for Info {\n\n fn from(description: &str) -> Self {\n", "file_path": "trillium/src/info.rs", "rank": 83, "score": 87728.4953000672 }, { "content": " Self {\n\n server_description: String::from(DEFAULT_SERVER_DESCRIPTION),\n\n listener_description: String::from(description),\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\nimpl From<SocketAddr> for Info {\n\n fn from(socket_addr: SocketAddr) -> Self {\n\n Self {\n\n server_description: String::from(DEFAULT_SERVER_DESCRIPTION),\n\n listener_description: socket_addr.to_string(),\n\n tcp_socket_addr: Some(socket_addr),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(unix)]\n\nimpl From<std::os::unix::net::SocketAddr> for Info {\n\n fn from(s: std::os::unix::net::SocketAddr) -> Self {\n\n Self {\n\n server_description: String::from(DEFAULT_SERVER_DESCRIPTION),\n\n listener_description: format!(\"{:?}\", s),\n\n tcp_socket_addr: None,\n\n }\n\n }\n\n}\n", "file_path": "trillium/src/info.rs", "rank": 84, "score": 87726.82607618462 }, { "content": "pub fn websocket<H>(websocket_handler: H) -> WebSocket<H>\n\nwhere\n\n H: WebSocketHandler,\n\n{\n\n WebSocket::new(websocket_handler)\n\n}\n\n\n\nimpl<H> WebSocket<H>\n\nwhere\n\n H: WebSocketHandler,\n\n{\n\n /// Build a new WebSocket with an async handler function that\n\n /// receives a [`WebSocketConn`]\n\n pub fn new(handler: H) -> Self {\n\n Self {\n\n handler,\n\n protocols: Default::default(),\n\n }\n\n }\n\n\n", "file_path": "websockets/src/lib.rs", "rank": 85, "score": 87514.38521521544 }, { "content": "#[test]\n\nfn options_star_with_a_star_handler() {\n\n let router = Router::new()\n\n .get(\"*\", \"ok\")\n\n .post(\"/some/specific/route\", \"ok\");\n\n let mut conn = TestConn::build(\"options\", \"*\", ()).on(&router);\n\n assert_status!(&conn, 200);\n\n assert_headers!(&mut conn, \"allow\" => \"GET, POST\");\n\n}\n\n\n", "file_path": "router/tests/options.rs", "rank": 86, "score": 87373.21313449295 }, { "content": "\n\nThe simplest implementation of Handler for a named type looks like this:\n\n```\n\npub struct MyHandler;\n\n#[trillium::async_trait]\n\nimpl trillium::Handler for MyHandler {\n\n async fn run(&self, conn: trillium::Conn) -> trillium::Conn {\n\n conn\n\n }\n\n}\n\n\n\nuse trillium_testing::prelude::*;\n\nassert_not_handled!(get(\"/\").on(&MyHandler)); // we did not halt or set a body status\n\n```\n\n\n\n**Temporary Note:** Until rust has true async traits, implementing\n\nhandler requires the use of the async_trait macro, which is reexported\n\nas [`trillium::async_trait`](crate::async_trait).\n\n\n\n## Full trait specification\n\n\n\nUnfortunately, the async_trait macro results in the difficult-to-read\n\ndocumentation at the top of the page, so here is how the trait is\n\nactually defined in trillium code:\n\n\n\n```\n\n# use trillium::{Conn, Upgrade, Info};\n\n# use std::borrow::Cow;\n\n#[trillium::async_trait]\n", "file_path": "trillium/src/handler.rs", "rank": 87, "score": 87194.91563374574 }, { "content": "The simplest handler is an async closure or\n\nasync fn that receives a Conn and returns a Conn, and Handler has a\n\nblanket implementation for any such Fn.\n\n\n\n```\n\n// as a closure\n\nlet handler = |conn: trillium::Conn| async move { conn.ok(\"trillium!\") };\n\n\n\nuse trillium_testing::prelude::*;\n\nassert_ok!(get(\"/\").on(&handler), \"trillium!\");\n\n```\n\n\n\n```\n\n// as an async function\n\nasync fn handler(conn: trillium::Conn) -> trillium::Conn {\n\n conn.ok(\"trillium!\")\n\n}\n\nuse trillium_testing::prelude::*;\n\nassert_ok!(get(\"/\").on(&handler), \"trillium!\");\n\n```\n", "file_path": "trillium/src/handler.rs", "rank": 88, "score": 87188.52862249452 }, { "content": "#[async_trait]\n\nimpl<H: Handler> Handler for Option<H> {\n\n async fn run(&self, conn: Conn) -> Conn {\n\n let handler = crate::conn_unwrap!(self, conn);\n\n handler.run(conn).await\n\n }\n\n\n\n async fn init(&mut self, info: &mut Info) {\n\n if let Some(handler) = self {\n\n handler.init(info).await\n\n }\n\n }\n\n\n\n async fn before_send(&self, conn: Conn) -> Conn {\n\n let handler = crate::conn_unwrap!(self, conn);\n\n handler.before_send(conn).await\n\n }\n\n\n\n fn name(&self) -> Cow<'static, str> {\n\n match self {\n", "file_path": "trillium/src/handler.rs", "rank": 89, "score": 87187.24049597376 }, { "content": "\n\n async fn upgrade(&self, upgrade: Upgrade) {\n\n self.as_ref().upgrade(upgrade).await\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<H: Handler> Handler for Vec<H> {\n\n async fn run(&self, mut conn: Conn) -> Conn {\n\n for handler in self {\n\n log::debug!(\"running {}\", handler.name());\n\n conn = handler.run(conn).await;\n\n if conn.is_halted() {\n\n break;\n\n }\n\n }\n\n conn\n\n }\n\n\n\n async fn init(&mut self, info: &mut Info) {\n", "file_path": "trillium/src/handler.rs", "rank": 90, "score": 87186.57500364991 }, { "content": "use crate::{async_trait, Conn, Info, Upgrade};\n\nuse std::{borrow::Cow, future::Future, sync::Arc};\n\n\n\n/**\n\n# The building block for Trillium applications.\n\n\n\n## Concept\n\n\n\nMany other frameworks have a notion of `middleware` and `endpoints`,\n\nin which the model is that a request passes through a router and then\n\nany number of middlewares, then a single endpoint that returns a\n\nresponse, and then passes a response back through the middleware\n\nstack.\n\n\n\nBecause a Trillium Conn represents both a request and response, there\n\nis no distinction between middleware and endpoints, as all of these\n\ncan be modeled as `Fn(Conn) -> Future<Output = Conn>`.\n\n\n\n## Implementing Handler\n\n\n", "file_path": "trillium/src/handler.rs", "rank": 91, "score": 87186.41806628674 }, { "content": "\n\n /**\n\n Performs any final modifications to this conn after all handlers\n\n have been run. Although this is a slight deviation from the simple\n\n conn->conn->conn chain represented by most Handlers, it provides\n\n an easy way for libraries to effectively inject a second handler\n\n into a response chain. This is useful for loggers that need to\n\n record information both before and after other handlers have run,\n\n as well as database transaction handlers and similar library code.\n\n\n\n **❗IMPORTANT NOTE FOR LIBRARY AUTHORS:** Please note that this\n\n will run __whether or not the conn has was halted before\n\n [`Handler::run`] was called on a given conn__. This means that if\n\n you want to make your `before_send` callback conditional on\n\n whether `run` was called, you need to put a unit type into the\n\n conn's state and check for that.\n\n\n\n stability note: I don't love this for the exact reason that it\n\n breaks the simplicity of the conn->conn->model, but it is\n\n currently the best compromise between that simplicity and\n", "file_path": "trillium/src/handler.rs", "rank": 92, "score": 87182.93883250005 }, { "content": " }\n\n}\n\n\n\n#[async_trait]\n\nimpl Handler for Box<dyn Handler> {\n\n async fn run(&self, conn: Conn) -> Conn {\n\n self.as_ref().run(conn).await\n\n }\n\n\n\n async fn init(&mut self, info: &mut Info) {\n\n self.as_mut().init(info).await\n\n }\n\n\n\n async fn before_send(&self, conn: Conn) -> Conn {\n\n self.as_ref().before_send(conn).await\n\n }\n\n\n\n fn name(&self) -> Cow<'static, str> {\n\n self.as_ref().name()\n\n }\n", "file_path": "trillium/src/handler.rs", "rank": 93, "score": 87182.78868122355 }, { "content": " for handler in self {\n\n handler.init(info).await;\n\n }\n\n }\n\n\n\n async fn before_send(&self, mut conn: Conn) -> Conn {\n\n for handler in self.iter().rev() {\n\n conn = handler.before_send(conn).await\n\n }\n\n conn\n\n }\n\n\n\n fn name(&self) -> Cow<'static, str> {\n\n self.iter()\n\n .map(|v| v.name())\n\n .collect::<Vec<_>>()\n\n .join(\",\")\n\n .into()\n\n }\n\n\n", "file_path": "trillium/src/handler.rs", "rank": 94, "score": 87182.44798995061 }, { "content": " if conn.is_halted() { return conn }\n\n )*\n\n conn\n\n }\n\n\n\n #[allow(non_snake_case)]\n\n async fn init(&mut self, info: &mut Info) {\n\n let ($(ref mut $name,)*) = *self;\n\n $(\n\n log::trace!(\"initializing {}\", ($name).name());\n\n ($name).init(info).await;\n\n )*\n\n }\n\n\n\n #[allow(non_snake_case)]\n\n async fn before_send(&self, conn: Conn) -> Conn {\n\n let ($(ref $name,)*) = *self;\n\n reverse_before_send!(conn, $($name)+);\n\n conn\n\n }\n", "file_path": "trillium/src/handler.rs", "rank": 95, "score": 87181.7113750563 }, { "content": " }\n\n\n\n async fn init(&mut self, info: &mut Info) {\n\n Arc::<H>::get_mut(self)\n\n .expect(\"cannot call init when there are already clones of an Arc<Handler>\")\n\n .init(info)\n\n .await\n\n }\n\n\n\n async fn before_send(&self, conn: Conn) -> Conn {\n\n self.as_ref().before_send(conn).await\n\n }\n\n\n\n fn name(&self) -> Cow<'static, str> {\n\n self.as_ref().name()\n\n }\n\n\n\n fn has_upgrade(&self, upgrade: &Upgrade) -> bool {\n\n self.as_ref().has_upgrade(upgrade)\n\n }\n", "file_path": "trillium/src/handler.rs", "rank": 96, "score": 87181.02218604286 }, { "content": " ($conn:ident, $name:ident) => (\n\n let $conn = ($name).before_send($conn).await;\n\n );\n\n\n\n ($conn:ident, $name:ident $($other_names:ident)+) => (\n\n reverse_before_send!($conn, $($other_names)*);\n\n reverse_before_send!($conn, $name);\n\n );\n\n}\n\n\n\nmacro_rules! impl_handler_tuple {\n\n ($($name:ident)+) => (\n\n #[async_trait]\n\n impl<$($name),*> Handler for ($($name,)*) where $($name: Handler),* {\n\n #[allow(non_snake_case)]\n\n async fn run(&self, conn: Conn) -> Conn {\n\n let ($(ref $name,)*) = *self;\n\n $(\n\n log::debug!(\"running {}\", ($name).name());\n\n let conn = ($name).run(conn).await;\n", "file_path": "trillium/src/handler.rs", "rank": 97, "score": 87180.79239089628 } ]
Rust
src/parser/record.rs
natir/needletail
3756d79cd3452f178a657387c26114bbbe7a5650
use std::borrow::Cow; use std::io::Write; use memchr::memchr; use crate::errors::ParseError; use crate::parser::fasta::BufferPosition as FastaBufferPosition; use crate::parser::fastq::BufferPosition as FastqBufferPosition; use crate::parser::utils::{Format, LineEnding, Position}; use crate::Sequence; #[derive(Debug, Clone)] enum BufferPositionKind<'a> { Fasta(&'a FastaBufferPosition), Fastq(&'a FastqBufferPosition), } #[derive(Debug, Clone)] pub struct SequenceRecord<'a> { buffer: &'a [u8], buf_pos: BufferPositionKind<'a>, position: &'a Position, line_ending: LineEnding, } impl<'a> SequenceRecord<'a> { pub(crate) fn new_fasta( buffer: &'a [u8], buf_pos: &'a FastaBufferPosition, position: &'a Position, line_ending: Option<LineEnding>, ) -> Self { Self { buffer, position, buf_pos: BufferPositionKind::Fasta(buf_pos), line_ending: line_ending.unwrap_or(LineEnding::Unix), } } pub(crate) fn new_fastq( buffer: &'a [u8], buf_pos: &'a FastqBufferPosition, position: &'a Position, line_ending: Option<LineEnding>, ) -> Self { Self { buffer, position, buf_pos: BufferPositionKind::Fastq(buf_pos), line_ending: line_ending.unwrap_or(LineEnding::Unix), } } #[inline] pub fn format(&self) -> Format { match self.buf_pos { BufferPositionKind::Fasta(_) => Format::Fasta, BufferPositionKind::Fastq(_) => Format::Fastq, } } #[inline] pub fn id(&self) -> &[u8] { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.id(&self.buffer), BufferPositionKind::Fastq(bp) => bp.id(&self.buffer), } } #[inline] pub fn raw_seq(&self) -> &[u8] { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.raw_seq(&self.buffer), BufferPositionKind::Fastq(bp) => bp.seq(&self.buffer), } } pub fn seq(&self) -> Cow<[u8]> { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.seq(&self.buffer), BufferPositionKind::Fastq(bp) => bp.seq(&self.buffer).into(), } } #[inline] pub fn qual(&self) -> Option<&[u8]> { match self.buf_pos { BufferPositionKind::Fasta(_) => None, BufferPositionKind::Fastq(bp) => Some(bp.qual(&self.buffer)), } } #[inline] pub fn all(&self) -> &[u8] { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.all(&self.buffer), BufferPositionKind::Fastq(bp) => bp.all(&self.buffer), } } #[inline] pub fn num_bases(&self) -> usize { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.num_bases(&self.buffer), BufferPositionKind::Fastq(bp) => bp.num_bases(&self.buffer), } } pub fn start_line_number(&self) -> u64 { self.position.line } pub fn line_ending(&self) -> LineEnding { self.line_ending } pub fn write( &self, writer: &mut dyn Write, forced_line_ending: Option<LineEnding>, ) -> Result<(), ParseError> { match self.buf_pos { BufferPositionKind::Fasta(_) => write_fasta( self.id(), self.raw_seq(), writer, forced_line_ending.unwrap_or(self.line_ending), ), BufferPositionKind::Fastq(_) => write_fastq( self.id(), self.raw_seq(), self.qual(), writer, forced_line_ending.unwrap_or(self.line_ending), ), } } } impl<'a> Sequence<'a> for SequenceRecord<'a> { fn sequence(&'a self) -> &'a [u8] { self.raw_seq() } } pub fn mask_header_tabs(id: &[u8]) -> Option<Vec<u8>> { memchr(b'\t', id).map(|_| { id.iter() .map(|x| if *x == b'\t' { b'|' } else { *x }) .collect() }) } pub fn mask_header_utf8(id: &[u8]) -> Option<Vec<u8>> { match String::from_utf8_lossy(id) { Cow::Owned(s) => Some(s.into_bytes()), Cow::Borrowed(_) => None, } } pub fn write_fasta( id: &[u8], seq: &[u8], writer: &mut dyn Write, line_ending: LineEnding, ) -> Result<(), ParseError> { let ending = line_ending.to_bytes(); writer.write_all(b">")?; writer.write_all(id)?; writer.write_all(&ending)?; writer.write_all(seq)?; writer.write_all(&ending)?; Ok(()) } pub fn write_fastq( id: &[u8], seq: &[u8], qual: Option<&[u8]>, writer: &mut dyn Write, line_ending: LineEnding, ) -> Result<(), ParseError> { let ending = line_ending.to_bytes(); writer.write_all(b"@")?; writer.write_all(id)?; writer.write_all(&ending)?; writer.write_all(seq)?; writer.write_all(&ending)?; writer.write_all(b"+")?; writer.write_all(&ending)?; if let Some(qual) = qual { writer.write_all(&qual)?; } else { writer.write_all(&vec![b'I'; seq.len()])?; } writer.write_all(&ending)?; Ok(()) }
use std::borrow::Cow; use std::io::Write; use memchr::memchr; use crate::errors::ParseError; use crate::parser::fasta::BufferPosition as FastaBufferPosition; use crate::parser::fastq::BufferPosition as FastqBufferPosition; use crate::parser::utils::{Format, LineEnding, Position}; use crate::Sequence; #[derive(Debug, Clone)] enum BufferPositionKind<'a> { Fasta(&'a FastaBufferPosition), Fastq(&'a FastqBufferPosition), } #[derive(Debug, Clone)] pub struct SequenceRecord<'a> { buffer: &'a [u8], buf_pos: BufferPositionKind<'a>, position: &'a Position, line_ending: LineEnding, } impl<'a> SequenceRecord<'a> { pub(crate) fn new_fasta( buffer: &'a [u8], buf_pos: &'a FastaBufferPosition, position: &'a Position, line_ending: Option<LineEnding>, ) -> Self { Self { buffer, position, buf_pos: BufferPositionKind::Fasta(buf_pos), line_ending: line_ending.unwrap_or(LineEnding::Unix), } } pub(crate) fn new_fastq( buffer: &'a [u8], buf_pos: &'a FastqBufferPosition, position: &'a Position, line_ending: Option<LineEnding>, ) -> Self { Self { buffer, position, buf_pos: BufferPositionKind::Fastq(buf_pos), line_ending: line_ending.unwrap_or(LineEnding::Unix), } } #[inline] pub fn format(&self) -> Format { match self.buf_pos { BufferPositionKind::Fasta(_) => Format::Fasta, BufferPositionKind::Fastq(_) => Format::Fastq, } } #[inline] pub fn id(&self) -> &[u8] { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.id(&self.buffer), BufferPositionKind::Fastq(bp) => bp.id(&self.buffer), } } #[inline] pub fn raw_seq(&self) -> &[u8] { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.raw_seq(&self.buffer), BufferPositionKind::Fastq(bp) => bp.seq(&self.buffer), } } pub fn seq(&self) -> Cow<[u8]> { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.seq(&self.buffer), BufferPositionKind::Fastq(bp) => bp.seq(&self.buffer).into(), } } #[inline] pub fn qual(&self) -> Option<&[u8]> { match self.buf_pos { BufferPositionKind::Fasta(_) => None, BufferPositionKind::Fastq(bp) => Some(bp.qual(&self.buffer)), } } #[inline] pub fn all(&self) -> &[u8] { match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.all(&self.buffer), BufferPositionKind::Fastq(bp) => bp.all(&self.buffer), } } #[inline] pub fn num_bases(&self) -> usize {
} pub fn start_line_number(&self) -> u64 { self.position.line } pub fn line_ending(&self) -> LineEnding { self.line_ending } pub fn write( &self, writer: &mut dyn Write, forced_line_ending: Option<LineEnding>, ) -> Result<(), ParseError> { match self.buf_pos { BufferPositionKind::Fasta(_) => write_fasta( self.id(), self.raw_seq(), writer, forced_line_ending.unwrap_or(self.line_ending), ), BufferPositionKind::Fastq(_) => write_fastq( self.id(), self.raw_seq(), self.qual(), writer, forced_line_ending.unwrap_or(self.line_ending), ), } } } impl<'a> Sequence<'a> for SequenceRecord<'a> { fn sequence(&'a self) -> &'a [u8] { self.raw_seq() } } pub fn mask_header_tabs(id: &[u8]) -> Option<Vec<u8>> { memchr(b'\t', id).map(|_| { id.iter() .map(|x| if *x == b'\t' { b'|' } else { *x }) .collect() }) } pub fn mask_header_utf8(id: &[u8]) -> Option<Vec<u8>> { match String::from_utf8_lossy(id) { Cow::Owned(s) => Some(s.into_bytes()), Cow::Borrowed(_) => None, } } pub fn write_fasta( id: &[u8], seq: &[u8], writer: &mut dyn Write, line_ending: LineEnding, ) -> Result<(), ParseError> { let ending = line_ending.to_bytes(); writer.write_all(b">")?; writer.write_all(id)?; writer.write_all(&ending)?; writer.write_all(seq)?; writer.write_all(&ending)?; Ok(()) } pub fn write_fastq( id: &[u8], seq: &[u8], qual: Option<&[u8]>, writer: &mut dyn Write, line_ending: LineEnding, ) -> Result<(), ParseError> { let ending = line_ending.to_bytes(); writer.write_all(b"@")?; writer.write_all(id)?; writer.write_all(&ending)?; writer.write_all(seq)?; writer.write_all(&ending)?; writer.write_all(b"+")?; writer.write_all(&ending)?; if let Some(qual) = qual { writer.write_all(&qual)?; } else { writer.write_all(&vec![b'I'; seq.len()])?; } writer.write_all(&ending)?; Ok(()) }
match self.buf_pos { BufferPositionKind::Fasta(bp) => bp.num_bases(&self.buffer), BufferPositionKind::Fastq(bp) => bp.num_bases(&self.buffer), }
if_condition
[ { "content": "/// Find the lexigraphically smallest substring of `seq` of length `length`\n\n///\n\n/// There's probably a faster algorithm for this somewhere...\n\npub fn minimizer(seq: &[u8], length: usize) -> Cow<[u8]> {\n\n let reverse_complement: Vec<u8> = seq.iter().rev().map(|n| complement(*n)).collect();\n\n let mut minmer = Cow::Borrowed(&seq[..length]);\n\n\n\n for (kmer, rc_kmer) in seq.windows(length).zip(reverse_complement.windows(length)) {\n\n if *kmer < minmer[..] {\n\n minmer = kmer.into();\n\n }\n\n if *rc_kmer < minmer[..] {\n\n minmer = rc_kmer.to_vec().into();\n\n }\n\n }\n\n minmer\n\n}\n\n\n", "file_path": "src/sequence.rs", "rank": 0, "score": 139782.86604720476 }, { "content": "#[inline]\n\npub fn complement(n: u8) -> u8 {\n\n match n {\n\n b'a' => b't',\n\n b'A' => b'T',\n\n b'c' => b'g',\n\n b'C' => b'G',\n\n b'g' => b'c',\n\n b'G' => b'C',\n\n b't' => b'a',\n\n b'T' => b'A',\n\n\n\n // IUPAC codes\n\n b'r' => b'y',\n\n b'y' => b'r',\n\n b'k' => b'm',\n\n b'm' => b'k',\n\n b'b' => b'v',\n\n b'v' => b'b',\n\n b'd' => b'h',\n\n b'h' => b'd',\n", "file_path": "src/sequence.rs", "rank": 1, "score": 138683.47367613614 }, { "content": "/// Taking in a sequence string, return the canonical form of the sequence\n\n/// (e.g. the lexigraphically lowest of either the original sequence or its\n\n/// reverse complement)\n\npub fn canonical(seq: &[u8]) -> Cow<[u8]> {\n\n let mut buf: Vec<u8> = Vec::with_capacity(seq.len());\n\n // enough just keeps our comparisons from happening after they need to\n\n let mut enough = false;\n\n let mut original_was_canonical = false;\n\n\n\n // loop through the kmer and its reverse complement simultaneously\n\n for (rn, n) in seq.iter().rev().map(|n| complement(*n)).zip(seq.iter()) {\n\n buf.push(rn);\n\n if !enough && (*n < rn) {\n\n original_was_canonical = true;\n\n break;\n\n } else if !enough && (rn < *n) {\n\n enough = true;\n\n }\n\n // unstated if branch: if rn == n, keep comparing\n\n }\n\n match (original_was_canonical, enough) {\n\n (true, true) => panic!(\"Bug: should never set original_was_canonical if enough == true\"),\n\n (true, false) => seq.into(),\n\n (false, true) => buf.into(),\n\n // the sequences were completely equal, return the ref\n\n (false, false) => seq.into(),\n\n }\n\n}\n\n\n", "file_path": "src/sequence.rs", "rank": 2, "score": 128989.49585850688 }, { "content": "/// Transform a nucleic acid sequence into its \"normalized\" form.\n\n///\n\n/// The normalized form is:\n\n/// - only AGCTN and possibly - (for gaps)\n\n/// - strip out any whitespace or line endings\n\n/// - lowercase versions of these are uppercased\n\n/// - U is converted to T (make everything a DNA sequence)\n\n/// - some other punctuation is converted to gaps\n\n/// - IUPAC bases may be converted to N's depending on the parameter passed in\n\n/// - everything else is considered a N\n\npub fn normalize(seq: &[u8], allow_iupac: bool) -> Option<Vec<u8>> {\n\n let mut buf: Vec<u8> = Vec::with_capacity(seq.len());\n\n let mut changed: bool = false;\n\n\n\n for n in seq.iter() {\n\n let (new_char, char_changed) = match (*n, allow_iupac) {\n\n c @ (b'A', _)\n\n | c @ (b'C', _)\n\n | c @ (b'G', _)\n\n | c @ (b'T', _)\n\n | c @ (b'N', _)\n\n | c @ (b'-', _) => (c.0, false),\n\n (b'a', _) => (b'A', true),\n\n (b'c', _) => (b'C', true),\n\n (b'g', _) => (b'G', true),\n\n // normalize uridine to thymine\n\n (b't', _) | (b'u', _) | (b'U', _) => (b'T', true),\n\n // normalize gaps\n\n (b'.', _) | (b'~', _) => (b'-', true),\n\n // logic for IUPAC bases (a little messy)\n", "file_path": "src/sequence.rs", "rank": 5, "score": 110936.83717369252 }, { "content": "pub fn bitmer_to_bytes(kmer: BitKmer) -> Vec<u8> {\n\n let mut new_kmer = kmer.0;\n\n let mut new_kmer_str = Vec::new();\n\n // we're reading the bases off from the \"high\" end of the integer so we need to do some\n\n // math to figure out where they start (this helps us just pop the bases on the end\n\n // of the working buffer as we read them off \"left to right\")\n\n let offset = (kmer.1 - 1) * 2;\n\n let bitmask = BitKmerSeq::pow(2, u32::from(2 * kmer.1 - 1))\n\n + BitKmerSeq::pow(2, u32::from(2 * kmer.1 - 2));\n\n\n\n for _ in 0..kmer.1 {\n\n let new_char = (new_kmer & bitmask) >> offset;\n\n new_kmer <<= 2;\n\n new_kmer_str.push(match new_char {\n\n 0 => b'A',\n\n 1 => b'C',\n\n 2 => b'G',\n\n 3 => b'T',\n\n _ => panic!(\"Mathematical impossibility\"),\n\n });\n", "file_path": "src/bitkmer.rs", "rank": 6, "score": 107548.5666495753 }, { "content": "pub fn find_line_ending(bytes: &[u8]) -> Option<LineEnding> {\n\n if !bytes.is_empty() {\n\n if let Some(idx) = memchr(b'\\n', &bytes) {\n\n if idx > 0 && bytes[idx - 1] == b'\\r' {\n\n return Some(LineEnding::Windows);\n\n } else {\n\n return Some(LineEnding::Unix);\n\n }\n\n }\n\n }\n\n None\n\n}\n", "file_path": "src/parser/utils.rs", "rank": 7, "score": 102882.67413749266 }, { "content": "/// Find the lexigraphically lowest substring of a given length in the BitKmer\n\npub fn minimizer(kmer: BitKmer, minmer_size: u8) -> BitKmer {\n\n let mut new_kmer = kmer.0;\n\n let mut lowest = !(0 as BitKmerSeq);\n\n let bitmask = (BitKmerSeq::pow(2, u32::from(2 * minmer_size)) - 1) as BitKmerSeq;\n\n for _ in 0..=(kmer.1 - minmer_size) {\n\n let cur = bitmask & new_kmer;\n\n if cur < lowest {\n\n lowest = cur;\n\n }\n\n let cur_rev = reverse_complement((bitmask & new_kmer, kmer.1));\n\n if cur_rev.0 < lowest {\n\n lowest = cur_rev.0;\n\n }\n\n new_kmer >>= 2;\n\n }\n\n (lowest, kmer.1)\n\n}\n\n\n", "file_path": "src/bitkmer.rs", "rank": 9, "score": 99998.11479712356 }, { "content": "fn nuc2bti_lookup_nocheck(nuc: u8) -> Option<u8> {\n\n unsafe { *NUC2BIT_LOOKUP.get_unchecked(nuc as usize) }\n\n}\n\n\n", "file_path": "src/bitkmer.rs", "rank": 10, "score": 91181.91331902539 }, { "content": "/// Returns true if the base is a unambiguous nucleic acid base (e.g. ACGT) and\n\n/// false otherwise.\n\nfn is_good_base(chr: u8) -> bool {\n\n match chr as char {\n\n 'a' | 'c' | 'g' | 't' | 'A' | 'C' | 'G' | 'T' => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n/// Generic moving window iterator over sequences to return k-mers\n\n///\n\n/// Iterator returns slices to the original data.\n\npub struct Kmers<'a> {\n\n k: u8,\n\n start_pos: usize,\n\n buffer: &'a [u8],\n\n}\n\n\n\nimpl<'a> Kmers<'a> {\n\n /// Creates a new kmer-izer for a nucleotide/amino acid sequence.\n\n pub fn new(buffer: &'a [u8], k: u8) -> Self {\n\n Kmers {\n", "file_path": "src/kmer.rs", "rank": 13, "score": 81661.59786863466 }, { "content": "#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]\n\nenum SearchPosition {\n\n Id,\n\n Sequence,\n\n Separator,\n\n Quality,\n\n}\n\n\n\n/// Parser for FASTQ files.\n\n/// Only use this directly if you know your file is FASTQ and that it is not compressed as\n\n/// it does not handle decompression.\n\n/// If you are unsure, it's better to use [parse_fastx_file](fn.parse_fastx_file.html).\n\npub struct Reader<R: io::Read> {\n\n buf_reader: buf_redux::BufReader<R>,\n\n buf_pos: BufferPosition,\n\n search_pos: SearchPosition,\n\n position: Position,\n\n finished: bool,\n\n line_ending: Option<LineEnding>,\n\n}\n\n\n", "file_path": "src/parser/fastq.rs", "rank": 14, "score": 79357.32277338734 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestCase {\n\n filename: String,\n\n // origin: String,\n\n tags: Option<Vec<String>>,\n\n // comments: Option<Vec<String>>,\n\n}\n\n\n", "file_path": "tests/format_specimens.rs", "rank": 15, "score": 79255.93520763148 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestIndex {\n\n valid: Vec<TestCase>,\n\n invalid: Option<Vec<TestCase>>,\n\n}\n\n\n", "file_path": "tests/format_specimens.rs", "rank": 16, "score": 79255.93520763148 }, { "content": "/// Used for the BitNuclKmer iterator to handle skipping invalid bases.\n\nfn update_position(\n\n start_pos: &mut usize,\n\n kmer: &mut BitKmer,\n\n buffer: &[u8],\n\n initial: bool,\n\n) -> bool {\n\n // check if we have enough \"physical\" space for one more kmer\n\n if *start_pos + kmer.1 as usize > buffer.len() {\n\n return false;\n\n }\n\n\n\n let (mut kmer_len, stop_len) = if initial {\n\n (0, (kmer.1 - 1) as usize)\n\n } else {\n\n ((kmer.1 - 1) as usize, kmer.1 as usize)\n\n };\n\n\n\n let mut cur_kmer = kmer;\n\n while kmer_len < stop_len {\n\n if extend_kmer(&mut cur_kmer, buffer[*start_pos + kmer_len]) {\n", "file_path": "src/bitkmer.rs", "rank": 17, "score": 76176.35056014365 }, { "content": "/// Reverse complement a BitKmer (reverses the sequence and swaps A<>T and G<>C)\n\npub fn reverse_complement(kmer: BitKmer) -> BitKmer {\n\n // FIXME: this is not going to work with BitKmers of u128 or u32\n\n // inspired from https://www.biostars.org/p/113640/\n\n let mut new_kmer = kmer.0;\n\n // reverse it\n\n new_kmer = (new_kmer >> 2 & 0x3333_3333_3333_3333) | (new_kmer & 0x3333_3333_3333_3333) << 2;\n\n new_kmer = (new_kmer >> 4 & 0x0F0F_0F0F_0F0F_0F0F) | (new_kmer & 0x0F0F_0F0F_0F0F_0F0F) << 4;\n\n new_kmer = (new_kmer >> 8 & 0x00FF_00FF_00FF_00FF) | (new_kmer & 0x00FF_00FF_00FF_00FF) << 8;\n\n new_kmer = (new_kmer >> 16 & 0x0000_FFFF_0000_FFFF) | (new_kmer & 0x0000_FFFF_0000_FFFF) << 16;\n\n new_kmer = (new_kmer >> 32 & 0x0000_0000_FFFF_FFFF) | (new_kmer & 0x0000_0000_FFFF_FFFF) << 32;\n\n // complement it\n\n new_kmer ^= 0xFFFF_FFFF_FFFF_FFFF;\n\n // shift it to the right size\n\n new_kmer >>= 2 * (32 - kmer.1);\n\n (new_kmer, kmer.1)\n\n}\n\n\n", "file_path": "src/bitkmer.rs", "rank": 18, "score": 72500.81801725154 }, { "content": "#[test]\n\nfn test_specimen_fastq() {\n\n let raw_index = fs::read_to_string(\"tests/specimen/FASTQ/index.toml\").unwrap();\n\n let index: TestIndex = toml::from_str(&raw_index).expect(\"Could not deserialize index\");\n\n\n\n for test in index.valid {\n\n if test.filename == \"wrapping_original_sanger.fastq\"\n\n || test.filename == \"longreads_original_sanger.fastq\"\n\n || test.filename == \"tricky.fastq\"\n\n {\n\n // may god have mercy upon us if someone ever tries a file like this\n\n // (sequences are one-line, but quality scores are line-wrapped)\n\n continue;\n\n }\n\n\n\n let path = format!(\"tests/specimen/FASTQ/{}\", test.filename);\n\n assert!(\n\n test_fastx_file(&path).is_ok(),\n\n format!(\"File {} is bad?\", test.filename)\n\n );\n\n }\n", "file_path": "tests/format_specimens.rs", "rank": 19, "score": 70853.3155254237 }, { "content": "#[test]\n\nfn test_specimen_fasta() {\n\n let raw_index = fs::read_to_string(\"tests/specimen/FASTA/index.toml\").unwrap();\n\n let index: TestIndex = toml::from_str(&raw_index).expect(\"Could not deserialize index\");\n\n for test in index.valid {\n\n // what kind of sicko puts comments in FASTAs?\n\n if test\n\n .tags\n\n .unwrap_or_else(Vec::new)\n\n .contains(&String::from(\"comments\"))\n\n {\n\n continue;\n\n }\n\n\n\n let path = format!(\"tests/specimen/FASTA/{}\", test.filename);\n\n assert_eq!(test_fastx_file(&path), Ok(()));\n\n }\n\n}\n\n\n", "file_path": "tests/format_specimens.rs", "rank": 20, "score": 70853.3155254237 }, { "content": "#[pyfunction]\n\npub fn reverse_complement(seq: &str) -> PyResult<String> {\n\n let comp: Vec<u8> = seq\n\n .as_bytes()\n\n .iter()\n\n .rev()\n\n .map(|n| complement(*n))\n\n .collect();\n\n Ok(String::from_utf8_lossy(&comp).to_string())\n\n}\n\n\n\n#[pyproto]\n\nimpl PyIterProtocol for FastxReaderIterator {\n\n fn __next__(slf: PyRef<Self>) -> PyResult<Option<Record>> {\n\n let gil_guard = Python::acquire_gil();\n\n let py = gil_guard.python();\n\n let mut parser: PyRefMut<PyFastxReader> = slf.t.extract(py)?;\n\n if let Some(rec) = parser.reader.next() {\n\n let record = py_try!(rec);\n\n Ok(Some(Record::from_sequence_record(&record)))\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/python.rs", "rank": 21, "score": 70366.67553200728 }, { "content": "/// Return the lexigraphically lowest of the BitKmer and its reverse complement and\n\n/// whether the returned kmer is the reverse_complement (true) or the original (false)\n\npub fn canonical(kmer: BitKmer) -> (BitKmer, bool) {\n\n let rc = reverse_complement(kmer);\n\n if kmer.0 > rc.0 {\n\n (rc, true)\n\n } else {\n\n (kmer, false)\n\n }\n\n}\n\n\n", "file_path": "src/bitkmer.rs", "rank": 22, "score": 70366.67553200728 }, { "content": "/// Takes a BitKmer and adds a new base on the end, optionally loping off the\n\n/// first base if the resulting kmer is too long.\n\nfn extend_kmer(kmer: &mut BitKmer, new_char: u8) -> bool {\n\n if let Some(new_char_int) = nuc2bti_lookup_nocheck(new_char) {\n\n let new_kmer = (kmer.0 << 2) + new_char_int as BitKmerSeq;\n\n\n\n // mask out any overflowed bits\n\n kmer.0 = new_kmer & (BitKmerSeq::pow(2, u32::from(2 * kmer.1)) - 1) as BitKmerSeq;\n\n true\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/bitkmer.rs", "rank": 23, "score": 66229.06130555688 }, { "content": "/// The main entry point of needletail if you're reading from something that impls std::io::Read\n\n/// This automatically detects whether the file is:\n\n/// 1. compressed: gzip, bz and xz are supported and will use the appropriate decoder\n\n/// 2. FASTA or FASTQ: the right parser will be automatically instantiated\n\n/// 1 is only available if the `compression` feature is enabled.\n\npub fn parse_fastx_reader<'a, R: 'a + io::Read + Send>(\n\n mut reader: R,\n\n) -> Result<Box<dyn FastxReader + 'a>, ParseError> {\n\n let mut first_two_bytes = [0; 2];\n\n reader.read_exact(&mut first_two_bytes)?;\n\n let first_two_cursor = Cursor::new(first_two_bytes);\n\n let new_reader = first_two_cursor.chain(reader);\n\n\n\n match first_two_bytes {\n\n #[cfg(feature = \"compression\")]\n\n GZ_MAGIC => {\n\n let mut gz_reader = MultiGzDecoder::new(new_reader);\n\n let mut first = [0; 1];\n\n gz_reader.read_exact(&mut first)?;\n\n let r = Cursor::new(first).chain(gz_reader);\n\n get_fastx_reader(r, first[0])\n\n }\n\n #[cfg(feature = \"compression\")]\n\n BZ_MAGIC => {\n\n let mut bz_reader = BzDecoder::new(new_reader);\n", "file_path": "src/parser/mod.rs", "rank": 24, "score": 65290.75935989908 }, { "content": "#[pyfunction]\n\npub fn normalize_seq(seq: &str, iupac: bool) -> PyResult<String> {\n\n if let Some(s) = normalize(seq.as_bytes(), iupac) {\n\n Ok(String::from_utf8_lossy(&s).to_string())\n\n } else {\n\n Ok(seq.to_owned())\n\n }\n\n}\n\n\n", "file_path": "src/python.rs", "rank": 25, "score": 63801.00546627822 }, { "content": "/// The main entry point of needletail if you're reading from stdin.\n\n/// Shortcut to calling `parse_fastx_reader` with `stdin()`\n\npub fn parse_fastx_stdin() -> Result<Box<dyn FastxReader>, ParseError> {\n\n let stdin = stdin();\n\n parse_fastx_reader(stdin)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 26, "score": 62751.433083249 }, { "content": "fn test_fastx_file(path: &str) -> Result<(), ParseError> {\n\n let mut reader = parse_fastx_file(path)?;\n\n while let Some(rec) = reader.next() {\n\n let _ = rec?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/format_specimens.rs", "rank": 27, "score": 55100.55226886361 }, { "content": "/// The main entry point of needletail if you're reading from a file.\n\n/// Shortcut to calling `parse_fastx_reader` with a file\n\npub fn parse_fastx_file<P: AsRef<Path>>(path: P) -> Result<Box<dyn FastxReader>, ParseError> {\n\n parse_fastx_reader(File::open(&path)?)\n\n}\n\n\n\npub use record::{mask_header_tabs, mask_header_utf8, write_fasta, write_fastq, SequenceRecord};\n\nuse std::io;\n\npub use utils::{Format, LineEnding};\n", "file_path": "src/parser/mod.rs", "rank": 28, "score": 50934.37579206069 }, { "content": "/// A generic FASTX record that also abstracts over several logical operations\n\n/// that can be performed on nucleic acid sequences.\n\npub trait Sequence<'a> {\n\n fn sequence(&'a self) -> &'a [u8];\n\n\n\n /// Remove newlines from the sequence; this handles `\\r`, `\\n`, and `\\r\\n`\n\n /// and removes internal newlines in addition to ones at the end.\n\n /// Primarily used for FASTA multiline records, but can also help process\n\n /// (the much rarer) multiline FASTQs. Always use before iteration methods\n\n /// below to ensure no newlines are being returned with e.g. `.kmers`.\n\n /// If you are using `normalize`, you do not need to call this function directly.\n\n fn strip_returns(&'a self) -> Cow<'a, [u8]> {\n\n let seq = self.sequence();\n\n\n\n // first part is a fast check to see if we need to do any allocations\n\n let mut i;\n\n match memchr2(b'\\r', b'\\n', &seq) {\n\n Some(break_loc) => i = break_loc,\n\n None => return seq.into(),\n\n }\n\n // we found a newline; create a new buffer and stripping out newlines\n\n // and writing into it\n", "file_path": "src/sequence.rs", "rank": 29, "score": 46957.81604910377 }, { "content": "#[cfg(not(feature = \"compression\"))]\n\n#[test]\n\nfn errors_on_compressed_files() {\n\n for p in &TEST_FILES {\n\n assert!(parse_fastx_file(p).is_err());\n\n }\n\n}\n", "file_path": "tests/test_compressed.rs", "rank": 30, "score": 45134.15424289516 }, { "content": "#[cfg(feature = \"compression\")]\n\n#[test]\n\nfn test_stdin_gz() {\n\n // Generated with `echo \">id1\\nAGTCGTCA\" | gzip -c | xxd -i`\n\n let input: &[u8] = &[\n\n 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0xb3, 0xcb, 0x4c, 0x31, 0xe4,\n\n 0x72, 0x74, 0x0f, 0x71, 0x06, 0x22, 0x47, 0x2e, 0x00, 0x9e, 0x3a, 0x32, 0x8c, 0x0e, 0x00,\n\n 0x00, 0x00,\n\n ];\n\n let mut file = tempfile::NamedTempFile::new().unwrap();\n\n file.write_all(input).unwrap();\n\n file.flush().unwrap();\n\n file.seek(SeekFrom::Start(0)).unwrap();\n\n\n\n escargot::CargoBuild::new()\n\n .example(\"stdin_pipe\")\n\n .current_release()\n\n .current_target()\n\n .run()\n\n .unwrap()\n\n .command()\n\n .stdin(file.into_file())\n\n .assert()\n\n .success()\n\n .stdout(contains(\"There are 8 bases in your file\"))\n\n .stdout(contains(\"There are 0 AAAAs in your file\"));\n\n}\n\n\n", "file_path": "tests/test_stdin.rs", "rank": 31, "score": 45134.15424289516 }, { "content": "#[test]\n\nfn test_stdin_no_compression() {\n\n let input: &[u8] = b\">id1\\nAGTCGTCA\";\n\n let mut file = tempfile::NamedTempFile::new().unwrap();\n\n file.write_all(input).unwrap();\n\n file.flush().unwrap();\n\n file.seek(SeekFrom::Start(0)).unwrap();\n\n\n\n escargot::CargoBuild::new()\n\n .example(\"stdin_pipe\")\n\n .current_release()\n\n .current_target()\n\n .run()\n\n .unwrap()\n\n .command()\n\n .stdin(file.into_file())\n\n .assert()\n\n .success()\n\n .stdout(contains(\"There are 8 bases in your file\"))\n\n .stdout(contains(\"There are 0 AAAAs in your file\"));\n\n}\n", "file_path": "tests/test_stdin.rs", "rank": 32, "score": 45134.15424289516 }, { "content": "#[cfg(feature = \"compression\")]\n\n#[test]\n\nfn test_stdin_xz() {\n\n // Generated with `echo \">id1\\nAGTCGTCA\" | xz -c | xxd -i`\n\n let input: &[u8] = &[\n\n 0xfd, 0x37, 0x7a, 0x58, 0x5a, 0x00, 0x00, 0x04, 0xe6, 0xd6, 0xb4, 0x46, 0x02, 0x00, 0x21,\n\n 0x01, 0x16, 0x00, 0x00, 0x00, 0x74, 0x2f, 0xe5, 0xa3, 0x01, 0x00, 0x0d, 0x3e, 0x69, 0x64,\n\n 0x31, 0x0a, 0x41, 0x47, 0x54, 0x43, 0x47, 0x54, 0x43, 0x41, 0x0a, 0x00, 0x00, 0x00, 0x12,\n\n 0x0f, 0x91, 0x75, 0xef, 0x7b, 0x63, 0x17, 0x00, 0x01, 0x26, 0x0e, 0x08, 0x1b, 0xe0, 0x04,\n\n 0x1f, 0xb6, 0xf3, 0x7d, 0x01, 0x00, 0x00, 0x00, 0x00, 0x04, 0x59, 0x5a,\n\n ];\n\n let mut file = tempfile::NamedTempFile::new().unwrap();\n\n file.write_all(input).unwrap();\n\n file.flush().unwrap();\n\n file.seek(SeekFrom::Start(0)).unwrap();\n\n\n\n escargot::CargoBuild::new()\n\n .example(\"stdin_pipe\")\n\n .current_release()\n\n .current_target()\n\n .run()\n\n .unwrap()\n\n .command()\n\n .stdin(file.into_file())\n\n .assert()\n\n .success()\n\n .stdout(contains(\"There are 8 bases in your file\"))\n\n .stdout(contains(\"There are 0 AAAAs in your file\"));\n\n}\n\n\n", "file_path": "tests/test_stdin.rs", "rank": 33, "score": 45134.15424289516 }, { "content": "#[cfg(feature = \"compression\")]\n\n#[test]\n\nfn test_stdin_bzip() {\n\n // Generated with `echo \">id1\\nAGTCGTCA\" | bzip2 -c | xxd -i`\n\n let input: &[u8] = &[\n\n 0x42, 0x5a, 0x68, 0x39, 0x31, 0x41, 0x59, 0x26, 0x53, 0x59, 0x9f, 0x9d, 0xf9, 0xa2, 0x00,\n\n 0x00, 0x01, 0xcf, 0x00, 0x00, 0x10, 0x20, 0x01, 0x28, 0x80, 0x04, 0x00, 0x04, 0x20, 0x20,\n\n 0x00, 0x22, 0x0c, 0x9a, 0x64, 0x20, 0xc9, 0x88, 0x21, 0x95, 0x8e, 0x82, 0x75, 0x27, 0x8b,\n\n 0xb9, 0x22, 0x9c, 0x28, 0x48, 0x4f, 0xce, 0xfc, 0xd1, 0x00,\n\n ];\n\n let mut file = tempfile::NamedTempFile::new().unwrap();\n\n file.write_all(input).unwrap();\n\n file.flush().unwrap();\n\n file.seek(SeekFrom::Start(0)).unwrap();\n\n\n\n escargot::CargoBuild::new()\n\n .example(\"stdin_pipe\")\n\n .current_release()\n\n .current_target()\n\n .run()\n\n .unwrap()\n\n .command()\n\n .stdin(file.into_file())\n\n .assert()\n\n .success()\n\n .stdout(contains(\"There are 8 bases in your file\"))\n\n .stdout(contains(\"There are 0 AAAAs in your file\"));\n\n}\n\n\n", "file_path": "tests/test_stdin.rs", "rank": 34, "score": 45134.15424289516 }, { "content": "/// The main trait, iterator-like, that the FASTA and FASTQ readers implement\n\npub trait FastxReader: Send {\n\n /// Gets the next record in the stream.\n\n /// This imitates the Iterator API but does not support any iterator functions.\n\n /// This returns None once we reached the EOF.\n\n fn next(&mut self) -> Option<Result<SequenceRecord, ParseError>>;\n\n /// Returns the current line/byte in the stream we are reading from\n\n fn position(&self) -> &Position;\n\n /// Returns whether the current stream uses Windows or Unix style line endings\n\n /// It is `None` only before calling `next`, once `next` has been called it will always\n\n /// return a line ending.\n\n fn line_ending(&self) -> Option<LineEnding>;\n\n}\n", "file_path": "src/parser/utils.rs", "rank": 35, "score": 43210.097258473026 }, { "content": "#[cfg(feature = \"compression\")]\n\n#[test]\n\nfn can_read_compressed_files_automatically() {\n\n use needletail::parser::Format;\n\n for p in &TEST_FILES {\n\n let mut reader = parse_fastx_file(p).unwrap();\n\n let mut i = 0;\n\n while let Some(record) = reader.next() {\n\n let seq = record.unwrap();\n\n assert_eq!(seq.format(), Format::Fasta);\n\n\n\n match i {\n\n 0 => {\n\n assert_eq!(seq.id(), b\"test\");\n\n assert_eq!(seq.raw_seq(), b\"AGCTGATCGA\");\n\n assert_eq!(seq.qual(), None);\n\n }\n\n 1 => {\n\n assert_eq!(seq.id(), b\"test2\");\n\n assert_eq!(seq.raw_seq(), b\"TAGC\");\n\n assert_eq!(seq.qual(), None);\n\n }\n\n _ => unreachable!(\"Too many records\"),\n\n }\n\n i += 1;\n\n }\n\n assert_eq!(i, 2);\n\n }\n\n}\n\n\n", "file_path": "tests/test_compressed.rs", "rank": 36, "score": 43005.90056789285 }, { "content": "/// [⚠️Unstable] A trait to wrap over sequence data that has associated\n\n/// quality information.\n\n///\n\n/// Will be stabilized once we figure out a good way to handle sequences that\n\n/// have _optional_ quality information (like SequenceRecord) because the\n\n/// return trait requires a slice from an immutable reference and\n\n/// SequenceRecords can't return that without modifying themselves.\n\npub trait QualitySequence<'a>: Sequence<'a> {\n\n fn quality(&'a self) -> &'a [u8];\n\n\n\n /// Given a SeqRecord and a quality cutoff, mask out low-quality bases with\n\n /// `N` characters.\n\n fn quality_mask(&'a self, score: u8) -> Cow<'a, [u8]> {\n\n let qual = self.quality();\n\n // could maybe speed this up by doing a copy of base and then\n\n // iterating though qual and masking?\n\n let seq: Vec<u8> = self\n\n .sequence()\n\n .iter()\n\n .zip(qual.iter())\n\n .map(|(base, qual)| if *qual < score { b'N' } else { *base })\n\n .collect();\n\n seq.into()\n\n }\n\n}\n\n\n\nimpl<'a> Sequence<'a> for (&'a [u8], &'a [u8]) {\n", "file_path": "src/sequence.rs", "rank": 37, "score": 41451.148525687684 }, { "content": "fn bench_kmer_speed(c: &mut Criterion) {\n\n let ksize = 31;\n\n\n\n let mut data: Vec<u8> = vec![];\n\n let mut f = File::open(\"tests/data/28S.fasta\").unwrap();\n\n let _ = f.read_to_end(&mut data);\n\n\n\n let mut group = c.benchmark_group(\"Kmerizing\");\n\n group.sample_size(10);\n\n\n\n group.bench_function(\"Kmer\", |b| {\n\n use needletail::parser::FastaReader;\n\n b.iter(|| {\n\n let mut n_total = 0;\n\n let mut n_canonical = 0;\n\n let fasta_data = Cursor::new(data.clone());\n\n let mut reader = FastaReader::new(fasta_data);\n\n\n\n while let Some(record) = reader.next() {\n\n let rec = record.unwrap();\n", "file_path": "benches/benchmark.rs", "rank": 38, "score": 39015.021743911624 }, { "content": "fn bench_fasta_file(c: &mut Criterion) {\n\n use bio::io::{fasta as bio_fasta, fasta::FastaRead};\n\n use seq_io::fasta as seq_fasta;\n\n\n\n let mut data: Vec<u8> = vec![];\n\n let mut f = File::open(\"tests/data/28S.fasta\").unwrap();\n\n let _ = f.read_to_end(&mut data);\n\n\n\n let mut group = c.benchmark_group(\"FASTA parsing\");\n\n\n\n group.bench_function(\"RustBio\", |bench| {\n\n bench.iter(|| {\n\n let mut record = bio_fasta::Record::new();\n\n let fastq_data = Cursor::new(data.clone());\n\n let mut reader = bio_fasta::Reader::new(fastq_data);\n\n let mut n_bases = 0;\n\n reader.read(&mut record).expect(\"Failed to parse record\");\n\n while !record.is_empty() {\n\n n_bases += record.seq().len() as u64;\n\n reader.read(&mut record).expect(\"Failed to parse record.\");\n", "file_path": "benches/benchmark.rs", "rank": 39, "score": 39015.021743911624 }, { "content": "fn bench_fastq_file(c: &mut Criterion) {\n\n use bio::io::fastq as bio_fastq;\n\n use bio::io::fastq::FastqRead;\n\n use seq_io::fastq as seq_fastq;\n\n use seq_io::fastq::Record;\n\n\n\n let mut data: Vec<u8> = vec![];\n\n let mut f = File::open(\"tests/data/PRJNA271013_head.fq\").unwrap();\n\n let _ = f.read_to_end(&mut data);\n\n\n\n let mut group = c.benchmark_group(\"FASTQ parsing\");\n\n\n\n group.bench_function(\"RustBio\", |bench| {\n\n bench.iter(|| {\n\n let mut record = bio_fastq::Record::new();\n\n let fastq_data = Cursor::new(data.clone());\n\n let mut reader = bio_fastq::Reader::new(fastq_data);\n\n let mut n_bases = 0;\n\n reader.read(&mut record).expect(\"Failed to parse record\");\n\n while !record.is_empty() {\n", "file_path": "benches/benchmark.rs", "rank": 40, "score": 39015.021743911624 }, { "content": "#[pymodule]\n\nfn needletail(py: Python, m: &PyModule) -> PyResult<()> {\n\n m.add_class::<PyFastxReader>()?;\n\n m.add_wrapped(wrap_pyfunction!(parse_fastx_file))?;\n\n m.add_wrapped(wrap_pyfunction!(parse_fastx_string))?;\n\n m.add_wrapped(wrap_pyfunction!(normalize_seq))?;\n\n m.add_wrapped(wrap_pyfunction!(reverse_complement))?;\n\n m.add(\"NeedletailError\", py.get_type::<NeedletailError>())?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/python.rs", "rank": 41, "score": 34909.27324752886 }, { "content": "#[pyfunction]\n\nfn parse_fastx_string(content: &str) -> PyResult<PyFastxReader> {\n\n let reader = py_try!(parse_fastx_reader(Cursor::new(content.to_owned())));\n\n Ok(PyFastxReader { reader })\n\n}\n\n\n\n#[pyclass]\n\npub struct FastxReaderIterator {\n\n t: PyObject,\n\n}\n\n\n\n#[pyproto]\n\nimpl PyIterProtocol for PyFastxReader {\n\n fn __iter__(slf: PyRefMut<Self>) -> PyResult<FastxReaderIterator> {\n\n let gil_guard = Python::acquire_gil();\n\n let py = gil_guard.python();\n\n Ok(FastxReaderIterator { t: slf.into_py(py) })\n\n }\n\n}\n\n\n\n#[pyclass]\n", "file_path": "src/python.rs", "rank": 42, "score": 33513.487319687716 }, { "content": "#[pyfunction]\n\nfn parse_fastx_file(path: &str) -> PyResult<PyFastxReader> {\n\n let reader = py_try!(rs_parse_fastx_file(path));\n\n Ok(PyFastxReader { reader })\n\n}\n\n\n", "file_path": "src/python.rs", "rank": 43, "score": 33513.487319687716 }, { "content": "fn get_fastx_reader<'a, R: 'a + io::Read + Send>(\n\n reader: R,\n\n first_byte: u8,\n\n) -> Result<Box<dyn FastxReader + 'a>, ParseError> {\n\n match first_byte {\n\n b'>' => Ok(Box::new(FastaReader::new(reader))),\n\n b'@' => Ok(Box::new(FastqReader::new(reader))),\n\n _ => Err(ParseError::new_unknown_format(first_byte)),\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 44, "score": 33185.44606590193 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut n_bases = 0;\n\n let mut n_valid_kmers = 0;\n\n let mut reader = parse_fastx_stdin().expect(\"valid path/file\");\n\n while let Some(record) = reader.next() {\n\n let seqrec = record.expect(\"invalid record\");\n\n // keep track of the total number of bases\n\n n_bases += seqrec.num_bases();\n\n // normalize to make sure all the bases are consistently capitalized and\n\n // that we remove the newlines since this is FASTA\n\n let norm_seq = seqrec.normalize(false);\n\n // we make a reverse complemented copy of the sequence first for\n\n // `canonical_kmers` to draw the complemented sequences from.\n\n let rc = norm_seq.reverse_complement();\n\n // now we keep track of the number of AAAAs (or TTTTs via\n\n // canonicalization) in the file; note we also get the position (i.0;\n\n // in the event there were `N`-containing kmers that were skipped)\n\n // and whether the sequence was complemented (i.2) in addition to\n\n // the canonical kmer (i.1)\n\n for (_, kmer, _) in norm_seq.canonical_kmers(4, &rc) {\n", "file_path": "examples/stdin_pipe.rs", "rank": 45, "score": 33185.44606590193 }, { "content": "use std::fs;\n\n\n\nuse needletail::errors::ParseError;\n\nuse needletail::parser::parse_fastx_file;\n\nuse serde_derive::Deserialize;\n\nuse toml;\n\n\n\n#[derive(Debug, Deserialize)]\n", "file_path": "tests/format_specimens.rs", "rank": 46, "score": 29914.064792602294 }, { "content": "\n\n for test in index.invalid.unwrap_or_else(Vec::new) {\n\n if test.filename == \"error_diff_ids.fastq\" {\n\n // we don't care if the sequence ID doesn't match the quality id?\n\n continue;\n\n }\n\n\n\n // We don't check for ascii validity since it's a big hit perf wise\n\n // This means some invalid sequences are considered ok but it's not a big issue\n\n // in practice\n\n if test.filename.starts_with(\"error_qual_\")\n\n || test.filename == \"error_spaces.fastq\"\n\n || test.filename == \"error_tabs.fastq\"\n\n {\n\n continue;\n\n }\n\n\n\n let path = format!(\"tests/specimen/FASTQ/{}\", test.filename);\n\n assert!(\n\n test_fastx_file(&path).is_err(),\n\n format!(\"File {} is good?\", test.filename)\n\n );\n\n }\n\n}\n", "file_path": "tests/format_specimens.rs", "rank": 47, "score": 29911.48032956727 }, { "content": " pub(crate) sep: usize,\n\n pub(crate) qual: usize,\n\n}\n\n\n\nimpl BufferPosition {\n\n #[inline]\n\n pub(crate) fn is_new(&self) -> bool {\n\n self.end == 0\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn len(&self) -> u64 {\n\n (self.end + 1 - self.start) as u64\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn id<'a>(&'a self, buffer: &'a [u8]) -> &'a [u8] {\n\n trim_cr(&buffer[self.start + 1..self.seq - 1])\n\n }\n\n\n", "file_path": "src/parser/fastq.rs", "rank": 49, "score": 20.038829692439332 }, { "content": " #[inline]\n\n pub(crate) fn seq<'a>(&'a self, buffer: &'a [u8]) -> &'a [u8] {\n\n trim_cr(&buffer[self.seq..self.sep - 1])\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn qual<'a>(&'a self, buffer: &'a [u8]) -> &'a [u8] {\n\n trim_cr(&buffer[self.qual..self.end])\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn num_bases<'a>(&'a self, buffer: &'a [u8]) -> usize {\n\n self.seq(buffer).len()\n\n }\n\n\n\n #[inline]\n\n fn find_line_ending<'a>(&'a self, buffer: &'a [u8]) -> Option<LineEnding> {\n\n find_line_ending(self.all(buffer))\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn all<'a>(&self, buffer: &'a [u8]) -> &'a [u8] {\n\n &buffer[self.start..self.end]\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]\n", "file_path": "src/parser/fastq.rs", "rank": 51, "score": 18.869865746619954 }, { "content": "//! The vast majority of the code is taken from https://github.com/markschl/seq_io/blob/master/src/fastq.rs\n\n\n\nuse std::fs::File;\n\nuse std::io::{self, BufRead};\n\nuse std::path::Path;\n\n\n\nuse crate::errors::{ErrorPosition, ParseError};\n\nuse crate::parser::record::SequenceRecord;\n\nuse crate::parser::utils::{\n\n fill_buf, find_line_ending, grow_to, trim_cr, FastxReader, Format, LineEnding, Position,\n\n BUFSIZE,\n\n};\n\nuse memchr::memchr;\n\n\n\n/// Represents the position of a record within a buffer\n\n#[derive(Debug, Clone, Default)]\n\npub struct BufferPosition {\n\n pub(crate) start: usize,\n\n pub(crate) end: usize,\n\n pub(crate) seq: usize,\n", "file_path": "src/parser/fastq.rs", "rank": 52, "score": 17.921431778689534 }, { "content": "use std::io;\n\n\n\nuse memchr::memchr;\n\n\n\nuse crate::errors::ParseError;\n\nuse crate::parser::record::SequenceRecord;\n\n\n\npub(crate) const BUFSIZE: usize = 64 * 1024;\n\n\n\n/// Remove a final '\\r' from a byte slice\n\n#[inline]\n\npub(crate) fn trim_cr(line: &[u8]) -> &[u8] {\n\n if let Some((&b'\\r', remaining)) = line.split_last() {\n\n remaining\n\n } else {\n\n line\n\n }\n\n}\n\n\n\n/// Standard buffer policy: buffer size\n", "file_path": "src/parser/utils.rs", "rank": 54, "score": 16.816365839191196 }, { "content": " }\n\n }\n\n\n\n pub fn new_unknown_format(byte_found: u8) -> Self {\n\n let msg = format!(\n\n \"Expected '@' or '>' at the start of the file but found '{}'.\",\n\n (byte_found as char).escape_default()\n\n );\n\n ParseError {\n\n kind: ParseErrorKind::UnknownFormat,\n\n msg,\n\n position: ErrorPosition::default(),\n\n format: Some(Format::Fastq),\n\n }\n\n }\n\n\n\n pub fn new_unequal_length(seq_len: usize, qual_len: usize, position: ErrorPosition) -> Self {\n\n let msg = format!(\n\n \"Sequence length is {} but quality length is {}\",\n\n seq_len, qual_len\n", "file_path": "src/errors.rs", "rank": 55, "score": 16.62853883725341 }, { "content": "impl Format {\n\n pub fn start_char(&self) -> char {\n\n match self {\n\n Format::Fasta => '>',\n\n Format::Fastq => '@',\n\n }\n\n }\n\n}\n\n\n\n/// Whether it uses \\r\\n or only \\n\n\n#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)]\n\npub enum LineEnding {\n\n Windows,\n\n Unix,\n\n}\n\n\n\nimpl LineEnding {\n\n pub fn to_bytes(&self) -> Vec<u8> {\n\n match self {\n\n LineEnding::Windows => vec![b'\\r', b'\\n'],\n\n LineEnding::Unix => vec![b'\\n'],\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parser/utils.rs", "rank": 56, "score": 16.49175280750454 }, { "content": " #[inline]\n\n pub(crate) fn num_bases<'a>(&self, buffer: &'a [u8]) -> usize {\n\n let seq = self.raw_seq(buffer);\n\n let num_lines = bytecount::count(seq, b'\\n');\n\n let windows_num_lines = bytecount::count(seq, b'\\r');\n\n seq.len() - num_lines - windows_num_lines\n\n }\n\n}\n\n\n\n/// Parser for FASTA files.\n\n/// Only use this directly if you know your file is FASTA and that it is not compressed as\n\n/// it does not handle decompression.\n\n/// If you are unsure, it's better to use [parse_fastx_file](fn.parse_fastx_file.html).\n\npub struct Reader<R: io::Read> {\n\n buf_reader: buf_redux::BufReader<R>,\n\n buf_pos: BufferPosition,\n\n search_pos: usize,\n\n position: Position,\n\n finished: bool,\n\n line_ending: Option<LineEnding>,\n", "file_path": "src/parser/fasta.rs", "rank": 59, "score": 15.747102572479148 }, { "content": "}\n\n\n\n/// The only error type that needletail returns\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct ParseError {\n\n /// A description of what went wrong\n\n pub msg: String,\n\n /// The type of error that occurred\n\n pub kind: ParseErrorKind,\n\n /// Position within file\n\n pub position: ErrorPosition,\n\n /// The format of the file we were parsing\n\n pub format: Option<Format>,\n\n}\n\n\n\nimpl ParseError {\n\n pub fn new_invalid_start(byte_found: u8, position: ErrorPosition, format: Format) -> Self {\n\n let msg = format!(\n\n \"Expected '{}' but found '{}\",\n\n format.start_char(),\n", "file_path": "src/errors.rs", "rank": 60, "score": 15.710665350313555 }, { "content": " return false;\n\n }\n\n }\n\n }\n\n true\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for CanonicalKmers<'a> {\n\n type Item = (usize, &'a [u8], bool);\n\n\n\n fn next(&mut self) -> Option<(usize, &'a [u8], bool)> {\n\n if !self.update_position(false) {\n\n return None;\n\n }\n\n let pos = self.start_pos;\n\n self.start_pos += 1;\n\n\n\n let result = &self.buffer[pos..pos + self.k as usize];\n\n let rc_buffer = self.rc_buffer;\n", "file_path": "src/kmer.rs", "rank": 61, "score": 15.157602179422767 }, { "content": " /// use needletail::kmer::CanonicalKmers;\n\n ///\n\n /// let seq = b\"ACGT\";\n\n /// let rc = seq.reverse_complement();\n\n /// let c_iter = CanonicalKmers::new(seq, &rc, 3);\n\n /// for (pos, kmer, canonical) in c_iter {\n\n /// // process data in here\n\n /// }\n\n ///\n\n /// ```\n\n pub fn new(buffer: &'a [u8], rc_buffer: &'a [u8], k: u8) -> Self {\n\n let mut nucl_kmers = CanonicalKmers {\n\n k,\n\n start_pos: 0,\n\n buffer,\n\n rc_buffer,\n\n };\n\n nucl_kmers.update_position(true);\n\n nucl_kmers\n\n }\n", "file_path": "src/kmer.rs", "rank": 63, "score": 14.933056348639166 }, { "content": " k,\n\n start_pos: 0,\n\n buffer,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for Kmers<'a> {\n\n type Item = &'a [u8];\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.start_pos + self.k as usize > self.buffer.len() {\n\n return None;\n\n }\n\n let pos = self.start_pos;\n\n self.start_pos += 1;\n\n Some(&self.buffer[pos..pos + self.k as usize])\n\n }\n\n}\n\n\n", "file_path": "src/kmer.rs", "rank": 64, "score": 14.799684526412749 }, { "content": "/// A kmer-izer for a nucleotide acid sequences to return canonical kmers.\n\n///\n\n/// Iterator returns the position of the kmer, a slice to the original data,\n\n/// and an boolean indicating if the kmer returned is the original or the\n\n/// reverse complement.\n\npub struct CanonicalKmers<'a> {\n\n k: u8,\n\n start_pos: usize,\n\n buffer: &'a [u8],\n\n rc_buffer: &'a [u8],\n\n}\n\n\n\nimpl<'a> CanonicalKmers<'a> {\n\n /// Creates a new iterator.\n\n ///\n\n /// It's generally more useful to use this directly from a sequences (e.g.\n\n /// `seq.canonical_kmers`. Requires a reference to the reverse complement\n\n /// of the sequence it's created on, e.g.\n\n /// ```\n\n /// use needletail::Sequence;\n", "file_path": "src/kmer.rs", "rank": 65, "score": 14.784903009805632 }, { "content": "//! The vast majority of the code is taken from https://github.com/markschl/seq_io/blob/master/src/fasta.rs\n\n\n\nuse crate::errors::{ErrorPosition, ParseError};\n\nuse crate::parser::record::SequenceRecord;\n\nuse crate::parser::utils::{\n\n fill_buf, find_line_ending, grow_to, trim_cr, FastxReader, Format, LineEnding, Position,\n\n BUFSIZE,\n\n};\n\nuse memchr::{memchr2, Memchr};\n\nuse std::borrow::Cow;\n\nuse std::fs::File;\n\nuse std::io;\n\nuse std::io::BufRead;\n\nuse std::path::Path;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct BufferPosition {\n\n /// index of '>'\n\n pub(crate) start: usize,\n\n /// Indicate line start, but actually it is one byte before (start - 1), which is usually\n", "file_path": "src/parser/fasta.rs", "rank": 66, "score": 14.50901778516927 }, { "content": " find_line_ending(self.all(buffer))\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn all<'a>(&self, buffer: &'a [u8]) -> &'a [u8] {\n\n &buffer[self.start..*self.seq_pos.last().unwrap()]\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn id<'a>(&self, buffer: &'a [u8]) -> &'a [u8] {\n\n trim_cr(&buffer[self.start + 1..*self.seq_pos.first().unwrap()])\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn raw_seq<'a>(&self, buffer: &'a [u8]) -> &'a [u8] {\n\n if self.seq_pos.len() > 1 {\n\n let start = *self.seq_pos.first().unwrap() + 1;\n\n let end = *self.seq_pos.last().unwrap();\n\n trim_cr(&buffer[start..end])\n\n } else {\n", "file_path": "src/parser/fasta.rs", "rank": 67, "score": 14.488540037252497 }, { "content": " /// the line terminator of the header (if there is one). The last index in the Vec is always\n\n /// the last byte of the last sequence line (including line terminator if present).\n\n /// Therefore, the length of this Vec should never be 0.\n\n pub(crate) seq_pos: Vec<usize>,\n\n}\n\n\n\nimpl BufferPosition {\n\n #[inline]\n\n fn is_new(&self) -> bool {\n\n self.seq_pos.is_empty()\n\n }\n\n\n\n #[inline]\n\n fn reset(&mut self, start: usize) {\n\n self.seq_pos.clear();\n\n self.start = start;\n\n }\n\n\n\n #[inline]\n\n fn find_line_ending(&self, buffer: &[u8]) -> Option<LineEnding> {\n", "file_path": "src/parser/fasta.rs", "rank": 68, "score": 14.427692709169529 }, { "content": " }\n\n if self.search_pos >= SearchPosition::Quality {\n\n self.buf_pos.qual -= consumed;\n\n }\n\n }\n\n}\n\n\n\n/// A FASTQ record that borrows data from a buffer\n\n#[derive(Debug, Clone)]\n\npub struct FastqRecord<'a> {\n\n buffer: &'a [u8],\n\n buf_pos: &'a BufferPosition,\n\n}\n\n\n\nimpl<R: io::Read + Send> FastxReader for Reader<R> {\n\n fn next(&mut self) -> Option<Result<SequenceRecord, ParseError>> {\n\n // No more records to read\n\n if self.finished {\n\n return None;\n\n }\n", "file_path": "src/parser/fastq.rs", "rank": 69, "score": 14.354808981586288 }, { "content": " b\"\"\n\n }\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn seq<'a>(&self, buffer: &'a [u8]) -> Cow<'a, [u8]> {\n\n // TODO: make that DRY\n\n let seq = if self.seq_pos.len() > 1 {\n\n let start = *self.seq_pos.first().unwrap() + 1;\n\n let end = *self.seq_pos.last().unwrap();\n\n trim_cr(&buffer[start..end])\n\n } else {\n\n b\"\"\n\n };\n\n\n\n // first part is a fast check to see if we need to do any allocations\n\n let mut i;\n\n match memchr2(b'\\r', b'\\n', &seq) {\n\n Some(break_loc) => i = break_loc,\n\n None => return seq.into(),\n", "file_path": "src/parser/fasta.rs", "rank": 70, "score": 14.325455340605997 }, { "content": " Reader::with_capacity(reader, BUFSIZE)\n\n }\n\n\n\n /// Creates a new reader with a given buffer capacity. The minimum allowed\n\n /// capacity is 3.\n\n #[inline]\n\n pub fn with_capacity(reader: R, capacity: usize) -> Reader<R> {\n\n assert!(capacity >= 3);\n\n Reader {\n\n buf_reader: buf_redux::BufReader::with_capacity(capacity, reader),\n\n buf_pos: BufferPosition {\n\n start: 0,\n\n seq_pos: Vec::with_capacity(1),\n\n },\n\n position: Position::new(0, 0),\n\n search_pos: 0,\n\n finished: false,\n\n line_ending: None,\n\n }\n\n }\n", "file_path": "src/parser/fasta.rs", "rank": 71, "score": 13.697527982496826 }, { "content": "//! The errors needletail can return; only when parsing FASTA/FASTQ files\n\n\n\nuse crate::parser::Format;\n\nuse std::error::Error as StdError;\n\nuse std::fmt;\n\nuse std::io;\n\n\n\n/// Represents where we were in a file when an error occurred.\n\n#[derive(Debug, Clone, PartialEq, Eq, Default)]\n\npub struct ErrorPosition {\n\n /// Line number where the error occurred (starting with 1)\n\n pub line: u64,\n\n /// ID of record if available\n\n pub id: Option<String>,\n\n}\n\n\n\nimpl fmt::Display for ErrorPosition {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n if let Some(id) = self.id.as_ref() {\n\n write!(f, \"record '{}' at \", id)?;\n", "file_path": "src/errors.rs", "rank": 72, "score": 12.157588728093208 }, { "content": " /// Creates a new reader with a given buffer capacity. The minimum allowed\n\n /// capacity is 3.\n\n pub fn with_capacity(reader: R, capacity: usize) -> Reader<R> {\n\n assert!(capacity >= 3);\n\n Reader {\n\n buf_reader: buf_redux::BufReader::with_capacity(capacity, reader),\n\n buf_pos: BufferPosition::default(),\n\n search_pos: SearchPosition::Id,\n\n position: Position::new(1, 0),\n\n finished: false,\n\n line_ending: None,\n\n }\n\n }\n\n}\n\n\n\nimpl Reader<File> {\n\n /// Creates a reader from a file path.\n\n ///\n\n /// # Example:\n\n ///\n", "file_path": "src/parser/fastq.rs", "rank": 73, "score": 11.868399177632124 }, { "content": "impl<'a> BitNuclKmer<'a> {\n\n pub fn new(slice: &'a [u8], k: u8, canonical: bool) -> BitNuclKmer<'a> {\n\n let mut kmer = (0u64, k);\n\n let mut start_pos = 0;\n\n update_position(&mut start_pos, &mut kmer, &slice, true);\n\n\n\n BitNuclKmer {\n\n start_pos,\n\n cur_kmer: kmer,\n\n buffer: slice,\n\n canonical,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for BitNuclKmer<'a> {\n\n type Item = (usize, BitKmer, bool);\n\n\n\n fn next(&mut self) -> Option<(usize, BitKmer, bool)> {\n\n if !update_position(&mut self.start_pos, &mut self.cur_kmer, &self.buffer, false) {\n", "file_path": "src/bitkmer.rs", "rank": 74, "score": 11.807450667184778 }, { "content": " (byte_found as char).escape_default()\n\n );\n\n ParseError {\n\n kind: ParseErrorKind::InvalidStart,\n\n msg,\n\n position,\n\n format: Some(format),\n\n }\n\n }\n\n\n\n pub fn new_invalid_separator(byte_found: u8, position: ErrorPosition) -> Self {\n\n let msg = format!(\n\n \"Expected '+' separator but found '{}\",\n\n (byte_found as char).escape_default()\n\n );\n\n ParseError {\n\n kind: ParseErrorKind::InvalidSeparator,\n\n msg,\n\n position,\n\n format: Some(Format::Fastq),\n", "file_path": "src/errors.rs", "rank": 75, "score": 11.483666455381204 }, { "content": " let got_record = match self.next_complete() {\n\n Ok(f) => f,\n\n Err(e) => {\n\n return Some(Err(e));\n\n }\n\n };\n\n\n\n if !got_record {\n\n return None;\n\n }\n\n }\n\n\n\n if self.buf_pos.seq_pos.is_empty() {\n\n return Some(Err(ParseError::new_unexpected_end(\n\n ErrorPosition {\n\n line: self.position.line,\n\n id: None,\n\n },\n\n Format::Fasta,\n\n )));\n", "file_path": "src/parser/fasta.rs", "rank": 76, "score": 11.407151698488908 }, { "content": " );\n\n ParseError {\n\n kind: ParseErrorKind::UnequalLengths,\n\n msg,\n\n position,\n\n format: Some(Format::Fastq),\n\n }\n\n }\n\n\n\n pub fn new_unexpected_end(position: ErrorPosition, format: Format) -> Self {\n\n ParseError {\n\n msg: String::new(),\n\n kind: ParseErrorKind::UnexpectedEnd,\n\n position,\n\n format: Some(format),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for ParseError {\n", "file_path": "src/errors.rs", "rank": 77, "score": 11.135578807464224 }, { "content": "impl<R> Reader<R>\n\nwhere\n\n R: io::Read,\n\n{\n\n #[inline]\n\n fn get_buf(&self) -> &[u8] {\n\n self.buf_reader.buffer()\n\n }\n\n\n\n #[inline]\n\n fn next_pos(&mut self) {\n\n self.position.line += self.buf_pos.seq_pos.len() as u64;\n\n self.position.byte += (self.search_pos - self.buf_pos.start) as u64;\n\n self.buf_pos.reset(self.search_pos);\n\n }\n\n\n\n /// Finds the position of the next record\n\n /// and returns true if found; false if end of buffer reached.\n\n #[inline]\n\n fn find(&mut self) -> Result<bool, ParseError> {\n", "file_path": "src/parser/fasta.rs", "rank": 78, "score": 11.090346005918704 }, { "content": " kmer_len += 1;\n\n } else {\n\n kmer_len = 0;\n\n *cur_kmer = (0u64, cur_kmer.1);\n\n *start_pos += kmer_len + 1;\n\n if *start_pos + cur_kmer.1 as usize > buffer.len() {\n\n return false;\n\n }\n\n }\n\n }\n\n true\n\n}\n\n\n\npub struct BitNuclKmer<'a> {\n\n start_pos: usize,\n\n cur_kmer: BitKmer,\n\n buffer: &'a [u8],\n\n canonical: bool,\n\n}\n\n\n", "file_path": "src/bitkmer.rs", "rank": 79, "score": 10.7975606536275 }, { "content": "//! Compact binary representations of nucleic acid kmers\n\npub type BitKmerSeq = u64;\n\npub type BitKmer = (BitKmerSeq, u8);\n\n\n\nstatic NUC2BIT_LOOKUP: [Option<u8>; 256] = [\n\n None,\n\n None,\n\n None,\n\n None,\n\n None,\n\n None,\n\n None,\n\n None,\n\n None,\n\n None,\n\n None,\n\n None,\n\n None,\n\n None,\n\n None,\n", "file_path": "src/bitkmer.rs", "rank": 80, "score": 10.790556246102417 }, { "content": " match reader.read_into_buf() {\n\n Ok(0) => break,\n\n Ok(n) => num_read += n,\n\n Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}\n\n Err(e) => return Err(e),\n\n }\n\n }\n\n Ok(num_read)\n\n}\n\n\n\n/// Holds line number and byte offset of our current state in a parser\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct Position {\n\n pub(crate) line: u64,\n\n pub(crate) byte: u64,\n\n}\n\n\n\nimpl Position {\n\n pub fn new(line: u64, byte: u64) -> Position {\n\n Position { line, byte }\n", "file_path": "src/parser/utils.rs", "rank": 82, "score": 10.521395515283878 }, { "content": " let rc_result = &rc_buffer[rc_buffer.len() - pos - self.k as usize..rc_buffer.len() - pos];\n\n if result < rc_result {\n\n Some((pos, result, false))\n\n } else {\n\n Some((pos, rc_result, true))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::sequence::Sequence;\n\n\n\n #[test]\n\n fn can_kmerize() {\n\n let k_iter = Kmers::new(b\"AGCT\", 1);\n\n // test general function\n\n for (i, k) in k_iter.enumerate() {\n\n match i {\n", "file_path": "src/kmer.rs", "rank": 83, "score": 10.419325337196693 }, { "content": " /// ```no_run\n\n /// use needletail::parser::{FastxReader, FastqReader};\n\n ///\n\n /// let mut reader = FastqReader::from_path(\"seqs.fastq\").unwrap();\n\n ///\n\n /// // (... do something with the reader)\n\n /// ```\n\n pub fn from_path<P: AsRef<Path>>(path: P) -> io::Result<Reader<File>> {\n\n File::open(path).map(Reader::new)\n\n }\n\n}\n\n\n\nimpl<R> Reader<R>\n\nwhere\n\n R: io::Read,\n\n{\n\n #[inline]\n\n fn get_buf(&self) -> &[u8] {\n\n self.buf_reader.buffer()\n\n }\n", "file_path": "src/parser/fastq.rs", "rank": 84, "score": 9.959987432133794 }, { "content": " self.buf_pos.sep = match self.find_line(self.buf_pos.seq) {\n\n Some(p) => p,\n\n None => {\n\n self.search_pos = SearchPosition::Sequence;\n\n return Ok(false);\n\n }\n\n };\n\n }\n\n\n\n if self.search_pos <= SearchPosition::Separator {\n\n self.buf_pos.qual = match self.find_line(self.buf_pos.sep) {\n\n Some(p) => p,\n\n None => {\n\n self.search_pos = SearchPosition::Separator;\n\n return Ok(false);\n\n }\n\n };\n\n }\n\n\n\n if self.search_pos <= SearchPosition::Quality {\n", "file_path": "src/parser/fastq.rs", "rank": 85, "score": 9.597972295079284 }, { "content": " return Ok(false);\n\n }\n\n };\n\n\n\n self.buf_pos.qual = match self.find_line(self.buf_pos.sep) {\n\n Some(p) => p,\n\n None => {\n\n self.search_pos = SearchPosition::Separator;\n\n return Ok(false);\n\n }\n\n };\n\n\n\n self.buf_pos.end = match self.find_line(self.buf_pos.qual) {\n\n Some(p) => p - 1,\n\n None => {\n\n self.search_pos = SearchPosition::Quality;\n\n return Ok(false);\n\n }\n\n };\n\n\n", "file_path": "src/parser/fastq.rs", "rank": 86, "score": 9.468273837303576 }, { "content": "\n\n // TODO: avoid duplication with find_incomplete.\n\n // TODO: having a single fn and adding branches introduce a noticeable slowdown\n\n /// Reads the current record and returns true if found.\n\n /// Returns false if incomplete because end of buffer reached,\n\n /// meaning that the last record may be incomplete.\n\n /// Updates self.search_pos.\n\n fn find(&mut self) -> Result<bool, ParseError> {\n\n self.buf_pos.seq = match self.find_line(self.buf_pos.start) {\n\n Some(p) => p,\n\n None => {\n\n self.search_pos = SearchPosition::Id;\n\n return Ok(false);\n\n }\n\n };\n\n\n\n self.buf_pos.sep = match self.find_line(self.buf_pos.seq) {\n\n Some(p) => p,\n\n None => {\n\n self.search_pos = SearchPosition::Sequence;\n", "file_path": "src/parser/fastq.rs", "rank": 87, "score": 9.175815052059873 }, { "content": "\n\n fn update_position(&mut self, initial: bool) -> bool {\n\n // check if we have enough \"physical\" space for one more kmer\n\n if self.start_pos + self.k as usize > self.buffer.len() {\n\n return false;\n\n }\n\n\n\n let (mut kmer_len, stop_len) = if initial {\n\n (0, (self.k - 1) as usize)\n\n } else {\n\n ((self.k - 1) as usize, self.k as usize)\n\n };\n\n\n\n while kmer_len < stop_len {\n\n if is_good_base(self.buffer[self.start_pos + kmer_len]) {\n\n kmer_len += 1;\n\n } else {\n\n kmer_len = 0;\n\n self.start_pos += kmer_len + 1;\n\n if self.start_pos + self.k as usize > self.buffer.len() {\n", "file_path": "src/kmer.rs", "rank": 88, "score": 9.111949531117096 }, { "content": " }\n\n\n\n /// Line number (starting with 1)\n\n pub fn line(&self) -> u64 {\n\n self.line\n\n }\n\n\n\n /// Byte offset within the file\n\n pub fn byte(&self) -> u64 {\n\n self.byte\n\n }\n\n}\n\n\n\n/// FASTA or FASTQ?\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\npub enum Format {\n\n Fasta,\n\n Fastq,\n\n}\n\n\n", "file_path": "src/parser/utils.rs", "rank": 89, "score": 8.908660778187707 }, { "content": " self.position.line += 4;\n\n self.buf_pos.start = self.buf_pos.end + 1;\n\n }\n\n\n\n // Can we identify all the positions of each element of the next record?\n\n let complete = match self.find() {\n\n Ok(f) => f,\n\n Err(e) => {\n\n return Some(Err(e));\n\n }\n\n };\n\n\n\n // If it's not complete, try to fetch more from the buffer until we have it in full\n\n if !complete {\n\n // Did we get a record?\n\n let got_record = match self.next_complete() {\n\n Ok(f) => f,\n\n Err(e) => {\n\n return Some(Err(e));\n\n }\n", "file_path": "src/parser/fastq.rs", "rank": 90, "score": 8.694838086818606 }, { "content": "\n\n // Empty buffer, let's fill it\n\n if self.get_buf().is_empty() {\n\n // If we get an ParseError when reading or get back 0 bytes, we're done\n\n match fill_buf(&mut self.buf_reader) {\n\n Ok(n) => {\n\n if n == 0 {\n\n self.finished = true;\n\n return None;\n\n }\n\n }\n\n Err(e) => {\n\n return Some(Err(e.into()));\n\n }\n\n };\n\n }\n\n\n\n // If we already did look at a record, let's setup for the next one\n\n if !self.buf_pos.is_new() {\n\n self.position.byte += self.buf_pos.len();\n", "file_path": "src/parser/fastq.rs", "rank": 91, "score": 8.60900948276136 }, { "content": "/// doubles until it reaches 8 MiB. Above, it will\n\n/// increase in steps of 8 MiB. Buffer size is not limited,\n\n/// it could theoretically grow indefinitely.\n\npub(crate) fn grow_to(current_size: usize) -> usize {\n\n if current_size < 1 << 23 {\n\n current_size * 2\n\n } else {\n\n current_size + (1 << 23)\n\n }\n\n}\n\n\n\n/// Makes sure the buffer is full after this call (unless EOF reached)\n\n/// code adapted from `io::Read::read_exact`\n\npub(crate) fn fill_buf<R>(reader: &mut buf_redux::BufReader<R>) -> io::Result<usize>\n\nwhere\n\n R: io::Read,\n\n{\n\n let initial_size = reader.buffer().len();\n\n let mut num_read = 0;\n\n while initial_size + num_read < reader.capacity() {\n", "file_path": "src/parser/utils.rs", "rank": 92, "score": 8.312895425502568 }, { "content": " // no line ending at end of last record\n\n self.buf_pos.end = self.get_buf().len();\n\n self.validate()?;\n\n return Ok(true);\n\n }\n\n\n\n // It allows some blank lines at the end of the file\n\n let rest = &self.get_buf()[self.buf_pos.start..];\n\n if rest.split(|c| *c == b'\\n').all(|l| trim_cr(l).is_empty()) {\n\n return Ok(false);\n\n }\n\n\n\n Err(ParseError::new_unexpected_end(\n\n self.get_error_pos(self.search_pos as u64, self.search_pos > SearchPosition::Id),\n\n Format::Fastq,\n\n ))\n\n }\n\n\n\n // Grow the internal buffer. Used if the original buffer is not big\n\n // enough for a record\n", "file_path": "src/parser/fastq.rs", "rank": 93, "score": 8.215865836564753 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::io::Cursor;\n\n\n\n use super::*;\n\n use crate::errors::ParseErrorKind;\n\n\n\n fn seq(s: &[u8]) -> Cursor<&[u8]> {\n\n Cursor::new(&s[..])\n\n }\n\n\n\n #[test]\n\n fn test_basic() {\n\n let mut reader = Reader::new(seq(b\">test\\nACGT\\n>test2\\nTGCA\\n\"));\n\n assert!(reader.line_ending().is_none());\n\n let rec = reader.next().unwrap();\n\n assert!(rec.is_ok());\n\n let r = rec.unwrap();\n", "file_path": "src/parser/fasta.rs", "rank": 94, "score": 8.191401440470644 }, { "content": " line: self.position.line() + line_offset,\n\n id,\n\n }\n\n }\n\n\n\n #[inline]\n\n fn find_line(&self, search_start: usize) -> Option<usize> {\n\n memchr(b'\\n', &self.get_buf()[search_start..]).map(|pos| search_start + pos + 1)\n\n }\n\n\n\n /// Called when we couldn't find a complete record.\n\n /// We might be at EOF, buffer might be too small or we need to refill it\n\n fn next_complete(&mut self) -> Result<bool, ParseError> {\n\n loop {\n\n if self.get_buf().len() < self.buf_reader.capacity() {\n\n // EOF reached, there will be no next record\n\n return self.check_end();\n\n }\n\n\n\n if self.buf_pos.start == 0 {\n", "file_path": "src/parser/fastq.rs", "rank": 95, "score": 8.098733843501881 }, { "content": " self.buf_reader.make_room();\n\n self.buf_pos.start = 0;\n\n self.search_pos -= consumed;\n\n for s in &mut self.buf_pos.seq_pos {\n\n *s -= consumed;\n\n }\n\n }\n\n}\n\n\n\nimpl<R: io::Read + Send> FastxReader for Reader<R> {\n\n fn next(&mut self) -> Option<Result<SequenceRecord, ParseError>> {\n\n if self.finished {\n\n return None;\n\n }\n\n\n\n // Load some data in the buffer to start\n\n if self.position.line == 0 {\n\n match fill_buf(&mut self.buf_reader) {\n\n Ok(n) => {\n\n if n == 0 {\n", "file_path": "src/parser/fasta.rs", "rank": 96, "score": 8.086536670405387 }, { "content": "}\n\n\n\nimpl<R> Reader<R>\n\nwhere\n\n R: io::Read,\n\n{\n\n /// Creates a new reader with the default buffer size of 64 KiB\n\n ///\n\n /// # Example:\n\n ///\n\n /// ```\n\n /// use needletail::parser::{FastaReader, FastxReader};\n\n /// let fasta = b\">id\\nSEQUENCE\";\n\n ///\n\n /// let mut reader = FastaReader::new(&fasta[..]);\n\n /// let record = reader.next().unwrap().unwrap();\n\n /// assert_eq!(record.id(), b\"id\")\n\n /// ```\n\n #[inline]\n\n pub fn new(reader: R) -> Reader<R> {\n", "file_path": "src/parser/fasta.rs", "rank": 97, "score": 7.9650851199103965 }, { "content": "mod fastq;\n\n\n\npub use crate::parser::utils::FastxReader;\n\n\n\n// Magic bytes for each compression format\n\n#[cfg(feature = \"compression\")]\n\nconst GZ_MAGIC: [u8; 2] = [0x1F, 0x8B];\n\n#[cfg(feature = \"compression\")]\n\nconst BZ_MAGIC: [u8; 2] = [0x42, 0x5A];\n\n#[cfg(feature = \"compression\")]\n\nconst XZ_MAGIC: [u8; 2] = [0xFD, 0x37];\n\n\n", "file_path": "src/parser/mod.rs", "rank": 98, "score": 7.9194709547742 }, { "content": " }\n\n // we found a newline; create a new buffer and stripping out newlines\n\n // and writing into it\n\n let mut new_buf = Vec::with_capacity(seq.len() - 1);\n\n new_buf.extend_from_slice(&seq[..i]);\n\n while i < seq.len() {\n\n match memchr2(b'\\r', b'\\n', &seq[i..]) {\n\n None => {\n\n new_buf.extend_from_slice(&seq[i..]);\n\n break;\n\n }\n\n Some(match_pos) => {\n\n new_buf.extend_from_slice(&seq[i..i + match_pos]);\n\n i += match_pos + 1;\n\n }\n\n }\n\n }\n\n new_buf.into()\n\n }\n\n\n", "file_path": "src/parser/fasta.rs", "rank": 99, "score": 7.861187731629163 } ]
Rust
client/src/utils/hd.rs
huhn511/stronghold.rs
85920e55eb05e50520795a67c533ec52f08fd10b
use crypto::{ed25519::SecretKey, macs::hmac::HMAC_SHA512}; use std::convert::TryFrom; #[derive(Debug)] pub enum Error { NotSupported, InvalidLength(usize), CryptoError(crypto::Error), } pub struct Seed(Vec<u8>); impl Seed { pub fn from_bytes(bs: &[u8]) -> Self { Self(bs.to_vec()) } pub fn to_master_key(&self) -> Key { let mut I = [0; 64]; HMAC_SHA512(&self.0, b"ed25519 seed", &mut I); Key(I) } pub fn derive(&self, chain: &Chain) -> Result<Key, Error> { self.to_master_key().derive(chain) } } type ChainCode = [u8; 32]; #[derive(Copy, Clone, Debug)] pub struct Key([u8; 64]); impl Key { pub fn secret_key(&self) -> Result<SecretKey, Error> { let mut I_l = [0; 32]; I_l.copy_from_slice(&self.0[..32]); SecretKey::from_le_bytes(I_l).map_err(Error::CryptoError) } pub fn chain_code(&self) -> ChainCode { let mut I_r = [0; 32]; I_r.copy_from_slice(&self.0[32..]); I_r } pub fn child_key(&self, segment: &Segment) -> Result<Key, Error> { if !segment.hardened { return Err(Error::NotSupported); } let mut data = [0u8; 1 + 32 + 4]; data[1..1 + 32].copy_from_slice(&self.0[..32]); data[1 + 32..1 + 32 + 4].copy_from_slice(&segment.bs); let mut I = [0; 64]; HMAC_SHA512(&data, &self.0[32..], &mut I); Ok(Self(I)) } pub fn derive(&self, chain: &Chain) -> Result<Key, Error> { let mut k = *self; for c in &chain.0 { k = k.child_key(c)?; } Ok(k) } } impl TryFrom<&[u8]> for Key { type Error = Error; fn try_from(bs: &[u8]) -> Result<Self, Self::Error> { if bs.len() != 64 { return Err(Error::InvalidLength(bs.len())); } let mut ds = [0; 64]; ds.copy_from_slice(bs); Ok(Self(ds)) } } #[derive(Debug, Clone)] pub struct Segment { hardened: bool, bs: [u8; 4], } impl Segment { pub fn from_u32(i: u32) -> Self { Self { hardened: i >= Self::HARDEN_MASK, bs: i.to_be_bytes(), } } pub const HARDEN_MASK: u32 = 1 << 31; } #[derive(Default, Debug, Clone)] pub struct Chain(Vec<Segment>); impl Chain { pub fn empty() -> Self { Self(vec![]) } pub fn from_u32<I: IntoIterator<Item = u32>>(is: I) -> Self { Self(is.into_iter().map(Segment::from_u32).collect()) } pub fn from_u32_hardened<I: IntoIterator<Item = u32>>(is: I) -> Self { Self::from_u32(is.into_iter().map(|i| Segment::HARDEN_MASK | i)) } } impl Into<Vec<u8>> for Key { fn into(self) -> Vec<u8> { self.0.to_vec() } } #[cfg(test)] mod tests { use super::*; struct TestChain { chain: Chain, chain_code: &'static str, private_key: &'static str, } struct TestVector { seed: &'static str, master_chain_code: &'static str, master_private_key: &'static str, chains: Vec<TestChain>, } #[test] fn ed25519_test_vectors() -> Result<(), Error> { let tvs = [ TestVector { seed: "000102030405060708090a0b0c0d0e0f", master_chain_code: "90046a93de5380a72b5e45010748567d5ea02bbf6522f979e05c0d8d8ca9fffb", master_private_key: "2b4be7f19ee27bbf30c667b642d5f4aa69fd169872f8fc3059c08ebae2eb19e7", chains: vec![ TestChain { chain: Chain::empty(), chain_code: "90046a93de5380a72b5e45010748567d5ea02bbf6522f979e05c0d8d8ca9fffb", private_key: "2b4be7f19ee27bbf30c667b642d5f4aa69fd169872f8fc3059c08ebae2eb19e7", }, TestChain { chain: Chain::from_u32_hardened(vec![0]), chain_code: "8b59aa11380b624e81507a27fedda59fea6d0b779a778918a2fd3590e16e9c69", private_key: "68e0fe46dfb67e368c75379acec591dad19df3cde26e63b93a8e704f1dade7a3", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1]), chain_code: "a320425f77d1b5c2505a6b1b27382b37368ee640e3557c315416801243552f14", private_key: "b1d0bad404bf35da785a64ca1ac54b2617211d2777696fbffaf208f746ae84f2", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2]), chain_code: "2e69929e00b5ab250f49c3fb1c12f252de4fed2c1db88387094a0f8c4c9ccd6c", private_key: "92a5b23c0b8a99e37d07df3fb9966917f5d06e02ddbd909c7e184371463e9fc9", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2, 2]), chain_code: "8f6d87f93d750e0efccda017d662a1b31a266e4a6f5993b15f5c1f07f74dd5cc", private_key: "30d1dc7e5fc04c31219ab25a27ae00b50f6fd66622f6e9c913253d6511d1e662", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2, 2]), chain_code: "8f6d87f93d750e0efccda017d662a1b31a266e4a6f5993b15f5c1f07f74dd5cc", private_key: "30d1dc7e5fc04c31219ab25a27ae00b50f6fd66622f6e9c913253d6511d1e662", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2, 2, 1000000000]), chain_code: "68789923a0cac2cd5a29172a475fe9e0fb14cd6adb5ad98a3fa70333e7afa230", private_key: "8f94d394a8e8fd6b1bc2f3f49f5c47e385281d5c17e65324b0f62483e37e8793", }, ], }, TestVector { seed: "fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9c999693908d8a8784817e7b7875726f6c696663605d5a5754514e4b484542", master_chain_code: "ef70a74db9c3a5af931b5fe73ed8e1a53464133654fd55e7a66f8570b8e33c3b", master_private_key: "171cb88b1b3c1db25add599712e36245d75bc65a1a5c9e18d76f9f2b1eab4012", chains: vec![ TestChain { chain: Chain::from_u32_hardened(vec![0]), chain_code: "0b78a3226f915c082bf118f83618a618ab6dec793752624cbeb622acb562862d", private_key: "1559eb2bbec5790b0c65d8693e4d0875b1747f4970ae8b650486ed7470845635", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647]), chain_code: "138f0b2551bcafeca6ff2aa88ba8ed0ed8de070841f0c4ef0165df8181eaad7f", private_key: "ea4f5bfe8694d8bb74b7b59404632fd5968b774ed545e810de9c32a4fb4192f4", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647, 1]), chain_code: "73bd9fff1cfbde33a1b846c27085f711c0fe2d66fd32e139d3ebc28e5a4a6b90", private_key: "3757c7577170179c7868353ada796c839135b3d30554bbb74a4b1e4a5a58505c", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647, 1, 2147483646]), chain_code: "0902fe8a29f9140480a00ef244bd183e8a13288e4412d8389d140aac1794825a", private_key: "5837736c89570de861ebc173b1086da4f505d4adb387c6a1b1342d5e4ac9ec72", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647, 1, 2147483646, 2]), chain_code: "5d70af781f3a37b829f0d060924d5e960bdc02e85423494afc0b1a41bbe196d4", private_key: "551d333177df541ad876a60ea71f00447931c0a9da16f227c11ea080d7391b8d", }, ], } ]; for tv in &tvs { let seed = Seed::from_bytes(&hex::decode(tv.seed).unwrap()); let m = seed.to_master_key(); let mut expected_master_chain_code = [0u8; 32]; hex::decode_to_slice(&tv.master_chain_code, &mut expected_master_chain_code as &mut [u8]).unwrap(); assert_eq!(expected_master_chain_code, m.chain_code()); let mut expected_master_private_key = [0u8; 32]; hex::decode_to_slice(&tv.master_private_key, &mut expected_master_private_key as &mut [u8]).unwrap(); assert_eq!(expected_master_private_key, m.secret_key()?.to_le_bytes()); for c in tv.chains.iter() { let ck = seed.derive(&c.chain)?; let mut expected_chain_code = [0u8; 32]; hex::decode_to_slice(&c.chain_code, &mut expected_chain_code as &mut [u8]).unwrap(); assert_eq!(expected_chain_code, ck.chain_code()); let mut expected_private_key = [0u8; 32]; hex::decode_to_slice(&c.private_key, &mut expected_private_key as &mut [u8]).unwrap(); assert_eq!(expected_private_key, ck.secret_key()?.to_le_bytes()); } } Ok(()) } }
use crypto::{ed25519::SecretKey, macs::hmac::HMAC_SHA512}; use std::convert::TryFrom; #[derive(Debug)] pub enum Error { NotSupported, InvalidLength(usize), CryptoError(crypto::Error), } pub struct Seed(Vec<u8>); impl Seed { pub fn from_bytes(bs: &[u8]) -> Self { Self(bs.to_vec()) } pub fn to_master_key(&self) -> Key { let mut I = [0; 64]; HMAC_SHA512(&self.0, b"ed25519 seed", &mut I); Key(I) } pub fn derive(&self, chain: &Chain) -> Result<Key, Error> { self.to_master_key().derive(chain) } } type ChainCode = [u8; 32]; #[derive(Copy, Clone, Debug)] pub struct Key([u8; 64]); impl Key { pub fn secret_key(&self) -> Result<SecretKey, Error> { let mut I_l = [0; 32]; I_l.copy_from_slice(&self.0[..32]); SecretKey::from_le_bytes(I_l).map_err(Error::CryptoError) } pub fn chain_code(&self) -> ChainCode { let mut I_r = [0; 32]; I_r.copy_from_slice(&self.0[32..]); I_r } pub fn child_key(&self, segment: &Segment) -> Result<Key, Error> { if !segment.hardened { return Err(Error::NotSupported); } let mut data = [0u8; 1 + 32 + 4]; data[1..1 + 32].copy_from_slice(&self.0[..32]); data[1 + 32..1 + 32 + 4].copy_from_slice(&segment.bs); let mut I = [0; 64]; HMAC_SHA512(&data, &self.0[32..], &mut I); Ok(Self(I)) } pub fn derive(&self, chain: &Chain) -> Result<Key, Error> { let mut k = *self; for c in &chain.0 { k = k.child_key(c)?; } Ok(k) } } impl TryFrom<&[u8]> for Key { type Error = Error; fn try_from(bs: &[u8]) -> Result<Self, Self::Error> { if bs.len() != 64 { return Err(Error::InvalidLength(bs.len())); } let mut ds = [0; 64]; ds.copy_from_slice(bs); Ok(Self(ds)) } } #[derive(Debug, Clone)] pub struct Segment { hardened: bool, bs: [u8; 4], } impl Segment { pub fn from_u32(i: u32) -> Self { Self { hardened: i >= Self::HARDEN_MASK, bs: i.to_be_bytes(), } } pub const HARDEN_MASK: u32 = 1 << 31; } #[derive(Default, Debug, Clone)] pub struct Chain(Vec<Segment>); impl Chain { pub fn empty() -> Self { Self(vec![]) } pub fn from_u32<I: IntoIterator<Item = u32>>(is: I) -> Self { Self(is.into_iter().map(Segment::from_u32).collect()) } pub fn from_u32_hardened<I: IntoIterator<Item = u32>>(is: I) -> Self { Self::from_u32(is.into_iter().map(|i| Segment::HARDEN_MASK | i)) } } impl Into<Vec<u8>> for Key { fn into(self) -> Vec<u8> { self.0.to_vec() } } #[cfg(test)] mod tests { use super::*; struct TestChain { chain: Chain, chain_code: &'static str, private_key: &'static str, } struct TestVector { seed: &'static str, master_chain_code: &'static str, master_private_key: &'static str, chains: Vec<TestChain>, } #[test] fn ed25519_test_vectors() -> Result<(), Error> { let tvs = [ TestVector { seed: "000102030405060708090a0b0c0d0e0f", master_chain_code: "90046a93de5380a72b5e45010748567d5ea02bbf6522f979e05c0d8d8ca9fffb", master_private_key: "2b4be7f19ee27bbf30c667b642d5f4aa69fd169872f8fc3059c08ebae2eb19e7", chains: vec![ TestChain { chain: Chain::empty(), chain_code: "90046a93de5380a72b5e45010748567d5ea02bbf6522f979e05c0d8d8ca9fffb", private_key: "2b4be7f19ee27bbf30c667b642d5f4aa69fd169872f8fc3059c08ebae2eb19e7", }, TestChain { chain: Chain::from_u32_hardened(vec![0]), chain_code: "8b59aa11380b624e81507a27fedda59fea6d0b779a778918a2fd3590e16e9c69", private_key: "68e0fe46dfb67e368c75379acec591dad19df3cde26e63b93a8e704f1dade7a3", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1]), chain_code: "a320425f77d1b5c2505a6b1b27382b37368ee640e3557c315416801243552f14",
}
private_key: "b1d0bad404bf35da785a64ca1ac54b2617211d2777696fbffaf208f746ae84f2", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2]), chain_code: "2e69929e00b5ab250f49c3fb1c12f252de4fed2c1db88387094a0f8c4c9ccd6c", private_key: "92a5b23c0b8a99e37d07df3fb9966917f5d06e02ddbd909c7e184371463e9fc9", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2, 2]), chain_code: "8f6d87f93d750e0efccda017d662a1b31a266e4a6f5993b15f5c1f07f74dd5cc", private_key: "30d1dc7e5fc04c31219ab25a27ae00b50f6fd66622f6e9c913253d6511d1e662", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2, 2]), chain_code: "8f6d87f93d750e0efccda017d662a1b31a266e4a6f5993b15f5c1f07f74dd5cc", private_key: "30d1dc7e5fc04c31219ab25a27ae00b50f6fd66622f6e9c913253d6511d1e662", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 1, 2, 2, 1000000000]), chain_code: "68789923a0cac2cd5a29172a475fe9e0fb14cd6adb5ad98a3fa70333e7afa230", private_key: "8f94d394a8e8fd6b1bc2f3f49f5c47e385281d5c17e65324b0f62483e37e8793", }, ], }, TestVector { seed: "fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9c999693908d8a8784817e7b7875726f6c696663605d5a5754514e4b484542", master_chain_code: "ef70a74db9c3a5af931b5fe73ed8e1a53464133654fd55e7a66f8570b8e33c3b", master_private_key: "171cb88b1b3c1db25add599712e36245d75bc65a1a5c9e18d76f9f2b1eab4012", chains: vec![ TestChain { chain: Chain::from_u32_hardened(vec![0]), chain_code: "0b78a3226f915c082bf118f83618a618ab6dec793752624cbeb622acb562862d", private_key: "1559eb2bbec5790b0c65d8693e4d0875b1747f4970ae8b650486ed7470845635", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647]), chain_code: "138f0b2551bcafeca6ff2aa88ba8ed0ed8de070841f0c4ef0165df8181eaad7f", private_key: "ea4f5bfe8694d8bb74b7b59404632fd5968b774ed545e810de9c32a4fb4192f4", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647, 1]), chain_code: "73bd9fff1cfbde33a1b846c27085f711c0fe2d66fd32e139d3ebc28e5a4a6b90", private_key: "3757c7577170179c7868353ada796c839135b3d30554bbb74a4b1e4a5a58505c", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647, 1, 2147483646]), chain_code: "0902fe8a29f9140480a00ef244bd183e8a13288e4412d8389d140aac1794825a", private_key: "5837736c89570de861ebc173b1086da4f505d4adb387c6a1b1342d5e4ac9ec72", }, TestChain { chain: Chain::from_u32_hardened(vec![0, 2147483647, 1, 2147483646, 2]), chain_code: "5d70af781f3a37b829f0d060924d5e960bdc02e85423494afc0b1a41bbe196d4", private_key: "551d333177df541ad876a60ea71f00447931c0a9da16f227c11ea080d7391b8d", }, ], } ]; for tv in &tvs { let seed = Seed::from_bytes(&hex::decode(tv.seed).unwrap()); let m = seed.to_master_key(); let mut expected_master_chain_code = [0u8; 32]; hex::decode_to_slice(&tv.master_chain_code, &mut expected_master_chain_code as &mut [u8]).unwrap(); assert_eq!(expected_master_chain_code, m.chain_code()); let mut expected_master_private_key = [0u8; 32]; hex::decode_to_slice(&tv.master_private_key, &mut expected_master_private_key as &mut [u8]).unwrap(); assert_eq!(expected_master_private_key, m.secret_key()?.to_le_bytes()); for c in tv.chains.iter() { let ck = seed.derive(&c.chain)?; let mut expected_chain_code = [0u8; 32]; hex::decode_to_slice(&c.chain_code, &mut expected_chain_code as &mut [u8]).unwrap(); assert_eq!(expected_chain_code, ck.chain_code()); let mut expected_private_key = [0u8; 32]; hex::decode_to_slice(&c.private_key, &mut expected_private_key as &mut [u8]).unwrap(); assert_eq!(expected_private_key, ck.secret_key()?.to_le_bytes()); } } Ok(()) }
function_block-function_prefix_line
[ { "content": "/// Read ciphertext from the input, decrypts it using the specified key and the associated data\n\n/// specified during encryption and returns the plaintext\n\npub fn read<I: Read>(input: &mut I, key: &Key, associated_data: &[u8]) -> crate::Result<Vec<u8>> {\n\n // check the header\n\n check_header(input)?;\n\n\n\n let mut nonce = [0; xchacha20poly1305::XCHACHA20POLY1305_NONCE_SIZE];\n\n input.read_exact(&mut nonce)?;\n\n\n\n let mut tag = [0; xchacha20poly1305::XCHACHA20POLY1305_TAG_SIZE];\n\n input.read_exact(&mut tag)?;\n\n\n\n let mut ct = Vec::new();\n\n input.read_to_end(&mut ct)?;\n\n\n\n let mut pt = vec![0; ct.len()];\n\n xchacha20poly1305::decrypt(&mut pt, &ct, key, &tag, &nonce, associated_data)?;\n\n\n\n Ok(pt)\n\n}\n\n\n", "file_path": "engine/snapshot/src/logic.rs", "rank": 0, "score": 430694.41574337694 }, { "content": "/// updates the value a with any data using the key and the multipliers\n\n/// pads any incomplete block with 0 bytes.\n\npub fn poly1305_update(a: &mut [u32], r: &[u32], mu: &[u32], mut data: &[u8], is_last: bool) {\n\n let mut buf = vec![0; 16];\n\n let mut w = vec![0; 5];\n\n\n\n // process data\n\n while !data.is_empty() {\n\n // put data into buffer and append 0x01 byte as padding as needed\n\n let buf_len = min(data.len(), buf.len());\n\n if buf_len < 16 {\n\n buf.copy_from_slice(&[0; 16]);\n\n if is_last {\n\n buf[buf_len] = 0x01\n\n }\n\n }\n\n buf[..buf_len].copy_from_slice(&data[..buf_len]);\n\n\n\n // decode next block into an accumulator. Apply high bit if needed.\n\n a[0] = add!(\n\n a[0],\n\n and!(shift_right!(read32_little_endian!(&buf[0..]), 0), 0x03FFFFFF)\n", "file_path": "engine/crypto/src/internal/poly.rs", "rank": 1, "score": 414764.29121239344 }, { "content": "/// [`read`](fn.read.html) and decrypt the ciphertext from the specified path\n\npub fn read_from(path: &Path, key: &Key, associated_data: &[u8]) -> crate::Result<Vec<u8>> {\n\n let mut f: File = OpenOptions::new().read(true).open(path)?;\n\n check_min_file_len(&mut f)?;\n\n let pt = read(&mut f, key, associated_data)?;\n\n\n\n decompress(&pt)\n\n}\n\n\n", "file_path": "engine/snapshot/src/logic.rs", "rank": 2, "score": 396441.7125323608 }, { "content": "pub fn naive_kdf(password: &[u8], salt: &[u8; 32], key: &mut [u8; 32]) -> crate::Result<()> {\n\n crypto::macs::hmac::HMAC_SHA256(password, salt, key);\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/snapshot/src/kdf.rs", "rank": 3, "score": 381282.26474269223 }, { "content": "/// Loads a key into r and s and computes the key multipliers\n\npub fn poly1305_init(r: &mut [u32], s: &mut [u32], mu: &mut [u32], key: &[u8]) {\n\n // load key\n\n r[0] = and!(shift_right!(read32_little_endian!(&key[0..]), 0), 0x03FFFFFF);\n\n r[1] = and!(shift_right!(read32_little_endian!(&key[3..]), 2), 0x03FFFF03);\n\n r[2] = and!(shift_right!(read32_little_endian!(&key[6..]), 4), 0x03FFC0FF);\n\n r[3] = and!(shift_right!(read32_little_endian!(&key[9..]), 6), 0x03F03FFF);\n\n r[4] = and!(shift_right!(read32_little_endian!(&key[12..]), 8), 0x000FFFFF);\n\n\n\n s[0] = read32_little_endian!(&key[16..]);\n\n s[1] = read32_little_endian!(&key[20..]);\n\n s[2] = read32_little_endian!(&key[24..]);\n\n s[3] = read32_little_endian!(&key[28..]);\n\n\n\n // compute multipliers\n\n mu[0] = 0;\n\n mu[1] = mult!(r[1], 5);\n\n mu[2] = mult!(r[2], 5);\n\n mu[3] = mult!(r[3], 5);\n\n mu[4] = mult!(r[4], 5);\n\n}\n\n\n", "file_path": "engine/crypto/src/internal/poly.rs", "rank": 4, "score": 380315.11927980924 }, { "content": "pub fn corrupt(bs: &mut [u8]) {\n\n if bs.is_empty() {\n\n return;\n\n }\n\n loop {\n\n let i = rand::random::<usize>() % bs.len();\n\n let b = bs[i];\n\n bs[i] = rand::random();\n\n if b != bs[i] && rand::random() {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "test_utils/src/lib.rs", "rank": 5, "score": 372029.7069037477 }, { "content": "/// Encrypt the opaque plaintext bytestring using the specified key and optional associated data\n\n/// and writes the ciphertext to the specifed output\n\npub fn write<O: Write>(plain: &[u8], output: &mut O, key: &Key, associated_data: &[u8]) -> crate::Result<()> {\n\n output.write_all(&MAGIC)?;\n\n output.write_all(&VERSION)?;\n\n\n\n let mut nonce = [0; xchacha20poly1305::XCHACHA20POLY1305_NONCE_SIZE];\n\n crypto::rand::fill(&mut nonce)?;\n\n output.write_all(&nonce)?;\n\n\n\n let mut tag = [0; xchacha20poly1305::XCHACHA20POLY1305_TAG_SIZE];\n\n\n\n let mut ct = vec![0; plain.len()];\n\n xchacha20poly1305::encrypt(&mut ct, &mut tag, &plain, key, &nonce, associated_data)?;\n\n\n\n output.write_all(&tag)?;\n\n output.write_all(&ct)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/snapshot/src/logic.rs", "rank": 6, "score": 370788.8402625411 }, { "content": "/// encrypts data in place\n\npub fn chachapoly_seal(data: &mut [u8], tag: &mut [u8], ad: &[u8], key: &[u8], nonce: &[u8]) {\n\n // encrypt data\n\n ChaCha20Ietf::xor(key, nonce, 1, data);\n\n\n\n // create footer\n\n let mut foot = Vec::with_capacity(16);\n\n foot.extend_from_slice(&(ad.len() as u64).to_le_bytes());\n\n foot.extend_from_slice(&(data.len() as u64).to_le_bytes());\n\n\n\n // compute poly key and auth tag\n\n let mut pkey = vec![0; 32];\n\n ChaCha20Ietf::xor(key, nonce, 0, &mut pkey);\n\n Poly1305::chachapoly_auth(tag, ad, data, &foot, &pkey);\n\n}\n\n\n", "file_path": "engine/crypto/src/chachapoly_ietf.rs", "rank": 7, "score": 366219.96538320364 }, { "content": "pub fn decompress_into(input: &[u8], output: &mut Vec<u8>) -> crate::Result<()> {\n\n LZ4Decoder {\n\n input,\n\n output,\n\n token: 0,\n\n }\n\n .complete()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/snapshot/src/compression/decoder.rs", "rank": 8, "score": 358679.54581672384 }, { "content": "/// calculates the nth ChaCha20-IETF block into a buffer\n\npub fn chacha20_ietf_block(key: &[u8], nonce: &[u8], n: u32, buf: &mut [u8]) {\n\n // create buffer\n\n let mut state = vec![0u32; 32];\n\n let (init, mixed) = state.split_at_mut(16);\n\n\n\n // initialize buffer\n\n (0..4).for_each(|i| init[i] = BASIS[i]);\n\n (4..12).for_each(|i| init[i] = read32_little_endian!(&key[(i - 4) * 4..]));\n\n init[12] = n;\n\n (13..16).for_each(|i| init[i] = read32_little_endian!(&nonce[(i - 13) * 4..]));\n\n\n\n // mix the buffer\n\n mixed.copy_from_slice(init);\n\n chacha20_rounds(mixed);\n\n\n\n // write the mixed state into the buffer\n\n (0..16).for_each(|i| mixed[i] = add!(mixed[i], init[i]));\n\n (0..16).for_each(|i| write32_little_endian!(mixed[i] => &mut buf[i * 4..]));\n\n}\n\n\n", "file_path": "engine/crypto/src/internal/chacha.rs", "rank": 9, "score": 357375.2974871336 }, { "content": "pub fn non_empty_bytestring() -> Vec<u8> {\n\n let s = (rand::random::<usize>() % 4096) + 1;\n\n let mut bs = Vec::with_capacity(s);\n\n for _ in 1..s {\n\n bs.push(rand::random());\n\n }\n\n bs\n\n}\n\n\n", "file_path": "test_utils/src/fresh.rs", "rank": 10, "score": 344424.39208211267 }, { "content": "/// Atomically encrypt and [`write`](fn.write.html) the specified plaintext to the specified path\n\n///\n\n/// This is achieved by creating a temporary file in the same directory as the specified path (same\n\n/// filename with a salted suffix). This is currently known to be problematic if the path is a\n\n/// symlink and/or if the target path resides in a directory without user write permission.\n\npub fn write_to(plain: &[u8], path: &Path, key: &Key, associated_data: &[u8]) -> crate::Result<()> {\n\n // TODO: if path exists and is a symlink, resolve it and then append the salt\n\n // TODO: if the sibling tempfile isn't writeable (e.g. directory permissions), write to\n\n // env::temp_dir()\n\n\n\n let compressed_plain = compress(plain);\n\n\n\n let mut salt = [0u8; 6];\n\n crypto::rand::fill(&mut salt)?;\n\n\n\n let mut s = path.as_os_str().to_os_string();\n\n s.push(\".\");\n\n s.push(hex::encode(salt));\n\n let tmp = Path::new(&s);\n\n\n\n let mut f = OpenOptions::new().write(true).create_new(true).open(tmp)?;\n\n write(&compressed_plain, &mut f, key, associated_data)?;\n\n f.sync_all()?;\n\n\n\n rename(tmp, path)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/snapshot/src/logic.rs", "rank": 11, "score": 341956.09561106644 }, { "content": "/// derive a key from password and salt using the currently recommended key derivation function\n\n/// and parameters\n\npub fn recommended_kdf(_password: &[u8], _salt: &[u8], _key: &mut [u8]) -> crate::Result<()> {\n\n todo!(\"argon2 with 'sensitive'/offline settings\")\n\n}\n", "file_path": "engine/snapshot/src/kdf.rs", "rank": 12, "score": 338676.2513047954 }, { "content": "/// finishes authentication\n\npub fn poly1305_finish(tag: &mut [u8], a: &mut [u32], s: &[u32]) {\n\n // modular reduction\n\n let mut c;\n\n c = shift_right!(a[1], 26);\n\n a[1] = and!(a[1], 0x3ffffff);\n\n a[2] = add!(a[2], c);\n\n c = shift_right!(a[2], 26);\n\n a[2] = and!(a[2], 0x3ffffff);\n\n a[3] = add!(a[3], c);\n\n c = shift_right!(a[3], 26);\n\n a[3] = and!(a[3], 0x3ffffff);\n\n a[4] = add!(a[4], c);\n\n c = shift_right!(a[4], 26);\n\n a[4] = and!(a[4], 0x3ffffff);\n\n a[0] = add!(a[0], mult!(c, 5));\n\n c = shift_right!(a[0], 26);\n\n a[0] = and!(a[0], 0x3ffffff);\n\n a[1] = add!(a[1], c);\n\n\n\n // reduce if values is in the range (2^130-5, 2^130]\n", "file_path": "engine/crypto/src/internal/poly.rs", "rank": 13, "score": 325880.1996369039 }, { "content": "/// encrypts data in-place and authenticates it\n\nfn xchachapoly_seal(data: &mut [u8], tag: &mut [u8], ad: &[u8], key: &[u8], nonce: &[u8]) {\n\n // xor and encrypt the data.\n\n XChaCha20::xor(key, nonce, 1, data);\n\n\n\n // build a footer\n\n let mut foot = Vec::with_capacity(16);\n\n foot.extend_from_slice(&(ad.len() as u64).to_le_bytes());\n\n foot.extend_from_slice(&(data.len() as u64).to_le_bytes());\n\n\n\n // compute Poly1305 key and auth tag\n\n let mut pkey = vec![0; 32];\n\n XChaCha20::xor(key, nonce, 0, &mut pkey);\n\n Poly1305::chachapoly_auth(tag, ad, data, &foot, &pkey);\n\n}\n\n\n", "file_path": "engine/crypto/src/xchachapoly.rs", "rank": 14, "score": 324759.6976372159 }, { "content": "pub fn decompress(input: &[u8]) -> crate::Result<Vec<u8>> {\n\n let mut vec = Vec::with_capacity(4096);\n\n\n\n decompress_into(input, &mut vec)?;\n\n\n\n Ok(vec)\n\n}\n\n\n", "file_path": "engine/snapshot/src/compression/decoder.rs", "rank": 15, "score": 318399.51005635934 }, { "content": "pub fn compress_into(input: &[u8], output: &mut Vec<u8>) {\n\n LZ4Encoder {\n\n input,\n\n output,\n\n cursor: 0,\n\n dict: [!0; DICT_SIZE],\n\n }\n\n .complete();\n\n}\n\n\n", "file_path": "engine/snapshot/src/compression/encoder.rs", "rank": 16, "score": 317797.553374668 }, { "content": "/// HChaCha20 implementation\n\npub fn h_chacha20_hash(key: &[u8], nonce: &[u8], buf: &mut [u8]) {\n\n // initialize state\n\n let mut state = vec![0u32; 16];\n\n (0..4).for_each(|i| state[i] = BASIS[i]);\n\n (4..12).for_each(|i| state[i] = read32_little_endian!(&key[(i - 4) * 4..]));\n\n (12..16).for_each(|i| state[i] = read32_little_endian!(&nonce[(i - 12) * 4..]));\n\n\n\n // run the rounds\n\n chacha20_rounds(&mut state);\n\n\n\n // write to the output\n\n let (buf_a, buf_b) = buf.split_at_mut(16);\n\n (0..4).for_each(|i| write32_little_endian!(state[i] => &mut buf_a[i* 4..]));\n\n (12..16).for_each(|i| write32_little_endian!(state[i] => &mut buf_b[(i - 12) * 4..]));\n\n}\n\n\n", "file_path": "engine/crypto/src/internal/chacha.rs", "rank": 17, "score": 316559.098984363 }, { "content": "pub fn bytestring() -> Vec<u8> {\n\n let s = if rand::random::<u8>() % 4 == 0 {\n\n 0\n\n } else {\n\n rand::random::<usize>() % 4096\n\n };\n\n\n\n let mut bs = Vec::with_capacity(s);\n\n for _ in 1..s {\n\n bs.push(rand::random());\n\n }\n\n bs\n\n}\n\n\n", "file_path": "test_utils/src/fresh.rs", "rank": 18, "score": 314305.4485742863 }, { "content": "/// calculates the nth ChaCha20 block into a buffer\n\npub fn chacha20_block(key: &[u8], nonce: &[u8], n: u64, buf: &mut [u8]) {\n\n // create buffer\n\n let mut state = vec![0u32; 32];\n\n let (init, mixed) = state.split_at_mut(16);\n\n\n\n // initialize buffer\n\n (0..4).for_each(|i| init[i] = BASIS[i]);\n\n (4..12).for_each(|i| init[i] = read32_little_endian!(&key[(i - 4) * 4..]));\n\n split64_little_endian!(n => &mut init[12..]);\n\n (14..16).for_each(|i| init[i] = read32_little_endian!(&nonce[(i - 14) * 4..]));\n\n\n\n // mix the buffer\n\n mixed.copy_from_slice(init);\n\n chacha20_rounds(mixed);\n\n\n\n // write the mixed state into the buffer\n\n (0..16).for_each(|i| mixed[i] = add!(mixed[i], init[i]));\n\n (0..16).for_each(|i| write32_little_endian!(mixed[i] => &mut buf[i * 4..]));\n\n}\n", "file_path": "engine/crypto/src/internal/chacha.rs", "rank": 19, "score": 304679.6840133453 }, { "content": "pub fn compress(input: &[u8]) -> Vec<u8> {\n\n let mut vec = Vec::with_capacity(input.len());\n\n\n\n compress_into(input, &mut vec);\n\n\n\n vec\n\n}\n\n\n", "file_path": "engine/snapshot/src/compression/encoder.rs", "rank": 20, "score": 272420.46159890824 }, { "content": "fn mmap(n: usize) -> crate::Result<*mut u8> {\n\n let x = unsafe {\n\n libc::mmap(\n\n ptr::null_mut::<u8>() as *mut libc::c_void,\n\n n,\n\n libc::PROT_NONE,\n\n libc::MAP_PRIVATE | libc::MAP_ANONYMOUS,\n\n -1,\n\n 0,\n\n )\n\n };\n\n if x == libc::MAP_FAILED {\n\n return Err(crate::Error::os(\"mmap\"));\n\n }\n\n Ok(x as *mut u8)\n\n}\n\n\n", "file_path": "runtime/src/mem.rs", "rank": 21, "score": 255241.55898772192 }, { "content": "fn init_read_snap(stronghold: Stronghold, key_data: Vec<u8>) -> Stronghold {\n\n let mut stronghold = init_read_vault(stronghold);\n\n\n\n block_on(stronghold.write_all_to_snapshot(key_data, Some(\"bench_read\".into()), None));\n\n\n\n stronghold\n\n}\n\n\n", "file_path": "client/benches/benchmark.rs", "rank": 22, "score": 253776.09141066024 }, { "content": "pub fn coinflip() -> bool {\n\n rand::random()\n\n}\n", "file_path": "test_utils/src/fresh.rs", "rank": 23, "score": 253760.5637479644 }, { "content": "fn munmap(p: *mut u8, n: usize) -> crate::Result<()> {\n\n match unsafe { libc::munmap(p as *mut libc::c_void, n) } {\n\n 0 => Ok(()),\n\n _ => Err(crate::Error::os(\"munmap\")),\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct GuardedAllocation {\n\n base: *mut u8,\n\n data_region_start: *mut u8,\n\n data_region_size: usize,\n\n data_aligned: *mut u8,\n\n mmapped_size: usize, // size of the memory mapping (including guard pages)\n\n}\n\n\n\nimpl GuardedAllocation {\n\n pub fn unaligned(n: usize) -> crate::Result<Self> {\n\n Self::aligned(Layout::from_size_align(n, 1).map_err(Error::Layout)?)\n\n }\n", "file_path": "runtime/src/mem.rs", "rank": 24, "score": 250870.00663330732 }, { "content": "/// Trait for decryptable data\n\npub trait Decrypt<E, T: TryFrom<Vec<u8>, Error = E>>: AsRef<[u8]> {\n\n /// decrypts raw data and creates a new type T from the plaintext\n\n fn decrypt<B: BoxProvider, AD: AsRef<[u8]>>(&self, key: &Key<B>, ad: AD) -> crate::Result<T> {\n\n let opened = B::box_open(key, ad.as_ref(), self.as_ref())?;\n\n Ok(T::try_from(opened).map_err(|_| crate::Error::DatabaseError(String::from(\"Invalid Entry\")))?)\n\n }\n\n}\n", "file_path": "engine/vault/src/crypto_box.rs", "rank": 25, "score": 243399.81862261787 }, { "content": "type Store = Cache<Vec<u8>, Vec<u8>>;\n\n\n\n/// A `Bucket` cache of the Data for stronghold. Contains a `HashMap<Key<P>, Option<DBView<P>>>` pairing the vault\n\n/// `Key<P>` and the vault `DBView<P>` together. Also contains a `HashMap<Key<P>, Vec<ReadResult>>` which pairs the\n\n/// backing data with the associated `Key<P>`.\n\npub struct Bucket<P: BoxProvider + Send + Sync + Clone + 'static> {\n\n vaults: HashMap<Key<P>, Option<DBView<P>>>,\n\n cache: HashMap<Key<P>, Vec<ReadResult>>,\n\n}\n\n\n\nimpl<P: BoxProvider + Send + Sync + Clone + Ord + PartialOrd + PartialEq + Eq + 'static> Bucket<P> {\n\n /// Creates a new `Bucket`.\n\n pub fn new() -> Self {\n\n let cache = HashMap::new();\n\n let vaults = HashMap::new();\n\n\n\n Self { cache, vaults }\n\n }\n\n\n\n #[allow(dead_code)]\n", "file_path": "client/src/bucket.rs", "rank": 26, "score": 239091.3063837017 }, { "content": "type Store = Cache<Vec<u8>, Vec<u8>>;\n\n\n\npub enum ReadWrite {\n\n Read,\n\n Write,\n\n}\n\n\n\n/// A `Client` Cache Actor which routes external messages to the rest of the Stronghold system.\n\n#[actor(SHResults, SHRequest, InternalResults)]\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct Client {\n\n pub client_id: ClientId,\n\n // Contains the vault ids and the record ids with their associated indexes.\n\n vaults: HashMap<VaultId, (usize, Vec<RecordId>)>,\n\n // Contains the Record Ids for the most recent Record in each vault.\n\n heads: Vec<RecordId>,\n\n counters: Vec<usize>,\n\n store: Store,\n\n}\n\n\n", "file_path": "client/src/client.rs", "rank": 27, "score": 239091.3063837017 }, { "content": "#[test]\n\nfn test_empty() -> Result<()> {\n\n let k: Key<Provider> = Key::random()?;\n\n let v = DBView::load(k, empty::<ReadResult>())?;\n\n\n\n assert_eq!(v.all().len(), 0);\n\n assert_eq!(v.absolute_balance(), (0, 0));\n\n assert_eq!(v.chain_ctrs(), HashMap::new());\n\n assert_eq!(v.gc().len(), 0);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/vault/tests/vault.rs", "rank": 28, "score": 236490.1289466984 }, { "content": "pub fn corrupt_file(f: &mut File) {\n\n seek_to_beginning(f);\n\n let mut bs = Vec::new();\n\n f.read_to_end(&mut bs).unwrap();\n\n\n\n corrupt(&mut bs);\n\n\n\n seek_to_beginning(f);\n\n f.write_all(&bs).unwrap();\n\n\n\n seek_to_beginning(f);\n\n}\n\n\n", "file_path": "test_utils/src/lib.rs", "rank": 29, "score": 235519.22471152717 }, { "content": "/// Construct the path to a snapshot file with the specifed name (defaults to `main`) under\n\n/// the directory specified by the (`snapshot_dir`)[fn.snapshot_dir.html] function.\n\npub fn get_path(name: Option<&str>) -> crate::Result<PathBuf> {\n\n snapshot_dir().map(|p| p.join(format!(\"{}.stronghold\", name.unwrap_or(\"main\"))))\n\n}\n", "file_path": "engine/snapshot/src/files.rs", "rank": 30, "score": 235283.36772362512 }, { "content": "/// Trait for the generic Request and Response types\n\npub trait MessageEvent: Serialize + DeserializeOwned + Debug + Send + Clone + Sync + 'static {}\n\nimpl<T: Serialize + DeserializeOwned + Debug + Send + Clone + Sync + 'static> MessageEvent for T {}\n\n\n\n/// Custom protocol that extends libp2ps RequestResponseProtocol\n\n#[derive(Debug, Clone)]\n\npub struct MessageProtocol();\n\n\n\nimpl ProtocolName for MessageProtocol {\n\n fn protocol_name(&self) -> &[u8] {\n\n b\"/stronghold-communication/1.0.0\"\n\n }\n\n}\n\n\n\n/// Describes how messages are read from and written to the io Socket by implementing the RequestResponseCodec\n\n#[derive(Clone)]\n\npub struct MessageCodec<T, U> {\n\n p: PhantomData<T>,\n\n q: PhantomData<U>,\n\n}\n\n\n", "file_path": "communication/src/behaviour/protocol.rs", "rank": 31, "score": 234750.4485137008 }, { "content": "pub fn hd_path() -> (String, hd::Chain) {\n\n let mut s = \"m\".to_string();\n\n let mut is = vec![];\n\n while coinflip() {\n\n let i = rand::random::<u32>() & 0x7fffff;\n\n s.push_str(&format!(\"/{}'\", i.to_string()));\n\n is.push(i);\n\n }\n\n (s, hd::Chain::from_u32_hardened(is))\n\n}\n", "file_path": "client/src/tests/fresh.rs", "rank": 32, "score": 233531.44821545767 }, { "content": "#[test]\n\nfn test_storage_returns_stale_blob() -> Result<()> {\n\n let k: Key<Provider> = Key::random()?;\n\n let v0 = DBView::load(k.clone(), empty::<ReadResult>())?;\n\n\n\n let mut writes = vec![];\n\n\n\n let id = RecordId::random::<Provider>()?;\n\n let mut w = v0.writer(id);\n\n writes.push(w.truncate()?);\n\n let hint = fresh_record_hint();\n\n\n\n let (bid, blob) = match w.write(&fresh::bytestring(), hint)?.as_slice() {\n\n [w0, w1] => {\n\n assert_eq!(w0.kind(), Kind::Transaction);\n\n assert_eq!(w1.kind(), Kind::Blob);\n\n (w1.id().to_vec(), w1.data().to_vec())\n\n }\n\n ws => panic!(\"{} unexpected writes\", ws.len()),\n\n };\n\n\n", "file_path": "engine/vault/tests/vault.rs", "rank": 33, "score": 226044.5063204646 }, { "content": "// Purge a record from the chain: revoke and then garbage collect.\n\nfn purge_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"purge\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref id) = matches.value_of(\"id\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let home_dir = home_dir().expect(line_error!());\n\n let snapshot = home_dir.join(\"snapshots\").join(\"commandline.stronghold\");\n\n\n\n if snapshot.exists() {\n\n block_on(stronghold.read_snapshot(\n\n client_path,\n\n None,\n\n key.to_vec(),\n\n Some(\"commandline\".to_string()),\n\n None,\n\n ));\n\n\n", "file_path": "client/examples/cli.rs", "rank": 35, "score": 218995.66546916927 }, { "content": "// create a record with a revoke transaction. Data isn't actually deleted until it is garbage collected.\n\nfn revoke_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"revoke\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref id) = matches.value_of(\"id\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let home_dir = home_dir().expect(line_error!());\n\n let snapshot = home_dir.join(\"snapshots\").join(\"commandline.stronghold\");\n\n\n\n if snapshot.exists() {\n\n block_on(stronghold.read_snapshot(\n\n client_path,\n\n None,\n\n key.to_vec(),\n\n Some(\"commandline\".to_string()),\n\n None,\n\n ));\n\n\n", "file_path": "client/examples/cli.rs", "rank": 36, "score": 218995.31811430264 }, { "content": "// handle the snapshot command.\n\nfn snapshot_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"snapshot\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref path) = matches.value_of(\"path\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let path = Path::new(path);\n\n\n\n let input = path.to_path_buf();\n\n\n\n let output = path.parent().expect(line_error!());\n\n let mut out = PathBuf::new();\n\n out.push(output);\n\n out.push(Path::new(\"recompute.stronghold\"));\n\n\n\n if input.exists() {\n\n let status = block_on(stronghold.read_snapshot(client_path, None, key.to_vec(), None, Some(input)));\n\n\n", "file_path": "client/examples/cli.rs", "rank": 37, "score": 218990.75264492573 }, { "content": "// handle the list command.\n\nfn list_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"list\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let home_dir = home_dir().expect(line_error!());\n\n let snapshot = home_dir.join(\"snapshots\").join(\"commandline.stronghold\");\n\n\n\n if snapshot.exists() {\n\n block_on(stronghold.read_snapshot(\n\n client_path,\n\n None,\n\n key.to_vec(),\n\n Some(\"commandline\".to_string()),\n\n None,\n\n ));\n\n\n\n let (list, status) = block_on(stronghold.list_hints_and_ids(b\"test\".to_vec()));\n", "file_path": "client/examples/cli.rs", "rank": 38, "score": 218990.75264492573 }, { "content": "fn encrypt_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"encrypt\") {\n\n if let Some(pass) = matches.value_of(\"password\") {\n\n if let Some(plain) = matches.value_of(\"plain\") {\n\n if let Some(rid) = matches.value_of(\"rpath\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let home_dir = home_dir().expect(line_error!());\n\n let snapshot = home_dir.join(\"snapshots\").join(\"commandline.stronghold\");\n\n\n\n if snapshot.exists() {\n\n block_on(stronghold.read_snapshot(\n\n client_path,\n\n None,\n\n key.to_vec(),\n\n Some(\"commandline\".to_string()),\n\n None,\n\n ));\n", "file_path": "client/examples/cli.rs", "rank": 39, "score": 218990.75264492573 }, { "content": "// Purge a record from the chain: revoke and then garbage collect.\n\nfn purge_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"purge\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref id) = matches.value_of(\"id\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let home_dir = home_dir().expect(line_error!());\n\n let snapshot = home_dir.join(\"snapshots\").join(\"commandline.stronghold\");\n\n\n\n if snapshot.exists() {\n\n block_on(stronghold.read_snapshot(\n\n client_path,\n\n None,\n\n key.to_vec(),\n\n Some(\"commandline\".to_string()),\n\n None,\n\n ));\n\n\n", "file_path": "products/commandline/src/main.rs", "rank": 40, "score": 216473.3149678799 }, { "content": "// create a record with a revoke transaction. Data isn't actually deleted until it is garbage collected.\n\nfn revoke_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"revoke\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref id) = matches.value_of(\"id\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let home_dir = home_dir().expect(line_error!());\n\n let snapshot = home_dir.join(\"snapshots\").join(\"commandline.stronghold\");\n\n\n\n if snapshot.exists() {\n\n block_on(stronghold.read_snapshot(\n\n client_path,\n\n None,\n\n key.to_vec(),\n\n Some(\"commandline\".to_string()),\n\n None,\n\n ));\n\n\n", "file_path": "products/commandline/src/main.rs", "rank": 41, "score": 216472.96761301332 }, { "content": "fn encrypt_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"encrypt\") {\n\n if let Some(pass) = matches.value_of(\"password\") {\n\n if let Some(plain) = matches.value_of(\"plain\") {\n\n if let Some(rid) = matches.value_of(\"rpath\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let home_dir = home_dir().expect(line_error!());\n\n let snapshot = home_dir.join(\"snapshots\").join(\"commandline.stronghold\");\n\n\n\n if snapshot.exists() {\n\n block_on(stronghold.read_snapshot(\n\n client_path,\n\n None,\n\n key.to_vec(),\n\n Some(\"commandline\".to_string()),\n\n None,\n\n ));\n", "file_path": "products/commandline/src/main.rs", "rank": 42, "score": 216468.40214363643 }, { "content": "// handle the encryption command.\n\nfn write_to_store_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"write\") {\n\n if let Some(pass) = matches.value_of(\"password\") {\n\n if let Some(plain) = matches.value_of(\"plain\") {\n\n if let Some(rid) = matches.value_of(\"rpath\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let home_dir = home_dir().expect(line_error!());\n\n let snapshot = home_dir.join(\"snapshots\").join(\"commandline.stronghold\");\n\n\n\n if snapshot.exists() {\n\n block_on(stronghold.read_snapshot(\n\n client_path,\n\n None,\n\n key.to_vec(),\n\n Some(\"commandline\".to_string()),\n\n None,\n\n ));\n", "file_path": "client/examples/cli.rs", "rank": 43, "score": 216468.40214363643 }, { "content": "// handle the snapshot command.\n\nfn snapshot_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"snapshot\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref path) = matches.value_of(\"path\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let path = Path::new(path);\n\n\n\n let input = path.to_path_buf();\n\n\n\n let output = path.parent().expect(line_error!());\n\n let mut out = PathBuf::new();\n\n out.push(output);\n\n out.push(Path::new(\"recompute.stronghold\"));\n\n\n\n if input.exists() {\n\n let status = block_on(stronghold.read_snapshot(client_path, None, key.to_vec(), None, Some(input)));\n\n\n", "file_path": "products/commandline/src/main.rs", "rank": 44, "score": 216468.40214363643 }, { "content": "// handle the read command.\n\nfn read_from_store_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"read\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref rpath) = matches.value_of(\"rpath\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let home_dir = home_dir().expect(line_error!());\n\n let snapshot = home_dir.join(\"snapshots\").join(\"commandline.stronghold\");\n\n\n\n if snapshot.exists() {\n\n block_on(stronghold.read_snapshot(\n\n client_path,\n\n None,\n\n key.to_vec(),\n\n Some(\"commandline\".to_string()),\n\n None,\n\n ));\n\n\n", "file_path": "client/examples/cli.rs", "rank": 45, "score": 216468.40214363643 }, { "content": "// handle the list command.\n\nfn list_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"list\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let home_dir = home_dir().expect(line_error!());\n\n let snapshot = home_dir.join(\"snapshots\").join(\"commandline.stronghold\");\n\n\n\n if snapshot.exists() {\n\n block_on(stronghold.read_snapshot(\n\n client_path,\n\n None,\n\n key.to_vec(),\n\n Some(\"commandline\".to_string()),\n\n None,\n\n ));\n\n\n\n let (list, status) = block_on(stronghold.list_hints_and_ids(b\"test\".to_vec()));\n", "file_path": "products/commandline/src/main.rs", "rank": 46, "score": 216468.40214363643 }, { "content": "/// a mutable view over raw data.\n\npub trait AsViewMut<T: Sized>: AsMut<[u8]> {\n\n /// creates a mutable view over `self`.\n\n fn view_mut(&mut self) -> &mut T {\n\n // get bytes\n\n let bytes = self.as_mut();\n\n // validate bytes\n\n assert!(mem::size_of::<T>() <= bytes.len(), \"Can't create view over this memory\");\n\n // get mute pointer\n\n let bytes = bytes.as_mut_ptr();\n\n // validate alignment\n\n assert_eq!(\n\n bytes.align_offset(mem::align_of::<T>()),\n\n 0,\n\n \"View's offset is incorrect\"\n\n );\n\n\n\n // cast mutable pointer\n\n unsafe { bytes.cast::<T>().as_mut() }.unwrap()\n\n }\n\n}\n", "file_path": "engine/vault/src/types.rs", "rank": 47, "score": 215048.04951783878 }, { "content": "// handle the encryption command.\n\nfn write_to_store_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"write\") {\n\n if let Some(pass) = matches.value_of(\"password\") {\n\n if let Some(plain) = matches.value_of(\"plain\") {\n\n if let Some(rid) = matches.value_of(\"rpath\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let home_dir = home_dir().expect(line_error!());\n\n let snapshot = home_dir.join(\"snapshots\").join(\"commandline.stronghold\");\n\n\n\n if snapshot.exists() {\n\n block_on(stronghold.read_snapshot(\n\n client_path,\n\n None,\n\n key.to_vec(),\n\n Some(\"commandline\".to_string()),\n\n None,\n\n ));\n", "file_path": "products/commandline/src/main.rs", "rank": 48, "score": 214039.35362822836 }, { "content": "// handle the read command.\n\nfn read_from_store_command(matches: &ArgMatches, stronghold: &mut iota_stronghold::Stronghold, client_path: Vec<u8>) {\n\n if let Some(matches) = matches.subcommand_matches(\"read\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref rpath) = matches.value_of(\"rpath\") {\n\n let mut key = [0u8; 32];\n\n let salt = [0u8; 32];\n\n naive_kdf(pass.as_bytes(), &salt, &mut key).expect(line_error!());\n\n\n\n let home_dir = home_dir().expect(line_error!());\n\n let snapshot = home_dir.join(\"snapshots\").join(\"commandline.stronghold\");\n\n\n\n if snapshot.exists() {\n\n block_on(stronghold.read_snapshot(\n\n client_path,\n\n None,\n\n key.to_vec(),\n\n Some(\"commandline\".to_string()),\n\n None,\n\n ));\n\n\n", "file_path": "products/commandline/src/main.rs", "rank": 49, "score": 214039.35362822836 }, { "content": "type Result<T, E = Error> = core::result::Result<T, E>;\n", "file_path": "runtime/src/lib.rs", "rank": 50, "score": 213576.74889381859 }, { "content": "#[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\n\nfn macos_secrandom() -> Option<&'static str> {\n\n println!(\"cargo:rustc-link-lib=framework=Security\");\n\n Some(\"USE_SECRANDOM\")\n\n}\n\n\n\n// checks if the current version of glibc supports the getrandom function\n", "file_path": "engine/random/build.rs", "rank": 51, "score": 207126.0497039024 }, { "content": "#[cfg(target_os = \"linux\")]\n\nfn linux_check_getrandom() -> Option<&'static str> {\n\n use std::{ffi::CStr, os::raw::c_char, str::FromStr};\n\n extern \"C\" {\n\n fn gnu_get_libc_version() -> *const c_char;\n\n }\n\n\n\n let v: Vec<u8> = unsafe { CStr::from_ptr(gnu_get_libc_version()) }\n\n .to_str()\n\n .unwrap()\n\n .split('.')\n\n .map(|s| u8::from_str(s).unwrap())\n\n .collect();\n\n\n\n match (v[0], v[1]) {\n\n (2..=255, 25..=255) => Some(\"USE_GETRANDOM\"),\n\n _ => Some(\"USE_DEV_RANDOM\"),\n\n }\n\n}\n\n\n", "file_path": "engine/random/build.rs", "rank": 52, "score": 204366.46866855118 }, { "content": "/// trait for encryptable data\n\npub trait Encrypt<T: From<Vec<u8>>>: AsRef<[u8]> {\n\n /// encrypts a raw data and creates a type T from the ciphertext\n\n fn encrypt<B: BoxProvider, AD: AsRef<[u8]>>(&self, key: &Key<B>, ad: AD) -> crate::Result<T> {\n\n let sealed = B::box_seal(key, ad.as_ref(), self.as_ref())?;\n\n Ok(T::from(sealed))\n\n }\n\n}\n\n\n", "file_path": "engine/vault/src/crypto_box.rs", "rank": 53, "score": 203744.79688836308 }, { "content": "/// Does ChaCha20 Rounds over the state\n\nfn chacha20_rounds(state: &mut [u32]) {\n\n for _ in 0..10 {\n\n // macro for a quater round\n\n macro_rules! quarter_round {\n\n ($a:expr, $b:expr, $c:expr, $d:expr) => {{\n\n state[$a] = add!(state[$a], state[$b]);\n\n state[$d] = xor!(state[$d], state[$a]);\n\n state[$d] = or!(shift_left!(state[$d], 16), shift_right!(state[$d], 16));\n\n state[$c] = add!(state[$c], state[$d]);\n\n state[$b] = xor!(state[$b], state[$c]);\n\n state[$b] = or!(shift_left!(state[$b], 12), shift_right!(state[$b], 20));\n\n state[$a] = add!(state[$a], state[$b]);\n\n state[$d] = xor!(state[$d], state[$a]);\n\n state[$d] = or!(shift_left!(state[$d], 8), shift_right!(state[$d], 24));\n\n state[$c] = add!(state[$c], state[$d]);\n\n state[$b] = xor!(state[$b], state[$c]);\n\n state[$b] = or!(shift_left!(state[$b], 7), shift_right!(state[$b], 25));\n\n }};\n\n }\n\n\n", "file_path": "engine/crypto/src/internal/chacha.rs", "rank": 54, "score": 200277.92470415717 }, { "content": "#[cfg(unix)]\n\nfn strerror(errno: libc::c_int) -> &'static str {\n\n #[allow(clippy::unnecessary_cast)]\n\n static mut BUF: [libc::c_char; 1024] = [0 as libc::c_char; 1024];\n\n unsafe {\n\n let res = libc::strerror_r(errno, BUF.as_mut_ptr(), BUF.len());\n\n assert_eq!(res, 0);\n\n\n\n let len = BUF.iter().position(|c| *c == 0).unwrap_or(BUF.len());\n\n core::str::from_utf8_unchecked(core::slice::from_raw_parts(BUF.as_ptr() as *const u8, len))\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n #[cfg(unix)]\n\n Self::OsError { syscall, errno } => f\n\n .debug_struct(\"OsError\")\n\n .field(\"syscall\", syscall)\n\n .field(\"errno\", errno)\n\n .field(\"strerror\", &strerror(*errno))\n\n .finish(),\n\n #[cfg(unix)]\n\n Self::MemError(me) => me.fmt(f),\n\n Self::ZoneError(ze) => ze.fmt(f),\n\n Self::Unreachable(msg) => f.write_fmt(format_args!(\"unreachable state: {}\", msg)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "runtime/src/lib.rs", "rank": 55, "score": 195906.95914857765 }, { "content": "#[test]\n\nfn test_truncate() -> Result<()> {\n\n let k: Key<Provider> = Key::random()?;\n\n let v0 = DBView::load(k.clone(), empty::<ReadResult>())?;\n\n\n\n let mut writes = vec![];\n\n\n\n let id = RecordId::random::<Provider>()?;\n\n writes.push(v0.writer(id).truncate()?);\n\n\n\n let v1 = DBView::load(k, writes.iter().map(write_to_read))?;\n\n\n\n assert_eq!(v1.all().len(), 1);\n\n assert_eq!(v1.absolute_balance(), (1, 1));\n\n assert_eq!(v1.chain_ctrs(), vec![(id, 0u64)].into_iter().collect());\n\n assert_eq!(v1.gc().len(), 0);\n\n\n\n assert_eq!(v1.reader().prepare_read(&id)?, PreparedRead::RecordIsEmpty);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/vault/tests/vault.rs", "rank": 56, "score": 194636.15336332054 }, { "content": "#[test]\n\nfn test_revoke() -> Result<()> {\n\n let k: Key<Provider> = Key::random()?;\n\n let v0 = DBView::load(k.clone(), empty::<ReadResult>())?;\n\n\n\n let mut writes = vec![];\n\n\n\n let id = RecordId::random::<Provider>()?;\n\n writes.push(v0.writer(id).truncate()?);\n\n\n\n let v1 = DBView::load(k.clone(), writes.iter().map(write_to_read))?;\n\n assert_eq!(v1.reader().prepare_read(&id)?, PreparedRead::RecordIsEmpty);\n\n writes.push(v1.writer(id).revoke()?);\n\n\n\n let v2 = DBView::load(k, writes.iter().map(write_to_read))?;\n\n assert_eq!(v2.reader().prepare_read(&id)?, PreparedRead::NoSuchRecord);\n\n\n\n assert_eq!(v2.all().len(), 1);\n\n assert_eq!(v2.records().count(), 0);\n\n assert_eq!(v2.absolute_balance(), (0, 2));\n\n assert_eq!(v2.chain_ctrs(), vec![(id, 1u64)].into_iter().collect());\n\n assert_eq!(v2.gc().len(), 2);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/vault/tests/vault.rs", "rank": 57, "score": 194636.15336332054 }, { "content": "/// Get the preferred snapshot directory\n\n///\n\n/// Defaults to the `snapshots` subdirectory under the preferred Stronghold home directory as\n\n/// returned by [`home_dir`](fn.home_dir.html).\n\npub fn snapshot_dir() -> crate::Result<PathBuf> {\n\n let home_dir = home_dir()?;\n\n let snapshot_dir = home_dir.join(\"snapshots\");\n\n\n\n verify_or_create(&snapshot_dir)?;\n\n\n\n Ok(snapshot_dir)\n\n}\n\n\n", "file_path": "engine/snapshot/src/files.rs", "rank": 58, "score": 192058.18180091935 }, { "content": "/// Get the preferred Stronghold home directory\n\n///\n\n/// Defaults to a sub-directory named `.stronghold` under the user's home directory (see\n\n/// [`dirs_next::home_dir`](../dirs_next/fn.home_dir.html), but can be overridden by the `STRONGHOLD` environment\n\n/// variable.\n\npub fn home_dir() -> crate::Result<PathBuf> {\n\n let home = match std::env::var(\"STRONGHOLD\") {\n\n Ok(h) => h.into(),\n\n Err(_) => dirs_next::home_dir().unwrap(),\n\n };\n\n let home_dir = home.join(\".stronghold\");\n\n\n\n verify_or_create(&home_dir)?;\n\n\n\n Ok(home_dir)\n\n}\n\n\n", "file_path": "engine/snapshot/src/files.rs", "rank": 59, "score": 192053.40049227804 }, { "content": "#[test]\n\nfn test_rekove_then_write() -> Result<()> {\n\n let k: Key<Provider> = Key::random()?;\n\n let v0 = DBView::load(k.clone(), empty::<ReadResult>())?;\n\n\n\n let mut writes = vec![];\n\n\n\n let id = RecordId::random::<Provider>()?;\n\n let mut w = v0.writer(id);\n\n writes.push(w.truncate()?);\n\n writes.push(w.revoke()?);\n\n let data = fresh::bytestring();\n\n let hint = fresh_record_hint();\n\n writes.append(&mut w.write(&data, hint)?);\n\n\n\n let v1 = DBView::load(k, writes.iter().map(write_to_read))?;\n\n assert_eq!(v1.reader().prepare_read(&id)?, PreparedRead::CacheHit(data));\n\n\n\n assert_eq!(v1.all().len(), 1);\n\n assert_eq!(v1.records().count(), 1);\n\n assert_eq!(v1.absolute_balance(), (2, 3));\n\n assert_eq!(v1.chain_ctrs(), vec![(id, 2u64)].into_iter().collect());\n\n assert_eq!(v1.gc().len(), 1);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/vault/tests/vault.rs", "rank": 60, "score": 191976.84886178857 }, { "content": "#[test]\n\nfn test_write_twice() -> Result<()> {\n\n let k: Key<Provider> = Key::random()?;\n\n let v0 = DBView::load(k.clone(), empty::<ReadResult>())?;\n\n\n\n let mut writes = vec![];\n\n\n\n let id = RecordId::random::<Provider>()?;\n\n let mut w = v0.writer(id);\n\n writes.push(w.truncate()?);\n\n let data0 = fresh::bytestring();\n\n let data1 = fresh::bytestring();\n\n let hint = fresh_record_hint();\n\n writes.append(&mut w.write(&data0, hint)?);\n\n writes.append(&mut w.write(&data1, hint)?);\n\n\n\n let v1 = DBView::load(k, writes.iter().map(write_to_read))?;\n\n\n\n assert_eq!(v1.all().len(), 1);\n\n assert_eq!(v1.records().count(), 1);\n\n assert_eq!(v1.absolute_balance(), (2, 3));\n\n assert_eq!(v1.chain_ctrs(), vec![(id, 2u64)].into_iter().collect());\n\n assert_eq!(v1.gc().len(), 1);\n\n\n\n assert_eq!(v1.reader().prepare_read(&id)?, PreparedRead::CacheHit(data1));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/vault/tests/vault.rs", "rank": 61, "score": 191976.84886178857 }, { "content": "#[test]\n\n#[ignore = \"not yet implemented: we need some kind of checksum in the data transaction to protect against this case: when the users key is compromised\"]\n\nfn test_ensure_authenticty_of_blob() -> Result<()> {\n\n let k: Key<Provider> = Key::random()?;\n\n let v0 = DBView::load(k.clone(), empty::<ReadResult>())?;\n\n\n\n let mut writes = vec![];\n\n\n\n let id = RecordId::random::<Provider>()?;\n\n let mut w = v0.writer(id);\n\n writes.push(w.truncate()?);\n\n let hint = fresh_record_hint();\n\n let bid = match w.write(&fresh::bytestring(), hint)?.as_slice() {\n\n [w0, w1] => {\n\n assert_eq!(w0.kind(), Kind::Transaction);\n\n writes.push(w0.clone());\n\n\n\n assert_eq!(w1.kind(), Kind::Blob);\n\n w1.id().to_vec()\n\n }\n\n ws => panic!(\"{} unexpected writes\", ws.len()),\n\n };\n", "file_path": "engine/vault/tests/vault.rs", "rank": 62, "score": 189434.049408456 }, { "content": "#[test]\n\nfn test_write_cache_miss() -> Result<()> {\n\n let k: Key<Provider> = Key::random()?;\n\n let v0 = DBView::load(k.clone(), empty::<ReadResult>())?;\n\n\n\n let mut writes = vec![];\n\n\n\n let id = RecordId::random::<Provider>()?;\n\n let mut w = v0.writer(id);\n\n writes.push(w.truncate()?);\n\n let data = fresh::bytestring();\n\n let hint = fresh_record_hint();\n\n let (bid, blob) = match w.write(&data, hint)?.as_slice() {\n\n [w0, w1] => {\n\n assert_eq!(w0.kind(), Kind::Transaction);\n\n writes.push(w0.clone());\n\n\n\n assert_eq!(w1.kind(), Kind::Blob);\n\n (w1.id().to_vec(), w1.data().to_vec())\n\n }\n\n ws => panic!(\"{} unexpected writes\", ws.len()),\n", "file_path": "engine/vault/tests/vault.rs", "rank": 63, "score": 189425.88129545512 }, { "content": "#[test]\n\nfn test_write_cache_hit() -> Result<()> {\n\n let k: Key<Provider> = Key::random()?;\n\n let v0 = DBView::load(k.clone(), empty::<ReadResult>())?;\n\n\n\n let mut writes = vec![];\n\n\n\n let id = RecordId::random::<Provider>()?;\n\n let mut w = v0.writer(id);\n\n writes.push(w.truncate()?);\n\n let data = fresh::bytestring();\n\n let hint = fresh_record_hint();\n\n writes.append(&mut w.write(&data, hint)?);\n\n\n\n let v1 = DBView::load(k, writes.iter().map(write_to_read))?;\n\n\n\n assert_eq!(v1.all().len(), 1);\n\n assert_eq!(v1.records().count(), 1);\n\n assert_eq!(v1.absolute_balance(), (2, 2));\n\n assert_eq!(v1.chain_ctrs(), vec![(id, 1u64)].into_iter().collect());\n\n assert_eq!(v1.gc().len(), 0);\n\n\n\n assert_eq!(v1.reader().prepare_read(&id)?, PreparedRead::CacheHit(data));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/vault/tests/vault.rs", "rank": 64, "score": 189425.88129545512 }, { "content": "#[test]\n\nfn test_rekove_without_reload() -> Result<()> {\n\n let k: Key<Provider> = Key::random()?;\n\n let v0 = DBView::load(k.clone(), empty::<ReadResult>())?;\n\n\n\n let mut writes = vec![];\n\n\n\n let id = RecordId::random::<Provider>()?;\n\n let mut w = v0.writer(id);\n\n writes.push(w.truncate()?);\n\n writes.push(w.revoke()?);\n\n\n\n let v1 = DBView::load(k, writes.iter().map(write_to_read))?;\n\n assert_eq!(v1.reader().prepare_read(&id)?, PreparedRead::NoSuchRecord);\n\n\n\n assert_eq!(v1.all().len(), 1);\n\n assert_eq!(v1.records().count(), 0);\n\n assert_eq!(v1.absolute_balance(), (0, 2));\n\n assert_eq!(v1.chain_ctrs(), vec![(id, 1u64)].into_iter().collect());\n\n assert_eq!(v1.gc().len(), 2);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/vault/tests/vault.rs", "rank": 65, "score": 189425.88129545512 }, { "content": "pub trait CloneSecret: Clone + Zeroize {}\n\n\n", "file_path": "client/src/utils/types.rs", "rank": 66, "score": 189183.34614341514 }, { "content": "#[test]\n\nfn test_read_non_existent_record() -> Result<()> {\n\n let k: Key<Provider> = Key::random()?;\n\n let v = DBView::load(k, empty::<ReadResult>())?;\n\n\n\n let id = RecordId::random::<Provider>()?;\n\n assert_eq!(v.reader().prepare_read(&id)?, PreparedRead::NoSuchRecord);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/vault/tests/vault.rs", "rank": 67, "score": 186976.55311950136 }, { "content": "fn compression(s: &str) {\n\n let compressed = compress(s.as_bytes());\n\n // println!(\"Compressed {} into {:?}\", s, compressed);\n\n let decompressed = decompress(&compressed).unwrap();\n\n // println!(\"Decompressed into {:?}\", str::from_utf8(&decompressed).unwrap());\n\n assert_eq!(decompressed, s.as_bytes());\n\n println!(\n\n \"original size: {}, compressed size: {}\",\n\n decompressed.len(),\n\n compressed.len()\n\n );\n\n assert_eq!(decompressed.len(), s.as_bytes().len());\n\n}\n\n\n", "file_path": "engine/snapshot/tests/compression.rs", "rank": 68, "score": 184760.04440053966 }, { "content": "fn check_min_file_len(input: &mut File) -> crate::Result<()> {\n\n let min = MAGIC.len()\n\n + VERSION.len()\n\n + xchacha20poly1305::XCHACHA20POLY1305_NONCE_SIZE\n\n + xchacha20poly1305::XCHACHA20POLY1305_TAG_SIZE;\n\n if input.metadata()?.len() >= min as u64 {\n\n Ok(())\n\n } else {\n\n Err(crate::Error::SnapshotError(\"Snapshot is too short to be valid\".into()))\n\n }\n\n}\n\n\n", "file_path": "engine/snapshot/src/logic.rs", "rank": 69, "score": 184001.84976641028 }, { "content": "fn check_header<I: Read>(input: &mut I) -> crate::Result<()> {\n\n // check the magic bytes\n\n let mut magic = [0u8; 5];\n\n input.read_exact(&mut magic)?;\n\n if magic != MAGIC {\n\n return Err(crate::Error::SnapshotError(\n\n \"magic bytes mismatch, is this really a snapshot file?\".into(),\n\n ));\n\n }\n\n\n\n // check the version\n\n let mut version = [0u8; 2];\n\n input.read_exact(&mut version)?;\n\n if version != VERSION {\n\n return Err(crate::Error::SnapshotError(\"snapshot version is incorrect\".into()));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/snapshot/src/logic.rs", "rank": 70, "score": 181924.27660294826 }, { "content": "pub fn location() -> Location {\n\n Location::generic(bytestring(), bytestring())\n\n}\n\n\n", "file_path": "client/src/tests/fresh.rs", "rank": 71, "score": 179586.63944935324 }, { "content": "pub fn string() -> String {\n\n let l = if rand::random::<u8>() % 4 == 0 {\n\n 0\n\n } else {\n\n rand::random::<usize>() % 4096\n\n };\n\n\n\n let mut s = String::with_capacity(l);\n\n for _ in 0..l {\n\n s.push(rand::random())\n\n }\n\n s\n\n}\n\n\n", "file_path": "test_utils/src/fresh.rs", "rank": 72, "score": 179586.63944935324 }, { "content": "pub fn fork<F, T>(f: F) -> crate::Result<T>\n\nwhere\n\n F: FnOnce() -> T,\n\n{\n\n unsafe {\n\n #[allow(clippy::unnecessary_cast)]\n\n let mut fds: [libc::c_int; 2] = [-1 as libc::c_int; 2];\n\n let r = libc::pipe(fds.as_mut_ptr());\n\n if r != 0 {\n\n return Err(crate::Error::os(\"pipe\"));\n\n }\n\n\n\n let pid = libc::fork();\n\n if pid < 0 {\n\n return Err(crate::Error::os(\"fork\"));\n\n }\n\n if pid == 0 {\n\n let r = libc::close(0);\n\n if r != 0 {\n\n libc::_exit(1)\n", "file_path": "runtime/src/zone_posix.rs", "rank": 73, "score": 176057.71304746025 }, { "content": "fn seek_to_beginning(f: &mut File) {\n\n f.seek(SeekFrom::Start(0)).unwrap();\n\n}\n\n\n", "file_path": "test_utils/src/lib.rs", "rank": 74, "score": 174254.7835277011 }, { "content": "pub fn record_hint() -> RecordHint {\n\n let mut bs = [0; 24];\n\n rand::thread_rng().fill(&mut bs);\n\n bs.into()\n\n}\n\n\n", "file_path": "client/src/tests/fresh.rs", "rank": 75, "score": 173996.87330262584 }, { "content": "pub fn corrupt_file_at(p: &Path) {\n\n let mut f: File = OpenOptions::new().write(true).read(true).open(p).unwrap();\n\n corrupt_file(&mut f)\n\n}\n", "file_path": "test_utils/src/lib.rs", "rank": 76, "score": 173629.83873458122 }, { "content": "pub fn passphrase() -> Option<String> {\n\n if coinflip() {\n\n Some(string())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "client/src/tests/fresh.rs", "rank": 77, "score": 173629.83873458122 }, { "content": "#[derive(Debug)]\n\nstruct ErrorTestVector {\n\n id: String,\n\n key: Vec<u8>,\n\n nonce: Vec<u8>,\n\n ad: Vec<u8>,\n\n cipher: Vec<u8>,\n\n}\n\n\n\nimpl ErrorTestVector {\n\n // load json\n\n pub fn load() -> Vec<Self> {\n\n let json = json::parse(VECTORS).unwrap();\n\n let mut vecs = Vec::new();\n\n for vec in json[\"error\"].check_array_iter() {\n\n vecs.push(Self {\n\n id: vec[\"id\"].check_string(),\n\n key: vec[\"key\"].check_bytes(),\n\n nonce: vec[\"nonce\"].check_bytes(),\n\n ad: vec[\"ad\"].check_bytes(),\n\n cipher: vec[\"cipher\"].check_bytes(),\n", "file_path": "engine/crypto/tests/xchachapoly.rs", "rank": 78, "score": 170565.16923622251 }, { "content": "/// a view over raw data.\n\npub trait AsView<T: Sized>: AsRef<[u8]> {\n\n /// creates a view over `self`.\n\n fn view(&self) -> &T {\n\n // get the bytes\n\n let bytes = self.as_ref();\n\n // validate the bytes\n\n assert!(mem::size_of::<T>() <= bytes.len(), \"Can't create view over this memory\");\n\n // get the pointer\n\n let bytes = bytes.as_ptr();\n\n // validate alignment\n\n assert_eq!(\n\n bytes.align_offset(mem::align_of::<T>()),\n\n 0,\n\n \"View's offset is incorrect\"\n\n );\n\n // cast the pointer\n\n unsafe { bytes.cast::<T>().as_ref() }.unwrap()\n\n }\n\n}\n\n\n", "file_path": "engine/vault/src/types.rs", "rank": 79, "score": 168686.4572472939 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_expired_key() {\n\n let mut cache = Cache::new();\n\n\n\n let key: Vec<u8> = b\"key\".to_vec();\n\n\n\n cache.insert(key.clone(), b\"value\".to_vec(), Some(Duration::default()));\n\n\n\n assert!(cache.contains_key(&key));\n\n}\n\n\n", "file_path": "engine/store/tests/test.rs", "rank": 80, "score": 167493.650802659 }, { "content": "#[derive(Debug)]\n\nstruct TestVector {\n\n id: String,\n\n key: Vec<u8>,\n\n nonce: Vec<u8>,\n\n cipher: Vec<u8>,\n\n}\n\n\n\nimpl TestVector {\n\n // load the json vectors\n\n pub fn load() -> Vec<Self> {\n\n let json = json::parse(VECTORS).unwrap();\n\n let mut vecs = Vec::new();\n\n\n\n for vec in json[\"crypto\"].check_array_iter() {\n\n vecs.push(Self {\n\n id: vec[\"id\"].check_string(),\n\n key: vec[\"key\"].check_bytes(),\n\n nonce: vec[\"nonce\"].check_bytes(),\n\n cipher: vec[\"cipher\"].check_bytes(),\n\n });\n", "file_path": "engine/crypto/tests/xchacha.rs", "rank": 81, "score": 166998.24839444703 }, { "content": "#[derive(Debug)]\n\nstruct TestVector {\n\n id: String,\n\n key: Vec<u8>,\n\n nonce: Vec<u8>,\n\n ad: Vec<u8>,\n\n plain: Vec<u8>,\n\n cipher: Vec<u8>,\n\n}\n\n\n\nimpl TestVector {\n\n // load the json vectors\n\n pub fn load() -> Vec<Self> {\n\n let json = json::parse(VECTORS).unwrap();\n\n let mut vecs = Vec::new();\n\n\n\n for vec in json[\"crypto\"].check_array_iter() {\n\n vecs.push(Self {\n\n id: vec[\"id\"].check_string(),\n\n key: vec[\"key\"].check_bytes(),\n\n nonce: vec[\"nonce\"].check_bytes(),\n", "file_path": "engine/crypto/tests/xchachapoly.rs", "rank": 82, "score": 166998.24839444703 }, { "content": "#[derive(Debug)]\n\nstruct TestVector {\n\n id: String,\n\n key: Vec<u8>,\n\n data: Vec<u8>,\n\n mac: Vec<u8>,\n\n}\n\n\n\nimpl TestVector {\n\n // load json vectors\n\n pub fn load() -> Vec<Self> {\n\n let json = json::parse(VECTORS).unwrap();\n\n let mut vecs = Vec::new();\n\n for vec in json[\"crypto\"].check_array_iter() {\n\n vecs.push(Self {\n\n id: vec[\"id\"].check_string(),\n\n key: vec[\"key\"].check_bytes(),\n\n data: vec[\"data\"].check_bytes(),\n\n mac: vec[\"mac\"].check_bytes(),\n\n });\n\n }\n", "file_path": "engine/crypto/tests/poly.rs", "rank": 83, "score": 166998.24839444703 }, { "content": "#[test]\n\nfn test_error() {\n\n for vec in ErrorTestVector::load() {\n\n vec.test_decryption();\n\n }\n\n}\n\n\n\n// API test vector\n\n#[derive(Default, Clone, Debug)]\n\npub struct ApiTestVector {\n\n id: String,\n\n key_len: usize,\n\n nonce_len: usize,\n\n ad_len: usize,\n\n enc_input_len: usize,\n\n enc_buf_len: usize,\n\n dec_input_len: usize,\n\n dec_buf_len: usize,\n\n error: String,\n\n}\n\nimpl ApiTestVector {\n", "file_path": "engine/crypto/tests/xchachapoly.rs", "rank": 84, "score": 165847.95306520473 }, { "content": "#[derive(Debug)]\n\nstruct TestVector {\n\n id: String,\n\n key: Vec<u8>,\n\n nonce: Vec<u8>,\n\n plain: Vec<u8>,\n\n ad: Vec<u8>,\n\n cipher: Vec<u8>,\n\n}\n\n\n\nimpl TestVector {\n\n // load json data\n\n pub fn load() -> Vec<Self> {\n\n let json = json::parse(VECTORS).unwrap();\n\n let mut vecs = Vec::new();\n\n\n\n for vec in json[\"crypto\"].check_array_iter() {\n\n vecs.push(Self {\n\n id: vec[\"id\"].check_string(),\n\n key: vec[\"key\"].check_bytes(),\n\n nonce: vec[\"nonce\"].check_bytes(),\n", "file_path": "engine/crypto/tests/chachapoly_ietf.rs", "rank": 85, "score": 165215.7062871817 }, { "content": "#[derive(Debug)]\n\nstruct TestVector {\n\n id: String,\n\n key: Vec<u8>,\n\n nonce: Vec<u8>,\n\n plain: Vec<u8>,\n\n cipher: Vec<u8>,\n\n}\n\n\n\nimpl TestVector {\n\n // load the json vectors\n\n pub fn load() -> Vec<Self> {\n\n let json = json::parse(VECTORS).unwrap();\n\n let mut vecs = Vec::new();\n\n\n\n for vec in json[\"crypto\"].check_array_iter() {\n\n vecs.push(Self {\n\n id: vec[\"id\"].check_string(),\n\n key: vec[\"key\"].check_bytes(),\n\n nonce: vec[\"nonce\"].check_bytes(),\n\n plain: vec[\"plain\"].check_bytes(),\n", "file_path": "engine/crypto/tests/chacha_ietf.rs", "rank": 86, "score": 165215.7062871817 }, { "content": "#[test]\n\nfn test_insert_return_old() {\n\n let mut cache = Cache::new();\n\n let key: &'static str = \"key\";\n\n\n\n let res_a = cache.insert(key, 1, Some(Duration::default()));\n\n let res_b = cache.insert(key, 2, None);\n\n let res_c = cache.insert(key, 3, None);\n\n\n\n assert_eq!(res_a, None);\n\n assert_eq!(res_b, None);\n\n assert_eq!(res_c, Some(2));\n\n}\n\n\n", "file_path": "engine/store/tests/test.rs", "rank": 87, "score": 165140.8729546104 }, { "content": "// Parse the user input line and handle the commands\n\nfn handle_input_line(swarm: &mut Swarm<P2PNetworkBehaviour<Request, Response>>, line: &str) {\n\n let target_regex = \"(?:\\\\s+\\\"(?P<target>[^\\\"]+)\\\")?\";\n\n let msg_regex = \"(?:\\\\s+\\\"(?P<msg>[^\\\"]+)\\\")?\";\n\n let regex = \"(?P<type>LIST|DIAL|PING|MSG)\".to_string() + target_regex + msg_regex;\n\n if let Some(captures) = Regex::new(&regex).unwrap().captures(&line) {\n\n match captures.name(\"type\").unwrap().as_str() {\n\n \"LIST\" => {\n\n println!(\"Known peers:\");\n\n let known_peers = swarm.get_all_peers();\n\n for (peer, addr) in known_peers {\n\n println!(\"{:?}: {:?}\", peer, addr);\n\n }\n\n }\n\n \"DIAL\" => {\n\n // Dial a multiaddress to establish a connection, if this was successful the identify protocol will\n\n // cause the two peers to send `P2PEvent::Identify` events to each other.\n\n if let Some(peer_addr) = captures\n\n .name(\"target\")\n\n .and_then(|peer_match| Multiaddr::from_str(peer_match.as_str()).ok())\n\n {\n", "file_path": "communication/examples/local-echo.rs", "rank": 88, "score": 164586.4004046985 }, { "content": "fn bench_decompress(c: &mut Criterion) {\n\n let compressed = compress(LOREM_STR.as_bytes());\n\n\n\n c.bench_function(\"decompress data\", |b| {\n\n b.iter(|| decompress(&compressed).unwrap());\n\n });\n\n}\n\n\n\ncriterion_group!(benches, bench_compression, bench_compress, bench_decompress);\n\ncriterion_main!(benches);\n", "file_path": "engine/snapshot/benches/benchmark.rs", "rank": 89, "score": 164579.35682127564 }, { "content": "fn bench_write_snapshot(c: &mut Criterion) {\n\n let stronghold = init_stronghold();\n\n let mut stronghold = init_read_vault(stronghold);\n\n\n\n let key_data = b\"abcdefghijklmnopqrstuvwxyz012345\".to_vec();\n\n\n\n c.bench_function(\"Write to snapshot\", |b| {\n\n b.iter(|| {\n\n block_on(stronghold.write_all_to_snapshot(key_data.clone(), Some(\"bench\".into()), None));\n\n });\n\n });\n\n}\n\n\n", "file_path": "client/benches/benchmark.rs", "rank": 90, "score": 164579.35682127564 }, { "content": "fn bench_read_from_snapshot(c: &mut Criterion) {\n\n let stronghold = init_stronghold();\n\n let key_data = b\"abcdefghijklmnopqrstuvwxyz012345\".to_vec();\n\n let mut stronghold = init_read_snap(stronghold, key_data.clone());\n\n\n\n c.bench_function(\"Read from snapshot\", |b| {\n\n b.iter(|| {\n\n block_on(stronghold.read_snapshot(\n\n b\"path\".to_vec(),\n\n None,\n\n key_data.clone(),\n\n Some(\"bench_read\".into()),\n\n None,\n\n ));\n\n });\n\n });\n\n}\n\n\n", "file_path": "client/benches/benchmark.rs", "rank": 91, "score": 164579.35682127564 }, { "content": "fn bench_write_store(c: &mut Criterion) {\n\n let stronghold = init_stronghold();\n\n\n\n c.bench_function(\"Bench write to store\", |b| {\n\n b.iter(|| block_on(stronghold.write_to_store(Location::generic(\"test\", \"some_key\"), b\"test\".to_vec(), None)));\n\n });\n\n}\n\n\n", "file_path": "client/benches/benchmark.rs", "rank": 92, "score": 164579.35682127564 }, { "content": "fn bench_write(c: &mut Criterion) {\n\n c.bench_function(\"write to engine\", |b| {\n\n b.iter(|| {\n\n let k: Key<Provider> = Key::random().unwrap();\n\n let v0 = DBView::load(k.clone(), empty::<ReadResult>()).unwrap();\n\n\n\n let mut writes = vec![];\n\n\n\n let id = RecordId::random::<Provider>().unwrap();\n\n let mut w = v0.writer(id);\n\n writes.push(w.truncate().unwrap());\n\n\n\n writes.append(\n\n &mut w\n\n .write(\n\n black_box(b\"abcdefghijklmnopqrstuvwxyz1234567890\"),\n\n RecordHint::new(b\"test\").unwrap(),\n\n )\n\n .unwrap(),\n\n );\n\n });\n\n });\n\n}\n\n\n\ncriterion_group!(benches, bench_write);\n\ncriterion_main!(benches);\n", "file_path": "engine/vault/benches/benchmark.rs", "rank": 93, "score": 164579.35682127564 }, { "content": "fn bench_compression(c: &mut Criterion) {\n\n c.bench_function(\"compress and decompress data\", |b| {\n\n b.iter(|| invert(LOREM_STR));\n\n });\n\n}\n\n\n", "file_path": "engine/snapshot/benches/benchmark.rs", "rank": 94, "score": 164579.35682127564 }, { "content": "fn bench_compression(c: &mut Criterion) {\n\n let mut cache = Cache::new();\n\n\n\n c.bench_function(\"Write to cache\", |b| {\n\n b.iter(|| {\n\n cache.insert(b\"test\", b\"values\", None);\n\n });\n\n });\n\n}\n\n\n", "file_path": "engine/store/benches/benchmark.rs", "rank": 95, "score": 164579.35682127564 }, { "content": "fn bench_read_store(c: &mut Criterion) {\n\n let stronghold = init_stronghold();\n\n block_on(stronghold.write_to_store(Location::generic(\"test\", \"some_key\"), b\"test\".to_vec(), None));\n\n\n\n c.bench_function(\"Bench read from store\", |b| {\n\n b.iter(|| block_on(stronghold.read_from_store(Location::generic(\"test\", \"some_key\"))));\n\n });\n\n}\n\n\n\ncriterion_group!(\n\n benches,\n\n bench_stronghold_write_create,\n\n bench_stronghold_write_init,\n\n bench_write_snapshot,\n\n bench_read_from_snapshot,\n\n bench_write_store,\n\n bench_read_store\n\n);\n\ncriterion_main!(benches);\n", "file_path": "client/benches/benchmark.rs", "rank": 96, "score": 164579.35682127564 }, { "content": "fn bench_compress(c: &mut Criterion) {\n\n c.bench_function(\"compress data\", |b| {\n\n b.iter(|| compress(LOREM_STR.as_bytes()));\n\n });\n\n}\n\n\n", "file_path": "engine/snapshot/benches/benchmark.rs", "rank": 97, "score": 164579.35682127564 }, { "content": "fn bench_compress(c: &mut Criterion) {\n\n let mut cache = Cache::new();\n\n\n\n cache.insert(b\"test\".to_vec(), b\"values\".to_vec(), None);\n\n\n\n c.bench_function(\"Read from cache\", |b| {\n\n b.iter(|| {\n\n cache.get(&b\"test\".to_vec());\n\n });\n\n });\n\n}\n\n\n", "file_path": "engine/store/benches/benchmark.rs", "rank": 98, "score": 164579.35682127564 }, { "content": "fn bench_decompress(c: &mut Criterion) {\n\n cache! {\n\n fn fib(n: u32) -> u32 => {\n\n match n {\n\n 0 => 1,\n\n 1 => 1,\n\n _ => fib(n - 1) + fib(n - 2),\n\n }\n\n }\n\n }\n\n\n\n fib(20);\n\n\n\n c.bench_function(\"Read from Cached function\", |b| {\n\n b.iter(|| {\n\n FIB_CACHE.lock().unwrap().get(&20);\n\n });\n\n });\n\n}\n\n\n\ncriterion_group!(benches, bench_compression, bench_compress, bench_decompress);\n\ncriterion_main!(benches);\n", "file_path": "engine/store/benches/benchmark.rs", "rank": 99, "score": 164579.35682127564 } ]
Rust
src/binary_heap.rs
acodercat/rust-algorithms
06db2526fa8709886a2baf2467f720c62076720d
use std::fmt::Debug; use std::cmp::PartialOrd; #[derive(Debug)] pub struct BinaryHeap <T> { container: Vec<T>, } impl <T: Debug + PartialOrd> BinaryHeap <T> { pub fn new() -> Self { return BinaryHeap { container: Vec::new() }; } pub fn from(vec: Vec<T>) -> BinaryHeap<T> { let mut heap = BinaryHeap { container: vec }; for i in (0 ..= heap.calculate_parent_index_of_tail()).rev() { heap.shift_down(i); } return heap; } pub fn push(&mut self, element: T) { self.container.push(element); self.shift_up(self.tail_index()); } pub fn len(&self)-> usize { return self.container.len(); } pub fn tail_index(&self)-> usize { return self.len() - 1; } pub fn capacity(self) -> usize { return self.container.capacity(); } pub fn is_empty(self) -> bool { return self.container.is_empty(); } pub fn peek(&self) -> Option<&T> { return self.container.first(); } pub fn extract(&mut self) -> Option<T> { let tail_index = self.tail_index(); let head_index = 0; self.container.swap(head_index, tail_index); let root= self.container.pop(); self.shift_down(head_index); return root; } fn shift_down(&mut self, mut current_index: usize) { let mut found_child_index; let mut left_child_index_of_current = Self::calculate_left_child_index(current_index); while left_child_index_of_current < self.len() { let right_child_index_of_current = Self::calculate_right_child_index(current_index); if right_child_index_of_current < self.len() && self.container.get(right_child_index_of_current) > self.container.get(left_child_index_of_current) { found_child_index = right_child_index_of_current; } else { found_child_index = left_child_index_of_current; } if self.container.get(current_index) >= self.container.get(found_child_index) { break; } self.container.swap(current_index, found_child_index); current_index = found_child_index; left_child_index_of_current = Self::calculate_left_child_index(current_index); } } fn shift_up(&mut self, mut current_index: usize) { let mut parent_index_of_current = Self::calculate_parent_index(current_index); while (current_index > 0) && (self.container.get(current_index) > self.container.get(parent_index_of_current)) { self.container.swap(current_index, parent_index_of_current); current_index = parent_index_of_current; parent_index_of_current = Self::calculate_parent_index(current_index); } } fn calculate_left_child_index(index: usize) -> usize { return index * 2 + 1; } fn calculate_right_child_index(index: usize) -> usize { return Self::calculate_left_child_index(index) + 1; } fn calculate_parent_index(index: usize) -> usize { let parent_index = ((index as f32 - 1.0) / 2.0).floor() as usize; if parent_index <= 0 { return 0; } return parent_index; } fn calculate_parent_index_of_tail(&self) -> usize { let tail_index = self.len() - 1; return Self::calculate_parent_index(tail_index); } } #[test] fn test_binary_heap() { let mut heap1:BinaryHeap<i32> = BinaryHeap::from(vec![1,3,4,5]); heap1.push(-21); heap1.push(1); heap1.push(3); heap1.push(190); assert_eq!(heap1.peek(), Some(&190)); assert_eq!(heap1.extract(), Some(190)); assert_eq!(heap1.peek(), Some(&5)); let mut heap2:BinaryHeap<i32> = BinaryHeap::new(); heap2.push(-21); heap2.push(1); heap2.push(3); heap2.push(190); assert_eq!(heap2.peek(), Some(&190)); assert_eq!(heap2.extract(), Some(190)); assert_eq!(heap2.peek(), Some(&3)); }
use std::fmt::Debug; use std::cmp::PartialOrd; #[derive(Debug)] pub struct BinaryHeap <T> { container: Vec<T>, } impl <T: Debug + PartialOrd> BinaryHeap <T> { pub fn new() -> Self { return BinaryHeap { container: Vec::new() }; } pub fn from(vec: Vec<T>) -> BinaryHeap<T> { let mut heap = BinaryHeap { container: vec }; for i in (0 ..= heap.calculate_parent_index_of_tail()).rev() { heap.shift_down(i); } return heap; } pub fn push(&mut self, element: T) { self.container.push(element); self.shift_up(self.tail_index()); } pub fn len(&self)-> usize { return self.container.len(); } pub fn tail_index(&self)-> usize { return self.len() - 1; } pub fn capacity(self) -> usize { return self.container.capacity(); } pub fn is_empty(self) -> bool { return self.container.is_empty(); } pub fn peek(&self) -> Option<&T> { return self.container.first(); } pub fn extract(&mut self) -> Option<T> { let tail_index = self.tail_index(); let head_index = 0; self.container.swap(head_index, tail_index); let root= self.container.pop(); self.shift_down(head_index); return root; } fn shift_down(&mut self, mut current_index: usize) { let mut found_child_index; let mut left_child_index_of_current = Self::calculate_left_child_index(current_index); while left_child_index_of_current < self.len() { let right_child_index_of_current = Self::calculate_right_child_index(current_index); if right_child_index_of_current < self.len() && self.container.get(right_child_index_of_current) > self.container.get(left_child_index_of_current) { found_child_index = right_child_index_of_current; } else { found_child_index = left_child_index_of_current; } if self.container.get(current_index) >= self.container.get(found_child_index) { break; } self.container.swap(current_index, found_child_index); current_index = found_child_index; left_child_index_of_current = Self::calculate_left_child_index(current_index); } } fn shift_up(&mut self, mut current_index: usize) { let mut parent_index_of_current = Self::calculate_parent_index(current_index); while (current_index > 0) && (self.container.get(current_index) > self.container.get(parent_index_of_current)) { self.container.swap(current_index, parent_index_of_current); current_index = parent_index_of_current; parent_index_of_current = Self::calculate_parent_index(current_index); } } fn calculate_left_child_index(index: usize) -> usize { return index * 2 + 1; } fn calculate_right_child_index(index: usize) -> usize { return Self::calculate_left_child_index(index) + 1; } fn calculate_parent_index(index: usize) -> usize { let parent_index = ((index as f32 - 1.0) / 2.0).floor() as usize; if parent_index <= 0 { return 0; } return parent_index; } fn calculate_parent_index_of_tail(&self) -> usize { let tail_index = self.len() - 1; return Self::calculate_parent_index(tail_index); } } #[test] fn test_binary_heap() { let mut heap1:BinaryHeap<i32> = BinaryHeap::from(vec![1,3,4,5]); heap1.push(-21); heap1.push(1); heap1.push(3); heap1.push(190); assert_eq!(heap1.peek(), Some(&190)); assert_eq!(heap1.extract(), Some(190)); assert_eq!(heap1.peek(), Some(&5)); let mut heap2:BinaryHeap<i32> = BinaryHeap::new(); heap2.push(-21);
heap2.push(1); heap2.push(3); heap2.push(190); assert_eq!(heap2.peek(), Some(&190)); assert_eq!(heap2.extract(), Some(190)); assert_eq!(heap2.peek(), Some(&3)); }
function_block-function_prefix_line
[ { "content": "fn main() {\n\n let mut heap:BinaryHeap<i32> = BinaryHeap::from(vec![1, 2, 3, 4]);\n\n heap.push(-21);\n\n heap.push(1);\n\n heap.push(3);\n\n heap.push(190);\n\n heap.push(4);\n\n heap.push(90);\n\n heap.extract();\n\n heap.extract();\n\n println!(\"{}\", heap.peek().unwrap());\n\n println!(\"{}\", heap.len());\n\n}\n", "file_path": "examples/binary_heap.rs", "rank": 1, "score": 32457.218975164462 }, { "content": "#[test]\n\nfn test_union_find() {\n\n let mut union_find1: UnionFind = UnionFind::new();\n\n union_find1.push(2);\n\n union_find1.push(21);\n\n union_find1.push(3);\n\n assert_eq!(union_find1.connected_component(), 3);\n\n assert_eq!(union_find1.is_connected(2, 3), false);\n\n assert_eq!(union_find1.union(3,2), true);\n\n assert_eq!(union_find1.union(3,2), false);\n\n assert_eq!(union_find1.is_connected(2, 3), true);\n\n assert_eq!(union_find1.connected_component(), 2);\n\n\n\n let mut union_find2: UnionFind = UnionFind::from(vec![1, 2, 3, 4]);\n\n assert_eq!(union_find2.connected_component(), 4);\n\n union_find2.push(11);\n\n assert_eq!(union_find2.connected_component(), 5);\n\n assert_eq!(union_find2.is_connected(1, 2), false);\n\n assert_eq!(union_find2.union(3,2), true);\n\n assert_eq!(union_find2.union(3,2), false);\n\n assert_eq!(union_find2.is_connected(2, 3), true);\n\n assert_eq!(union_find2.connected_component(), 4);\n\n}", "file_path": "src/union_find.rs", "rank": 2, "score": 31110.045523419936 }, { "content": "fn main() {\n\n\n\n let mut union_find1: UnionFind = UnionFind::new();\n\n union_find1.push(2);\n\n union_find1.push(21);\n\n union_find1.push(3);\n\n assert_eq!(union_find1.connected_component(), 3);\n\n assert_eq!(union_find1.is_connected(2, 3), false);\n\n assert_eq!(union_find1.union(3,2), true);\n\n assert_eq!(union_find1.union(3,2), false);\n\n assert_eq!(union_find1.is_connected(2, 3), true);\n\n assert_eq!(union_find1.connected_component(), 2);\n\n\n\n let mut union_find2: UnionFind = UnionFind::from(vec![1, 2, 3, 4]);\n\n assert_eq!(union_find2.connected_component(), 4);\n\n union_find2.push(11);\n\n assert_eq!(union_find2.connected_component(), 5);\n\n assert_eq!(union_find2.is_connected(1, 2), false);\n\n assert_eq!(union_find2.union(3,2), true);\n\n assert_eq!(union_find2.union(3,2), false);\n\n assert_eq!(union_find2.is_connected(2, 3), true);\n\n assert_eq!(union_find2.connected_component(), 4);\n\n\n\n}\n\n\n\n\n", "file_path": "examples/union_find.rs", "rank": 3, "score": 19366.96392580316 }, { "content": "fn main() {\n\n\n\n}\n", "file_path": "examples/binary_search_tree.rs", "rank": 4, "score": 18534.897771531316 }, { "content": "use algorithms::binary_heap::BinaryHeap;\n\n\n", "file_path": "examples/binary_heap.rs", "rank": 11, "score": 13960.250652223946 }, { "content": " connected_component: 0\n\n };\n\n }\n\n\n\n pub fn from(vec: Vec<usize>) -> Self {\n\n let mut parents = HashMap::<usize, usize>::new();\n\n let mut ranks = HashMap::<usize, usize>::new();\n\n let len = vec.len();\n\n for element in vec {\n\n ranks.insert(element, 1);\n\n parents.insert(element, element);\n\n }\n\n\n\n return UnionFind {\n\n parents,\n\n ranks,\n\n len,\n\n connected_component: len\n\n };\n\n }\n", "file_path": "src/union_find.rs", "rank": 12, "score": 8.89295005616103 }, { "content": "use std::fmt::Debug;\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug)]\n\npub struct UnionFind {\n\n parents: HashMap<usize, usize>,\n\n ranks: HashMap<usize, usize>,\n\n len: usize,\n\n connected_component: usize\n\n}\n\n\n\nimpl UnionFind {\n\n\n\n pub fn new() -> Self {\n\n let parents = HashMap::<usize, usize>::new();\n\n let ranks = HashMap::<usize, usize>::new();\n\n return UnionFind {\n\n parents,\n\n ranks,\n\n len: 0,\n", "file_path": "src/union_find.rs", "rank": 13, "score": 7.641467025863852 }, { "content": "use std::fmt::Debug;\n\n\n\n#[derive(Debug)]\n\npub struct BinarySearchTree <T> {\n\n container: Vec<T>,\n\n}", "file_path": "src/binary_search_tree.rs", "rank": 14, "score": 6.979237583231621 }, { "content": "\n\n pub fn find(&self, element: usize) -> Option<usize> {\n\n let mut current_element= element;\n\n while let Some(parent) = self.parents.get(&current_element) {\n\n if let Some(parent_of_parent) = self.parents.get(parent) {\n\n if parent == parent_of_parent {\n\n return Some(*parent_of_parent);\n\n }\n\n current_element = *parent_of_parent;\n\n };\n\n };\n\n None\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n return self.len;\n\n }\n\n\n\n pub fn push(&mut self, element: usize) {\n\n if !self.parents.contains_key(&element) {\n", "file_path": "src/union_find.rs", "rank": 15, "score": 6.567646733142012 }, { "content": " self.parents.insert(element, element);\n\n self.ranks.insert(element, 1);\n\n self.connected_component += 1;\n\n }\n\n }\n\n\n\n pub fn connected_component(&self) -> usize {\n\n return self.connected_component;\n\n }\n\n\n\n pub fn is_connected(&self, q: usize, p: usize) -> bool {\n\n if let (Some(p_parent), Some(q_parent)) = (self.find(p), self.find(q)) {\n\n if p_parent == q_parent {\n\n return true;\n\n }\n\n }\n\n return false;\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/union_find.rs", "rank": 16, "score": 6.380450750125281 }, { "content": "pub mod binary_heap;\n\npub mod union_find;\n\npub mod binary_search_tree;", "file_path": "src/lib.rs", "rank": 17, "score": 3.1655693666662303 }, { "content": "\n\n pub fn union(&mut self, p: usize, q: usize) -> bool {\n\n if let (Some(p_parent), Some(q_parent)) = (self.find(p), self.find(q)) {\n\n if p_parent != q_parent {\n\n if let (Some(p_rank), Some(q_rank)) = (self.ranks.get(&p_parent), self.ranks.get(&q_parent)) {\n\n if p_rank > q_rank {\n\n self.parents.insert(q_parent, p_parent);\n\n } else if p_rank < q_rank {\n\n self.parents.insert(p_parent, q_parent);\n\n } else {\n\n self.ranks.insert( q_parent, self.ranks.get(&(q_parent)).unwrap() + 1);\n\n self.parents.insert(p_parent, q_parent);\n\n }\n\n self.connected_component -= 1;\n\n return true;\n\n }\n\n }\n\n };\n\n return false;\n\n }\n", "file_path": "src/union_find.rs", "rank": 18, "score": 2.99048221077421 }, { "content": "# Rust Algorithms\n\n[![Build Status](https://travis-ci.org/acodercat/rust-algorithms.svg?branch=master)](https://travis-ci.org/acodercat/rust-algorithms)\n\n[![MIT](https://img.shields.io/badge/License-MIT-green.svg)](https://github.com/acodercat/rust-algorithms/blob/master/LICENSE)\n\n\n\nCommon data structures and algorithms are implemented using Rust.\n\n\n\nSee the [examples](https://github.com/acodercat/rust-algorithms/blob/master/examples) directory for more algorithm examples.\n\n\n\n## Algorithms\n\n\n\n* [BinaryHeap](https://github.com/acodercat/rust-algorithms/blob/master/src/binary_heap.rs)\n\n* [UnionFind](https://github.com/acodercat/rust-algorithms/blob/master/src/union_find.rs)\n\n* [BinarySearchTree](https://github.com/acodercat/rust-algorithms/blob/master/src/binary_search_tree.rs)\n\n\n\n## Run Example\n\n\n\n```bash\n\n$ cargo run --example exampleName\n\n```\n\nJust like this:\n\n```bash\n\n$ cargo run --example binary_heap\n\n```\n\n\n\n## Test\n\n\n\n```bash\n\n$ cargo test\n\n```\n\n\n\n## License\n\n\n\n[MIT](LICENSE)\n", "file_path": "README.md", "rank": 19, "score": 1.6955309195195367 }, { "content": "use algorithms::union_find::UnionFind;\n\n\n", "file_path": "examples/union_find.rs", "rank": 20, "score": 1.6186579177799123 }, { "content": "use algorithms::binary_search_tree::BinarySearchTree;\n\n\n", "file_path": "examples/binary_search_tree.rs", "rank": 21, "score": 1.5155586873480706 } ]
Rust
tests/issuer.rs
evannetwork/vade-evan
7d37225a4756c7595b03a4901a5e3805654f2f7d
/* Copyright (c) 2018-present evan GmbH. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ extern crate env_logger; extern crate log; extern crate vade_evan; mod test_data; use std::{collections::HashMap, error::Error}; use test_data::{ accounts::local::{ISSUER_ADDRESS, ISSUER_DID, ISSUER_PRIVATE_KEY}, did::{EXAMPLE_DID_1, EXAMPLE_DID_DOCUMENT_1}, vc_zkp::EXAMPLE_CREDENTIAL_SCHEMA, }; use vade_evan::{ application::{ datatypes::{CredentialSchema, SchemaProperty}, issuer::Issuer, }, crypto::crypto_utils::check_assertion_proof, signing::{LocalSigner, Signer}, }; #[tokio::test] async fn can_create_schema() -> Result<(), Box<dyn Error>> { match env_logger::try_init() { Ok(_) | Err(_) => (), }; let did_document = serde_json::to_value(&EXAMPLE_DID_DOCUMENT_1)?; let mut required_properties: Vec<String> = Vec::new(); let mut test_properties: HashMap<String, SchemaProperty> = HashMap::new(); test_properties.insert( "test_property_string".to_owned(), SchemaProperty { r#type: "string".to_owned(), format: None, items: None, }, ); required_properties.push("test_property_string".to_owned()); let signer: Box<dyn Signer> = Box::new(LocalSigner::new()); let schema: CredentialSchema = Issuer::create_credential_schema( EXAMPLE_DID_1, ISSUER_DID, "test_schema", "Test description", test_properties, required_properties, false, &did_document["publicKey"][0]["id"].to_string(), &ISSUER_PRIVATE_KEY, &signer, ) .await?; assert_eq!(&schema.author, &ISSUER_DID); assert_eq!(schema.additional_properties, false); let result_property: &SchemaProperty = &schema.properties.get("test_property_string").unwrap(); let expected: SchemaProperty = SchemaProperty { r#type: "string".to_owned(), format: None, items: None, }; assert_eq!( serde_json::to_string(&result_property).unwrap(), serde_json::to_string(&expected).unwrap(), ); let serialized = serde_json::to_string(&schema).unwrap(); assert!(match check_assertion_proof(&serialized, ISSUER_ADDRESS) { Ok(()) => true, Err(e) => panic!("assertion check failed with: {}", e), }); Ok(()) } #[tokio::test] async fn can_create_credential_definition() -> Result<(), Box<dyn Error>> { let schema: CredentialSchema = serde_json::from_str(&EXAMPLE_CREDENTIAL_SCHEMA).unwrap(); let signer: Box<dyn Signer> = Box::new(LocalSigner::new()); let (definition, _) = Issuer::create_credential_definition( &EXAMPLE_DID_1, &ISSUER_DID, &schema, "did:evan:testcore:0x0f737d1478ea29df0856169f25ca9129035d6fd1#key-1", &ISSUER_PRIVATE_KEY, &signer, None, None, ) .await?; assert_eq!( serde_json::to_string(&definition.issuer).unwrap(), serde_json::to_string(&ISSUER_DID).unwrap(), ); assert_eq!( serde_json::to_string(&definition.schema).unwrap(), serde_json::to_string(&schema.id).unwrap() ); assert_eq!(&definition.id, EXAMPLE_DID_1); let serialized = serde_json::to_string(&definition).unwrap(); assert!(match check_assertion_proof(&serialized, ISSUER_ADDRESS) { Ok(()) => true, Err(e) => panic!("assertion check failed with: {}", e), }); Ok(()) }
/* Copyright (c) 2018-present evan GmbH. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ extern crate env_logger; extern crate log; extern crate vade_evan; mod test_data; use std::{collections::HashMap, error::Error}; use test_data::{ accounts::local::{ISSUER_ADDRESS, ISSUER_DID, ISSUER_PRIVATE_KEY}, did::{EXAMPLE_DID_1, EXAMPLE_DID_DOCUMENT_1}, vc_zkp::EXAMPLE_CREDENTIAL_SCHEMA, }; use vade_evan::{ application::{ datatypes::{CredentialSchema, SchemaProperty}, issuer::Issuer, }, crypto::crypto_utils::check_assertion_proof, signing::{LocalSigner, Signer}, }; #[tokio::test] async fn can_create_schema() -> Result<(), Box<dyn Error>> { match env_logger::try_init() { Ok(_) | Err(_) => (), }; let did_document = serde_json::to_value(&EXAMPLE_DID_DOCUMENT_1)?; let mut required_properties: Vec<String> = Vec::new(); let mut test_properties: HashMap<String, SchemaProperty> = HashMap::new(); test_properties.insert( "test_property_string".to_owned(), SchemaProperty { r#type: "string".to_owned(), format: None,
ocalSigner::new()); let schema: CredentialSchema = Issuer::create_credential_schema( EXAMPLE_DID_1, ISSUER_DID, "test_schema", "Test description", test_properties, required_properties, false, &did_document["publicKey"][0]["id"].to_string(), &ISSUER_PRIVATE_KEY, &signer, ) .await?; assert_eq!(&schema.author, &ISSUER_DID); assert_eq!(schema.additional_properties, false); let result_property: &SchemaProperty = &schema.properties.get("test_property_string").unwrap(); let expected: SchemaProperty = SchemaProperty { r#type: "string".to_owned(), format: None, items: None, }; assert_eq!( serde_json::to_string(&result_property).unwrap(), serde_json::to_string(&expected).unwrap(), ); let serialized = serde_json::to_string(&schema).unwrap(); assert!(match check_assertion_proof(&serialized, ISSUER_ADDRESS) { Ok(()) => true, Err(e) => panic!("assertion check failed with: {}", e), }); Ok(()) } #[tokio::test] async fn can_create_credential_definition() -> Result<(), Box<dyn Error>> { let schema: CredentialSchema = serde_json::from_str(&EXAMPLE_CREDENTIAL_SCHEMA).unwrap(); let signer: Box<dyn Signer> = Box::new(LocalSigner::new()); let (definition, _) = Issuer::create_credential_definition( &EXAMPLE_DID_1, &ISSUER_DID, &schema, "did:evan:testcore:0x0f737d1478ea29df0856169f25ca9129035d6fd1#key-1", &ISSUER_PRIVATE_KEY, &signer, None, None, ) .await?; assert_eq!( serde_json::to_string(&definition.issuer).unwrap(), serde_json::to_string(&ISSUER_DID).unwrap(), ); assert_eq!( serde_json::to_string(&definition.schema).unwrap(), serde_json::to_string(&schema.id).unwrap() ); assert_eq!(&definition.id, EXAMPLE_DID_1); let serialized = serde_json::to_string(&definition).unwrap(); assert!(match check_assertion_proof(&serialized, ISSUER_ADDRESS) { Ok(()) => true, Err(e) => panic!("assertion check failed with: {}", e), }); Ok(()) }
items: None, }, ); required_properties.push("test_property_string".to_owned()); let signer: Box<dyn Signer> = Box::new(L
function_block-random_span
[ { "content": "fn get_vade_evan(matches: &ArgMatches) -> Result<VadeEvan> {\n\n let target = get_argument_value(&matches, \"target\", Some(DEFAULT_TARGET));\n\n let signer = get_argument_value(&matches, \"signer\", Some(DEFAULT_SIGNER));\n\n return Ok(VadeEvan::new(VadeEvanConfig { target, signer })?);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 0, "score": 156609.9577208173 }, { "content": "fn get_first_result(results: Vec<Option<String>>) -> Result<String, VadeEvanError> {\n\n if results.is_empty() {\n\n return Err(VadeEvanError::NoResults);\n\n }\n\n let empty_result = String::new();\n\n let result = results[0].as_ref().unwrap_or(&empty_result);\n\n\n\n Ok(result.to_string())\n\n}\n\n\n\npub struct VadeEvanConfig<'a> {\n\n pub target: &'a str,\n\n pub signer: &'a str,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n pub request_id: *const c_void,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n pub request_function_callback: ResolveHttpRequest,\n\n}\n\n\n\n/// A [`VadeEvan`] instance is your single point of contact for interacting with DIDs and VCs.\n", "file_path": "src/api/vade_evan_api.rs", "rank": 1, "score": 138231.30162689352 }, { "content": "#[allow(unused_variables)] // allow possibly unused variables due to feature mix\n\nfn get_vade_evan(config: Option<&JsValue>) -> Result<VadeEvan, Box<dyn Error>> {\n\n let config_values =\n\n get_config_values(config, vec![\"signer\".to_string(), \"target\".to_string()])?;\n\n let (signer_config, target) = match config_values.as_slice() {\n\n [signer_config, target, ..] => (signer_config, target),\n\n _ => {\n\n return Err(Box::from(\"invalid vade config\"));\n\n }\n\n };\n\n\n\n return VadeEvan::new(VadeEvanConfig {\n\n target,\n\n signer: signer_config,\n\n })\n\n .map_err(|err| Box::from(format!(\"could not create VadeEvan instance; {}\", &err)));\n\n}\n\n\n", "file_path": "src/wasm_lib.rs", "rank": 2, "score": 132714.46789343457 }, { "content": "fn get_argument_matches<'a>() -> Result<ArgMatches<'a>> {\n\n Ok(get_app()?.get_matches())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 3, "score": 129937.74716698032 }, { "content": "fn stringify_vade_evan_error(err: VadeEvanError) -> String {\n\n format!(\"{}\", err)\n\n}\n\n\n", "file_path": "src/c_lib.rs", "rank": 4, "score": 116255.14784894847 }, { "content": "fn parse<T>(payload: &str) -> Result<T, serde_json::Error>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n serde_json::from_str(payload)\n\n}\n\n\n", "file_path": "src/wasm_lib.rs", "rank": 5, "score": 112207.01437295887 }, { "content": "fn jsify_vade_evan_error(err: VadeEvanError) -> JsValue {\n\n JsValue::from(format!(\"{}\", err))\n\n}\n\n\n\n#[derive(Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Response {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub error: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub response: Option<String>,\n\n}\n\n\n\n#[allow(unused_variables)] // allow possibly unused variables due to feature mix\n\n#[wasm_bindgen]\n\npub async fn execute_vade(\n\n func_name: String,\n\n did_or_method: String,\n\n options: String,\n\n payload: String,\n", "file_path": "src/wasm_lib.rs", "rank": 6, "score": 111417.62552598762 }, { "content": "#[cfg(any(feature = \"vc-zkp-bbs\", feature = \"jwt-vc\", feature = \"did-substrate\"))]\n\nfn get_signer(signer: &str) -> Box<dyn Signer> {\n\n if signer.starts_with(\"remote\") {\n\n Box::new(RemoteSigner::new(\n\n signer.trim_start_matches(\"remote|\").to_string(),\n\n ))\n\n } else if signer.starts_with(\"local\") {\n\n Box::new(LocalSigner::new())\n\n } else {\n\n panic!(\"invalid signer config: {}\", &signer)\n\n }\n\n}\n\n\n\n// variables might be unused depending on feature combination\n", "file_path": "src/api/vade_bundle.rs", "rank": 7, "score": 104483.96837890786 }, { "content": "/// Checks if input is a DID and returns a `CredentialError::NotADid` if not.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `to_check` - input value to check\n\n/// * `name` - name of the input to check, used for log message\n\n///\n\n/// # Returns\n\n/// `()` or `CredentialError::NotADid`\n\npub fn fail_if_not_a_did(to_check: &str, name: &str) -> Result<(), CredentialError> {\n\n if !is_did(to_check) {\n\n return Err(CredentialError::NotADid(\n\n to_check.to_owned(),\n\n name.to_owned(),\n\n ));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/helpers/credential.rs", "rank": 8, "score": 101291.34854616862 }, { "content": "/// Checks if input is a DID and returns a `PresentationError::NotADid` if not.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `to_check` - input value to check\n\n/// * `name` - name of the input to check, used for log message\n\n///\n\n/// # Returns\n\n/// `()` or `PresentationError::NotADid`\n\npub fn fail_if_not_a_did(to_check: &str, name: &str) -> Result<(), PresentationError> {\n\n if !is_did(to_check) {\n\n return Err(PresentationError::NotADid(\n\n to_check.to_owned(),\n\n name.to_owned(),\n\n ));\n\n }\n\n Ok(())\n\n}\n\n\n\npub struct Presentation<'a> {\n\n vade_evan: &'a mut VadeEvan,\n\n}\n\n\n\nimpl<'a> Presentation<'a> {\n\n pub fn new(vade_evan: &'a mut VadeEvan) -> Result<Self, PresentationError> {\n\n Ok(Self { vade_evan })\n\n }\n\n\n\n /// Proposes to issue a proof for a credential.\n", "file_path": "src/helpers/presentation.rs", "rank": 9, "score": 101291.34854616862 }, { "content": "fn get_parsing_error_message(error: &serde_json::Error, payload: &str) -> JsValue {\n\n return JsValue::from(format!(\n\n r#\"got error \\{}\" when parsing payload: {}\"#,\n\n &error, &payload\n\n ));\n\n}\n\n\n", "file_path": "src/wasm_lib.rs", "rank": 10, "score": 95772.59252290713 }, { "content": "fn stringify_generic_error(err: Box<dyn Error>) -> String {\n\n format!(\"{}\", err)\n\n}\n\n\n", "file_path": "src/c_lib.rs", "rank": 11, "score": 92803.0703233269 }, { "content": "fn get_app<'a>() -> Result<App<'a, 'a>> {\n\n // variable might be needlessly mutable due to the following feature listing not matching\n\n #[allow(unused_mut)]\n\n let mut app = App::new(\"vade_evan_cli\")\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .author(env!(\"CARGO_PKG_AUTHORS\"))\n\n .about(\"Allows to work with DIDs and zero knowledge proof VCs\")\n\n .setting(AppSettings::DeriveDisplayOrder)\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .subcommand(\n\n SubCommand::with_name(\"build_version\")\n\n .about(\"shows version of vade_evan_cli build and its vade dependencies\")\n\n .setting(AppSettings::DeriveDisplayOrder),\n\n );\n\n\n\n cfg_if::cfg_if! {\n\n if #[cfg(any(feature = \"did-read\", feature = \"did-write\"))] {\n\n app = add_subcommand_did(app)?;\n\n } else {}\n\n }\n", "file_path": "src/main.rs", "rank": 12, "score": 90855.7796631732 }, { "content": "fn jsify_generic_error(err: Box<dyn Error>) -> JsValue {\n\n JsValue::from(format!(\"{}\", err))\n\n}\n\n\n", "file_path": "src/wasm_lib.rs", "rank": 13, "score": 89301.00181344026 }, { "content": "fn get_clap_argument(arg_name: &str) -> Result<Arg> {\n\n Ok(match arg_name {\n\n \"did\" => Arg::with_name(\"did\")\n\n .long(\"did\")\n\n .short(\"d\")\n\n .value_name(\"did\")\n\n .required(true)\n\n .help(\"a DID to work with, e.g. 'did:evan:testcore:0x0d87204c3957d73b68ae28d0af961d3c72403906'\")\n\n .takes_value(true),\n\n \"method\" => Arg::with_name(\"method\")\n\n .long(\"method\")\n\n .short(\"m\")\n\n .value_name(\"method\")\n\n .required(true)\n\n .help(\"method to use, e.g. 'did:evan'\")\n\n .takes_value(true),\n\n \"options\" => Arg::with_name(\"options\")\n\n .long(\"options\")\n\n .short(\"o\")\n\n .value_name(\"options\")\n", "file_path": "src/main.rs", "rank": 14, "score": 84657.07375848517 }, { "content": "#[allow(unused_variables)] // allow possibly unused variables due to feature mix\n\npub fn get_vade_evan(\n\n config: Option<&String>,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))] request_id: *const c_void,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n request_function_callback: ResolveHttpRequest,\n\n) -> Result<VadeEvan, Box<dyn Error>> {\n\n let config_values =\n\n get_config_values(config, vec![\"signer\".to_string(), \"target\".to_string()])?;\n\n let (signer_config, target) = match config_values.as_slice() {\n\n [signer_config, target, ..] => (signer_config, target),\n\n _ => {\n\n return Err(Box::from(\"invalid vade config\"));\n\n }\n\n };\n\n\n\n return VadeEvan::new(VadeEvanConfig {\n\n target,\n\n signer: signer_config,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n request_id,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n request_function_callback,\n\n })\n\n .map_err(|err| Box::from(format!(\"could not create VadeEvan instance; {}\", &err)));\n\n}\n\n\n", "file_path": "src/c_lib.rs", "rank": 15, "score": 82973.16520324997 }, { "content": "#[wasm_bindgen]\n\npub fn set_log_level(log_level: String) {\n\n let _ = match log_level.as_str() {\n\n \"trace\" => console_log::init_with_level(log::Level::Trace),\n\n \"debug\" => console_log::init_with_level(log::Level::Debug),\n\n \"info\" => console_log::init_with_level(log::Level::Info),\n\n \"error\" => console_log::init_with_level(log::Level::Error),\n\n _ => console_log::init_with_level(log::Level::Error),\n\n };\n\n}\n\n\n", "file_path": "src/wasm_lib.rs", "rank": 16, "score": 82810.63315975698 }, { "content": "#[cfg(feature = \"did-substrate\")]\n\nfn get_vade_evan_substrate(\n\n target: &str,\n\n signer: &str,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))] _request_id: *const c_void,\n\n) -> Result<VadeEvanSubstrate, Box<dyn Error>> {\n\n Ok(VadeEvanSubstrate::new(ResolverConfig {\n\n signer: get_signer(signer),\n\n target: target.to_string(),\n\n }))\n\n}\n\n\n", "file_path": "src/api/vade_bundle.rs", "rank": 17, "score": 81949.26200820191 }, { "content": "#[cfg(feature = \"vc-zkp-bbs\")]\n\nfn get_vade_evan_bbs(\n\n signer: &str,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))] _request_id: *const c_void,\n\n) -> Result<VadeEvanBbs, Box<dyn Error>> {\n\n let signer: Box<dyn Signer> = get_signer(signer);\n\n Ok(VadeEvanBbs::new(signer))\n\n}\n\n\n", "file_path": "src/api/vade_bundle.rs", "rank": 18, "score": 81949.26200820191 }, { "content": "const copyVadeFiles = `rm -rf dist/${VADE_WASM_FOLDER} && cp -r ${VADE_WASM_FOLDER} dist/${VADE_WASM_FOLDER}`;\n", "file_path": "builds/wasm/package-scripts.js", "rank": 19, "score": 81094.96042363244 }, { "content": "#[allow(dead_code)]\n\nfn add_subcommand_did<'a>(app: App<'a, 'a>) -> Result<App<'a, 'a>> {\n\n // variable might be needlessly mutable due to the following feature listing not matching\n\n #[allow(unused_mut)]\n\n let mut subcommand = SubCommand::with_name(\"did\")\n\n .about(\"Work with DIDs\")\n\n .setting(AppSettings::DeriveDisplayOrder)\n\n .setting(AppSettings::SubcommandRequiredElseHelp);\n\n\n\n cfg_if::cfg_if! {\n\n if #[cfg(feature = \"did-read\")] {\n\n subcommand = subcommand.subcommand(\n\n SubCommand::with_name(\"resolve\")\n\n .about(\"Fetch data about a DID, which returns this DID's DID document.\")\n\n .arg(get_clap_argument(\"did\")?)\n\n .arg(get_clap_argument(\"target\")?)\n\n .arg(get_clap_argument(\"signer\")?),\n\n );\n\n } else {}\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 20, "score": 79249.49988766253 }, { "content": "#[allow(dead_code)]\n\nfn add_subcommand_didcomm<'a>(app: App<'a, 'a>) -> Result<App<'a, 'a>> {\n\n let app = app.subcommand(\n\n SubCommand::with_name(\"didcomm\")\n\n .about(\"Process DIDComm message\")\n\n .setting(AppSettings::DeriveDisplayOrder)\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .subcommand(\n\n SubCommand::with_name(\"send\")\n\n .about(r###\"Prepare a plain DIDComm json message to be sent, including encryption and protocol specific message enhancement. The DIDComm options can include a shared secret to encrypt the message with a specific key. If no key was given and the message should be encrypted (depends on protocol implementation), the DIDComm keypair from a db provider will be used.\"###)\n\n .arg(get_clap_argument(\"options\")?)\n\n .arg(get_clap_argument(\"payload\")?),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"receive\")\n\n .about(r###\"Receive a plain DIDComm json message, including decryption and protocol specific message parsing. The DIDComm options can include a shared secret to encrypt the message with a specific key. If no key was given and the message is encrypted the DIDComm keypair from a db will be used.\"###)\n\n .arg(get_clap_argument(\"options\")?)\n\n .arg(get_clap_argument(\"payload\")?),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"create_keys\")\n", "file_path": "src/main.rs", "rank": 21, "score": 77133.14759176824 }, { "content": "#[allow(dead_code)]\n\nfn add_subcommand_helper<'a>(app: App<'a, 'a>) -> Result<App<'a, 'a>> {\n\n // variable might be needlessly mutable due to the following feature listing not matching\n\n #[allow(unused_mut)]\n\n let mut subcommand = SubCommand::with_name(\"helper\")\n\n .about(\"streamlined and updated VADE API that will replace some of the current functions\")\n\n .setting(AppSettings::DeriveDisplayOrder)\n\n .setting(AppSettings::SubcommandRequiredElseHelp);\n\n\n\n cfg_if::cfg_if! {\n\n if #[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))] {\n\n subcommand = subcommand.subcommand(\n\n SubCommand::with_name(\"create_credential_offer\")\n\n .about(\"Creates a `CredentialOffer` message. A `CredentialOffer` is sent by an issuer and is the response to a `CredentialProposal`. The `CredentialOffer` specifies which schema the issuer is capable and willing to use for credential issuance.\")\n\n .arg(get_clap_argument(\"schema_did\")?)\n\n .arg(get_clap_argument(\"use_valid_until\")?)\n\n .arg(get_clap_argument(\"issuer_did\")?)\n\n .arg(get_clap_argument(\"include_credential_status\")?)\n\n .arg(get_clap_argument(\"required_reveal_statements\")?)\n\n );\n\n } else {}\n", "file_path": "src/main.rs", "rank": 22, "score": 77133.14759176824 }, { "content": "#[allow(dead_code)]\n\nfn add_subcommand_vc_zkp<'a>(app: App<'a, 'a>) -> Result<App<'a, 'a>> {\n\n // variable might be needlessly mutable due to the following feature listing not matching\n\n #[allow(unused_mut)]\n\n let mut subcommand = SubCommand::with_name(\"vc_zkp\")\n\n .about(\"Work with zero knowledge proof VCs\")\n\n .setting(AppSettings::DeriveDisplayOrder)\n\n .setting(AppSettings::SubcommandRequiredElseHelp);\n\n\n\n cfg_if::cfg_if! {\n\n if #[cfg(feature = \"vc-zkp-bbs\")] {\n\n subcommand = subcommand.subcommand(\n\n SubCommand::with_name(\"create_master_secret\")\n\n .about(\"Creates a new master secret.\")\n\n .arg(get_clap_argument(\"options\")?)\n\n );\n\n } else {}\n\n }\n\n\n\n cfg_if::cfg_if! {\n\n if #[cfg(feature = \"vc-zkp-bbs\")] {\n", "file_path": "src/main.rs", "rank": 23, "score": 75158.69936766209 }, { "content": "fn none_to_empty_string(optional: Option<String>) -> String {\n\n optional.unwrap_or_else(|| \"\".to_string())\n\n}\n\n\n\ncfg_if::cfg_if! {\n\n if #[cfg(feature = \"did-read\")] {\n\n create_function!(did_resolve, did_or_method, config);\n\n } else {\n\n }\n\n}\n\n\n\ncfg_if::cfg_if! {\n\n if #[cfg(feature = \"did-write\")] {\n\n create_function!(did_create, did_or_method, options, payload, config);\n\n create_function!(did_update, did_or_method, options, payload, config);\n\n } else {\n\n }\n\n}\n\n\n\ncfg_if::cfg_if! {\n", "file_path": "src/wasm_lib.rs", "rank": 24, "score": 70611.4925061309 }, { "content": "#[cfg(any(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\nfn get_optional_argument_value<'a>(matches: &'a ArgMatches, arg_name: &'a str) -> Option<&'a str> {\n\n matches.value_of(arg_name)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 25, "score": 69332.78802774404 }, { "content": "#[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\nuse crate::helpers::CredentialError;\n\n#[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\nuse crate::helpers::PresentationError;\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum VadeEvanError {\n\n #[error(\"initialization failed with {source_message}\")]\n\n InitializationFailed { source_message: String },\n\n #[error(\"vade call failed with: {source_message}\")]\n\n InternalError { source_message: String },\n\n #[error(\"vade call returned no results\")]\n\n NoResults,\n\n #[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\n #[error(transparent)]\n\n CredentialError(#[from] CredentialError),\n\n #[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\n #[error(transparent)]\n\n PresentationError(#[from] PresentationError),\n", "file_path": "src/api/vade_evan_error.rs", "rank": 26, "score": 66115.80196246393 }, { "content": "}\n\n\n\nimpl From<Box<dyn std::error::Error>> for VadeEvanError {\n\n fn from(vade_error: Box<dyn std::error::Error>) -> VadeEvanError {\n\n VadeEvanError::InternalError {\n\n source_message: vade_error.to_string(),\n\n }\n\n }\n\n}\n", "file_path": "src/api/vade_evan_error.rs", "rank": 27, "score": 66108.68597247722 }, { "content": "fn main() {\n\n let lock_path = Path::new(&env::var(\"CARGO_MANIFEST_DIR\").unwrap()).join(\"Cargo.lock\");\n\n let lock_content = std::fs::read_to_string(lock_path).unwrap();\n\n let lock_object: LockFile = toml::from_str(&lock_content).unwrap();\n\n let relevant_packages: Vec<Package> = lock_object\n\n .package\n\n .into_iter()\n\n .filter(|package| package.name.starts_with(\"vade-\"))\n\n .collect();\n\n let filtered_lock_file = LockFile {\n\n package: relevant_packages,\n\n };\n\n println!(\"{}\", &toml::to_string(&filtered_lock_file).unwrap());\n\n\n\n let dest_path = Path::new(&env::var(\"OUT_DIR\").unwrap()).join(\"build_info.txt\");\n\n let mut f = BufWriter::new(File::create(&dest_path).unwrap());\n\n\n\n // variable might be needlessly mutable due to the following feature listing not matching\n\n #[allow(unused_mut)]\n\n let mut output = toml::to_string(&filtered_lock_file).unwrap();\n", "file_path": "build.rs", "rank": 28, "score": 61983.3390763257 }, { "content": "fn get_config_values(\n\n config: Option<&String>,\n\n keys: Vec<String>,\n\n) -> Result<Vec<String>, Box<dyn Error>> {\n\n let mut vec = Vec::new();\n\n let mut config_undefined = true;\n\n\n\n let config_hash_map: HashMap<String, String>;\n\n match config {\n\n Some(value) => {\n\n if !value.is_empty() {\n\n config_hash_map = serde_json::from_str(&value)?;\n\n config_undefined = false;\n\n } else {\n\n config_hash_map = HashMap::<String, String>::new();\n\n }\n\n }\n\n None => {\n\n config_hash_map = HashMap::<String, String>::new();\n\n }\n", "file_path": "src/c_lib.rs", "rank": 29, "score": 57302.8616104621 }, { "content": "fn get_config_values(\n\n config: Option<&JsValue>,\n\n keys: Vec<String>,\n\n) -> Result<Vec<String>, Box<dyn Error>> {\n\n let mut vec = Vec::new();\n\n let mut config_undefined = true;\n\n\n\n let config_hash_map: HashMap<String, String>;\n\n match config {\n\n Some(value) => {\n\n if !value.is_undefined() {\n\n config_hash_map = serde_wasm_bindgen::from_value(value.clone())?;\n\n config_undefined = false;\n\n } else {\n\n config_hash_map = HashMap::<String, String>::new();\n\n }\n\n }\n\n None => {\n\n config_hash_map = HashMap::<String, String>::new();\n\n }\n", "file_path": "src/wasm_lib.rs", "rank": 30, "score": 55984.32603242101 }, { "content": "#[cfg(feature = \"did-sidetree\")]\n\nfn get_vade_sidetree(\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))] request_id: *const c_void,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n request_function_callback: ResolveHttpRequest,\n\n) -> Result<VadeSidetree, Box<dyn Error>> {\n\n Ok(VadeSidetree::new(\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n request_id,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n request_function_callback,\n\n std::env::var(\"SIDETREE_API_URL\").ok(),\n\n ))\n\n}\n", "file_path": "src/api/vade_bundle.rs", "rank": 31, "score": 54764.086194860145 }, { "content": "fn get_public_key_generator(\n\n public_key: &str,\n\n message_count: usize,\n\n) -> Result<PublicKey, CredentialError> {\n\n let public_key: DeterministicPublicKey =\n\n DeterministicPublicKey::from(base64::decode(public_key)?.into_boxed_slice());\n\n let public_key_generator = public_key.to_public_key(message_count).map_err(|e| {\n\n CredentialError::PublicKeyParsingError(format!(\n\n \"public key invalid, generate public key generator; {}\",\n\n e\n\n ))\n\n })?;\n\n\n\n Ok(public_key_generator)\n\n}\n\n\n", "file_path": "src/helpers/credential.rs", "rank": 32, "score": 54764.086194860145 }, { "content": "const ISSUER_DID = 'did:evan:testcore:0x6240cedfc840579b7fdcd686bdc65a9a8c42dea6';\n", "file_path": "builds/wasm/src/vade/vade-api-bbs.spec.ts", "rank": 33, "score": 54104.215074866086 }, { "content": "pub fn is_revoked(\n\n credential_status: &CredentialStatus,\n\n revocation_list: &RevocationListCredential,\n\n) -> Result<bool, CredentialError> {\n\n let encoded_list = base64::decode_config(\n\n revocation_list.credential_subject.encoded_list.to_string(),\n\n base64::URL_SAFE,\n\n )?;\n\n let mut decoder = GzDecoder::new(&encoded_list[..]);\n\n let mut decoded_list = Vec::new();\n\n decoder\n\n .read_to_end(&mut decoded_list)\n\n .map_err(|e| CredentialError::RevocationListInvalid(e.to_string()))?;\n\n\n\n let revocation_list_index_number = credential_status\n\n .revocation_list_index\n\n .parse::<usize>()\n\n .map_err(|e| {\n\n CredentialError::RevocationListInvalid(format!(\n\n \"Error parsing revocation_list_id: {}\",\n", "file_path": "src/helpers/credential.rs", "rank": 34, "score": 53781.18597885645 }, { "content": "fn get_argument_value<'a>(\n\n matches: &'a ArgMatches,\n\n arg_name: &'a str,\n\n fallback: Option<&'a str>,\n\n) -> &'a str {\n\n match matches.value_of(arg_name) {\n\n Some(value) => value,\n\n None => match fallback {\n\n Some(value) => value,\n\n None => {\n\n panic!(\"no value for {} given\", arg_name);\n\n }\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 35, "score": 53781.18597885645 }, { "content": "#[cfg(feature = \"did-universal-resolver\")]\n\nfn get_vade_universal_resolver(\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))] request_id: *const c_void,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n request_function_callback: ResolveHttpRequest,\n\n) -> Result<VadeUniversalResolver, Box<dyn Error>> {\n\n Ok(VadeUniversalResolver::new(\n\n std::env::var(\"RESOLVER_URL\").ok(),\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n request_id,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n request_function_callback,\n\n ))\n\n}\n\n\n", "file_path": "src/api/vade_bundle.rs", "rank": 36, "score": 53631.5452897878 }, { "content": "#[cfg(feature = \"jwt-vc\")]\n\nfn get_vade_jwt_vc(\n\n signer: &str,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))] _request_id: *const c_void,\n\n) -> Result<VadeJwtVC, Box<dyn Error>> {\n\n Ok(VadeJwtVC::new(get_signer(signer)))\n\n}\n\n\n", "file_path": "src/api/vade_bundle.rs", "rank": 37, "score": 53631.5452897878 }, { "content": "#[wasm_bindgen]\n\npub fn set_panic_hook() {\n\n console_error_panic_hook::set_once();\n\n}\n\n\n", "file_path": "src/wasm_lib.rs", "rank": 38, "score": 51242.410563254496 }, { "content": "#[allow(unused_variables)]\n\npub fn get_vade(\n\n target: &str,\n\n signer: &str,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))] request_id: *const c_void,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n request_function_callback: ResolveHttpRequest,\n\n) -> Result<Vade, Box<dyn Error>> {\n\n let mut vade = Vade::new();\n\n\n\n #[cfg(feature = \"did-substrate\")]\n\n vade.register_plugin(Box::from(get_vade_evan_substrate(\n\n target,\n\n signer,\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n request_id,\n\n )?));\n\n #[cfg(feature = \"did-universal-resolver\")]\n\n vade.register_plugin(Box::from(get_vade_universal_resolver(\n\n #[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\n request_id,\n", "file_path": "src/api/vade_bundle.rs", "rank": 39, "score": 51242.410563254496 }, { "content": "pub fn create_draft_credential_from_schema(\n\n use_valid_until: bool,\n\n schema: &CredentialSchema,\n\n) -> UnsignedBbsCredential {\n\n let credential = UnsignedBbsCredential {\n\n context: vec![\n\n \"https://www.w3.org/2018/credentials/v1\".to_string(),\n\n \"https://schema.org/\".to_string(),\n\n \"https://w3id.org/vc-revocation-list-2020/v1\".to_string(),\n\n ],\n\n id: \"uuid:834ca9da-9f09-4359-8264-c890de13cdc8\".to_string(),\n\n r#type: vec![\"VerifiableCredential\".to_string()],\n\n issuer: \"did:evan:testcore:placeholder_issuer\".to_string(),\n\n valid_until: if use_valid_until {\n\n Some(\"2031-01-01T00:00:00.000Z\".to_string())\n\n } else {\n\n None\n\n },\n\n issuance_date: \"2021-01-01T00:00:00.000Z\".to_string(),\n\n credential_subject: CredentialSubject {\n", "file_path": "src/helpers/shared.rs", "rank": 40, "score": 50109.86965818215 }, { "content": "### Fixes\n\n\n\n- add `payload` argument to did_create in CLI\n\n- align key format for master secrets and public keys (no extra double quotes)\n\n- fix optional params for did_create\n\n- fix wasm release and `target-c-sdk` build options\n\n- update dependencies for critical vulnerabilities\n\n- fix revocation credential size increase with every revocation\n\n- fix cli output for commands\n\n\n\n### Deprecation\n\n\n\n## v0.4.1\n\n\n\n### Features\n\n\n\n- migrate `Vade` instance creation and plugin bundling to new `VadeEvan` API layer\n\n- migration C-lib, CLI and WASM wrapper to use `VadeEvan` instead of `Vade`\n\n- add `get_version_info` helper function\n\n- update vade-sidetree\n\n\n\n## v0.4.0\n\n\n\n### Features\n\n\n\n- add sdk feature for in3 integration and request_list usage to resolve http requests\n\n- create javascript wrapper for vade-evan\n\n- setup pipeline for different targets\n\n- use signing logic from `vade-signer` instead of `vade-evan-substrate`\n\n\n\n## v0.3.0\n\n\n\n### Features\n\n\n\n- bump dependency versions\n\n\n\n## v0.2.0\n\n\n\n### Features\n\n\n\n- add helper script for updating git based dependencies\n\n- add create_keys subcommand to didcomm in cli\n\n- add java jni wrapper code\n\n- add query_didcomm_message subcommand to didcomm command in cli\n\n- disable vade-evan-cl as dependency\n\n\n\n## Version 0.1.2\n\n\n\n### Fixes\n\n\n\n- fix Linux and WASM build\n\n- increase version for vade-didcomm vade-sidetree and added vade-jwt-vc features\n\n\n\n## Version 0.1.1\n\n\n\n### Fixes\n\n\n\n- add git urls as dependencies\n\n\n\n## Version 0.1.0\n\n\n\n### Features\n\n\n\n- add support for `didcomm_send`, `didcomm_receive`, `vc_zkp_finish_credential`\n\n- add WASM complied project with sample javascript library\n\n- made changes to pass external signer to vade-jwt-vc plugin\n\n\n\n### Deprecations\n\n\n\n- split off substrate logic from original `vade-evan` project into separate projects\n\n - did related components and signing went to `vade-evan-substrate`\n\n - cl vc related components went to `vade-evan-cl`\n\n\n\n## Version 0.0.8\n\n\n\n### Fixes\n\n\n\n- fix links in documentation\n\n- remove path from default `vade` and `ursa` dependencies\n\n\n\n## Version 0.0.7\n\n\n\n### Fixes\n\n\n", "file_path": "VERSIONS.md", "rank": 41, "score": 50071.88806761921 }, { "content": "# vade-evan\n\n\n\n## Next Version\n\n\n\n### Features\n\n\n\n- add support for `vc_zkp_propose_proof` function in `vade-evan-bbs` plugin\n\n- add checks to ensure inputs that are supposed to be DIDs are really DIDs\n\n\n\n### Fixes\n\n\n\n- fix timestamp generation for `vade-didcomm` in `wasm` build\n\n- update didcomm dependency for pthid in `get_did_exchange_message`\n\n- update `vade-didcomm` dependency for `comment` fix in `did-exchange`\n\n\n\n### Deprecation\n\n\n\n## Release candidates\n\n\n\n## 0.6.0-rc.6\n\n\n\n### Fixes\n\n\n\n- fix timestamp generation for `vade-didcomm` in `wasm` build\n\n\n\n## 0.6.0-rc.5\n\n\n\n### Features\n\n\n\n- add support for `vc_zkp_propose_proof` function in `vade-evan-bbs` plugin\n\n- add checks to ensure inputs that are supposed to be DIDs are really DIDs\n\n\n\n## v0.5.0\n\n\n\n### Features\n\n\n\n- migrate `Vade` instance creation and plugin bundling to new `VadeEvan` API layer\n\n- migration C-lib, CLI and WASM wrapper to use `VadeEvan` instead of `Vade`\n\n- add `get_version_info` helper function\n\n- add `create_credential_request` helper function\n\n- add `create_credential_offer` helper function\n\n- add `helper_verify_credential` helper function\n\n- add `helper_did_create` and `helper_did_update` functions\n\n- add `helper_revoke_credential` function\n\n- pass sdk feature to vade-sidetree plugin\n\n- update release ci to build and upload artifacts for android, ios, wasm, linux, macos and windows targets\n\n- add `helper_create_self_issued_credential` helper function\n\n- add `helper_create_proof_request`\n\n- add optional params `update_key` and `recovery_key` to `did_create`\n\n- add `helper_create_presentation`\n\n- adjust `credential_status` property in `BbsCredential` to be optional\n\n- refactor features to use target specific(c-lib, c-sdk, wasm, cli, java) builds\n\n- adjust functions to remove `credential_subject.id` from `BbsCredential` and other types\n\n- add `helper_verify_presentation`\n\n- update `vade-evan-bbs` dependency for revocation fix\n\n\n", "file_path": "VERSIONS.md", "rank": 42, "score": 50066.946337352005 }, { "content": "const DEFAULT_SIGNER = 'remote|http://localhost:7070/key/sign';\n", "file_path": "builds/wasm/src/vade/vade-api-shared.ts", "rank": 43, "score": 50066.28672715221 }, { "content": "- fix badges in readme\n\n\n\n## Version 0.0.6\n\n\n\n- initial version after project renaming\n", "file_path": "VERSIONS.md", "rank": 44, "score": 50062.894243048504 }, { "content": "function checkRequiredProperties(\n\n toCheck: unknown,\n\n requiredProperties: string[],\n\n type: string,\n\n): boolean {\n\n for (let i = 0; i < requiredProperties.length; i += 1) {\n\n if (!toCheck[requiredProperties[i]]) {\n\n throw new Error(`Parameter ${requiredProperties[i]} is required in ${type}!`);\n\n }\n\n }\n\n\n\n return true;\n", "file_path": "builds/wasm/src/vade/vade-api-shared.ts", "rank": 45, "score": 48611.94231045118 }, { "content": "const SIGNER_1_ADDRESS = '0x03c174bfc6d05f2f520e6ada156d0a5120aebdee';\n", "file_path": "builds/wasm/src/vade/vade-api-bbs.spec.ts", "rank": 46, "score": 48586.25859464488 }, { "content": "export interface RequestCredentialResultBbs {\n\n credentialRequest: CredentialRequestBbs;\n\n signatureBlinding?: string;\n", "file_path": "builds/wasm/src/vade/typings/index.ts", "rank": 47, "score": 48489.242714750966 }, { "content": "export interface IssueCredentialResultBbs {\n\n credential: UnfinishedCredentialBbs;\n", "file_path": "builds/wasm/src/vade/typings/index.ts", "rank": 48, "score": 48489.242714750966 }, { "content": "/// Checks if given value is a DID\n\n///\n\n/// # Arguments\n\n///\n\n/// * `to_check` - input value to check\n\n///\n\n/// # Returns\n\n/// `true` if DID, otherwise\n\npub fn is_did(to_check: &str) -> bool {\n\n to_check.starts_with(\"did:\")\n\n}\n", "file_path": "src/helpers/shared.rs", "rank": 49, "score": 45078.85863095101 }, { "content": "mod vade_bundle;\n\nmod vade_evan_api;\n\nmod vade_evan_error;\n\n\n\npub use vade_evan_api::{VadeEvan, VadeEvanConfig, DEFAULT_SIGNER, DEFAULT_TARGET};\n\npub use vade_evan_error::VadeEvanError;\n", "file_path": "src/api/mod.rs", "rank": 50, "score": 37498.7790432817 }, { "content": "#[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\nmod credential;\n\nmod datatypes;\n\n#[cfg(feature = \"did-sidetree\")]\n\nmod did;\n\n#[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\nmod presentation;\n\n#[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\nmod shared;\n\nmod version_info;\n\n\n\n#[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\npub(crate) use credential::{Credential, CredentialError};\n\n#[cfg(feature = \"did-sidetree\")]\n\npub(crate) use did::Did;\n\n#[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\npub(crate) use presentation::{Presentation, PresentationError};\n\npub(crate) use version_info::VersionInfo;\n", "file_path": "src/helpers/mod.rs", "rank": 51, "score": 37498.06486148333 }, { "content": "#[derive(Deserialize, Serialize)]\n\nstruct LockFile {\n\n package: Vec<Package>,\n\n}\n\n\n", "file_path": "build.rs", "rank": 52, "score": 37490.561517880305 }, { "content": "pub fn check_for_optional_empty_params(param: Option<&str>) -> Option<&str> {\n\n match param {\n\n Some(val) => {\n\n if val.is_empty() {\n\n None\n\n } else {\n\n Some(val)\n\n }\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/helpers/shared.rs", "rank": 53, "score": 36980.48718817529 }, { "content": "package com.vade.evan;\n", "file_path": "builds/java/vade/src/main/java/com/vade/evan/Vade.java", "rank": 54, "score": 36791.71099286335 }, { "content": "package com.vade.evan;\n", "file_path": "builds/java/vade/src/test/java/com/vade/evan/VadeTest.java", "rank": 55, "score": 36227.90098380989 }, { "content": "mod did_types;\n\n\n\npub use did_types::*;\n", "file_path": "src/helpers/datatypes/mod.rs", "rank": 56, "score": 35913.795548662 }, { "content": "static BUILD_INFO: &'static str = include_str!(concat!(env!(\"OUT_DIR\"), \"/build_info.txt\"));\n\n\n\npub struct VersionInfo {}\n\n\n\nimpl VersionInfo {\n\n pub fn get_version_info() -> String {\n\n BUILD_INFO.to_string()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::VersionInfo;\n\n\n\n #[test]\n\n fn can_get_version_info() {\n\n let version_info = VersionInfo::get_version_info();\n\n\n\n assert!(version_info.contains(\"vade-evan\"));\n\n }\n\n}\n", "file_path": "src/helpers/version_info.rs", "rank": 57, "score": 35906.572340424296 }, { "content": "cfg_if::cfg_if! {\n\n if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n use anyhow::Result;\n\n use vade_evan::{VadeEvan, VadeEvanConfig};\n\n\n\n #[test]\n\n #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))]\n\n fn can_get_version_info() -> Result<()> {\n\n let vade_evan = VadeEvan::new(VadeEvanConfig {\n\n target: \"test\",\n\n signer: \"remote|http://127.0.0.1:7070/key/sign\",\n\n })?;\n\n let version_info = vade_evan.get_version_info();\n\n\n\n assert!(version_info.contains(\"vade-evan\"));\n\n\n\n Ok(())\n\n }\n\n } else {\n\n // currently no example for target-c-sdk and c-lib/target-java-lib\n\n }\n\n}\n", "file_path": "tests/vade_evan_api.rs", "rank": 58, "score": 33088.87674329079 }, { "content": " private Vade() {\n", "file_path": "builds/java/vade/src/main/java/com/vade/evan/Vade.java", "rank": 59, "score": 32467.881298884782 }, { "content": "/*\n\n Copyright (c) 2018-present evan GmbH.\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n\n*/\n\n\n\n#[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\nuse std::os::raw::c_void;\n\nuse vade::Vade;\n\n\n", "file_path": "src/api/vade_evan_api.rs", "rank": 60, "score": 31819.437405735036 }, { "content": " /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.did_update(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"did successfully updated: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn did_update(\n\n &mut self,\n\n did: &str,\n\n options: &str,\n\n payload: &str,\n\n ) -> Result<String, VadeEvanError> {\n", "file_path": "src/api/vade_evan_api.rs", "rank": 61, "score": 31763.982910591123 }, { "content": " /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.vc_zkp_update_revocation_registry(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"updated revocation registry: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn vc_zkp_update_revocation_registry(\n\n &mut self,\n\n method: &str,\n\n options: &str,\n\n payload: &str,\n\n ) -> Result<String, VadeEvanError> {\n", "file_path": "src/api/vade_evan_api.rs", "rank": 62, "score": 31763.014536972398 }, { "content": " }\n\n\n\n /// Creates a new DID. May also persist a DID document for it, depending on plugin implementation.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `did_method` - did method to cater to, usually also used by plugins to decide if a plugins will process the request\n\n /// * `options` - JSON string with additional information supporting the request (e.g. authentication data)\n\n /// * `payload` - JSON string with information for the request (e.g. actual data to write)\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n", "file_path": "src/api/vade_evan_api.rs", "rank": 63, "score": 31762.201501358373 }, { "content": " /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.vc_zkp_create_revocation_registry_definition(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"created a revocation registry definition: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn vc_zkp_create_revocation_registry_definition(\n\n &mut self,\n\n method: &str,\n\n options: &str,\n\n payload: &str,\n\n ) -> Result<String, VadeEvanError> {\n\n get_first_result(\n", "file_path": "src/api/vade_evan_api.rs", "rank": 64, "score": 31761.76154272774 }, { "content": " /// println!(\"prepared DIDComm message: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn didcomm_send(\n\n &mut self,\n\n options: &str,\n\n payload: &str,\n\n ) -> Result<String, VadeEvanError> {\n\n get_first_result(self.vade.didcomm_send(options, payload).await?)\n\n }\n\n\n\n /// Gets information about version of `vade_evan` and dependencies prefixed with `evan-`.\n\n ///\n\n /// This can be useful to determine which versions of plugins are used to resolve vade calls.\n\n ///\n", "file_path": "src/api/vade_evan_api.rs", "rank": 65, "score": 31761.621815267707 }, { "content": " /// # Example\n\n ///\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.get_version_info();\n\n /// println!(\"vade_evan version info: \\n{}\", &result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub fn get_version_info(&self) -> String {\n", "file_path": "src/api/vade_evan_api.rs", "rank": 66, "score": 31761.204225511636 }, { "content": " /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.vc_zkp_verify_proof(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"verified proof: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn vc_zkp_verify_proof(\n\n &mut self,\n\n method: &str,\n", "file_path": "src/api/vade_evan_api.rs", "rank": 67, "score": 31760.90085030709 }, { "content": " /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.vc_zkp_revoke_credential(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"revoked credential: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn vc_zkp_revoke_credential(\n\n &mut self,\n\n method: &str,\n", "file_path": "src/api/vade_evan_api.rs", "rank": 68, "score": 31760.90085030709 }, { "content": " /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.vc_zkp_issue_credential(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"issued credential: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn vc_zkp_issue_credential(\n\n &mut self,\n\n method: &str,\n\n options: &str,\n\n payload: &str,\n", "file_path": "src/api/vade_evan_api.rs", "rank": 69, "score": 31760.73605662276 }, { "content": " /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.vc_zkp_finish_credential(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"issued credential: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn vc_zkp_finish_credential(\n\n &mut self,\n\n method: &str,\n\n options: &str,\n\n payload: &str,\n", "file_path": "src/api/vade_evan_api.rs", "rank": 70, "score": 31760.73605662276 }, { "content": " /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.vc_zkp_propose_proof(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"created proof proposal: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn vc_zkp_propose_proof(\n\n &mut self,\n\n method: &str,\n\n options: &str,\n\n payload: &str,\n", "file_path": "src/api/vade_evan_api.rs", "rank": 71, "score": 31760.654680594405 }, { "content": " /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.vc_zkp_present_proof(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"created a proof presentation: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn vc_zkp_present_proof(\n\n &mut self,\n\n method: &str,\n\n options: &str,\n", "file_path": "src/api/vade_evan_api.rs", "rank": 72, "score": 31760.654680594405 }, { "content": " /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.vc_zkp_request_proof(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"created proof request: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn vc_zkp_request_proof(\n\n &mut self,\n\n method: &str,\n\n options: &str,\n", "file_path": "src/api/vade_evan_api.rs", "rank": 73, "score": 31760.654680594405 }, { "content": " /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.vc_zkp_request_credential(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"created credential request: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn vc_zkp_request_credential(\n\n &mut self,\n\n method: &str,\n\n options: &str,\n", "file_path": "src/api/vade_evan_api.rs", "rank": 74, "score": 31760.654680594405 }, { "content": " /// # Example\n\n ///\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let bbs_public_key = \"LwDjc3acetrEsbccFI4zSy1+AFqUbkEUf6Sm0OxIdhU=\";\n\n /// let signing_key = None;\n\n /// let service_url = \"www.example.service\";\n\n ///\n\n /// let create_response = vade_evan\n\n /// .helper_did_create(\n\n /// Some(bbs_public_key),\n\n /// signing_key,\n\n /// Some(service_url),\n\n /// None,\n", "file_path": "src/api/vade_evan_api.rs", "rank": 75, "score": 31760.61630183758 }, { "content": " /// Processes a DIDComm message and prepares it for sending.\n\n ///\n\n /// It **may** be sent, depending on the configuration and implementation of underlying plugins.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `options` - JSON string with additional information supporting the request (e.g. authentication data)\n\n /// * `payload` - JSON string with information for the request (usually a raw DIDComm message)\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.didcomm_send(\"\", \"\").await?;\n", "file_path": "src/api/vade_evan_api.rs", "rank": 76, "score": 31760.301224007555 }, { "content": " ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.didcomm_receive(\"\", \"\").await?;\n\n /// println!(\"received DIDComm message: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn didcomm_receive(\n\n &mut self,\n\n options: &str,\n\n payload: &str,\n\n ) -> Result<String, VadeEvanError> {\n\n get_first_result(self.vade.didcomm_receive(options, payload).await?)\n\n }\n\n\n", "file_path": "src/api/vade_evan_api.rs", "rank": 77, "score": 31760.20015855896 }, { "content": " /// # Arguments\n\n ///\n\n /// * `method` - method to call a function for (e.g. \"did:example\")\n\n /// * `function` - function to call (e.g. \"test connection\")\n\n /// * `options` - JSON string with additional information supporting the request (e.g. authentication data)\n\n /// * `payload` - JSON string with information for the request (e.g. actual data to write)\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.run_custom_function(\"did:example\", \"test connection\", \"\", \"\").await?;\n\n /// println!(\"connection status is: {}\", result);\n\n /// Ok(())\n", "file_path": "src/api/vade_evan_api.rs", "rank": 78, "score": 31760.098058711344 }, { "content": " }\n\n\n\n /// Creates a new zero-knowledge proof credential offer. This message is the response to a credential proposal.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `method` - method to create a credential offer for (e.g. \"did:example\")\n\n /// * `options` - JSON string with additional information supporting the request (e.g. authentication data)\n\n /// * `payload` - JSON string with information for the request (e.g. actual data to write)\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n", "file_path": "src/api/vade_evan_api.rs", "rank": 79, "score": 31759.805506527096 }, { "content": " /// # Arguments\n\n ///\n\n /// * `did` - did to fetch data for\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.did_resolve(\"did:example:123\").await?;\n\n /// println!(\"got did: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n", "file_path": "src/api/vade_evan_api.rs", "rank": 80, "score": 31759.737609662145 }, { "content": " /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.vc_zkp_create_credential_schema(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"created a credential schema: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn vc_zkp_create_credential_schema(\n\n &mut self,\n\n method: &str,\n\n options: &str,\n\n payload: &str,\n\n ) -> Result<String, VadeEvanError> {\n\n get_first_result(\n\n self.vade\n\n .vc_zkp_create_credential_schema(method, options, payload)\n", "file_path": "src/api/vade_evan_api.rs", "rank": 81, "score": 31759.142880764695 }, { "content": " ///\n\n /// * `credential` - credential to be revoked as serialized JSON\n\n /// * `update_key_jwk` - update key in jwk format as serialized JSON\n\n /// * `private_key` - private key for local signer to be used for signing\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let credential = r###\"{\n\n /// \"id\": \"uuid:70b7ec4e-f035-493e-93d3-2cf5be4c7f88\",\n\n /// \"type\": [\n\n /// \"VerifiableCredential\"\n\n /// ],\n", "file_path": "src/api/vade_evan_api.rs", "rank": 82, "score": 31758.967143290476 }, { "content": "#[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\nuse crate::helpers::Credential;\n\n#[cfg(feature = \"did-sidetree\")]\n\nuse crate::helpers::Did;\n\n#[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\nuse crate::helpers::Presentation;\n\n#[cfg(all(feature = \"c-lib\", feature = \"target-c-sdk\"))]\n\nuse crate::in3_request_list::ResolveHttpRequest;\n\nuse crate::{\n\n api::{vade_bundle::get_vade, vade_evan_error::VadeEvanError},\n\n helpers::VersionInfo,\n\n};\n\n\n\npub const DEFAULT_TARGET: &str = \"substrate-dev.trust-trace.com\";\n\npub const DEFAULT_SIGNER: &str = \"local\";\n\n\n", "file_path": "src/api/vade_evan_api.rs", "rank": 83, "score": 31758.882392980107 }, { "content": " /// Creates a new zero-knowledge proof credential definition. A credential definition holds cryptographic key material\n\n /// and is needed by an issuer to issue a credential, thus needs to be created before issuance. A credential definition\n\n /// is always bound to one credential schema.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `method` - method to create a credential definition for (e.g. \"did:example\")\n\n /// * `options` - JSON string with additional information supporting the request (e.g. authentication data)\n\n /// * `payload` - JSON string with information for the request (e.g. actual data to write)\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n", "file_path": "src/api/vade_evan_api.rs", "rank": 84, "score": 31758.30262057603 }, { "content": " /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let result = vade_evan.vc_zkp_create_credential_proposal(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"created a credential proposal: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn vc_zkp_create_credential_proposal(\n\n &mut self,\n\n method: &str,\n\n options: &str,\n\n payload: &str,\n\n ) -> Result<String, VadeEvanError> {\n\n get_first_result(\n\n self.vade\n\n .vc_zkp_create_credential_proposal(method, options, payload)\n\n .await?,\n", "file_path": "src/api/vade_evan_api.rs", "rank": 85, "score": 31758.09965294936 }, { "content": " ///\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n ///\n\n /// let proposal = r###\"{\n\n /// \"verifier\": \"verifier\",\n\n /// \"createdAt\": \"createdAt\",\n\n /// \"nonce\": \"nonce\",\n\n /// \"type\": \"BBS\",\n\n /// \"subProofRequests\": [{\n\n /// \"schema\": \"did:evan:EiBrPL8Yif5NWHOzbKvyh1PX1wKVlWvIa6nTG1v8PXytvg\",\n\n /// \"revealedAttributes\": [13, 15]\n\n /// }]\n\n /// }\"###;\n", "file_path": "src/api/vade_evan_api.rs", "rank": 86, "score": 31758.099972884236 }, { "content": " /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n #[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\n pub async fn helper_create_credential_offer(\n\n &mut self,\n\n schema_did: &str,\n\n use_valid_until: bool,\n\n issuer_did: &str,\n\n is_credential_status_included: bool,\n\n required_reveal_statements: &str,\n\n ) -> Result<String, VadeEvanError> {\n\n let mut credential = Credential::new(self)?;\n\n credential\n\n .create_credential_offer(\n\n schema_did,\n", "file_path": "src/api/vade_evan_api.rs", "rank": 87, "score": 31758.017162250162 }, { "content": " /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// const ISSUER_DID: &str = \"did:evan:testcore:0x6240cedfc840579b7fdcd686bdc65a9a8c42dea6\";\n\n /// const SCHEMA_DID: &str = \"did:evan:EiACv4q04NPkNRXQzQHOEMa3r1p_uINgX75VYP2gaK5ADw\";\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let offer_str = vade_evan\n\n /// .helper_create_credential_offer(\n\n /// SCHEMA_DID,\n\n /// false,\n\n /// ISSUER_DID,\n\n /// true,\n\n /// \"[1]\",\n\n /// )\n\n /// .await?;\n\n ///\n", "file_path": "src/api/vade_evan_api.rs", "rank": 88, "score": 31757.953412269857 }, { "content": " /// }\n\n /// }\n\n /// ```\n\n pub async fn did_resolve(&mut self, did: &str) -> Result<String, VadeEvanError> {\n\n get_first_result(self.vade.did_resolve(did).await?)\n\n }\n\n\n\n /// Updates data related to a DID. May also persist a DID document for it, depending on plugin implementation.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `did` - DID to update data for\n\n /// * `options` - JSON string with additional information supporting the request (e.g. authentication data)\n\n /// * `payload` - JSON string with information for the request (e.g. actual data to write)\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n", "file_path": "src/api/vade_evan_api.rs", "rank": 89, "score": 31757.734141288878 }, { "content": " /// Verifies a given credential by checking if given master secret was incorporated\n\n /// into proof and if proof was signed with issuers public key.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `credential` - credential to verify as serialized JSON\n\n /// * `master_secret` - master secret incorporated as a blinded value into the proof of the credential\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let credential = r###\"{\n\n /// \"id\": \"uuid:70b7ec4e-f035-493e-93d3-2cf5be4c7f88\",\n", "file_path": "src/api/vade_evan_api.rs", "rank": 90, "score": 31757.557377029905 }, { "content": " /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let schema_did = \"did:evan:EiBrPL8Yif5NWHOzbKvyh1PX1wKVlWvIa6nTG1v8PXytvg\";\n\n /// let revealed_attributes = Some(r#\"[\"zip\", \"country\"]\"#);\n\n ///\n\n /// vade_evan\n\n /// .helper_create_proof_request(schema_did, revealed_attributes)\n\n /// .await?;\n\n ///\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n #[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\n pub async fn helper_create_proof_request(\n\n &mut self,\n", "file_path": "src/api/vade_evan_api.rs", "rank": 91, "score": 31757.189133162046 }, { "content": " }\n\n\n\n /// Creates a new zero-knowledge proof credential proposal. This message is the first in the\n\n /// credential issuance flow.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `method` - method to create a credential proposal for (e.g. \"did:example\")\n\n /// * `options` - JSON string with additional information supporting the request (e.g. authentication data)\n\n /// * `payload` - JSON string with information for the request (e.g. actual data to write)\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n\n ///\n\n /// async fn example() -> Result<()> {\n", "file_path": "src/api/vade_evan_api.rs", "rank": 92, "score": 31756.735736569965 }, { "content": " /// const BBS_SECRET: &str = \"GRsdzRB0pf/8MKP/ZBOM2BEV1A8DIDfmLh8T3b1hPKc=\";\n\n /// const BBS_PRIVATE_KEY: &str = \"WWTZW8pkz35UnvsUCEsof2CJmNHaJQ/X+B5xjWcHr/I=\";\n\n /// const SUBJECT_DID: &str = \"did:evan:EiAee4ixDnSP0eWyp0YFV7Wt9yrZ3w841FNuv9NSLFSCVA\";\n\n ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let offer_str = vade_evan\n\n /// .helper_create_self_issued_credential(\n\n /// SCHEMA_DID,\n\n /// CREDENTIAL_SUBJECT_STR,\n\n /// BBS_SECRET,\n\n /// BBS_PRIVATE_KEY,\n\n /// Some(\"did:revoc:12345\"),\n\n /// Some(\"1\"),\n\n /// None,\n\n /// SUBJECT_DID,\n\n /// \"[1]\",\n\n /// )\n\n /// .await?;\n\n ///\n", "file_path": "src/api/vade_evan_api.rs", "rank": 93, "score": 31756.390926671836 }, { "content": " get_first_result(self.vade.did_update(did, options, payload).await?)\n\n }\n\n\n\n /// Processes a DIDComm message as received, this may prepare a matching response for it\n\n /// if the DIDComm message can be interpreted and answered by a plugin's implementation.\n\n ///\n\n /// This response **may** be sent, depending on the configuration and implementation of\n\n /// underlying plugins, but it is usually also returned as response to this request.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `options` - JSON string with additional information supporting the request (e.g. authentication data)\n\n /// * `payload` - JSON string with information for the request (usually a raw DIDComm message)\n\n ///\n\n /// # Example\n\n /// ```\n\n /// cfg_if::cfg_if! {\n\n /// if #[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))] {\n\n /// use anyhow::Result;\n\n /// use vade_evan::{VadeEvan, VadeEvanConfig, DEFAULT_TARGET, DEFAULT_SIGNER};\n", "file_path": "src/api/vade_evan_api.rs", "rank": 94, "score": 31756.388922568764 }, { "content": "\n\n#[cfg(not(all(feature = \"c-lib\", feature = \"target-c-sdk\")))]\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{VadeEvan, VadeEvanConfig};\n\n\n\n #[test]\n\n fn can_be_created() {\n\n let vade_evan = VadeEvan::new(VadeEvanConfig {\n\n target: \"test\",\n\n signer: \"remote|http://127.0.0.1:7070/key/sign\",\n\n });\n\n\n\n assert!(vade_evan.is_ok());\n\n }\n\n}\n", "file_path": "src/api/vade_evan_api.rs", "rank": 95, "score": 31755.661974086113 }, { "content": " /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let revealed_attributes = Some(r#\"[\"zip\", \"country\"]\"#);\n\n /// let proof_request_str = vade_evan\n\n /// .helper_create_proof_request(SCHEMA_DID, revealed_attributes)\n\n /// .await?;\n\n ///\n\n /// let presentation_str = vade_evan\n\n /// .helper_create_presentation(\n\n /// &proof_request_str,\n\n /// CREDENTIAL,\n\n /// MASTER_SECRET,\n\n /// SIGNER_PRIVATE_KEY,\n\n /// PROVER_DID,\n\n /// None,\n\n /// )\n\n /// .await?;\n\n /// let verify_result = vade_evan.helper_verify_presentation(&presentation_str, &proof_request_str).await;\n\n /// assert!(verify_result.is_ok());\n\n /// Ok(())\n\n /// }\n", "file_path": "src/api/vade_evan_api.rs", "rank": 96, "score": 31755.034597035105 }, { "content": " /// let result = vade_evan.did_create(\"did:example\", \"\", \"\").await?;\n\n /// println!(\"created new did: {}\", result);\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n /// ```\n\n pub async fn did_create(\n\n &mut self,\n\n did_method: &str,\n\n options: &str,\n\n payload: &str,\n\n ) -> Result<String, VadeEvanError> {\n\n get_first_result(self.vade.did_create(did_method, options, payload).await?)\n\n }\n\n\n\n /// Fetch data about a DID. This usually returns a DID document.\n\n ///\n", "file_path": "src/api/vade_evan_api.rs", "rank": 97, "score": 31754.891589100716 }, { "content": " ///\n\n /// async fn example() -> Result<()> {\n\n /// let mut vade_evan = VadeEvan::new(VadeEvanConfig { target: DEFAULT_TARGET, signer: DEFAULT_SIGNER })?;\n\n /// let schema_did = \"did:evan:EiBrPL8Yif5NWHOzbKvyh1PX1wKVlWvIa6nTG1v8PXytvg\";\n\n /// let revealed_attributes = Some(r#\"[\"zip\", \"country\"]\"#);\n\n ///\n\n /// vade_evan\n\n /// .helper_create_proof_proposal(schema_did, revealed_attributes)\n\n /// .await?;\n\n ///\n\n /// Ok(())\n\n /// }\n\n /// } else {\n\n /// // currently no example for target-c-sdk and c-lib/target-java-lib\n\n /// }\n\n /// }\n\n #[cfg(all(feature = \"vc-zkp-bbs\", feature = \"did-sidetree\"))]\n\n pub async fn helper_create_proof_proposal(\n\n &mut self,\n\n schema_did: &str,\n", "file_path": "src/api/vade_evan_api.rs", "rank": 98, "score": 31754.817394389942 }, { "content": "/*\n\n Copyright (c) 2018-present evan GmbH.\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n\n*/\n\n\n\nextern crate clap;\n\n\n\nuse anyhow::{bail, Result};\n\nuse clap::{App, AppSettings, Arg, ArgMatches, SubCommand};\n", "file_path": "src/main.rs", "rank": 99, "score": 87.74674971081811 } ]
Rust
src/identbimap/mod.rs
sozysozbot/wenyan-to-rust
2129edbb87116b899875b14fee2e0785f8a7d7b3
use crate::parse; use big_s::S; use bimap_plus_map::BiMapPlusMap; use std::collections::HashMap; type Table = HashMap<String, String>; fn to_pinyin(ident: parse::Identifier, conversion_table: &Table) -> String { let parse::Identifier(i) = ident; let vec = i .chars() .map(|c| match conversion_table.get(&format!("{:X}", c as u32)) { None => S("_"), Some(a) => a.split(' ').collect::<Vec<_>>()[0].to_string(), }) .collect::<Vec<_>>(); vec.join("") } type Hanzi = parse::Identifier; type Ascii = String; #[derive(Debug, Clone, PartialEq, Eq)] pub enum Type { Mutable, } pub struct IdentBiMap(BiMapPlusMap<Hanzi, Ascii, Option<Type>>); impl IdentBiMap { pub fn translate_from_hanzi(&self, id: &parse::Identifier) -> Ascii { self.0.bimap_get_by_left(id).unwrap().to_string() } pub fn is_mutable(&self, id: &parse::Identifier) -> bool { let typ = self.0.hashmap_get_by_left(id).unwrap(); *typ == Some(Type::Mutable) } pub fn new(parsed: &[parse::Statement], conversion_table: &Table) -> Self { let mut ans = IdentBiMap(BiMapPlusMap::new()); for st in parsed { ans.insert_stmt(&st, &conversion_table); } eprintln!("{:?}", ans.0); ans } fn insert_ident(&mut self, ident: &parse::Identifier, conversion_table: &Table) { if self.0.bimap_get_by_left(&ident).is_some() { return; } let mut candidate: Ascii = to_pinyin(ident.clone(), &conversion_table); loop { if self.0.bimap_get_by_right(&candidate).is_some() { candidate.push('_'); } else { self.0.insert(ident.clone(), candidate, None); break; } } } fn insert_stmts(&mut self, statements: &[parse::Statement], conversion_table: &Table) { for s in statements { self.insert_stmt(&s, &conversion_table) } } fn insert_dat(&mut self, dat: &parse::Data, conversion_table: &Table) { if let parse::Data::Identifier(id) = dat { self.insert_ident(&id, &conversion_table) } } fn insert_data_or_qi2(&mut self, dat: &parse::OrQi2<parse::Data>, conversion_table: &Table) { if let parse::OrQi2::NotQi2(d1) = dat { self.insert_dat(d1, &conversion_table); } } fn insert_rvaluenoqi2(&mut self, val: &parse::Value<parse::Data>, conversion_table: &Table) { match val { parse::Value::Index(data, _) | parse::Value::Simple(data) | parse::Value::Length(data) => self.insert_dat(data, &conversion_table), parse::Value::IndexByIdent(data, ident) => { self.insert_dat(data, &conversion_table); self.insert_ident(ident, &conversion_table) } } } fn insert_unaryifexpr(&mut self, unary: &parse::UnaryIfExpr, conversion_table: &Table) { match unary { parse::UnaryIfExpr::Simple(data) => self.insert_data_or_qi2(data, &conversion_table), parse::UnaryIfExpr::Complex(val) => self.insert_rvaluenoqi2(val, &conversion_table), } } fn insert_ifexpr(&mut self, ifexpr: &parse::IfCond, conversion_table: &Table) { match ifexpr { parse::IfCond::Binary(data1, _, data2) => { self.insert_unaryifexpr(data1, &conversion_table); self.insert_unaryifexpr(data2, &conversion_table); } parse::IfCond::Unary(data) => { self.insert_unaryifexpr(data, &conversion_table); } parse::IfCond::NotQi2 => {} } } fn insert_math(&mut self, math: &parse::MathKind, conversion_table: &Table) { match math { parse::MathKind::ArithUnaryMath(data) => { self.insert_data_or_qi2(data, &conversion_table) } parse::MathKind::ArithBinaryMath(_, data1, _, data2) | parse::MathKind::ModMath(_, data1, _, data2) => { self.insert_data_or_qi2(data1, &conversion_table); self.insert_data_or_qi2(data2, &conversion_table); } parse::MathKind::BooleanAlgebra(ident1, ident2, _) => { self.insert_ident(&ident1, &conversion_table); self.insert_ident(&ident2, &conversion_table); } } } fn insert_rvalue( &mut self, rv: &parse::Value<parse::OrQi2<parse::Data>>, conversion_table: &Table, ) { match rv { parse::Value::Index(data, _) | parse::Value::Length(data) | parse::Value::Simple(data) => self.insert_data_or_qi2(data, &conversion_table), parse::Value::IndexByIdent(data, ident) => { self.insert_data_or_qi2(data, &conversion_table); self.insert_ident(ident, &conversion_table) } } } fn insert_idents(&mut self, idents: &[parse::Identifier], conversion_table: &Table) { for ident in idents { self.insert_ident(&ident, &conversion_table) } } fn insert_dats(&mut self, data_arr: &[parse::Data], conversion_table: &Table) { for dat in data_arr { self.insert_dat(dat, &conversion_table); } } fn insert_stmt(&mut self, st: &parse::Statement, conversion_table: &Table) { use parse::Statement::*; match st { ReferenceWhatIsLeft { data } => { self.insert_dat(&data, &conversion_table); } ForArr { list, elem, stmts } => { self.insert_ident(&list, &conversion_table); self.insert_ident(&elem, &conversion_table); self.insert_stmts(&stmts, &conversion_table) } ArrayCat { append_to, elems } => { self.insert_data_or_qi2(&parse::OrQi2::from(append_to), &conversion_table); self.insert_idents(&elems, &conversion_table) } ArrayFill { what_to_fill, elems, } => { self.insert_data_or_qi2(&parse::OrQi2::from(what_to_fill), &conversion_table); if let parse::OrQi2::NotQi2(ident) = what_to_fill { let ascii = self.0.bimap_get_by_left(&ident).unwrap().clone(); self.0.insert(ident.clone(), ascii, Some(Type::Mutable)); } self.insert_dats(&elems, &conversion_table); } If { ifcase: (ifexpr, ifcase), elseifcases, elsecase, } => { self.insert_ifexpr(ifexpr, &conversion_table); self.insert_stmts(&ifcase, &conversion_table); for (elseifexpr, elseifcase) in elseifcases { self.insert_ifexpr(elseifexpr, &conversion_table); self.insert_stmts(&elseifcase, &conversion_table) } self.insert_stmts(&elsecase, &conversion_table) } Reference { rvalue } => self.insert_rvaluenoqi2(rvalue, &conversion_table), NameMulti { idents } => self.insert_idents(&idents, &conversion_table), Math { math } => self.insert_math(math, &conversion_table), Assignment { lvalue: parse::Lvalue::Simple(ident), rvalue, } | Assignment { lvalue: parse::Lvalue::Index(ident, _), rvalue, } => { self.insert_ident(&ident, &conversion_table); let ascii = self.0.bimap_get_by_left(&ident).unwrap().clone(); self.0.insert(ident.clone(), ascii, Some(Type::Mutable)); self.insert_rvalue(rvalue, &conversion_table) } Assignment { lvalue: parse::Lvalue::IndexByIdent(ident, index), rvalue, } => { self.insert_ident(&ident, &conversion_table); let ascii = self.0.bimap_get_by_left(&ident).unwrap().clone(); self.0.insert(ident.clone(), ascii, Some(Type::Mutable)); self.insert_ident(&index, &conversion_table); self.insert_rvalue(rvalue, &conversion_table) } Print | Flush | Break | Continue => {} ForEnum { statements, num: _ } | Loop { statements } => { self.insert_stmts(&statements, &conversion_table) } Declare(parse::DeclareStatement { how_many_variables: _, type_: _, data_arr, }) => self.insert_dats(data_arr, &conversion_table), InitDefine { name, type_: _, data: dat, } => { self.insert_dat(dat, &conversion_table); self.insert_ident(&name, &conversion_table) } ForEnumIdent { ident, statements } => { if let parse::OrQi2::NotQi2(i) = ident { self.insert_ident(&i, &conversion_table); } self.insert_stmts(&statements, &conversion_table) } Define { idents, decl: parse::DeclareStatement { how_many_variables: _, type_: _, data_arr, }, } => { self.insert_dats(data_arr, &conversion_table); self.insert_idents(&idents, &conversion_table) } } } }
use crate::parse; use big_s::S; use bimap_plus_map::BiMapPlusMap; use std::collections::HashMap; type Table = HashMap<String, String>; fn to_pinyin(ident: parse::Identifier, conversion_table: &Table) -> String {
type Hanzi = parse::Identifier; type Ascii = String; #[derive(Debug, Clone, PartialEq, Eq)] pub enum Type { Mutable, } pub struct IdentBiMap(BiMapPlusMap<Hanzi, Ascii, Option<Type>>); impl IdentBiMap { pub fn translate_from_hanzi(&self, id: &parse::Identifier) -> Ascii { self.0.bimap_get_by_left(id).unwrap().to_string() } pub fn is_mutable(&self, id: &parse::Identifier) -> bool { let typ = self.0.hashmap_get_by_left(id).unwrap(); *typ == Some(Type::Mutable) } pub fn new(parsed: &[parse::Statement], conversion_table: &Table) -> Self { let mut ans = IdentBiMap(BiMapPlusMap::new()); for st in parsed { ans.insert_stmt(&st, &conversion_table); } eprintln!("{:?}", ans.0); ans } fn insert_ident(&mut self, ident: &parse::Identifier, conversion_table: &Table) { if self.0.bimap_get_by_left(&ident).is_some() { return; } let mut candidate: Ascii = to_pinyin(ident.clone(), &conversion_table); loop { if self.0.bimap_get_by_right(&candidate).is_some() { candidate.push('_'); } else { self.0.insert(ident.clone(), candidate, None); break; } } } fn insert_stmts(&mut self, statements: &[parse::Statement], conversion_table: &Table) { for s in statements { self.insert_stmt(&s, &conversion_table) } } fn insert_dat(&mut self, dat: &parse::Data, conversion_table: &Table) { if let parse::Data::Identifier(id) = dat { self.insert_ident(&id, &conversion_table) } } fn insert_data_or_qi2(&mut self, dat: &parse::OrQi2<parse::Data>, conversion_table: &Table) { if let parse::OrQi2::NotQi2(d1) = dat { self.insert_dat(d1, &conversion_table); } } fn insert_rvaluenoqi2(&mut self, val: &parse::Value<parse::Data>, conversion_table: &Table) { match val { parse::Value::Index(data, _) | parse::Value::Simple(data) | parse::Value::Length(data) => self.insert_dat(data, &conversion_table), parse::Value::IndexByIdent(data, ident) => { self.insert_dat(data, &conversion_table); self.insert_ident(ident, &conversion_table) } } } fn insert_unaryifexpr(&mut self, unary: &parse::UnaryIfExpr, conversion_table: &Table) { match unary { parse::UnaryIfExpr::Simple(data) => self.insert_data_or_qi2(data, &conversion_table), parse::UnaryIfExpr::Complex(val) => self.insert_rvaluenoqi2(val, &conversion_table), } } fn insert_ifexpr(&mut self, ifexpr: &parse::IfCond, conversion_table: &Table) { match ifexpr { parse::IfCond::Binary(data1, _, data2) => { self.insert_unaryifexpr(data1, &conversion_table); self.insert_unaryifexpr(data2, &conversion_table); } parse::IfCond::Unary(data) => { self.insert_unaryifexpr(data, &conversion_table); } parse::IfCond::NotQi2 => {} } } fn insert_math(&mut self, math: &parse::MathKind, conversion_table: &Table) { match math { parse::MathKind::ArithUnaryMath(data) => { self.insert_data_or_qi2(data, &conversion_table) } parse::MathKind::ArithBinaryMath(_, data1, _, data2) | parse::MathKind::ModMath(_, data1, _, data2) => { self.insert_data_or_qi2(data1, &conversion_table); self.insert_data_or_qi2(data2, &conversion_table); } parse::MathKind::BooleanAlgebra(ident1, ident2, _) => { self.insert_ident(&ident1, &conversion_table); self.insert_ident(&ident2, &conversion_table); } } } fn insert_rvalue( &mut self, rv: &parse::Value<parse::OrQi2<parse::Data>>, conversion_table: &Table, ) { match rv { parse::Value::Index(data, _) | parse::Value::Length(data) | parse::Value::Simple(data) => self.insert_data_or_qi2(data, &conversion_table), parse::Value::IndexByIdent(data, ident) => { self.insert_data_or_qi2(data, &conversion_table); self.insert_ident(ident, &conversion_table) } } } fn insert_idents(&mut self, idents: &[parse::Identifier], conversion_table: &Table) { for ident in idents { self.insert_ident(&ident, &conversion_table) } } fn insert_dats(&mut self, data_arr: &[parse::Data], conversion_table: &Table) { for dat in data_arr { self.insert_dat(dat, &conversion_table); } } fn insert_stmt(&mut self, st: &parse::Statement, conversion_table: &Table) { use parse::Statement::*; match st { ReferenceWhatIsLeft { data } => { self.insert_dat(&data, &conversion_table); } ForArr { list, elem, stmts } => { self.insert_ident(&list, &conversion_table); self.insert_ident(&elem, &conversion_table); self.insert_stmts(&stmts, &conversion_table) } ArrayCat { append_to, elems } => { self.insert_data_or_qi2(&parse::OrQi2::from(append_to), &conversion_table); self.insert_idents(&elems, &conversion_table) } ArrayFill { what_to_fill, elems, } => { self.insert_data_or_qi2(&parse::OrQi2::from(what_to_fill), &conversion_table); if let parse::OrQi2::NotQi2(ident) = what_to_fill { let ascii = self.0.bimap_get_by_left(&ident).unwrap().clone(); self.0.insert(ident.clone(), ascii, Some(Type::Mutable)); } self.insert_dats(&elems, &conversion_table); } If { ifcase: (ifexpr, ifcase), elseifcases, elsecase, } => { self.insert_ifexpr(ifexpr, &conversion_table); self.insert_stmts(&ifcase, &conversion_table); for (elseifexpr, elseifcase) in elseifcases { self.insert_ifexpr(elseifexpr, &conversion_table); self.insert_stmts(&elseifcase, &conversion_table) } self.insert_stmts(&elsecase, &conversion_table) } Reference { rvalue } => self.insert_rvaluenoqi2(rvalue, &conversion_table), NameMulti { idents } => self.insert_idents(&idents, &conversion_table), Math { math } => self.insert_math(math, &conversion_table), Assignment { lvalue: parse::Lvalue::Simple(ident), rvalue, } | Assignment { lvalue: parse::Lvalue::Index(ident, _), rvalue, } => { self.insert_ident(&ident, &conversion_table); let ascii = self.0.bimap_get_by_left(&ident).unwrap().clone(); self.0.insert(ident.clone(), ascii, Some(Type::Mutable)); self.insert_rvalue(rvalue, &conversion_table) } Assignment { lvalue: parse::Lvalue::IndexByIdent(ident, index), rvalue, } => { self.insert_ident(&ident, &conversion_table); let ascii = self.0.bimap_get_by_left(&ident).unwrap().clone(); self.0.insert(ident.clone(), ascii, Some(Type::Mutable)); self.insert_ident(&index, &conversion_table); self.insert_rvalue(rvalue, &conversion_table) } Print | Flush | Break | Continue => {} ForEnum { statements, num: _ } | Loop { statements } => { self.insert_stmts(&statements, &conversion_table) } Declare(parse::DeclareStatement { how_many_variables: _, type_: _, data_arr, }) => self.insert_dats(data_arr, &conversion_table), InitDefine { name, type_: _, data: dat, } => { self.insert_dat(dat, &conversion_table); self.insert_ident(&name, &conversion_table) } ForEnumIdent { ident, statements } => { if let parse::OrQi2::NotQi2(i) = ident { self.insert_ident(&i, &conversion_table); } self.insert_stmts(&statements, &conversion_table) } Define { idents, decl: parse::DeclareStatement { how_many_variables: _, type_: _, data_arr, }, } => { self.insert_dats(data_arr, &conversion_table); self.insert_idents(&idents, &conversion_table) } } } }
let parse::Identifier(i) = ident; let vec = i .chars() .map(|c| match conversion_table.get(&format!("{:X}", c as u32)) { None => S("_"), Some(a) => a.split(' ').collect::<Vec<_>>()[0].to_string(), }) .collect::<Vec<_>>(); vec.join("") }
function_block-function_prefix_line
[ { "content": "pub fn compile(parsed: &[parse::Statement], conversion_table: &HashMap<String, String>) -> String {\n\n let mut ans = vec![(0, S(\"fn main() {\"))];\n\n let mut env = Env {\n\n ans_counter: 0,\n\n rand_counter: 0,\n\n indent_level: 1,\n\n variables_not_yet_named: vec![],\n\n ident_map: identbimap::IdentBiMap::new(&parsed, &conversion_table),\n\n };\n\n\n\n for st in parsed {\n\n ans.append(&mut compile_statement(&mut env, &st));\n\n }\n\n\n\n ans.push((0, S(\"}\")));\n\n\n\n ans.iter()\n\n .map(|(indent, src)| format!(\"{}{}\\n\", \" \".repeat(*indent), src))\n\n .collect::<Vec<_>>()\n\n .join(\"\")\n\n}\n", "file_path": "src/compile/mod.rs", "rank": 2, "score": 123760.59770283167 }, { "content": "type Line = (usize, String);\n\n\n", "file_path": "src/compile/mod.rs", "rank": 3, "score": 119952.6222585197 }, { "content": "fn compile_literal(env: &Env, v: &parse::Data) -> String {\n\n match v.clone() {\n\n parse::Data::BoolValue(true) => S(\"true\"),\n\n parse::Data::BoolValue(false) => S(\"false\"),\n\n parse::Data::Identifier(ident) => env.ident_map.translate_from_hanzi(&ident),\n\n parse::Data::IntNum(intnum) => format!(\"{}.0\", intnum),\n\n parse::Data::StringLiteral(strlit) => format!(\"\\\"{}\\\"\", strlit), // FIXME properly escape\n\n }\n\n}\n\n\n\n/// It is possible to have three conflicting information on the number of variables declared.\n\n/// Let's say we have `吾有三數。曰三。曰九。名之曰「庚」。曰「辛」。曰「壬」。曰「癸」。書之。`\n\n/// Then `how_many_variables` is `3`, `type_` is `Type::Shu4`, `data_arr` is `vec![3, 9]` and `idents` are the idents.\n\n/// This compiles to\n\n/// ```\n\n/// var 庚 = 3;\n\n/// var 辛 = 9;\n\n/// var 壬 = 0;\n\n/// console.log();\n\n/// ```\n", "file_path": "src/compile/mod.rs", "rank": 5, "score": 83584.47240067 }, { "content": "fn compile_lvalue(env: &Env, lvalue: &parse::Lvalue) -> String {\n\n match lvalue {\n\n parse::Lvalue::Index(ident, index) => format!(\n\n \"{}[{} - 1]\",\n\n env.ident_map.translate_from_hanzi(&ident),\n\n index\n\n ),\n\n parse::Lvalue::Simple(ident) => env.ident_map.translate_from_hanzi(&ident),\n\n parse::Lvalue::IndexByIdent(ident, index) => format!(\n\n \"{}[({} as usize) - 1]\",\n\n env.ident_map.translate_from_hanzi(&ident),\n\n env.ident_map.translate_from_hanzi(&index),\n\n ),\n\n }\n\n}\n\n\n", "file_path": "src/compile/mod.rs", "rank": 6, "score": 81956.26411566767 }, { "content": "fn compile_unaryifexpr(mut env: &mut Env, unary: &parse::UnaryIfExpr) -> String {\n\n match unary {\n\n parse::UnaryIfExpr::Simple(data1) => compile_dataorqi2(&mut env, data1),\n\n parse::UnaryIfExpr::Complex(rv) => compile_rvalue_noqi2(&mut env, &rv, true),\n\n }\n\n}\n\n\n", "file_path": "src/compile/mod.rs", "rank": 7, "score": 73888.00468033238 }, { "content": "fn compile_dataorqi2(env: &mut Env, a: &parse::OrQi2<parse::Data>) -> String {\n\n match a {\n\n parse::OrQi2::Qi2 => {\n\n let qi = env\n\n .variables_not_yet_named\n\n .last()\n\n .unwrap_or(&S(\"f64::NAN\"))\n\n .to_string();\n\n\n\n //《文言陰符》曰『言「其」者。取至近之魚而棄其餘。』\n\n env.variables_not_yet_named = vec![];\n\n qi\n\n }\n\n parse::OrQi2::NotQi2(data) => compile_literal(&env, &data),\n\n }\n\n}\n\n\n\n/// 吾有三數。曰三曰五曰二名之曰「甲」。加其以五。\n\n/// is to be translated as\n\n/// ```\n", "file_path": "src/compile/mod.rs", "rank": 8, "score": 73609.70088921575 }, { "content": "fn compile_rvalue(mut env: &mut Env, rvalue: &parse::Value<parse::OrQi2<parse::Data>>) -> String {\n\n match rvalue {\n\n parse::Value::Index(data, index) => {\n\n format!(\"{}[{} - 1]\", compile_dataorqi2(&mut env, data), index)\n\n }\n\n parse::Value::Simple(data) => compile_dataorqi2(&mut env, data),\n\n parse::Value::Length(data) => {\n\n format!(\"({}.len() as f64)\", compile_dataorqi2(&mut env, data))\n\n }\n\n parse::Value::IndexByIdent(data, index) => format!(\n\n \"{}[({} as usize) - 1]\",\n\n compile_dataorqi2(&mut env, data),\n\n env.ident_map.translate_from_hanzi(&index),\n\n ),\n\n }\n\n}\n\n\n", "file_path": "src/compile/mod.rs", "rank": 9, "score": 64731.21738308403 }, { "content": "fn main() {\n\n let _ans1 = 2.0 + 3.0;\n\n let _ans2 = 1.0 + 3.0;\n\n let _ans3 = 3.0 + 3.0;\n\n let JIA3 = _ans3;\n\n let YI3 = _ans2;\n\n println!(\"{}\", _ans1);\n\n let _ans4 = 2.0 + 3.0;\n\n let _ans5 = 1.0 + 3.0;\n\n let _ans6 = 3.0 + 3.0;\n\n let BING3 = _ans5;\n\n let DING1 = _ans6;\n\n println!(\"{}\", _ans4);\n\n}\n", "file_path": "test035.rs", "rank": 11, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 2.0 + 3.0;\n\n let _ans2 = 1.0 + 3.0;\n\n let _ans3 = 3.0 + 3.0;\n\n let BING3 = _ans2;\n\n let DING1 = _ans3;\n\n println!(\"{}\", _ans1);\n\n let _ans4 = 2.0 + 3.0;\n\n let _ans5 = 1.0 + 3.0;\n\n let _ans6 = 3.0 + 3.0;\n\n let WU4 = _ans4;\n\n let JI3 = _ans5;\n\n let GENG1 = _ans6;\n\n println!(\"\");\n\n}\n", "file_path": "test031.rs", "rank": 12, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 2.0 % 3.0;\n\n println!(\"{}\", _ans1);\n\n}\n", "file_path": "test042.rs", "rank": 13, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = 3.0;\n\n let mut _rand1 = 0.0;\n\n while _rand1 < JIA3 {\n\n let _ans1 = \"問天地好在。\";\n\n println!(\"{}\", _ans1);\n\n _rand1 += 1.0;\n\n }\n\n}\n", "file_path": "test006.rs", "rank": 14, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = false;\n\n let YI3 = true;\n\n let _ans1 = JIA3 || YI3;\n\n println!(\"{}\", _ans1);\n\n}\n", "file_path": "test043.rs", "rank": 15, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = \"問天地好在。\";\n\n println!(\"{}\", _ans1);\n\n}\n", "file_path": "test002.rs", "rank": 16, "score": 50775.791374769564 }, { "content": "fn main() {\n\n for _ in 0..3 {\n\n let _ans1 = \"問天地好在。\";\n\n println!(\"{}\", _ans1);\n\n }\n\n}\n", "file_path": "test003.rs", "rank": 17, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 98765.0 * 3456.0;\n\n println!(\"{}\", _ans1);\n\n}\n", "file_path": "test040.rs", "rank": 18, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut ZHU3 = 49.0;\n\n if ZHU3 > 50.0 {\n\n let _ans1 = ZHU3 + 1.0;\n\n ZHU3 = _ans1;\n\n }\n\n}\n", "file_path": "test048.rs", "rank": 19, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n}\n", "file_path": "test067.rs", "rank": 20, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let _ans1 = JIA3.len() as f64;\n\n println!(\"{}\", _ans1);\n\n let mut YI3 = 1.0;\n\n loop {\n\n if (JIA3.len() as f64) < YI3 {\n\n break;\n\n }\n\n let _ans2 = YI3 + 1.0;\n\n YI3 = _ans2;\n\n }\n\n}\n", "file_path": "test078.rs", "rank": 21, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let LU3REN2YE2 = true;\n\n let WEI4REN2YE2 = false;\n\n let DE2XING2KE1YE2 = false;\n\n let ZHENG4SHI4KE1YE2 = true;\n\n let FU4XING4YE2 = false;\n\n let WEI4SHI4YE2 = false;\n\n let ZAO3SI3YE2 = false;\n\n let BING4LI4YE2 = false;\n\n let ZHAN4SI3YE2 = true;\n\n let _ans1 = LU3REN2YE2;\n\n if _ans1 {\n\n let _ans2 = DE2XING2KE1YE2;\n\n if _ans2 {\n\n let _ans3 = WEI4SHI4YE2;\n\n if _ans3 {\n\n let _ans4 = ZAO3SI3YE2;\n\n if _ans4 {\n\n let _ans5 = \"賢哉。回也。人不堪其憂。回也不改其樂。\";\n\n println!(\"{}\", _ans5);\n", "file_path": "test057.rs", "rank": 22, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 4.0;\n\n let _ans2 = 7.0 - _ans1;\n\n let ZUO4_ = 2.0;\n\n println!(\"{}\", _ans2);\n\n}\n", "file_path": "test021.rs", "rank": 23, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.append(&mut vec![1.0, 2.0, 3.0]);\n\n let mut YI3 = JIA3;\n\n YI3[1 - 1] = 4.0;\n\n let _ans1 = JIA3;\n\n println!(\"{}\", _ans1);\n\n}\n", "file_path": "fail000.rs", "rank": 24, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut LIE4 = vec![];\n\n LIE4.append(&mut vec![1.0, 2.0, 3.0]);\n\n}\n", "file_path": "test061.rs", "rank": 25, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = \"問天地好在。\";\n\n let _ans2 = \"\";\n\n println!(\"{} {}\", _ans1, _ans2);\n\n let _ans3 = \"天地\";\n\n println!(\"{}\", _ans3);\n\n let _ans4 = \"宇宙\";\n\n let _ans5 = \"洪荒\";\n\n let _ans6 = \"\";\n\n let _ans7 = \"\";\n\n println!(\"{} {} {} {}\", _ans4, _ans5, _ans6, _ans7);\n\n let _ans8 = 3.0;\n\n let _ans9 = 0.0;\n\n println!(\"{} {}\", _ans8, _ans9);\n\n}\n", "file_path": "test000.rs", "rank": 26, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = 3.0;\n\n let _ans1 = \"問天地好在。\";\n\n println!(\"{}\", _ans1);\n\n JIA3 = 4.0;\n\n}\n", "file_path": "test014.rs", "rank": 27, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = 3.0;\n\n let _ans1 = \"問天地好在。\";\n\n println!(\"{}\", _ans1);\n\n}\n", "file_path": "test012.rs", "rank": 28, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 3.0;\n\n}\n", "file_path": "test016.rs", "rank": 29, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut YI3 = vec![];\n\n YI3.append(&mut vec![2.0, 9.0, 4.0, 22.0]);\n\n}\n", "file_path": "test068.rs", "rank": 30, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let mut YI3 = vec![];\n\n YI3.append(&mut vec![2.0, 9.0, 4.0, 22.0]);\n\n let _ans1 = [&JIA3[..], &YI3[..]].concat();\n\n let BING3 = _ans1;\n\n JIA3[1 - 1] = 5.0;\n\n}\n", "file_path": "test075.rs", "rank": 31, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut LIE4 = vec![];\n\n LIE4.append(&mut vec![1.0, 2.0, 3.0]);\n\n for YUAN2 in LIE4 {\n\n let _ans1 = YUAN2;\n\n let _ans2 = \"者。亦列中之物也。\";\n\n println!(\"{} {}\", _ans1, _ans2);\n\n }\n\n}\n", "file_path": "test062.rs", "rank": 32, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = 0.0;\n\n let mut JIA3 = 0.0;\n\n let mut JIA3 = 0.0;\n\n JIA3 = 4.0;\n\n YI3 = JIA3;\n\n}\n", "file_path": "test026.rs", "rank": 33, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 2.0 + 3.0;\n\n let _ans2 = 2.0 + 3.0;\n\n let _ans3 = 1.0 + 3.0;\n\n let _ans4 = 3.0 + 3.0;\n\n let BING3 = _ans3;\n\n let DING1 = _ans4;\n\n println!(\"{} {}\", _ans1, _ans2);\n\n let WU4 : (); // undefined\n\n let _ans5 = 2.0 + 3.0;\n\n let _ans6 = 1.0 + 3.0;\n\n let _ans7 = 3.0 + 3.0;\n\n let WU4 = _ans5;\n\n let JI3 = _ans6;\n\n let GENG1 = _ans7;\n\n println!(\"\");\n\n}\n", "file_path": "invalid036.rs", "rank": 34, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut XING2YI1 = vec![];\n\n XING2YI1.append(&mut vec![4.0, 9.0, 2.0]);\n\n let mut XING2ER4 = vec![];\n\n XING2ER4.append(&mut vec![3.0, 5.0, 7.0]);\n\n let mut XING2SAN1 = vec![];\n\n XING2SAN1.append(&mut vec![8.0, 1.0, 6.0]);\n\n let mut JIU3GONG1 = vec![];\n\n JIU3GONG1.append(&mut vec![XING2YI1, XING2ER4, XING2SAN1]);\n\n for XING2 in JIU3GONG1 {\n\n let _ans1 = XING2;\n\n println!(\"{}\", _ans1);\n\n }\n\n let GUANG3 = 9.0;\n\n let mut ZONG4HENG2TU2 = vec![];\n\n let mut _rand1 = 0.0;\n\n while _rand1 < GUANG3 {\n\n let mut XING2 = vec![];\n\n let mut _rand2 = 0.0;\n\n while _rand2 < GUANG3 {\n", "file_path": "fail003.rs", "rank": 35, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 2.0 + 3.0;\n\n let _ans2 = 1.0 + 3.0;\n\n let _ans3 = 3.0 + 3.0;\n\n let JIA3 = _ans2;\n\n let YI3 = _ans3;\n\n println!(\"{}\", _ans1);\n\n}\n", "file_path": "test030.rs", "rank": 36, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 1.0 + 2.0;\n\n println!(\"{}\", _ans1);\n\n let _ans2 = 2.0 - 1.0;\n\n println!(\"{}\", _ans2);\n\n let _ans3 = 2.0 * 3.0;\n\n println!(\"{}\", _ans3);\n\n let _ans4 = 8.0 / 4.0;\n\n println!(\"{}\", _ans4);\n\n let _ans5 = 9.0 % 4.0;\n\n println!(\"{}\", _ans5);\n\n}\n", "file_path": "test038.rs", "rank": 37, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 75.0 + 2.0;\n\n let _ans2 = _ans1 * 9.0;\n\n let _ans3 = _ans2 - 36.0;\n\n let _ans4 = _ans3 / 2.0;\n\n let _ans5 = _ans4 + 500.0;\n\n println!(\"{}\", _ans5);\n\n}\n", "file_path": "test041.rs", "rank": 38, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let WU4 : (); // undefined\n\n let JIA3 : (); // undefined\n\n}\n", "file_path": "invalid037.rs", "rank": 39, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let mut YI3 = vec![];\n\n YI3.append(&mut vec![2.0, 9.0, 4.0, 22.0]);\n\n let _ans1 = JIA3[1 - 1];\n\n println!(\"{}\", _ans1);\n\n let _ans2 = JIA3[2 - 1];\n\n println!(\"{}\", _ans2);\n\n let _ans3 = YI3[4 - 1];\n\n let _ans4 = _ans3 + 45.0;\n\n println!(\"{}\", _ans4);\n\n}\n", "file_path": "test073.rs", "rank": 40, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = 3.0;\n\n println!(\"\");\n\n let YI3 = 5.0;\n\n let _ans1 = YI3;\n\n println!(\"{}\", _ans1);\n\n}\n", "file_path": "test005.rs", "rank": 41, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = true;\n\n let YI3 = true;\n\n let _ans1 = JIA3 && YI3;\n\n let BING3 = _ans1;\n\n if BING3 {\n\n let _ans2 = \"古之人誠不我欺。\";\n\n println!(\"{}\", _ans2);\n\n }\n\n let _ans3 = JIA3 && YI3;\n\n if _ans3 {\n\n let _ans4 = \"古之人誠不我欺。\";\n\n println!(\"{}\", _ans4);\n\n }\n\n let _ans5 = JIA3 && YI3;\n\n if _ans5 {\n\n let _ans6 = \"古之人誠不我欺。\";\n\n println!(\"{}\", _ans6);\n\n }\n\n}\n", "file_path": "test054.rs", "rank": 42, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = 3.0;\n\n let mut _rand1 = 0.0;\n\n while _rand1 < JIA3 {\n\n let _ans1 = \"問天地好在\";\n\n println!(\"{}\", _ans1);\n\n _rand1 += 1.0;\n\n }\n\n}\n", "file_path": "test013.rs", "rank": 43, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut QI2SUO3 = vec![];\n\n let mut SUO3NAN2 = vec![];\n\n let mut QI2FANG1 = vec![];\n\n let mut SUO3ZHAN1 = vec![];\n\n let mut SUO3ZENG4 = vec![];\n\n let mut SUO3BAO4 = vec![];\n\n let mut SUO3GAN3 = vec![];\n\n let mut SUO3SHANG1 = vec![];\n\n QI2SUO3.append(&mut vec![\"太山\", \"桂林\", \"漢陽\", \"雁門\"]);\n\n SUO3NAN2.append(&mut vec![\"樑父艱\", \"湘水深\", \"隴阪長\", \"雪雰雰\"]);\n\n QI2FANG1.append(&mut vec![\"東\", \"南\", \"西\", \"北\"]);\n\n SUO3ZHAN1.append(&mut vec![\"翰\", \"襟\", \"裳\", \"巾\"]);\n\n SUO3ZENG4.append(&mut vec![\"金錯刀\", \"琴琅玕\", \"貂襜褕\", \"錦繡段\"]);\n\n SUO3BAO4.append(&mut vec![\"英瓊瑤\", \"雙玉盤\", \"明月珠\", \"青玉案\"]);\n\n SUO3GAN3.append(&mut vec![\"逍遙\", \"惆悵\", \"踟躕\", \"增嘆\"]);\n\n SUO3SHANG1.append(&mut vec![\"勞\", \"傷\", \"紆\", \"惋\"]);\n\n}\n", "file_path": "test082.rs", "rank": 44, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let mut YI3 = vec![];\n\n YI3.append(&mut vec![2.0, 9.0, 4.0, 22.0]);\n\n let _ans1 = [&JIA3[..], &YI3[..]].concat();\n\n let BING3 = _ans1;\n\n}\n", "file_path": "test071.rs", "rank": 45, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let mut YI3 = vec![];\n\n for YUAN2 in JIA3 {\n\n YI3.push(YUAN2);\n\n }\n\n}\n", "file_path": "test085.rs", "rank": 46, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let TOU2 = 35.0;\n\n let ZU2 = 94.0;\n\n let _ans1 = ZU2 / 2.0;\n\n let _ans2 = _ans1 - TOU2;\n\n let TU4 = _ans2;\n\n let _ans3 = TOU2 - TU4;\n\n let ZHI4 = _ans3;\n\n let _ans4 = ZHI4;\n\n let _ans5 = TU4;\n\n println!(\"{} {}\", _ans4, _ans5);\n\n}\n", "file_path": "test044.rs", "rank": 47, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 3.0;\n\n let _ans2 = _ans1 + 5.0;\n\n let _ans3 = _ans2 - 7.0;\n\n let _ans4 = 3.0;\n\n let _ans5 = 2.0 + _ans4;\n\n let _ans6 = 8.0 - _ans5;\n\n let _ans7 = 3.0;\n\n let _ans8 = 0.0;\n\n let _ans9 = _ans8 - 7.0;\n\n let ZUO4_ = 3.0;\n\n println!(\"{}\", _ans9);\n\n let ZUO4__ = 5.0;\n\n println!(\"\");\n\n}\n", "file_path": "test022.rs", "rank": 48, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let _ans1 = JIA3.len() as f64;\n\n println!(\"{}\", _ans1);\n\n let mut YI3 = 1.0;\n\n loop {\n\n if YI3 > (JIA3.len() as f64) {\n\n break;\n\n }\n\n let _ans2 = YI3 + 1.0;\n\n YI3 = _ans2;\n\n }\n\n}\n", "file_path": "test077.rs", "rank": 49, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let mut YI3 = vec![];\n\n YI3.append(&mut vec![2.0, 9.0, 4.0, 22.0]);\n\n let mut DING1 = vec![];\n\n DING1.push(9.0);\n\n let _ans1 = [&JIA3[..], &YI3[..]].concat();\n\n let BING3 = _ans1;\n\n let _ans2 = [&JIA3[..], &YI3[..], &BING3[..], &DING1[..]].concat();\n\n let WU4 = _ans2;\n\n}\n", "file_path": "test069.rs", "rank": 50, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = 3.0;\n\n let YI3 = false;\n\n let BING3 = \"噫吁戲\";\n\n}\n", "file_path": "test015.rs", "rank": 51, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = vec![];\n\n let _ans1 = vec![];\n\n let YI3 = _ans1;\n\n}\n", "file_path": "test060.rs", "rank": 52, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = 0.0;\n\n let mut YI3 = 0.0;\n\n for _ in 0..100 {\n\n let _ans1 = JIA3 + 1.0;\n\n JIA3 = _ans1;\n\n let _ans2 = JIA3 % 2.0;\n\n if _ans2 == 1.0 {\n\n continue;\n\n }\n\n let _ans3 = YI3 + JIA3;\n\n YI3 = _ans3;\n\n }\n\n let _ans4 = YI3;\n\n println!(\"{}\", _ans4);\n\n}\n", "file_path": "test066.rs", "rank": 53, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = \"天地。\";\n\n let _ans2 = \"\";\n\n for _ in 0..3 {\n\n println!(\"{} {}\", _ans1, _ans2);\n\n let _ans3 = \"問天地好在。\";\n\n println!(\"{}\", _ans3);\n\n }\n\n}\n", "file_path": "test004.rs", "rank": 54, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let TOU2 = 340.0;\n\n let BI4 = 32.0;\n\n let ZU2 = 216.0;\n\n let WEI3 = 417.0;\n\n let _ans1 = BI4 / 2.0;\n\n let XING2TIAN1 = _ans1;\n\n let _ans2 = XING2TIAN1 * 2.0;\n\n let _ans3 = ZU2 - _ans2;\n\n let _ans4 = _ans3 / 4.0;\n\n let XU3HU2HE2 = _ans4;\n\n let _ans5 = XU3HU2HE2 * 10.0;\n\n let SHI2BEI4XU3HU2HE2 = _ans5;\n\n let _ans6 = TOU2 + WEI3;\n\n let _ans7 = _ans6 - SHI2BEI4XU3HU2HE2;\n\n let _ans8 = _ans7 / 3.0;\n\n let LIANG3TOU2SHE2 = _ans8;\n\n let _ans9 = WEI3 - LIANG3TOU2SHE2;\n\n let _ans10 = _ans9 - XU3HU2HE2;\n\n let _ans11 = _ans10 / 8.0;\n\n let JIU3WEI3HU2 = _ans11;\n\n let _ans12 = XU3HU2HE2 - JIU3WEI3HU2;\n\n let XU3DE2LA1 = _ans12;\n\n let _ans13 = XU3DE2LA1;\n\n let _ans14 = JIU3WEI3HU2;\n\n let _ans15 = XING2TIAN1;\n\n let _ans16 = LIANG3TOU2SHE2;\n\n println!(\"{} {} {} {}\", _ans13, _ans14, _ans15, _ans16);\n\n}\n", "file_path": "test047.rs", "rank": 55, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let ZUO4_ = 3.0;\n\n println!(\"\");\n\n let ZUO4__ = 5.0;\n\n println!(\"\");\n\n}\n", "file_path": "test020.rs", "rank": 56, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 3.0;\n\n let _ans2 = _ans1 + 5.0;\n\n let _ans3 = _ans2 - 7.0;\n\n let _ans4 = 3.0;\n\n let _ans5 = 2.0 + _ans4;\n\n let _ans6 = 8.0 - _ans5;\n\n let _ans7 = 3.0;\n\n let _ans8 = 0.0;\n\n let _ans9 = _ans8 - 7.0;\n\n}\n", "file_path": "test019.rs", "rank": 57, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 2.0 + 3.0;\n\n let _ans2 = 1.0 + 3.0;\n\n let _ans3 = 3.0 + 3.0;\n\n let JIA3 = _ans3;\n\n let YI3 = _ans2;\n\n println!(\"{}\", _ans1);\n\n let _ans4 = 2.0 + 3.0;\n\n let _ans5 = 1.0 + 3.0;\n\n let _ans6 = 3.0 + 3.0;\n\n let BING3 = _ans5;\n\n let DING1 = _ans6;\n\n println!(\"{}\", _ans4);\n\n let _ans7 = 2.0 + 3.0;\n\n let _ans8 = 1.0 + 3.0;\n\n let _ans9 = 3.0 + 3.0;\n\n let WU4 = _ans7;\n\n let JI3 = _ans8;\n\n let GENG1 = _ans9;\n\n println!(\"\");\n\n}\n", "file_path": "test046.rs", "rank": 58, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let YI3 = 1.0;\n\n if YI3 < JIA3[(YI3 as usize) - 1] {\n\n let _ans1 = YI3;\n\n println!(\"{}\", _ans1);\n\n }\n\n}\n", "file_path": "test086.rs", "rank": 59, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 1.0 + 3.0;\n\n let _ans2 = 2.0 + 3.0;\n\n let _ans3 = _ans2 - f64::NAN;\n\n let _ans4 = 1.0 + 3.0;\n\n let _ans5 = 2.0 + 3.0;\n\n let _ans6 = _ans5 - f64::NAN;\n\n}\n", "file_path": "invalid027.rs", "rank": 60, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = 3.0;\n\n let YI3 = 5.0;\n\n let mut _rand1 = 0.0;\n\n while _rand1 < JIA3 {\n\n let _ans1 = \"問天地好在。\";\n\n println!(\"{}\", _ans1);\n\n _rand1 += 1.0;\n\n }\n\n}\n", "file_path": "test008.rs", "rank": 61, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let _ans1 = JIA3.len() as f64;\n\n println!(\"{}\", _ans1);\n\n let mut YI3 = 1.0;\n\n loop {\n\n if YI3 > (JIA3.len() as f64) {\n\n break;\n\n }\n\n let _ans2 = JIA3[(YI3 as usize) - 1];\n\n println!(\"{}\", _ans2);\n\n let _ans3 = YI3 + 1.0;\n\n YI3 = _ans3;\n\n }\n\n}\n", "file_path": "test079.rs", "rank": 62, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = 3.0;\n\n let YI3 = 9.0;\n\n let BING3 = 27.0;\n\n}\n", "file_path": "test023.rs", "rank": 63, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let SHI3 = 999.0;\n\n let mut WU4 = SHI3;\n\n let mut _rand1 = 0.0;\n\n while _rand1 < SHI3 {\n\n let mut XU1 = WU4;\n\n let mut _rand2 = 0.0;\n\n while _rand2 < WU4 {\n\n let _ans1 = WU4 * XU1;\n\n let JIA3 = _ans1;\n\n if JIA3 < 10.0 {\n\n let _ans2 = XU1;\n\n let _ans3 = WU4;\n\n let _ans4 = \"如\";\n\n let _ans5 = JIA3;\n\n println!(\"{} {} {} {}\", _ans2, _ans3, _ans4, _ans5);\n\n } else {\n\n let _ans6 = XU1;\n\n let _ans7 = WU4;\n\n let _ans8 = JIA3;\n", "file_path": "test064.rs", "rank": 64, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = 91.0;\n\n let mut YI3 = 49.0;\n\n loop {\n\n if JIA3 == YI3 {\n\n break;\n\n } else if JIA3 > YI3 {\n\n let _ans1 = JIA3 - YI3;\n\n JIA3 = _ans1;\n\n } else {\n\n let _ans2 = YI3 - JIA3;\n\n YI3 = _ans2;\n\n }\n\n }\n\n let _ans3 = JIA3;\n\n println!(\"{}\", _ans3);\n\n}\n", "file_path": "test065.rs", "rank": 65, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = false;\n\n let YI3 = false;\n\n let _ans1 = 1.0 + 3.0;\n\n let _ans2 = JIA3 || YI3;\n\n let BING3 = _ans2;\n\n let _ans3 = !BING3;\n\n let DING1 = _ans3;\n\n if DING1 {\n\n let _ans4 = \"古之人誠不我欺。\";\n\n }\n\n println!(\"{} {}\", _ans1, _ans4);\n\n let _ans5 = 1.0 + 3.0;\n\n let _ans6 = JIA3 || YI3;\n\n if !_ans6 {\n\n let _ans7 = \"古之人誠不我欺。\";\n\n }\n\n println!(\"{}\", _ans7);\n\n}\n", "file_path": "test056.rs", "rank": 66, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = 3.0;\n\n let _ans1 = 9.0;\n\n let _ans2 = 7.0;\n\n println!(\"{} {}\", _ans1, _ans2);\n\n let YI3 = 3.0;\n\n let BING3 = 9.0;\n\n let _ans3 = 7.0;\n\n println!(\"{}\", _ans3);\n\n let DING1 = 3.0;\n\n let WU4 = 9.0;\n\n let JI3 = 7.0;\n\n println!(\"\");\n\n let GENG1 = 3.0;\n\n let XIN1 = 9.0;\n\n let REN2 = 0.0;\n\n println!(\"\");\n\n}\n", "file_path": "test007.rs", "rank": 67, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let _ans1 = &JIA3[1..].to_vec();\n\n let YI3 = _ans1;\n\n}\n", "file_path": "test080.rs", "rank": 68, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = true;\n\n if JIA3 {\n\n let _ans1 = \"陽者。歲之主也。\";\n\n println!(\"{}\", _ans1);\n\n } else {\n\n let _ans2 = \"陰者。陽之助也。\";\n\n println!(\"{}\", _ans2);\n\n }\n\n}\n", "file_path": "test051.rs", "rank": 69, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let QI4LIANG4 = 10.0;\n\n let mut JIU3LIANG4 = 9.0;\n\n if JIU3LIANG4 > QI4LIANG4 {\n\n JIU3LIANG4 = QI4LIANG4;\n\n }\n\n let _ans1 = JIU3LIANG4;\n\n println!(\"{}\", _ans1);\n\n}\n", "file_path": "test049.rs", "rank": 70, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 1.0 + 3.0;\n\n let _ans2 = 6.0 + 9.0;\n\n let JIA3 = _ans1;\n\n let YI3 = _ans2;\n\n}\n", "file_path": "test029.rs", "rank": 71, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 1.0 + 3.0;\n\n let BING3 = _ans1;\n\n}\n", "file_path": "test028.rs", "rank": 72, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let mut YI3 = vec![];\n\n for YUAN2 in JIA3 {\n\n YI3.push(YUAN2);\n\n }\n\n}\n", "file_path": "test081.rs", "rank": 73, "score": 50775.791374769564 }, { "content": "fn main() {\n\n for _ in 0..5 {\n\n let _ans1 = \"問天地好在。\";\n\n println!(\"{}\", _ans1);\n\n }\n\n for _ in 0..1000 {\n\n let _ans2 = \"問天地好在。\";\n\n println!(\"{}\", _ans2);\n\n }\n\n}\n", "file_path": "test011.rs", "rank": 74, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let WU4 : (); // undefined\n\n}\n", "file_path": "invalid034.rs", "rank": 75, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 0.0 - 4.0;\n\n let FU4SI4 = _ans1;\n\n let _ans2 = 0.0 - 9.0;\n\n let FU4JIU3 = _ans2;\n\n let _ans3 = 9.0 % FU4SI4;\n\n println!(\"{}\", _ans3);\n\n let _ans4 = FU4JIU3 % 4.0;\n\n println!(\"{}\", _ans4);\n\n let _ans5 = FU4JIU3 % FU4SI4;\n\n println!(\"{}\", _ans5);\n\n}\n", "file_path": "test039.rs", "rank": 76, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = 0.0;\n\n let JIA3 = 0.0;\n\n let JIA3 = 0.0;\n\n}\n", "file_path": "test025.rs", "rank": 77, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 2.0 + 3.0;\n\n let _ans2 = 2.0 + 3.0;\n\n let _ans3 = 1.0 + 3.0;\n\n let _ans4 = 3.0 + 3.0;\n\n println!(\"\");\n\n}\n", "file_path": "test032.rs", "rank": 78, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 2.0 + 3.0;\n\n let _ans2 = 1.0 + 3.0;\n\n let _ans3 = 3.0 + 3.0;\n\n let WU4 : (); // undefined\n\n let JI3 = _ans1;\n\n let GENG1 = _ans2;\n\n let XIN1 = _ans3;\n\n println!(\"\");\n\n}\n", "file_path": "invalid033.rs", "rank": 79, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = false;\n\n let YI3 = true;\n\n let _ans1 = JIA3 && YI3;\n\n let BING3 = _ans1;\n\n let _ans2 = JIA3 || YI3;\n\n let BING3 = _ans2;\n\n let _ans3 = !JIA3;\n\n let BING3 = _ans3;\n\n}\n", "file_path": "test053.rs", "rank": 80, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = 3.0;\n\n let JIA3 = 3.0;\n\n}\n", "file_path": "test024.rs", "rank": 81, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let SHI3 = 9.0;\n\n let mut WU4 = SHI3;\n\n let mut _rand1 = 0.0;\n\n while _rand1 < SHI3 {\n\n let mut XU1 = WU4;\n\n let mut _rand2 = 0.0;\n\n while _rand2 < WU4 {\n\n let _ans1 = WU4 * XU1;\n\n let JIA3 = _ans1;\n\n if JIA3 < 10.0 {\n\n let _ans2 = XU1;\n\n let _ans3 = WU4;\n\n let _ans4 = \"如\";\n\n let _ans5 = JIA3;\n\n println!(\"{} {} {} {}\", _ans2, _ans3, _ans4, _ans5);\n\n } else {\n\n let _ans6 = XU1;\n\n let _ans7 = WU4;\n\n let _ans8 = JIA3;\n", "file_path": "test063.rs", "rank": 82, "score": 50775.791374769564 }, { "content": "fn main() {\n\n for _ in 0..5 {\n\n let _ans1 = \"問天地好在。\";\n\n println!(\"{}\", _ans1);\n\n }\n\n for _ in 0..10 {\n\n let _ans2 = \"問天地好在。\";\n\n println!(\"{}\", _ans2);\n\n }\n\n}\n", "file_path": "test010.rs", "rank": 83, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = 3.0;\n\n let mut _rand1 = 0.0;\n\n while _rand1 < JIA3 {\n\n let _ans1 = \"問天地好在。\";\n\n println!(\"{}\", _ans1);\n\n _rand1 += 1.0;\n\n }\n\n}\n", "file_path": "test009.rs", "rank": 84, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let TOU2 = 579.0;\n\n let ZU2 = 2284.0;\n\n let _ans1 = ZU2 / 2.0;\n\n let _ans2 = _ans1 - TOU2;\n\n let TU4 = _ans2;\n\n let _ans3 = TOU2 - TU4;\n\n let ZHI4 = _ans3;\n\n let _ans4 = ZHI4;\n\n let _ans5 = TU4;\n\n println!(\"{} {}\", _ans4, _ans5);\n\n}\n", "file_path": "test045.rs", "rank": 85, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let mut YI3 = vec![];\n\n YI3.append(&mut vec![2.0, 9.0, 4.0, 22.0]);\n\n let _ans1 = 1.0 + 3.0;\n\n let _ans2 = [&JIA3[..], &YI3[..]].concat();\n\n let BING3 = _ans1;\n\n let CHOU3 = _ans2;\n\n}\n", "file_path": "test070.rs", "rank": 86, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut XING2YI1 = vec![];\n\n XING2YI1.append(&mut vec![4.0, 9.0, 2.0]);\n\n let mut XING2ER4 = vec![];\n\n XING2ER4.append(&mut vec![3.0, 5.0, 7.0]);\n\n let mut XING2SAN1 = vec![];\n\n XING2SAN1.append(&mut vec![8.0, 1.0, 6.0]);\n\n let mut JIU3GONG1 = vec![];\n\n JIU3GONG1.append(&mut vec![XING2YI1, XING2ER4, XING2SAN1]);\n\n for XING2 in JIU3GONG1 {\n\n let _ans1 = XING2;\n\n println!(\"{}\", _ans1);\n\n }\n\n let GUANG3 = 9.0;\n\n let mut ZONG4HENG2TU2 = vec![];\n\n let mut _rand1 = 0.0;\n\n while _rand1 < GUANG3 {\n\n let mut XING2 = vec![];\n\n let mut _rand2 = 0.0;\n\n while _rand2 < GUANG3 {\n\n XING2.push(0.0);\n\n _rand2 += 1.0;\n\n }\n\n ZONG4HENG2TU2.push(XING2);\n\n _rand1 += 1.0;\n\n }\n\n}\n", "file_path": "fail002.rs", "rank": 87, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = 3.0;\n\n let _ans1 = 5.0;\n\n let _ans2 = 2.0;\n\n let _ans3 = _ans2 + 5.0;\n\n}\n", "file_path": "test017.rs", "rank": 88, "score": 50775.791374769564 }, { "content": "fn main() {\n\n loop {\n\n let _ans1 = \"天地長不沒。山川無改時。\";\n\n println!(\"{}\", _ans1);\n\n }\n\n}\n", "file_path": "test058.rs", "rank": 89, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut ZHU3 = 49.0;\n\n if ZHU3 > 50.0 {\n\n let _ans1 = ZHU3 + 1.0;\n\n ZHU3 = _ans1;\n\n } else {\n\n let _ans2 = ZHU3 - 1.0;\n\n ZHU3 = _ans2;\n\n }\n\n}\n", "file_path": "test050.rs", "rank": 90, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = \"問天地好在。\";\n\n let _ans2 = \"\";\n\n println!(\"{} {}\", _ans1, _ans2);\n\n let _ans3 = \"天地\";\n\n println!(\"{}\", _ans3);\n\n let _ans4 = \"宇宙\";\n\n let _ans5 = \"洪荒\";\n\n let _ans6 = \"\";\n\n let _ans7 = \"\";\n\n println!(\"{} {} {} {}\", _ans4, _ans5, _ans6, _ans7);\n\n let _ans8 = 3.0;\n\n let _ans9 = 0.0;\n\n println!(\"{} {}\", _ans8, _ans9);\n\n}\n", "file_path": "test001.rs", "rank": 91, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let mut YI3 = vec![];\n\n YI3.append(&mut vec![2.0, 9.0, 4.0, 22.0]);\n\n let mut DING1 = vec![];\n\n DING1.push(9.0);\n\n let _ans1 = [&JIA3[..], &YI3[..]].concat();\n\n let BING3 = _ans1;\n\n let _ans2 = [&JIA3[..], &YI3[..], &BING3[..], &DING1[..]].concat();\n\n let WU4 = _ans2;\n\n}\n", "file_path": "test072.rs", "rank": 92, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = JIA3;\n\n JIA3.append(&mut vec![1.0, 2.0]);\n\n let _ans2 = 1.0 + 3.0;\n\n let _ans3 = JIA3[1 - 1];\n\n let YI3 = _ans2;\n\n let BING3 = _ans3;\n\n let _ans4 = 2.0 + 4.0;\n\n let _ans5 = BING3;\n\n let WU4 = _ans4;\n\n let JI3 = _ans5;\n\n}\n", "file_path": "test074.rs", "rank": 93, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let JIA3 = false;\n\n let YI3 = false;\n\n let _ans1 = JIA3 || YI3;\n\n let BING3 = _ans1;\n\n let _ans2 = !BING3;\n\n let DING1 = _ans2;\n\n if DING1 {\n\n let _ans3 = \"古之人誠不我欺。\";\n\n println!(\"{}\", _ans3);\n\n }\n\n let _ans4 = JIA3 || YI3;\n\n if !_ans4 {\n\n let _ans5 = \"古之人誠不我欺。\";\n\n println!(\"{}\", _ans5);\n\n }\n\n}\n", "file_path": "test055.rs", "rank": 94, "score": 50775.791374769564 }, { "content": "fn main() {\n\n for _ in 0..100 {\n\n let _ans1 = \"讀書百遍。其義自見。\";\n\n println!(\"{}\", _ans1);\n\n }\n\n}\n", "file_path": "test059.rs", "rank": 95, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let _ans1 = 3.0;\n\n let _ans2 = _ans1 + 5.0;\n\n let _ans3 = _ans2 - 2.0;\n\n println!(\"{}\", _ans3);\n\n}\n", "file_path": "test018.rs", "rank": 96, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut WU4 = \"人\";\n\n if WU4 == \"禽獸\" {\n\n let _ans1 = \"爾芻狗也。\";\n\n println!(\"{}\", _ans1);\n\n } else if WU4 == \"草木\" {\n\n let _ans2 = \"爾亦芻狗也。\";\n\n println!(\"{}\", _ans2);\n\n } else if WU4 == \"人\" {\n\n let _ans3 = \"爾雖人。於我實芻狗也。\";\n\n println!(\"{}\", _ans3);\n\n } else if WU4 == \"芻狗\" {\n\n let _ans4 = \"更不待言。\";\n\n println!(\"{}\", _ans4);\n\n } else {\n\n let _ans5 = \"吾不知爾何物。然爾之為芻狗明也。\";\n\n println!(\"{}\", _ans5);\n\n }\n\n WU4 = \"芻狗\";\n\n}\n", "file_path": "test052.rs", "rank": 97, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut JIA3 = vec![];\n\n JIA3.push(3.0);\n\n JIA3.push(5.0);\n\n let mut YI3 = vec![];\n\n YI3.append(&mut vec![2.0, 9.0, 4.0, 22.0]);\n\n let _ans1 = [&JIA3[..], &YI3[..]].concat();\n\n let BING3 = _ans1;\n\n JIA3[1 - 1] = 5.0;\n\n YI3[3 - 1] = BING3[4 - 1];\n\n}\n", "file_path": "test076.rs", "rank": 98, "score": 50775.791374769564 }, { "content": "fn main() {\n\n let mut XING2YI1 = vec![];\n\n XING2YI1.append(&mut vec![4.0, 9.0, 2.0]);\n\n let mut XING2ER4 = vec![];\n\n XING2ER4.append(&mut vec![3.0, 5.0, 7.0]);\n\n let mut XING2SAN1 = vec![];\n\n XING2SAN1.append(&mut vec![8.0, 1.0, 6.0]);\n\n let mut JIU3GONG1 = vec![];\n\n JIU3GONG1.append(&mut vec![XING2YI1, XING2ER4, XING2SAN1]);\n\n for XING2 in JIU3GONG1 {\n\n let _ans1 = XING2;\n\n println!(\"{}\", _ans1);\n\n }\n\n}\n", "file_path": "fail001.rs", "rank": 99, "score": 50775.791374769564 } ]
Rust
rust/leetcode/src/hash_table/valid_sudoku.rs
zayfen/LeetCode
8efdb380b79355e463f5a8b01105275ac699e08c
struct Solution (); impl Solution { pub fn is_valid_cell_in_row (board: &Vec<Vec<char>>, row: usize, col: usize) -> bool { let mut numbers = vec!('1', '2', '3', '4', '5', '6', '7', '8', '9'); for num in &board[row] { if *num == '.' { continue; } let number = num.to_digit(10).unwrap(); if numbers[(number-1) as usize] == '.' { return false; } else { numbers[(number-1) as usize] = '.'; } } return true; } pub fn is_valid_cell_in_column (board: &Vec<Vec<char>>, row: usize, col: usize) -> bool { let mut numbers = vec!('1', '2', '3', '4', '5', '6', '7', '8', '9'); for r in 0..9 { let n = board[r][col]; if n == '.' { continue; } let number = n.to_digit(10).unwrap(); if numbers[(number - 1) as usize] == '.' { return false; } else { numbers[(number - 1) as usize] = '.' } } return true; } pub fn is_valid_cell_in_block (board: &Vec<Vec<char>>, row: usize, col: usize) -> bool { let mut numbers = vec!('1', '2', '3', '4', '5', '6', '7', '8', '9'); let x: usize = (row as f32 / 3f32).floor() as usize; let y: usize = (col as f32 / 3f32).floor() as usize; let (cell_index_x, cell_index_y) = (x * 3, y * 3); for i in 0..3 { for j in 0..3 { if board[cell_index_x + i][cell_index_y + j] == '.' { continue; } let number = board[cell_index_x + i][cell_index_y + j].to_digit(10).unwrap(); if numbers[(number-1) as usize] == '.' { return false; } else { numbers[(number-1) as usize] = '.'; } } } return true; } pub fn is_valid_sudoku (board: Vec<Vec<char>>) -> bool { for row in 0..9 { for col in 0..9 { if board[row][col] != '.' { let pass = Solution::is_valid_cell_in_row(&board, row, col) && Solution::is_valid_cell_in_column(&board, row, col) && Solution::is_valid_cell_in_block(&board, row, col); if !pass { return false; } } } } return true; } pub fn is_valid_sudoku_v2 (board: Vec<Vec<char>>) -> bool { let mut hash_map = std::collections::HashMap::new(); for row in 0..9 { for col in 0..9 { let ch = board[row][col]; if ch != '.' { let x: usize = (row as f32 / 3f32).floor() as usize; let y: usize = (col as f32 / 3f32).floor() as usize; let (cell_index_x, cell_index_y) = (x * 3, y * 3); let row_key = format!("{}r{}", ch, row); let col_key = format!("{}c{}", ch, col); let block_key = format!("{}b{:?}", ch, (cell_index_x, cell_index_y)); if hash_map.insert(row_key, "") == None && hash_map.insert(col_key, "") == None && hash_map.insert(block_key, "") == None { continue; } return false; } } } return true; } } #[cfg(test)] mod hash_table { use super::*; #[test] fn test_is_valid_sudoku () { let board = vec!( vec!('5','3','.','.','7','.','.','.','.'), vec!('6','.','.','1','9','5','.','.','.'), vec!('.','9','8','.','.','.','.','6','.'), vec!('8','.','.','.','6','.','.','.','3'), vec!('4','.','.','8','.','3','.','.','1'), vec!('7','.','.','.','2','.','.','.','6'), vec!('.','6','.','.','.','.','2','8','.'), vec!('.','.','.','4','1','9','.','.','5'), vec!('.','.','.','.','8','.','.','7','9') ); assert_eq!(Solution::is_valid_sudoku_v2(board), true); } }
struct Solution (); impl Solution { pub fn is_valid_cell_in_row (board: &Vec<Vec<char>>, row: usize, col: usize) -> bool { let mut numbers = vec!('1', '2', '3', '4', '5', '6', '7', '8', '9'); for num in &board[row] { if *num == '.' { continue; } let number = num.to_digit(10).unwrap(); if numbers[(number-1) as usize] == '.' { return false; } else { numbers[(number-1) as usize] = '.'; } } return true; } pub fn is_valid_cell_in_column (board: &Vec<Vec<char>>, row: usize, col: usize) -> bool { let mut numbers = vec!('1', '2', '3', '4', '5', '6', '7', '8', '9'); for r in 0..9 { let n = board[r][col]; if n == '.' { continue; } let number = n.to_digit(10).unwrap(); if numbers[(number - 1) as usize] == '.' { return false; } else { numbers[(number - 1) as usize] = '.' } } return true; } pub fn is_valid_cell_in_block (board: &Vec<Vec<char>>, row: usize, col: usize) -> bool { let mut numbers = vec!('1', '2', '3', '4', '5', '6', '7', '8', '9'); let x: usize = (row as f32 / 3f32).floor() as usize; let y: usize = (col as f32 / 3f32).floor() as usize; let (cell_index_x, cell_index_y) = (x * 3, y * 3); for i in 0..3 { for j in 0..3 { if board[cell_index_x + i][cell_index_y + j] == '.' { continue; } let number = board[cell_index_x + i][cell_index_y + j].to_digit(10).unwrap(); if numbers[(number-1) as usize] == '.' { return false; } else { numbers[(number-1) as usize] = '.'; } } } return true; } pub fn is_valid_sudoku (board: Vec<Vec<char>>) -> bool { for row in 0..9 { for col in 0..9 { if board[row][col] != '.' { let pass = Solution::is_valid_cell_in_row(&board, row, col) && Solution::is_valid_cell_in_column(&board, row, col) && Solution::is_valid_cell_in_block(&board, row, col); if !pass { return false; } } } } return true; } pub fn is_valid_sudoku_v2 (board: Vec<Vec<char>>) -> bool { let mut hash_map = std::collections::HashMap::new(); for row in 0..9 { for col in 0..9 { let ch = board[row][col]; if ch != '.' { let x: usize = (row as f32 / 3f32).floor() as usize; let y: usize = (col as f32 / 3f32).floor() as usize; let (cell_index_x, cell_index_y) = (x * 3, y * 3); let row_key = format!("{}r{}", ch, row); let col_key = format!("{}c{}", ch, col); let block_key = format!("{}b{:?}", ch, (cell_index_x, cell_index_y)); if hash_map.insert(row_key, "") == None && hash_map.insert(col_key, "") == None && hash_map.insert(block_key, "") == None { continue; } return false; } } } return true; } } #[cfg(test)] mod hash_table { use super::*; #[test]
}
fn test_is_valid_sudoku () { let board = vec!( vec!('5','3','.','.','7','.','.','.','.'), vec!('6','.','.','1','9','5','.','.','.'), vec!('.','9','8','.','.','.','.','6','.'), vec!('8','.','.','.','6','.','.','.','3'), vec!('4','.','.','8','.','3','.','.','1'), vec!('7','.','.','.','2','.','.','.','6'), vec!('.','6','.','.','.','.','2','8','.'), vec!('.','.','.','4','1','9','.','.','5'), vec!('.','.','.','.','8','.','.','7','9') ); assert_eq!(Solution::is_valid_sudoku_v2(board), true); }
function_block-full_function
[ { "content": "pub fn switch (nums: &mut Vec<i32>, left: usize, right: usize) {\n\n let tmp = nums[left];\n\n nums[left] = nums[right];\n\n nums[right] = tmp;\n\n}\n\n\n", "file_path": "rust/leetcode/src/heap/p215_largest_element_in_an_array.rs", "rank": 0, "score": 166054.35345015902 }, { "content": "pub fn heapify (nums: &mut Vec<i32>, heap_size: usize, i: usize) {\n\n let mut largest: usize = i;\n\n\n\n let left = left_child(i);\n\n if left < heap_size && nums[left] > nums[largest] {\n\n largest = left;\n\n }\n\n\n\n let right = right_child(i);\n\n if right < heap_size && nums[right] > nums[largest] {\n\n largest = right;\n\n }\n\n\n\n if largest != i {\n\n // switch i and largest\n\n switch(nums, largest, i);\n\n heapify(nums, heap_size, largest);\n\n }\n\n}\n\n\n", "file_path": "rust/leetcode/src/heap/p215_largest_element_in_an_array.rs", "rank": 1, "score": 166054.35345015902 }, { "content": "pub fn build_max_heap (nums: &mut Vec<i32>, heap_size: usize) -> Vec<i32> {\n\n let mut result: Vec<i32> = vec![];\n\n if nums.is_empty() {\n\n return result;\n\n }\n\n\n\n let the_last_notleaf_child: usize = parent(heap_size - 1);\n\n println!(\"the_last_notleaf_child: {}\", the_last_notleaf_child);\n\n for i in 0..=the_last_notleaf_child {\n\n heapify(nums, heap_size, the_last_notleaf_child - i);\n\n }\n\n \n\n result\n\n}\n\n\n", "file_path": "rust/leetcode/src/heap/p215_largest_element_in_an_array.rs", "rank": 2, "score": 149877.46018980516 }, { "content": "// assume nums is sorted\n\npub fn n_sum (nums: &[i32], target: i32, n: i32, mut result: Vec<i32>, mut store: &mut Vec<Vec<i32>>) {\n\n println!(\"n: {:?} ; result: {:?}\", n, result);\n\n if n == 0 { // print result\n\n \n\n if target == 0 {\n\n println!(\"{:?}\", result);\n\n\n\n if store.len() > 0 {\n\n let mut existed = false;\n\n for item in store.iter() {\n\n if item.iter().zip(&result).all(|(a, b)| *a == *b) {\n\n existed = true;\n\n }\n\n }\n\n \n\n if !existed {\n\n store.push(result);\n\n }\n\n // let last_result = &store[store.len() - 1];\n\n // if !(last_result.iter().zip(&result).all(|(a, b)| *a == *b)) {\n", "file_path": "rust/leetcode/src/hash_table/four_sum.rs", "rank": 3, "score": 146320.06757981225 }, { "content": "pub fn n_sum_v2 (nums: &[i32], target: i32, n: i32, mut result: Vec<i32>, mut store: &mut Vec<Vec<i32>>) {\n\n let len_nums = nums.len();\n\n // early return\n\n if len_nums < 2 || n < 2 || nums[0] * n > target || target > nums.last().unwrap() * n {\n\n return ()\n\n }\n\n \n\n if n == 2 {\n\n let mut left = 0;\n\n let mut right = len_nums - 1;\n\n while left < right {\n\n let sum = nums[left] + nums[right];\n\n let mut result = result.clone();\n\n println!(\"nums: {:?}\", nums);\n\n println!(\"n_sum_v2 left: {:?} ;right: {:?} ;sum: {:?} ;target: {:?}\", left, right, sum, target);\n\n if sum == target {\n\n result.push(nums[left]);\n\n result.push(nums[right]);\n\n store.push(result);\n\n left += 1;\n", "file_path": "rust/leetcode/src/hash_table/four_sum.rs", "rank": 4, "score": 144793.28056695047 }, { "content": "fn dfs (dp: &mut Vec<usize>, pindex: usize, s: &String, p: &String) -> bool {\n\n if p.len() == 0 {\n\n return s.len() == 0;\n\n }\n\n\n\n if pindex >= p.len() {\n\n return dp[p.len()-1] == s.len();\n\n }\n\n \n\n let mut matched = false;\n\n let current_pattern_char = p.chars().nth(pindex).unwrap();\n\n match current_pattern_char {\n\n '*' => { // 因为 ‘×’ 可以匹配空字符串\n\n if s.is_empty() {\n\n return p.len() == 1;\n\n }\n\n\n\n let mut start_index = 0;\n\n if pindex > 0 {\n\n start_index = dp[pindex - 1];\n", "file_path": "rust/leetcode/src/dp/p44_wildcard_matching.rs", "rank": 5, "score": 142632.59753089113 }, { "content": "pub fn parent (i: usize) -> usize {\n\n if i == 0 {\n\n return i;\n\n }\n\n (i - 1) / 2\n\n}\n\n\n", "file_path": "rust/leetcode/src/heap/p215_largest_element_in_an_array.rs", "rank": 6, "score": 124030.5584072038 }, { "content": "pub fn right_child (i: usize) -> usize {\n\n i * 2 + 2\n\n}\n\n\n", "file_path": "rust/leetcode/src/heap/p215_largest_element_in_an_array.rs", "rank": 7, "score": 122166.94625684945 }, { "content": "pub fn left_child (i: usize) -> usize {\n\n i * 2 + 1\n\n}\n\n\n", "file_path": "rust/leetcode/src/heap/p215_largest_element_in_an_array.rs", "rank": 8, "score": 122166.94625684945 }, { "content": "pub fn max_sub_array(nums: Vec<i32>) -> i32 {\n\n // if curr_num + prev_num > curr_num ,then make nums[curr] = curr_nums + prev_nums else nums[curr] = nums[curr]\n\n let len_nums = nums.len();\n\n let mut result: Vec<i32> = Vec::new();\n\n result.push(*nums.get(0).unwrap());\n\n\n\n for i in 1..len_nums {\n\n let curr_num = *nums.get(i).unwrap();\n\n let prev_num = result.get(result.len() - 1).unwrap();\n\n\n\n if curr_num + prev_num >= curr_num {\n\n result.push(curr_num + prev_num);\n\n } else {\n\n result.push(curr_num);\n\n }\n\n }\n\n println!(\"{:?}\", nums);\n\n println!(\"{:?}\", result);\n\n *result.iter().max().unwrap()\n\n}\n\n\n", "file_path": "rust/leetcode/src/dp/maximum_subarray.rs", "rank": 9, "score": 109136.44792633384 }, { "content": "pub fn max_sub_array_v2(nums: Vec<i32>) -> i32 {\n\n let len_nums = nums.len();\n\n\n\n let mut result = nums;\n\n\n\n for i in 1..len_nums {\n\n let curr_num = *result.get(i).unwrap();\n\n let prev_num = *result.get(i - 1).unwrap();\n\n\n\n if curr_num + prev_num >= curr_num {\n\n *result.get_mut(i).unwrap() = curr_num + prev_num;\n\n }\n\n }\n\n\n\n println!(\"{:?}\", result);\n\n\n\n *result.iter().max().unwrap()\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "rust/leetcode/src/dp/maximum_subarray.rs", "rank": 10, "score": 107383.71273955618 }, { "content": "pub fn search (nums: Vec<i32>, target: i32) -> i32 {\n\n let length = nums.len();\n\n if length == 0 {\n\n return -1;\n\n }\n\n let (length, first_num, last_num) = (length, nums[0], nums[length - 1]);\n\n let (mut left, mut right) = (0, length - 1);\n\n\n\n if first_num == target {\n\n return 0 as i32;\n\n }\n\n\n\n if target == last_num {\n\n return (length - 1) as i32;\n\n }\n\n\n\n while left <= right {\n\n let mid = left + (((right - left) as f32 / 2f32).ceil() as usize);\n\n let mid_num = nums[mid];\n\n println!(\"nums: {:?}, (left, right): {:?}, mid: {}, mid_num: {}\", nums, (left, right), mid, mid_num);\n", "file_path": "rust/leetcode/src/array/search_in_rotated_sorted_array.rs", "rank": 11, "score": 102099.00832980595 }, { "content": "pub fn unique_paths (m: i32, n: i32) -> i32 {\n\n\n\n let mut state:[[i32; 100]; 100] = [[0; 100]; 100];\n\n for i in 0..100 {\n\n state[i as usize][0] = 1;\n\n state[0][i as usize] = 1;\n\n }\n\n \n\n for x in 1..m {\n\n for y in 1..n {\n\n let x: usize = x as usize;\n\n let y: usize = y as usize;\n\n state[x][y] = state[x-1][y] + state[x][y-1];\n\n }\n\n }\n\n\n\n let m: usize = m as usize;\n\n let n: usize = n as usize;\n\n state[m-1][n-1]\n\n}\n\n\n", "file_path": "rust/leetcode/src/dp/unique_paths.rs", "rank": 12, "score": 101318.98368212869 }, { "content": "pub fn unique_paths_v2 (m: i32, n: i32) -> i32 {\n\n\n\n if m.le(&0) || n.le(&0) {\n\n return 0;\n\n }\n\n \n\n if m.eq(&0) || n.eq(&0) {\n\n return 1;\n\n }\n\n\n\n return unique_paths_v2(m-1, n) + unique_paths_v2(m, n-1);\n\n}\n\n\n", "file_path": "rust/leetcode/src/dp/unique_paths.rs", "rank": 13, "score": 100035.78613037625 }, { "content": "pub fn unique_paths_v3 (m: i32, n: i32) -> i32 {\n\n match (m, n) {\n\n (-1, _) | (_, -1) => 0,\n\n (0, _) | (_, 0) => 1,\n\n _ => unique_paths_v3(m-1, n) + unique_paths_v3(m, n-1)\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod dp_tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_unique_paths () {\n\n assert_eq!(unique_paths(3, 2), 3);\n\n assert_eq!(unique_paths(7, 3), 28);\n\n assert_eq!(unique_paths(1, 1), 1);\n\n }\n\n\n", "file_path": "rust/leetcode/src/dp/unique_paths.rs", "rank": 14, "score": 100035.78613037625 }, { "content": "pub fn four_sum(nums: Vec<i32>, target: i32) -> Vec<Vec<i32>> {\n\n let mut store: Vec<Vec<i32>> = Vec::new();\n\n let mut result: Vec<i32> = Vec::new();\n\n let mut nums = nums.clone();\n\n nums.sort();\n\n let nums = nums.clone();\n\n\n\n n_sum(nums.as_slice(), target, 4, result, &mut store);\n\n store\n\n}\n\n\n\n\n\n\n", "file_path": "rust/leetcode/src/hash_table/four_sum.rs", "rank": 15, "score": 96061.61898531321 }, { "content": "struct Solution ();\n\nimpl Solution {\n\n pub fn reverse_up_to_down (matrix: &mut Vec<Vec<i32>>) {\n\n let rows = matrix.len();\n\n for row in 0..(rows / 2) {\n\n let another_row = rows - row - 1;\n\n let (left_rows, right_rows) = matrix.split_at_mut(row + 1);\n\n mem::swap(&mut left_rows[row], &mut right_rows[another_row - row - 1]);\n\n }\n\n }\n\n\n\n // first reverse up to down\n\n // second swap (x, y) to (y, x)\n\n pub fn rotate (matrix: &mut Vec<Vec<i32>>) {\n\n let n = matrix.len();\n\n Solution::reverse_up_to_down(matrix);\n\n for x in 0..n {\n\n for y in (x + 1)..n {\n\n let tmp = *(&matrix[x][y]);\n\n matrix[x][y] = matrix[y][x];\n", "file_path": "rust/leetcode/src/array/rotate_image.rs", "rank": 16, "score": 84996.74318375147 }, { "content": "struct Solution ();\n\n\n\nimpl Solution {\n\n\n\n pub fn next_permutation(nums: &mut Vec<i32>) {\n\n println!(\"{:?}\", nums);\n\n\n\n let nums_len = nums.len();\n\n\n\n let (mut left, mut right) = (0, 0);\n\n let mut i = nums_len - 1;\n\n while i >= 1 {\n\n if nums[i-1] < nums[i] {\n\n left = i-1;\n\n right = i;\n\n break;\n\n }\n\n i -= 1;\n\n }\n\n\n", "file_path": "rust/leetcode/src/array/next_permutation.rs", "rank": 17, "score": 84996.74318375147 }, { "content": "/// p39_combination_sum\n\nstruct Solution();\n\n\n\nimpl Solution {\n\n\n\n pub fn combination_sum (candidates: Vec<i32>, target: i32) -> Vec<Vec<i32>> {\n\n let mut result: Vec<Vec<i32>> = [].to_vec();\n\n let mut candidates = candidates;\n\n candidates.sort_unstable();\n\n let len = candidates.len();\n\n if len == 0 {\n\n return result;\n\n }\n\n\n\n let mut path: Vec<i32> = [].to_vec();\n\n Solution::combination_sum_helper(&candidates, target, 0, &mut path, &mut result);\n\n return result;\n\n }\n\n\n\n\n\n pub fn combination_sum_helper (candidates: &Vec<i32>, target: i32, begin: usize, path: &mut Vec<i32>, result: &mut Vec<Vec<i32>>) {\n", "file_path": "rust/leetcode/src/array/p39_combination_sum.rs", "rank": 18, "score": 83323.58042435971 }, { "content": "struct Solution();\n\n\n\nimpl Solution {\n\n pub fn combination_sum2 (candidates: Vec<i32>, target: i32) -> Vec<Vec<i32>> {\n\n let mut candidates = candidates;\n\n if candidates.len() == 0 {\n\n return vec![];\n\n }\n\n candidates.sort_unstable();\n\n let mut result: Vec<Vec<i32>> = vec![];\n\n let mut path: Vec<i32> = vec![];\n\n \n\n Solution::combination_sum2_helper(&candidates, target, 0, &mut path, &mut result);\n\n\n\n return result;\n\n }\n\n\n\n pub fn combination_sum2_helper (candidates: &Vec<i32>, target: i32, begin: usize, path: &mut Vec<i32>, result: &mut Vec<Vec<i32>>) {\n\n if target == 0 {\n\n result.push(path.clone());\n", "file_path": "rust/leetcode/src/array/p40_combination_sum2.rs", "rank": 19, "score": 83323.58042435971 }, { "content": "struct Solution ();\n\n\n\nimpl Solution {\n\n\n\n pub fn search_insert (nums: Vec<i32>, target: i32) -> i32 {\n\n let len = nums.len();\n\n // if len == 0 {\n\n // return 0;\n\n // }\n\n let mut left = 0;\n\n let mut right = len;\n\n while left < right {\n\n let mid = left + (right - left) / 2;\n\n if nums[mid] < target {\n\n left = mid + 1;\n\n } else {\n\n right = mid;\n\n }\n\n }\n\n return left as i32;\n", "file_path": "rust/leetcode/src/array/p35_search_insert.rs", "rank": 20, "score": 83323.58042435971 }, { "content": "struct Solution ();\n\n\n\nimpl Solution {\n\n pub fn valid_numbers (board: &Vec<Vec<char>>, row: usize, column: usize) -> Vec<char> {\n\n let mut numbers = vec!('1', '2', '3', '4', '5', '6', '7', '8', '9');\n\n \n\n // mark row\n\n for num in &board[row] {\n\n if *num == '.' {\n\n continue;\n\n }\n\n \n\n let number = num.to_digit(10).unwrap();\n\n numbers[(number-1) as usize] = '.';\n\n }\n\n \n\n // mark column\n\n for r in 0..9 {\n\n let n = board[r][column];\n\n if n == '.' {\n", "file_path": "rust/leetcode/src/hash_table/sudoku_solver.rs", "rank": 22, "score": 83323.58042435971 }, { "content": "struct Solution ();\n\nimpl Solution {\n\n pub fn longest_valid_parentheses (s: String) -> i32 {\n\n let mut stack = vec!();\n\n let mut chars_iter = s.char_indices();\n\n\n\n let mut longest = 0;\n\n let mut longest_recorder = vec!();\n\n\n\n while let Some((index, ch)) = chars_iter.next() {\n\n if ch == '(' {\n\n stack.push((index, ch));\n\n continue;\n\n }\n\n\n\n // ch is ')', pop top '('\n\n if stack.len() > 0 {\n\n let paired_parent = stack.pop();\n\n longest_recorder.push(paired_parent.unwrap());\n\n continue;\n", "file_path": "rust/leetcode/src/string/longest_valid_parentheses.rs", "rank": 23, "score": 83323.58042435971 }, { "content": "// @lc code=start\n\nstruct Solution();\n\n\n", "file_path": "rust/leetcode/src/dp/p44_wildcard_matching.rs", "rank": 24, "score": 83323.58042435971 }, { "content": "struct Solution ();\n\n\n\nimpl Solution {\n\n\n\n #[allow(unused)]\n\n pub fn first_missing_positive (nums: Vec<i32>) -> i32 {\n\n let mut nums = nums;\n\n let len = nums.len();\n\n let mut result :usize = 1;\n\n // first checkout that 1 included in nums\n\n // all numbers less than 1 be 1\n\n let mut _1_contained = nums.contains(&1);\n\n if !_1_contained {\n\n return result as i32;\n\n }\n\n\n\n let mut nums: Vec<i32> = nums.into_iter().map(|num| {\n\n if num < 1 || num as usize > len {\n\n 1\n\n } else {\n", "file_path": "rust/leetcode/src/array/p41_first_missing_positive.rs", "rank": 25, "score": 81740.46094767001 }, { "content": "struct Solution ();\n\n\n\nimpl Solution {\n\n pub fn find_kth_largest (nums: Vec<i32>, k: i32) -> i32 {\n\n let mut nums = nums;\n\n let len = nums.len();\n\n build_max_heap(&mut nums, len);\n\n for i in 1..k {\n\n let heap_size: usize = len - i as usize;\n\n switch(&mut nums, 0, heap_size); \n\n heapify(&mut nums, heap_size, 0);\n\n }\n\n\n\n nums[0]\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\npub mod heap_tests {\n", "file_path": "rust/leetcode/src/heap/p215_largest_element_in_an_array.rs", "rank": 26, "score": 81740.46094767001 }, { "content": "struct Solution ();\n\n\n\nimpl Solution {\n\n pub fn longest_valid_parentheses(s: String) -> i32 {\n\n let mut dp: Vec<usize> = vec![0; s.chars().count()];\n\n let mut max: usize = 0;\n\n\n\n for i in 1..s.chars().count() {\n\n if s.get(i..i+1) == Some(\")\") {\n\n if s.get(i-1..i) == Some(\"(\") {\n\n // dp[i] = dp[i-2] + 2\n\n dp[i] = match i {\n\n idx if idx < 2 => 2,\n\n _ => dp[i-2] + 2\n\n }\n\n\n\n } else {\n\n // if dp[i-dp[i-1]-1] == \"(\", then dp[i] = dp[i-dp[i-1]-2] + dp[i-1] + 2\n\n if s.get((i-dp[i-1]-1)..(i-dp[i-1])) == Some(\"(\") {\n\n dp[i] = match i {\n", "file_path": "rust/leetcode/src/dp/p32_longest_valid_parentheses.rs", "rank": 27, "score": 81740.46094767001 }, { "content": "struct Solution ();\n\n\n\n\n\nimpl Solution {\n\n pub fn longest_palindrome (s: String) -> String {\n\n // manacher algorithm\n\n let mut result: String = String::from(\"\");\n\n\n\n if s.is_empty() {\n\n return result;\n\n }\n\n\n\n let iter = s.split(\"\");\n\n let mut vec: Vec<&str> = vec![];\n\n vec.extend(iter);\n\n let s = vec.join(\"#\");\n\n \n\n // d1[]\n\n let len = s.len();\n\n let mut mem: Vec<usize> = vec![];\n", "file_path": "rust/leetcode/src/string/p5_longest_palindromic_substrig.rs", "rank": 28, "score": 81740.46094767001 }, { "content": "struct Solution ();\n\nimpl Solution {\n\n pub fn trap (height: Vec<i32>) -> i32 {\n\n let mut result = 0;\n\n let len = height.len();\n\n if len == 0 {\n\n return result;\n\n }\n\n \n\n let (mut left, mut right) = (0usize, len - 1);\n\n let (mut left_max_height, mut right_max_height) = (0i32, 0i32);\n\n \n\n while left < right {\n\n if height[left] < height[right] {\n\n if height[left] > left_max_height {\n\n left_max_height = height[left];\n\n } else {\n\n result += left_max_height - height[left];\n\n }\n\n left += 1;\n", "file_path": "rust/leetcode/src/array/p42_trapping_rain_water.rs", "rank": 29, "score": 81740.46094767001 }, { "content": "struct Solution ();\n\n\n\nimpl Solution {\n\n\n\n pub fn search_range (nums: Vec<i32>, target: i32) -> Vec<i32> {\n\n let (mut first_pos, mut last_pos) = (-1, -1);\n\n let (mut left, mut right) = (0, nums.len() - 1);\n\n\n\n if nums.len() == 0 {\n\n return vec!(first_pos, last_pos);\n\n }\n\n\n\n // closures\n\n let get_mid_index = |left: usize, right: usize| left + ((right - left) / 2);\n\n\n\n // find first_position\n\n while left < right {\n\n let mid = get_mid_index(left, right);\n\n let mid_num = *nums.get(mid).unwrap();\n\n if target > mid_num {\n", "file_path": "rust/leetcode/src/array/find_first_last_position_in_sorted_array.rs", "rank": 30, "score": 78816.76189805841 }, { "content": "pub fn unique_paths_with_obstacles (obstacle_grid: Vec<Vec<i32>>) -> i32 {\n\n let mut state:[[i32; 100]; 100] = [[-1; 100]; 100];\n\n\n\n if let 1 = obstacle_grid[0][0] {\n\n state[0][0] = 0;\n\n } else {\n\n state[0][0] = 1;\n\n }\n\n\n\n let (len_rows, len_cols) = (obstacle_grid.len(), obstacle_grid[0].len());\n\n for row in 0..len_rows {\n\n for col in 0..len_cols {\n\n match obstacle_grid[row][col] {\n\n 1 => { // hava obstacle\n\n state[row][col] = 0;\n\n },\n\n _ => { // no obstacle\n\n match (row as i32, col as i32) {\n\n (0, 0) => continue,\n\n (0, _) => state[row][col] = state[row][col-1],\n", "file_path": "rust/leetcode/src/dp/unique_paths_with_obstacles.rs", "rank": 31, "score": 78546.81064108608 }, { "content": "pub fn unique_patghs_with_obstacles_v2 (obstacle_grid: Vec<Vec<i32>>) -> i32 {\n\n 0\n\n}\n\n\n\n#[cfg(test)]\n\nmod dp_tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_unique_paths_with_obstacles () {\n\n let vec2d = vec!(vec!(0, 0, 0), vec!(0, 1, 0), vec!(0, 0, 0));\n\n assert_eq!(unique_paths_with_obstacles(vec2d), 2);\n\n }\n\n\n\n #[test]\n\n fn test_unique_paths_with_obstacles_v2 () {\n\n let vec2d = vec!(vec!(0, 0, 0), vec!(0, 1, 0), vec!(0, 0, 0));\n\n assert_eq!(unique_paths_with_obstacles(vec2d), 2);\n\n }\n\n}\n", "file_path": "rust/leetcode/src/dp/unique_paths_with_obstacles.rs", "rank": 32, "score": 77529.27155432443 }, { "content": "fn main() {\n\n println!(\n\n \"{}\",\n\n dp::maximum_subarray::max_sub_array(vec!(-2, 1, -3, 4, -1, 2, 1, -5, 4))\n\n );\n\n\n\n println!(\"==========================\");\n\n let mut result: Vec<i32> = Vec::new();\n\n // let nums = &[-2, -1, 0, 0, 1, 2]; //\n\n // let nums = &[1, 0, -1, 0, -2, 2];\n\n // let nums = &[-3, -2, -1, 0, 0, 1, 2, 3];\n\n let nums = &[0, 0];\n\n let mut nums = nums.clone();\n\n nums.sort();\n\n let nums = nums.clone();\n\n let mut store: Vec<Vec<i32>> = Vec::new();\n\n\n\n println!(\">>>>>>>>>>>>>>>>>> 0000000 >>>>>>>>>\");\n\n println!(\"sorted numbers: {:?}\", nums);\n\n hash_table::four_sum::n_sum_v2(&nums, 0, 4, result, &mut store);\n", "file_path": "rust/leetcode/src/main.rs", "rank": 33, "score": 55711.51280530699 }, { "content": "fn simplify_pattern (p: String) -> String {\n\n let mut result: String = \"\".to_owned();\n\n let mut p = p;\n\n while p != result {\n\n result = p.clone();\n\n p = p.replace(\"**\", \"*\");\n\n }\n\n result\n\n}\n\n\n\nimpl Solution {\n\n\n\n\n\n pub fn is_match(s: String, p: String) -> bool {\n\n // p 替换连续的*成一个\n\n let p = simplify_pattern(p);\n\n let mut dp: Vec<usize> = vec![0; p.len()];\n\n return dfs(&mut dp, 0, &s, &p);\n\n }\n\n}\n", "file_path": "rust/leetcode/src/dp/p44_wildcard_matching.rs", "rank": 34, "score": 45914.32048108068 }, { "content": "#ifndef ATOITEST_H\n\n#define ATOITEST_H\n\n\n\n#include \"../src/AtoI.hpp\"\n\n#include \"external/single_include/catch.hpp\"\n\n\n\nTEST_CASE(\"AtoITest\", \"[myAtoi]\") {\n\n string s(\"123\");\n\n REQUIRE(myAtoi(s) == 123);\n\n string s1(\"1\");\n\n REQUIRE(myAtoi(s1) == 1);\n\n string s_1(\"-1\");\n\n REQUIRE(myAtoi(s_1) == -1);\n\n string s2(\"+1\");\n\n REQUIRE(myAtoi(s2) == 1);\n\n string s3(\"-123\");\n\n REQUIRE(myAtoi(s3) == -123);\n\n string s4(\"0\");\n\n REQUIRE(myAtoi(s4) == 0);\n\n string s5(\"-00\");\n", "file_path": "cpp/test/AtoITest.cpp", "rank": 35, "score": 40904.65717412056 }, { "content": " REQUIRE(myAtoi(s5) == 0);\n\n string s6(\"00001\");\n\n REQUIRE(myAtoi(s6) == 1);\n\n\n\n string s7(\" 123\");\n\n REQUIRE(myAtoi(s7) == 123);\n\n string s8(\"2147483648\");\n\n REQUIRE(myAtoi(s8) == 2147483647);\n\n string s9(\"+-2\");\n\n REQUIRE(myAtoi(s9) == 0);\n\n string s10(\" -0012a42\");\n\n REQUIRE(myAtoi(s10) == -12);\n\n string s11(\"9223372036854775809\");\n\n REQUIRE(myAtoi(s11) == 2147483647);\n\n string s12(\"-2147483648\");\n\n REQUIRE(myAtoi(s12) == -2147483648);\n\n}\n\n\n\n#endif /* MYATOITEST_H */\n", "file_path": "cpp/test/AtoITest.cpp", "rank": 36, "score": 40902.11616548286 }, { "content": "#define CATCH_CONFIG_MAIN\n\n#include <iostream>\n\n#include \"external/single_include/catch.hpp\"\n\n#include \"external/include/reporters/catch_reporter_teamcity.hpp\"\n\n\n\n// Some example tag aliases\n\nCATCH_REGISTER_TAG_ALIAS( \"[@nhf]\", \"[failing]~[.]\" )\n\nCATCH_REGISTER_TAG_ALIAS( \"[@tricky]\", \"[tricky]~[.]\" )\n\n\n\n\n\n#ifdef __clang__\n\n# pragma clang diagnostic ignored \"-Wpadded\"\n\n# pragma clang diagnostic ignored \"-Wweak-vtables\"\n\n# pragma clang diagnostic ignored \"-Wc++98-compat\"\n\n#endif\n", "file_path": "cpp/test/testMain.cpp", "rank": 37, "score": 40902.11616548286 }, { "content": "#include \"../src/RegularMatching.hpp\"\n\n#include \"external/single_include/catch.hpp\"\n\n\n\n\n\nTEST_CASE(\"RegularMatching\",\"[isMatch]\") {\n\n REQUIRE(isMatch(\"123\", \"123\") == true);\n\n //REQUIRE(isMatch(\"123\", \"*\") == true); p[0] can't be '*'\n\n REQUIRE(isMatch(\"\", \"\") == true);\n\n //REQUIRE(isMatch(\"abcd\", \"d*\") == true);\n\n REQUIRE(isMatch(\"abcd\", \"d*\") == false);\n\n}\n", "file_path": "cpp/test/RegularMatchingTest.cpp", "rank": 38, "score": 40140.38633755245 }, { "content": "#include \"../src/IsPalindromeInteger.hpp\"\n\n#include \"external/single_include/catch.hpp\"\n\n\n\nTEST_CASE(\"IsPalindromeInteger\", \"[isPalindromeInteger]\") {\n\n REQUIRE(isPalindromeInteger(11) == true);\n\n REQUIRE(isPalindromeInteger(1) == true);\n\n REQUIRE(isPalindromeInteger(12321) == true);\n\n REQUIRE(isPalindromeInteger(131) == true);\n\n REQUIRE(isPalindromeInteger(-11) == false);\n\n REQUIRE(isPalindromeInteger(12) == false);\n\n REQUIRE(isPalindromeInteger(-2147447412) == false);\n\n}\n", "file_path": "cpp/test/PalindromeIntegerTest.cpp", "rank": 39, "score": 40139.883658902305 }, { "content": "#include \"../src/ContainMostWater.hpp\"\n\n#include \"external/single_include/catch.hpp\"\n\n\n\nTEST_CASE(\"ContainMostWater\", \"[getMostWater]\") {\n\n std::vector<int> v(3, 0);\n\n v.push_back(1);\n\n v.push_back(3);\n\n v.push_back(2);\n\n REQUIRE(getMostWater(v) == 2);\n\n}\n", "file_path": "cpp/test/ContainerMostWaterTest.cpp", "rank": 40, "score": 40136.78708598364 }, { "content": "#include \"../src/ReverseInteger.hpp\"\n\n\n\n#include \"external/single_include/catch.hpp\"\n\n\n\n\n\nTEST_CASE(\"ReverseInteger\", \"[reverseInteger]\") {\n\n REQUIRE(reverseInteger(0) == 0);\n\n REQUIRE(reverseInteger(-1) == -1);\n\n REQUIRE(reverseInteger(1) == 1);\n\n REQUIRE(reverseInteger(10) == 1);\n\n REQUIRE(reverseInteger(123) == 321);\n\n REQUIRE(reverseInteger(-123) == -321);\n\n REQUIRE(reverseInteger(1534236469) == 0);\n\n}\n", "file_path": "cpp/test/ReverseIntegerTest.cpp", "rank": 41, "score": 40136.5286338732 }, { "content": "#include \"../src/LongestPalindromeSubString.hpp\"\n\n\n\n#include \"external/single_include/catch.hpp\"\n\n\n\n\n\nTEST_CASE(\"LongestPalindromeSubString\", \"[getLongestPalindromeSubString]\") {\n\n LongestPalindromeSubString obj;\n\n REQUIRE(obj.getLongestPalindromeSubString(\"hello\") == \"ll\");\n\n REQUIRE(obj.getLongestPalindromeSubString(\"world\") == \"w\");\n\n REQUIRE(obj.getLongestPalindromeSubString(\"abccba\") == \"abccba\");\n\n REQUIRE(obj.getLongestPalindromeSubString(\"abccbaabccb\") == \"bccbaabccb\");\n\n REQUIRE(obj.getLongestPalindromeSubString(\"a\") == \"a\");\n\n REQUIRE(obj.getLongestPalindromeSubString(\"aaaaa\") == \"aaaaa\");\n\n REQUIRE(obj.getLongestPalindromeSubString(\"abcda\") == \"a\");\n\n REQUIRE(obj.getLongestPalindromeSubString(\"aaabaaaa\") == \"aaabaaa\");\n\n REQUIRE(obj.getLongestPalindromeSubString(\"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\") == \"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\");\n\n}\n", "file_path": "cpp/test/LongestPalindromeSubStringTest.cpp", "rank": 42, "score": 38683.808739059365 }, { "content": "pub struct Solution ();\n", "file_path": "rust/leetcode/src/solution.rs", "rank": 43, "score": 33066.94708897103 }, { "content": "\n\npub mod next_permutation;\n\n\n\npub mod search_in_rotated_sorted_array;\n\n\n\npub mod find_first_last_position_in_sorted_array;\n\n\n\npub mod rotate_image;\n\n\n\npub mod p35_search_insert;\n\n\n\npub mod p39_combination_sum;\n\n\n\npub mod p40_combination_sum2;\n\n\n\npub mod p41_first_missing_positive;\n\n\n\npub mod p42_trapping_rain_water;\n\n\n\n\n", "file_path": "rust/leetcode/src/array/mod.rs", "rank": 44, "score": 32343.161931129853 }, { "content": "pub mod maximum_subarray;\n\n\n\npub mod unique_paths;\n\n\n\npub mod unique_paths_with_obstacles;\n\n\n\npub mod p32_longest_valid_parentheses;\n\n\n\npub mod p44_wildcard_matching;\n\n\n\n\n", "file_path": "rust/leetcode/src/dp/mod.rs", "rank": 45, "score": 32343.122459353428 }, { "content": "pub mod p307_range_sum_query;\n\npub mod p218_the_skyline_problem;\n", "file_path": "rust/leetcode/src/tree/mod.rs", "rank": 46, "score": 32342.43253705471 }, { "content": "pub mod longest_valid_parentheses;\n\n\n\npub mod p5_longest_palindromic_substrig;\n\n\n", "file_path": "rust/leetcode/src/string/mod.rs", "rank": 47, "score": 32342.43253705471 }, { "content": "\n\n\n\npub mod p215_largest_element_in_an_array;\n", "file_path": "rust/leetcode/src/heap/mod.rs", "rank": 48, "score": 32341.604487387864 }, { "content": "pub mod p137_single_number_II;\n", "file_path": "rust/leetcode/src/bit_operations/mod.rs", "rank": 49, "score": 31398.464115245504 }, { "content": "pub mod four_sum;\n\npub mod sudoku_solver;\n\npub mod valid_sudoku;\n", "file_path": "rust/leetcode/src/hash_table/mod.rs", "rank": 50, "score": 31396.55161113355 }, { "content": "class NumArray;\n\n\n\nint main() {\n\n std::vector<int> nums = {1, 2, 3, 4, 5, 6, 7, 8};\n\n NumArray arr(nums);\n\n arr.printSegmentTree();\n\n std::cout << arr.sumRange(1, 6) << std::endl; // 3 + 5 + 7 + 8 + 9 + 10 = 42\n\n assert(arr.sumRange(1, 6) == 27);\n\n arr.update(1, 3);\n\n arr.printSegmentTree();\n\n assert(arr.sumRange(1, 6) == 28);\n\n\n\n\n\n // nums: [1, 3, 5], sumRange: 0-2\n\n std::vector<int> nums2 = {1, 3, 5};\n\n NumArray arr2(nums2);\n\n arr2.printSegmentTree();\n\n\n\n assert(arr2.sumRange(0, 2) == 9);\n\n\n\n}\n", "file_path": "cpp/src/tree/main.cpp", "rank": 51, "score": 31272.358840387802 }, { "content": "class NumArray {\n\npublic:\n\n NumArray (std::vector<int>& nums): size(nums.size()) {\n\n this->segmentTree.resize(2 * size); // 等比求和公式\n\n this->buildSegmentTree(nums);\n\n }\n\n\n\n void update (int i, int val) {\n\n for (this->segmentTree[i += size] = val; i > 1; i >>= 1) {\n\n segmentTree[i >> 1] = segmentTree[i] + segmentTree[i^1];\n\n }\n\n return ;\n\n }\n\n\n\n int sumRange (int i, int j) {\n\n int sum = 0;\n\n for (i += size, j+= size; i < j; i >>= 1, j >>= 1) {\n\n if (i & 1) { // 奇数\n\n sum += segmentTree[i++];\n\n }\n", "file_path": "cpp/src/tree/range_sum_query.hpp", "rank": 52, "score": 29542.861429736786 }, { "content": "mod solution;\n\nuse solution::Solution;\n\n\n\nimpl Solution {\n\n pub fn single_number (nums: Vec<i32>) -> i32 {\n\n let mut ans = 0;\n\n for i in 0..32 {\n\n let mut count = 0; \n\n for j in &nums {\n\n if j & (1 << i) == (1 << i) {\n\n count = count + 1;\n\n }\n\n }\n\n if count % 3 == 0 {\n\n continue;\n\n }\n\n ans = ans | (1 << i);\n\n }\n\n ans\n\n }\n", "file_path": "rust/leetcode/src/bit_operations/p137_single_number_II.rs", "rank": 53, "score": 28984.835732425407 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod bit_operations_tests {\n\n use super::Solution;\n\n\n\n #[test]\n\n fn test_single_number () {\n\n assert_eq!(1, Solution::single_number(vec![1]));\n\n assert_eq!(3, Solution::single_number(vec![2,2,3,2]));\n\n assert_eq!(99, Solution::single_number(vec![0,1,0,1,0,1,99]));\n\n }\n\n}\n", "file_path": "rust/leetcode/src/bit_operations/p137_single_number_II.rs", "rank": 54, "score": 28978.34274471453 }, { "content": "//! https://leetcode-cn.com/problems/single-number-ii/\n\n//! \n\n//! \n\n//! Given a non-empty array of integers, every element appears three times except for one, which appears exactly once. Find that single one.\n\n\n\n//! Note:\n\n\n\n//! Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?\n\n\n\n//! Example 1:\n\n\n\n//! Input: [2,2,3,2]\n\n//! Output: 3\n\n//! Example 2:\n\n\n\n//! Input: [0,1,0,1,0,1,99]\n\n//! Output: 99\n\n//! \n\n\n\n#[path = \"../solution.rs\"] \n", "file_path": "rust/leetcode/src/bit_operations/p137_single_number_II.rs", "rank": 55, "score": 28966.2054651153 }, { "content": " const char features[] = {\"\\n\"\n\n\"C_FEATURE:\"\n\n#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 304\n\n\"1\"\n\n#else\n\n\"0\"\n\n#endif\n\n\"c_function_prototypes\\n\"\n\n\"C_FEATURE:\"\n\n#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 304 && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L\n\n\"1\"\n\n#else\n\n\"0\"\n\n#endif\n\n\"c_restrict\\n\"\n\n\"C_FEATURE:\"\n\n#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 406 && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201000L\n\n\"1\"\n\n#else\n\n\"0\"\n\n#endif\n\n\"c_static_assert\\n\"\n\n\"C_FEATURE:\"\n\n#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 304 && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L\n\n\"1\"\n\n#else\n\n\"0\"\n\n#endif\n\n\"c_variadic_macros\\n\"\n\n\n", "file_path": "cpp/CMake/CMakeFiles/feature_tests.c", "rank": 56, "score": 26625.44580330602 }, { "content": "int main(int argc, char** argv) { (void)argv; return features[argc]; }\n", "file_path": "cpp/CMake/CMakeFiles/feature_tests.c", "rank": 57, "score": 26625.44580330602 }, { "content": " numbers.retain(|&c| c != '.');\n\n numbers\n\n }\n\n \n\n pub fn solve_sudoku_helper (board: &mut Vec<Vec<char>>, row: usize, col: usize, solution: &mut Vec<Vec<char>>) {\n\n\n\n let (mut next_row, mut next_col) = (row, col + 1);\n\n if next_col >= 9 {\n\n next_col = 0;\n\n next_row += 1;\n\n }\n\n\n\n if row > 8 || (row == 8 && col == 8 && board[row][col] != '.') { // reach the end\n\n Solution::print_sudoku(board);\n\n for r in 0..9 {\n\n for c in 0..9 {\n\n solution[r][c] = board[r][c];\n\n }\n\n }\n\n return\n", "file_path": "rust/leetcode/src/hash_table/sudoku_solver.rs", "rank": 60, "score": 24.734142358159463 }, { "content": " board[row][col] = '.'; // restore \n\n }\n\n \n\n } else {\n\n Solution::solve_sudoku_helper(board, next_row, next_col, solution);\n\n }\n\n }\n\n\n\n \n\n\n\n pub fn solve_sudoku(board: &mut Vec<Vec<char>>) {\n\n let (mut row, mut col) = (100, 100);\n\n // find the first dot (empty cell)\n\n for x in 0..9 {\n\n for y in 0..9 {\n\n if board[x][y] == '.' {\n\n row = x;\n\n col = y;\n\n break;\n\n }\n", "file_path": "rust/leetcode/src/hash_table/sudoku_solver.rs", "rank": 63, "score": 19.49244946109994 }, { "content": " println!(\"=============================\\n\");\n\n for row in board {\n\n println!(\"{:?}\\n\", row);\n\n }\n\n println!(\"\");\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod hash_table {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_solve_sudoku () {\n\n let mut board = vec!(\n\n vec!('5', '3', '.', '.', '7', '.', '.', '.', '.'),\n\n vec!('6', '.', '.', '1', '9', '5', '.', '.', '.'),\n\n vec!('.', '9', '8', '.', '.', '.', '.', '6', '.'),\n\n vec!('8', '.', '.', '.', '6', '.', '.', '.', '3'),\n\n vec!('4', '.', '.', '8', '.', '3', '.', '.', '1'),\n", "file_path": "rust/leetcode/src/hash_table/sudoku_solver.rs", "rank": 64, "score": 19.400898513903638 }, { "content": " continue;\n\n }\n\n let n = n.to_digit(10).unwrap();\n\n numbers[(n-1) as usize] = '.'\n\n }\n\n\n\n // 3 x 3 round\n\n let x: usize = (row as f32 / 3f32).floor() as usize;\n\n let y: usize = (column as f32 / 3f32).floor() as usize;\n\n let (cell_index_x, cell_index_y) = (x * 3, y * 3);\n\n for i in 0..3 {\n\n for j in 0..3 {\n\n if board[cell_index_x + i][cell_index_y + j] == '.' {\n\n continue;\n\n }\n\n let number = board[cell_index_x + i][cell_index_y + j].to_digit(10).unwrap();\n\n numbers[(number-1) as usize] = '.';\n\n }\n\n }\n\n\n", "file_path": "rust/leetcode/src/hash_table/sudoku_solver.rs", "rank": 65, "score": 18.876875180331478 }, { "content": " }\n\n \n\n if row != 100 { // break outer for loop\n\n break;\n\n }\n\n }\n\n\n\n if row == 100 { // sudoku is finished already\n\n Solution::print_sudoku(board);\n\n return;\n\n }\n\n\n\n let mut solution = board.clone();\n\n Solution::solve_sudoku_helper(board, row, col, &mut solution);\n\n \n\n *board = solution;\n\n }\n\n \n\n\n\n pub fn print_sudoku(board: &Vec<Vec<char>>) {\n", "file_path": "rust/leetcode/src/hash_table/sudoku_solver.rs", "rank": 66, "score": 18.09938439098051 }, { "content": " let mut step = 1;\n\n for i in right..nums_len {\n\n let (l, r) = (i, nums_len - step);\n\n if l >= r {\n\n return;\n\n }\n\n\n\n let tmp = nums[l];\n\n nums[l] = nums[r];\n\n nums[r] = tmp;\n\n step += 1;\n\n }\n\n\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod array_test {\n\n use super::*;\n", "file_path": "rust/leetcode/src/array/next_permutation.rs", "rank": 68, "score": 17.296667606299398 }, { "content": " matrix[y][x] = tmp;\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod array_tests {\n\n use super::*;\n\n\n\n #[test]\n\n pub fn test_rotate () {\n\n let mut matrix = vec!(\n\n vec!(1, 2, 3),\n\n vec!(4, 5, 6),\n\n vec!(7, 8, 9)\n\n );\n\n\n\n let expected = vec!(\n\n vec!(7, 4, 1),\n\n vec!(8, 5, 2),\n\n vec!(9, 6, 3)\n\n );\n\n Solution::rotate(&mut matrix);\n\n assert_eq!(matrix, expected);\n\n }\n\n}\n", "file_path": "rust/leetcode/src/array/rotate_image.rs", "rank": 69, "score": 17.167296436469265 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod array_test {\n\n use super::*;\n\n\n\n #[test]\n\n pub fn test_search_in_rotated_sorted_array() {\n\n let nums = vec![4, 5, 6, 7, 0, 1, 2];\n\n let target = 0;\n\n assert_eq!(search(nums, target), 4);\n\n \n\n let nums = vec![4, 5, 6, 7, 0, 1, 2];\n\n assert_eq!(search(nums, 3), -1);\n\n }\n\n}\n", "file_path": "rust/leetcode/src/array/search_in_rotated_sorted_array.rs", "rank": 70, "score": 16.55183088995696 }, { "content": " }\n\n\n\n if board[row][col] == '.' {\n\n let valid_chars = Solution::valid_numbers(board, row, col);\n\n \n\n if row == 8 && col == 8 {\n\n println!(\"valid_chars {:?}\", valid_chars);\n\n }\n\n \n\n let no_valid_chars = valid_chars.len() == 0;\n\n\n\n if no_valid_chars { // 回溯\n\n return;\n\n\n\n } else {\n\n \n\n for c in valid_chars {\n\n board[row][col] = c;\n\n Solution::solve_sudoku_helper(board, next_row, next_col, solution);\n\n }\n", "file_path": "rust/leetcode/src/hash_table/sudoku_solver.rs", "rank": 71, "score": 16.37547565801175 }, { "content": " }\n\n if *nums.get(right).unwrap() == target {\n\n last_pos = right as i32;\n\n }\n\n\n\n vec!(first_pos, last_pos)\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod array_test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_search_range () {\n\n let nums = vec!(5, 7, 7, 8, 8, 10);\n\n assert_eq!(Solution::search_range(nums, 8), vec!(3, 4));\n\n\n\n let nums = vec!(5, 7, 7, 8, 8, 10);\n\n assert_eq!(Solution::search_range(nums, 6), vec!(-1, -1));\n\n }\n\n}\n", "file_path": "rust/leetcode/src/array/find_first_last_position_in_sorted_array.rs", "rank": 72, "score": 15.310177856049147 }, { "content": " }\n\n \n\n i+=1;\n\n }\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod hash_table_tests {\n\n use super::*;\n\n \n\n #[test]\n\n fn test_four_sum () {\n\n let mut result: Vec<i32> = Vec::new();\n\n }\n\n}\n", "file_path": "rust/leetcode/src/hash_table/four_sum.rs", "rank": 73, "score": 14.473372498618783 }, { "content": " vec!('9','7','5','8','6','3','1','2','4'),\n\n vec!('8','3','2','4','9','1','7','5','6'),\n\n vec!('6','4','1','2','7','5','9','8','3')\n\n );\n\n Solution::solve_sudoku(& mut board2);\n\n assert_eq!(board2, expected_board2);\n\n }\n\n\n\n #[test]\n\n fn test_valid_number () {\n\n let mut board = vec!(\n\n vec!('5', '3', '.', '.', '7', '.', '.', '.', '.'),\n\n vec!('6', '.', '.', '1', '9', '5', '.', '.', '.'),\n\n vec!('.', '9', '8', '.', '.', '.', '.', '6', '.'),\n\n vec!('8', '.', '.', '.', '6', '.', '.', '.', '3'),\n\n vec!('4', '.', '.', '8', '.', '3', '.', '.', '1'),\n\n vec!('7', '.', '.', '.', '2', '.', '.', '.', '6'),\n\n vec!('.', '6', '.', '.', '.', '.', '2', '8', '.'),\n\n vec!('.', '.', '.', '4', '1', '9', '.', '.', '5'),\n\n vec!('.', '.', '.', '.', '8', '.', '.', '7', '9')\n\n );\n\n\n\n assert_eq!(Solution::valid_numbers(&board, 0, 2), vec!('1', '2', '4'));\n\n }\n\n}\n", "file_path": "rust/leetcode/src/hash_table/sudoku_solver.rs", "rank": 74, "score": 14.252628006682166 }, { "content": "mod string_tests {\n\n use super::*;\n\n\n\n #[test]\n\n pub fn test_longest_valid_parentheses () {\n\n let s1 = String::from(\"(()\");\n\n assert_eq!(Solution::longest_valid_parentheses(s1), 2);\n\n\n\n let s2 = String::from(\")()())\");\n\n assert_eq!(Solution::longest_valid_parentheses(s2), 4);\n\n\n\n let s3 = String::from(\"()(()\");\n\n assert_eq!(Solution::longest_valid_parentheses(s3), 2);\n\n\n\n let s4 = String::from(\"()(()(()\");\n\n assert_eq!(Solution::longest_valid_parentheses(s4), 2);\n\n }\n\n}\n", "file_path": "rust/leetcode/src/string/longest_valid_parentheses.rs", "rank": 75, "score": 13.422570491344654 }, { "content": "\n\n#[cfg(test)]\n\nmod test_array {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_combination_sum () {\n\n let res = Solution::combination_sum(vec![2,3,6,7], 7);\n\n println!(\"{:?}\", res);\n\n assert_eq!(res, vec![vec![2, 2, 3], vec![7]]);\n\n }\n\n}\n\n\n", "file_path": "rust/leetcode/src/array/p39_combination_sum.rs", "rank": 76, "score": 13.343526862048641 }, { "content": " }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod array_test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_search_insert () {\n\n assert_eq!(Solution::search_insert(vec![1, 3, 5, 6], 5), 2);\n\n assert_eq!(Solution::search_insert(vec![1, 3, 5, 6], 2), 1);\n\n assert_eq!(Solution::search_insert(vec![], 0), 0); // or -1\n\n assert_eq!(Solution::search_insert(vec![1, 3, 5, 6], 7), 4);\n\n }\n\n}\n", "file_path": "rust/leetcode/src/array/p35_search_insert.rs", "rank": 77, "score": 13.292437262750749 }, { "content": "\n\n#[cfg(test)]\n\nmod range_sum_query_tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_num_array () {\n\n let arr = NumArray::new(vec![1, 2, 3, 4, 5]);\n\n\n\n let ref_arr = &arr;\n\n dbg!(ref_arr);\n\n\n\n let sum = arr.sum_range(0, 4);\n\n dbg!(sum);\n\n\n\n assert_eq!(10, sum);\n\n\n\n let mut arr = arr;\n\n arr.update(0, 2);\n\n let sum = arr.sum_range(0, 4);\n\n assert_eq!(11, sum);\n\n }\n\n}\n", "file_path": "rust/leetcode/src/tree/p307_range_sum_query.rs", "rank": 78, "score": 13.201657999616142 }, { "content": "#[derive(Debug, Clone)]\n\npub(crate) struct NumArray {\n\n tree: Vec<i32>\n\n}\n\n\n\npub(crate) fn left_child (i: usize) -> usize {\n\n i << 1\n\n}\n\n\n\npub(crate) fn right_child (i: usize) -> usize {\n\n (i << 1) | 0b1\n\n}\n\n\n\npub(crate) fn parent (i: usize) -> usize {\n\n i >> 1 as usize\n\n}\n\n\n\n\n\nimpl NumArray {\n\n fn new (nums: Vec<i32>) -> Self {\n", "file_path": "rust/leetcode/src/tree/p307_range_sum_query.rs", "rank": 79, "score": 13.11630471192304 }, { "content": "\n\n } else {\n\n if height[right] > right_max_height {\n\n right_max_height = height[right];\n\n } else {\n\n result += right_max_height - height[right];\n\n }\n\n right -= 1;\n\n }\n\n }\n\n\n\n result\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_array {\n\n use super::Solution;\n\n\n\n #[test]\n", "file_path": "rust/leetcode/src/array/p42_trapping_rain_water.rs", "rank": 80, "score": 12.582675065108235 }, { "content": " if index == len - 1 {\n\n result = len + 1;\n\n }\n\n }\n\n\n\n result as i32\n\n }\n\n\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_array {\n\n use super::Solution;\n\n\n\n #[test]\n\n fn test_first_missing_positive () {\n\n \n\n assert_eq!(Solution::first_missing_positive(vec![-1, -1, 2, 0, 4, -2, 1]), 3);\n\n assert_eq!(Solution::first_missing_positive(vec![]), 1);\n\n assert_eq!(Solution::first_missing_positive(vec![1, 2, 3]), 4);\n\n assert_eq!(Solution::first_missing_positive(vec![-1, -2, -3]), 1);\n\n }\n\n}\n\n\n\n\n\n\n\n\n\n\n", "file_path": "rust/leetcode/src/array/p41_first_missing_positive.rs", "rank": 81, "score": 12.199122024278537 }, { "content": "\n\n let _r: String = s.get(l..r+1).unwrap().to_owned().split(\"#\").collect();\n\n if _r.len() > result.len() {\n\n result = _r\n\n }\n\n }\n\n }\n\n println!(\"{:?}\", result);\n\n result\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_dp {\n\n use super::Solution;\n\n\n\n #[test]\n\n fn test_longest_palindrome () {\n\n assert_eq!(\"bab\", Solution::longest_palindrome(\"babad\".to_owned()));\n\n assert_eq!(\"bb\", Solution::longest_palindrome(\"cbbd\".to_owned()));\n\n assert_eq!(\"a\", Solution::longest_palindrome(\"a\".to_owned()));\n\n }\n\n}\n", "file_path": "rust/leetcode/src/string/p5_longest_palindromic_substrig.rs", "rank": 82, "score": 11.975385599438724 }, { "content": "// @lc code=end\n\n\n\n\n\n#[cfg(test)]\n\nmod dp_tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_is_match () {\n\n\n\n assert_eq!(true, Solution::is_match(\"\".to_owned(), \"\".to_owned()));\n\n assert_eq!(true, Solution::is_match(\"\".to_owned(), \"*\".to_owned()));\n\n assert_eq!(false, Solution::is_match(\"\".to_owned(), \"?\".to_owned()));\n\n assert_eq!(false, Solution::is_match(\"\".to_owned(), \"*a\".to_owned()));\n\n assert_eq!(false, Solution::is_match(\"\".to_owned(), \"?a\".to_owned()));\n\n assert_eq!(false, Solution::is_match(\"a\".to_owned(), \"?a\".to_owned()));\n\n assert_eq!(true, Solution::is_match(\"a\".to_owned(), \"*a\".to_owned()));\n\n assert_eq!(false, Solution::is_match(\"aa\".to_owned(), \"a\".to_owned()));\n\n assert_eq!(true, Solution::is_match(\"aa\".to_owned(), \"a*\".to_owned()));\n\n assert_eq!(true, Solution::is_match(\"aa\".to_owned(), \"aa\".to_owned()));\n\n assert_eq!(true, Solution::is_match(\"adceb\".to_owned(), \"*a*b\".to_owned()));\n\n assert_eq!(false, Solution::is_match(\"acdcb\".to_owned(), \"a*c?b\".to_owned()));\n\n assert_eq!(false, Solution::is_match(\"aaabbbaabaaaaababaabaaabbabbbbbbbbaabababbabbbaaaaba\".to_owned(), \"a*******b\".to_owned()));\n\n assert_eq!(false, Solution::is_match(\"babbbbaabababaabbababaababaabbaabababbaaababbababaaaaaabbabaaaabababbabbababbbaaaababbbabbbbbbbbbbaabbb\".to_owned(), \"b**bb**a**bba*b**a*bbb**aba***babbb*aa****aabb*bbb***a\".to_owned()));\n\n }\n\n}\n", "file_path": "rust/leetcode/src/dp/p44_wildcard_matching.rs", "rank": 83, "score": 11.729863950564935 }, { "content": " Solution::combination_sum2_helper(candidates, target - value, index + 1, path, result);\n\n path.pop();\n\n \n\n index += 1;\n\n }\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod test_array {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_combination_sum2 () {\n\n let candidates = vec![10,1,2,7,6,1,5];\n\n // sorted: 1 1 2 5 6 7 10\n\n let target = 8;\n\n let result = Solution::combination_sum2(candidates, target);\n\n assert_eq!(result, vec![ vec![1,1,6], vec![1,2,5], vec![1,7], vec![2,6] ]);\n\n }\n\n}\n", "file_path": "rust/leetcode/src/array/p40_combination_sum2.rs", "rank": 84, "score": 11.7044502738468 }, { "content": " while left < right && nums[left] == nums[left-1] {\n\n left += 1;\n\n }\n\n } else if sum < target {\n\n left += 1;\n\n } else {\n\n right -= 1;\n\n }\n\n }\n\n } else {\n\n let i32_len_nums = len_nums as i32;\n\n let range_index = i32_len_nums - n;\n\n let mut i = 0;\n\n while i <= range_index {\n\n let i32_i = i as usize;\n\n if i == 0 || (i > 0 && nums[i32_i-1] != nums[i32_i]) {\n\n let number = nums[i32_i];\n\n let mut result = result.clone();\n\n result.push(number);\n\n n_sum_v2(&nums[(i32_i+1)..len_nums], target-number, n-1, result, store);\n", "file_path": "rust/leetcode/src/hash_table/four_sum.rs", "rank": 85, "score": 11.363419596302657 }, { "content": "#[path = \"../solution.rs\"]\n\nmod solution;\n\n\n\nuse solution::Solution;\n\nuse std::cmp::Ordering;\n\n\n", "file_path": "rust/leetcode/src/tree/p218_the_skyline_problem.rs", "rank": 86, "score": 10.734811277949873 }, { "content": "mod dp_tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_max_sub_array() {\n\n assert_eq!(6, max_sub_array(vec!(-2, 1, -3, 4, -1, 2, 1, -5, 4)));\n\n }\n\n\n\n #[test]\n\n fn test_max_sub_array_v2() {\n\n assert_eq!(6, max_sub_array_v2(vec!(-2, 1, -3, 4, -1, 2, 1, -5, 4)));\n\n }\n\n}\n", "file_path": "rust/leetcode/src/dp/maximum_subarray.rs", "rank": 87, "score": 10.541236422837514 }, { "content": " use super::*;\n\n\n\n #[test]\n\n fn test_find_kth_largest () {\n\n assert_eq!(10, Solution::find_kth_largest(vec![4, 1, 7, 2, 10, 20, 0, 2], 2));\n\n assert_eq!(5, Solution::find_kth_largest(vec![3,2,1,5,6,4], 2));\n\n assert_eq!(4, Solution::find_kth_largest(vec![3,2,3,1,2,4,5,5,6], 4));\n\n assert_eq!(3, Solution::find_kth_largest(vec![3], 1));\n\n }\n\n\n\n}\n", "file_path": "rust/leetcode/src/heap/p215_largest_element_in_an_array.rs", "rank": 88, "score": 10.519514860419056 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod dp_tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_longest_valid_parentheses () {\n\n assert_eq!(Solution::longest_valid_parentheses(\"\".to_owned()), 0);\n\n assert_eq!(Solution::longest_valid_parentheses(\")\".to_owned()), 0);\n\n assert_eq!(Solution::longest_valid_parentheses(\"(\".to_owned()), 0);\n\n assert_eq!(Solution::longest_valid_parentheses(\")()())\".to_owned()), 4);\n\n assert_eq!(Solution::longest_valid_parentheses(\"(()\".to_owned()), 2);\n\n }\n\n\n\n\n\n #[test]\n\n fn test_longest_valid_parentheses_v2 () {\n\n assert_eq!(Solution::longest_valid_parentheses_v2(\"\".to_owned()), 0);\n\n assert_eq!(Solution::longest_valid_parentheses_v2(\")\".to_owned()), 0);\n\n assert_eq!(Solution::longest_valid_parentheses_v2(\"(\".to_owned()), 0);\n\n assert_eq!(Solution::longest_valid_parentheses_v2(\")()())\".to_owned()), 4);\n\n assert_eq!(Solution::longest_valid_parentheses_v2(\"(()\".to_owned()), 2);\n\n assert_eq!(Solution::longest_valid_parentheses_v2(\"()(()\".to_owned()), 2);\n\n }\n\n}\n\n\n", "file_path": "rust/leetcode/src/dp/p32_longest_valid_parentheses.rs", "rank": 89, "score": 10.0047028577644 }, { "content": "\n\n #[test]\n\n fn next_permutation_test () {\n\n let mut vec = vec!(1, 2, 3);\n\n Solution::next_permutation(&mut vec);\n\n assert_eq!(vec!(1, 3, 2), vec);\n\n\n\n let mut vec2 = vec!(3, 2, 1);\n\n Solution::next_permutation(&mut vec2);\n\n assert_eq!(vec!(1, 2, 3), vec2);\n\n\n\n let mut vec3 = vec!(1, 1, 5);\n\n Solution::next_permutation(&mut vec3);\n\n assert_eq!(vec!(1, 5, 1), vec3);\n\n\n\n let mut vec4 = vec!(1, 3, 2);\n\n Solution::next_permutation(&mut vec4);\n\n assert_eq!(vec!(2, 1, 3), vec4);\n\n\n\n let mut vec5 = vec!(2, 3, 1, 3, 3);\n\n Solution::next_permutation(&mut vec5);\n\n assert_eq!(vec!(2, 3, 3, 1, 3), vec5);\n\n\n\n }\n\n}\n", "file_path": "rust/leetcode/src/array/next_permutation.rs", "rank": 90, "score": 9.503864773889449 }, { "content": "/**\n\nMaximum Subarray\n\n\n\nGiven an integer array nums, find the contiguous subarray (containing at least one number) which has the largest sum and return its sum.\n\n\n\nExample:\n\n\n\nInput: [-2,1,-3,4,-1,2,1,-5,4],\n\nOutput: 6\n\nExplanation: [4,-1,2,1] has the largest sum = 6.\n\nFollow up:\n\n\n\nIf you have figured out the O(n) solution, try coding another solution using the divide and conquer approach, which is more subtle.\n\n**/\n\n\n", "file_path": "rust/leetcode/src/dp/maximum_subarray.rs", "rank": 91, "score": 9.075701476856006 }, { "content": " if position.is_some() {\n\n heights.swap_remove(position.unwrap());\n\n }\n\n }\n\n\n\n if let Some(&max_height) = heights.iter().max() {\n\n // a new point\n\n if max_height != last_record {\n\n res.push(vec![x, max_height]);\n\n last_record = max_height;\n\n }\n\n }\n\n }\n\n res\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod get_skyline_test {\n\n use super::*;\n", "file_path": "rust/leetcode/src/tree/p218_the_skyline_problem.rs", "rank": 92, "score": 8.944304866911018 }, { "content": " (_, 0) => state[row][col] = state[row-1][col],\n\n (_, _) => state[row][col] = state[row-1][col] + state[row][col-1]\n\n }\n\n }\n\n } \n\n }\n\n }\n\n\n\n state[len_rows-1][len_cols-1]\n\n}\n\n\n", "file_path": "rust/leetcode/src/dp/unique_paths_with_obstacles.rs", "rank": 93, "score": 8.725306462380175 }, { "content": " vec!('7', '.', '.', '.', '2', '.', '.', '.', '6'),\n\n vec!('.', '6', '.', '.', '.', '.', '2', '8', '.'),\n\n vec!('.', '.', '.', '4', '1', '9', '.', '.', '5'),\n\n vec!('.', '.', '.', '.', '8', '.', '.', '7', '9')\n\n );\n\n\n\n Solution::solve_sudoku(& mut board);\n\n\n\n let expected_board = vec!(\n\n vec!('5', '3', '4', '6', '7', '8', '9', '1', '2'),\n\n vec!('6', '7', '2', '1', '9', '5', '3', '4', '8'),\n\n vec!('1', '9', '8', '3', '4', '2', '5', '6', '7'),\n\n vec!('8', '5', '9', '7', '6', '1', '4', '2', '3'),\n\n vec!('4', '2', '6', '8', '5', '3', '7', '9', '1'),\n\n vec!('7', '1', '3', '9', '2', '4', '8', '5', '6'),\n\n vec!('9', '6', '1', '5', '3', '7', '2', '8', '4'),\n\n vec!('2', '8', '7', '4', '1', '9', '6', '3', '5'),\n\n vec!('3', '4', '5', '2', '8', '6', '1', '7', '9')\n\n );\n\n assert_eq!(board, expected_board);\n", "file_path": "rust/leetcode/src/hash_table/sudoku_solver.rs", "rank": 94, "score": 8.480811164472541 }, { "content": "///\n\n/// Given an array nums of n integers and an integer target, are there elements a, b, c, and d in nums such that a + b + c + d = target? Find all unique quadruplets in the array which gives the sum of target.\n\n///\n\n/// # Note:\n\n///\n\n/// The solution set must not contain duplicate quadruplets.\n\n///\n\n/// # Example:\n\n///\n\n/// Given array nums = [1, 0, -1, 0, -2, 2], and target = 0.\n\n///\n\n/// A solution set is:\n\n/// [\n\n/// [-1, 0, 0, 1],\n\n/// [-2, -1, 1, 2],\n\n/// [-2, 0, 0, 2]\n\n/// ]\n\n///\n\n/// pub fn four_sum (nums: Vec<i32>, target: i32) -> Vec<Vec<i32>> {\n\n/// let len_nums = nums.len();\n\n/// for i in 0..len_nums {\n\n/// if nums[i] < target {\n\n///\n\n/// }\n\n/// }\n\n/// }\n\n\n", "file_path": "rust/leetcode/src/hash_table/four_sum.rs", "rank": 95, "score": 8.26442473478227 }, { "content": " // store.push(result);\n\n // }\n\n \n\n } else {\n\n store.push(result);\n\n }\n\n\n\n }\n\n return ();\n\n }\n\n\n\n let len_nums = nums.len();\n\n \n\n for i in 0..len_nums {\n\n // find a number less than target\n\n let number = nums[i];\n\n\n\n // if target-number >= number, go on\n\n // if target >= number {\n\n let next_nums = &nums[i+1..];\n", "file_path": "rust/leetcode/src/hash_table/four_sum.rs", "rank": 96, "score": 8.195904492466621 }, { "content": " let len = nums.len();\n\n let mut tree_arr = vec![0; len];\n\n let mut tree_arr = [tree_arr, nums].concat();\n\n\n\n // (1..5).rev() => 4 3 2 1\n\n for i in (1..len).rev() {\n\n // now i from len-1 to 0 => [len-1, 0)\n\n dbg!(i);\n\n tree_arr[i] = tree_arr[left_child(i)] + tree_arr[right_child(i)];\n\n }\n\n \n\n NumArray {\n\n tree: tree_arr\n\n }\n\n }\n\n\n\n fn update (&mut self, i: usize, val: i32) {\n\n let n = self.tree.len() / 2;\n\n // i must between [0, n)\n\n if i >= n {\n", "file_path": "rust/leetcode/src/tree/p307_range_sum_query.rs", "rank": 97, "score": 8.045080383804143 }, { "content": " num\n\n }\n\n }).collect();\n\n\n\n \n\n for i in 0..len {\n\n let index = nums[i].abs() - 1; // as index of nums\n\n nums[index as usize] = if nums[index as usize] < 0 {\n\n nums[index as usize]\n\n } else {\n\n -nums[index as usize]\n\n }\n\n }\n\n\n\n for (index, val) in nums.iter().enumerate() {\n\n if *val > 0 {\n\n result = index + 1;\n\n break;\n\n }\n\n // the last element < 0, then result is length+1\n", "file_path": "rust/leetcode/src/array/p41_first_missing_positive.rs", "rank": 98, "score": 7.991298354771894 }, { "content": " let next_target = target - number;\n\n if next_target.abs() <= target.abs() {\n\n let mut next_result = result.clone();\n\n next_result.push(number);\n\n n_sum(next_nums, next_target, n-1, next_result, store);\n\n }\n\n }\n\n \n\n}\n\n\n\n\n", "file_path": "rust/leetcode/src/hash_table/four_sum.rs", "rank": 99, "score": 7.781167628841504 } ]
Rust
src/drive_operations.rs
EndaHallahan/Scrit
10431c049fef5f587b506f0e65b4f55443b3f91c
use map_operations::*; use push::ScritFile; use hyper; use hyper::net::HttpsConnector; use hyper::Client; use hyper_native_tls::NativeTlsClient; use yup_oauth2::{Authenticator, FlowType, ApplicationSecret, DiskTokenStorage, DefaultAuthenticatorDelegate, parse_application_secret}; use google_drive3::{Drive, File}; use std::io::Cursor; use std::fs; use client_info::CLIENT_SECRET; fn read_client_secret(client_info: &'static str) -> ApplicationSecret { parse_application_secret(&client_info.to_string()).unwrap() } pub fn get_hub() -> Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> { let secret = read_client_secret(CLIENT_SECRET); let client = hyper::Client::with_connector( HttpsConnector::new(NativeTlsClient::new().unwrap())); let authenticator = Authenticator::new(&secret, DefaultAuthenticatorDelegate, client, DiskTokenStorage::new(&"Scrit/token_store.json".to_string()) .unwrap(), Some(FlowType::InstalledInteractive)); let client = hyper::Client::with_connector( HttpsConnector::new(NativeTlsClient::new().unwrap()) ); Drive::new(client, authenticator) } pub fn make_document(name: &String, contents: &String, dir_id: &String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> String { let mut doc = File::default(); doc.name = Some(name.to_string()); doc.mime_type = Some("application/vnd.google-apps.document".to_string()); doc.parents = Some(vec![dir_id.clone()]); match hub.files().create(doc) .param("fields", "id") .upload(Cursor::new(contents.as_bytes()), "text/html".parse().unwrap()) { Ok((_, y)) => { println!("OK! Successfully uploaded '{}'.", name); y.id.unwrap() }, Err(x) => {panic!("ERROR! {:?}",x);} } } pub fn update_document(name: &String, contents: &String, dir_id: &String, file_id: &str, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> String { let mut doc = File::default(); doc.mime_type = Some("application/vnd.google-apps.document".to_string()); match hub.files().update(doc, file_id) .param("fields", "id") .upload(Cursor::new(contents.as_bytes()), "text/html".parse().unwrap()) { Ok((_, y)) => { println!("OK! Successfully updated '{}'.", name); y.id.unwrap() }, Err(x) => {panic!("ERROR! {:?}",x);} } } pub fn make_directory(name: String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> String { let mut dir = File::default(); dir.name = Some(name.to_string()); dir.mime_type = Some("application/vnd.google-apps.folder".to_string()); match hub.files().create(dir) .param("fields", "id") .upload(Cursor::new(name.as_bytes()), "application/vnd.google-apps.folder".parse().unwrap()) { Ok((_, y)) => { println!("OK! Successfully created directory '{}'.", name); y.id.unwrap() }, Err(x) => {panic!("ERROR! {:?}",x)} } } pub fn check_file_in_folder(file_id: &str, folder_id : &String, file_name: &String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> bool { match hub.files().get(file_id).param("fields", "parents, trashed").doit() { Ok((_, y)) => { match y.parents { Some(parents) => { if !y.trashed.unwrap() && parents.contains(folder_id) { true } else { println!("Couldn't find file '{}' in project folder, creating new file...", file_name); false } } None => { println!("Couldn't find file '{}' in project folder, creating new file...", file_name); false } } }, Err(_) => { println!("Couldn't find file '{}' in project folder, creating new file...", file_name); false } } } pub fn check_folder(folder_id : &String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> bool { match hub.files().get(folder_id).param("fields", "trashed").doit() { Ok((_, y)) => { if !y.trashed.unwrap() { true } else { println!("Couldn't find project folder, creating new directory..."); false } }, Err(x) => { println!("Couldn't find project folder, creating new directory..."); false } } } pub fn upload(compiled_set: &mut Vec<ScritFile>) { let hub = get_hub(); let mut map = get_map(); let title = get_title_text(&map); let mut dir_id = get_directory_id(&map); if dir_id.is_empty() || !check_folder(&dir_id, &hub) { dir_id = make_directory(title.to_string(), &hub); set_directory_id(&mut map, &dir_id); } for scrit_file in compiled_set { let mut file_id: String; match check_existing_files(&mut map, scrit_file.title()) { Some(ele) => { if check_file_in_folder(ele.attr("id").unwrap(), &dir_id, scrit_file.title(), &hub) { file_id = update_document(scrit_file.title(), scrit_file.body(), &dir_id, ele.attr("id").unwrap(), &hub); } else { file_id = make_document(scrit_file.title(), scrit_file.body(), &dir_id, &hub); replace_file(ele, &file_id, scrit_file.title()); } }, None => {file_id = make_document(scrit_file.title(), scrit_file.body(), &dir_id, &hub);} } scrit_file.set_id(file_id); } map.write_to(&mut fs::File::create("Scrit/scrit_map.xml").unwrap()); } pub fn download() { let hub = get_hub(); }
use map_operations::*; use push::ScritFile; use hyper; use hyper::net::HttpsConnector; use hyper::Client; use hyper_native_tls::NativeTlsClient; use yup_oauth2::{Authenticator, FlowType, ApplicationSecret, DiskTokenStorage, DefaultAuthenticatorDelegate, parse_application_secret}; use google_drive3::{Drive, File}; use std::io::Cursor; use std::fs; use client_info::CLIENT_SECRET; fn read_client_secret(client_info: &'static str) -> ApplicationSecret { parse_application_secret(&client_info.to_string()).unwrap() } pub fn get_hub() -> Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> { let secret = read_client_secret(CLIENT_SECRET); let client = hyper::Client::with_connector( HttpsConnector::new(NativeTlsClient::new().unwrap())); let authenticator = Authenticator::new(&secret, DefaultAuthenticatorDelegate, client, DiskTokenStorage::new(&"Scrit/token_store.json".to_string()) .unwrap(), Some(FlowType::InstalledInteractive)); let client = hyper::Client::with_connector( HttpsConnector::new(NativeTlsClient::new().unwrap()) ); Drive::new(client, authenticator) } pub fn make_document(name: &String, contents: &String, dir_id: &String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> String { let mut doc = File::default(); doc.name = Some(name.to_string()); doc.mime_type = Some("application/vnd.google-apps.document".to_string()); doc.parents = Some(vec![dir_id.clone()]); match hub.files().create(doc) .param("fields", "id") .upload(Cursor::new(contents.as_bytes()), "text/html".parse().unwrap()) { Ok((_, y)) => { println!("OK! Successfully uploaded '{}'.", name); y.id.unwrap() }, Err(x) => {panic!("ERROR! {:?}",x);} } } pub fn update_document(name: &String, contents: &String, dir_id: &String, file_id: &str, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> String { let mut doc = File::default(); doc.mime_type = Some("application/vnd.google-apps.document".to_string()); match hub.files().update(doc, file_id) .param("fields", "id") .upload(Cursor::new(contents.as_bytes()), "text/html".parse().unwrap()) { Ok((_, y)) => { println!("OK! Successfully updated '{}'.", name); y.id.unwrap() }, Err(x) => {panic!("ERROR! {:?}",x);} } } pub fn make_directory(name: String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> String { let mut dir = File::default(); dir.name = Some(name.to_string()); dir.mime_type = Some("application/vnd.google-apps.folder".to_string()); match hub.files().create(dir) .param("fields", "id") .upload(Cursor::new(name.as_bytes()), "application/vnd.google-apps.folder".parse().unwrap()) { Ok((_, y)) => { println!("OK! Successfully created directory '{}'.", name); y.id.unwrap() }, Err(x) => {panic!("ERROR! {:?}",x)} } } pub fn check_file_in_folder(file_id: &str, folder_id : &String, file_name: &String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> bool { match hub.files().get(file_id).param("fields", "parents, trashed").doit() { Ok((_, y)) => { match y.parents { Some(parents) => {
} None => { println!("Couldn't find file '{}' in project folder, creating new file...", file_name); false } } }, Err(_) => { println!("Couldn't find file '{}' in project folder, creating new file...", file_name); false } } } pub fn check_folder(folder_id : &String, hub: &Drive<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, DiskTokenStorage, Client>> ) -> bool { match hub.files().get(folder_id).param("fields", "trashed").doit() { Ok((_, y)) => { if !y.trashed.unwrap() { true } else { println!("Couldn't find project folder, creating new directory..."); false } }, Err(x) => { println!("Couldn't find project folder, creating new directory..."); false } } } pub fn upload(compiled_set: &mut Vec<ScritFile>) { let hub = get_hub(); let mut map = get_map(); let title = get_title_text(&map); let mut dir_id = get_directory_id(&map); if dir_id.is_empty() || !check_folder(&dir_id, &hub) { dir_id = make_directory(title.to_string(), &hub); set_directory_id(&mut map, &dir_id); } for scrit_file in compiled_set { let mut file_id: String; match check_existing_files(&mut map, scrit_file.title()) { Some(ele) => { if check_file_in_folder(ele.attr("id").unwrap(), &dir_id, scrit_file.title(), &hub) { file_id = update_document(scrit_file.title(), scrit_file.body(), &dir_id, ele.attr("id").unwrap(), &hub); } else { file_id = make_document(scrit_file.title(), scrit_file.body(), &dir_id, &hub); replace_file(ele, &file_id, scrit_file.title()); } }, None => {file_id = make_document(scrit_file.title(), scrit_file.body(), &dir_id, &hub);} } scrit_file.set_id(file_id); } map.write_to(&mut fs::File::create("Scrit/scrit_map.xml").unwrap()); } pub fn download() { let hub = get_hub(); }
if !y.trashed.unwrap() && parents.contains(folder_id) { true } else { println!("Couldn't find file '{}' in project folder, creating new file...", file_name); false }
if_condition
[ { "content": "pub fn replace_file(ele: &mut Element, in_id: &str, in_title: &String) {\n\n\tele.set_attr(\"id\", in_id);\n\n\tele.set_attr(\"title\", in_title);\n\n\tele.delete_children();\n\n}\n\n\n", "file_path": "src/map_operations.rs", "rank": 3, "score": 224990.306815553 }, { "content": "pub fn set_directory_id(map: &mut Element, id: &String) {\n\n\t{\n\n\t\tlet mut dir_ele = map.get_child_mut(\"Drive\", \"argabarga\").unwrap()\n\n\t\t\t.get_child_mut(\"Directory\", \"argabarga\").unwrap();\n\n\t\tif dir_ele.text().is_empty() {\n\n\t\t\tdir_ele.append_text_node(id.to_string());\n\n\t\t} else {\n\n\t\t\tmatch dir_ele.texts_mut().nth(0) {\n\n\t\t\t\tSome(text) => {text.clear()},\n\n\t\t\t\tNone => {}\n\n\t\t\t}\n\n\t\t\tdir_ele.append_text_node(id.to_string());\n\n\t\t}\t\n\n\t}\n\n\t*get_files(map) = Element::builder(\"Files\").ns(\"argabarga\").build();\n\n}\n\n\n", "file_path": "src/map_operations.rs", "rank": 4, "score": 223044.36726134963 }, { "content": "pub fn make_file<'a>(map: &'a mut Element, id: &'a String, title: &'a String) -> &'a mut Element {\n\n\tget_files(map)\n\n\t\t.append_child(Element::builder(\"File\")\n\n\t\t.attr(\"id\", id)\n\n\t\t.attr(\"title\", title)\n\n\t\t.build())\t\n\n}\n\n\n", "file_path": "src/map_operations.rs", "rank": 7, "score": 207036.97410529037 }, { "content": "pub fn does_file_exist(map: &Element, in_id: &String) -> bool {\n\n\tlet files = map.get_child(\"Drive\", \"argabarga\").unwrap()\n\n\t\t.get_child(\"Files\", \"argabarga\").unwrap()\n\n\t\t\t.children();\n\n\tlet mut found: bool = false;\n\n\tfor file in files {\n\n\t\tmatch file.attr(\"id\") {\n\n\t\t\tSome(id) => {\n\n\t\t\t\tif id == in_id {\n\n\t\t\t\t\treturn true\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tNone => {}\n\n\t\t}\n\n\t}\n\n\tfalse\n\n}\n\n\n", "file_path": "src/map_operations.rs", "rank": 8, "score": 198844.59334319888 }, { "content": "pub fn get_doc(id: &String) -> Option<String> {\n\n\tlet file_name_full = format!(\"{}.rtf\", id);\n\n\tfor entry in WalkDir::new(\"./Files/Docs\").into_iter().filter_map(|e| e.ok()) {\n\n\t\tif entry.file_name().to_str().unwrap() == file_name_full {\n\n\t\t\treturn Some(entry.path().to_path_buf().into_os_string().into_string().unwrap());\n\n\t\t}\n\n\t}\n\n\tNone\n\n}\n\n\n", "file_path": "src/scrivx_reader.rs", "rank": 10, "score": 197194.0930777646 }, { "content": "pub fn get_me_a_file_with_id_and_title<'a>(map: &'a mut Element, in_id: &'a String, in_title: &'a String) -> &'a mut Element {\n\n\tif does_file_exist(map, in_id) {\n\n\t\tlet ele = get_file_by_id(map, in_id).unwrap();\n\n\t\tele.delete_children();\n\n\t\tele.set_attr(\"title\", in_title);\n\n\t\tele\n\n\t} else {\n\n\t\tmake_file(map, in_id, in_title)\n\n\t}\t\n\n}", "file_path": "src/map_operations.rs", "rank": 11, "score": 195046.1472357277 }, { "content": "pub fn compile(documents: Vec<Document>, clean: bool, split: bool, name: Option<String>) -> Vec<ScritFile> {\n\n\tlet mut scrit_file_list: Vec<ScritFile> = Vec::new();\n\n\tif !split {\n\n\t\tlet mut scrit_file = ScritFile::new(documents);\n\n\t\tmatch name {\n\n\t\t\tSome(title) => scrit_file.set_title(title),\n\n\t\t\tNone => {}\n\n\t\t}\n\n\t\tscrit_file_list.push(scrit_file);\n\n\t} else {\n\n\t\tlet title_prefix = match name {\n\n\t\t\tSome(title) => format!(\"{} - \", title.to_string()),\n\n\t\t\tNone => String::new()\n\n\t\t};\n\n\t\tfor doc in documents {\n\n\t\t\tlet doc_title = doc.title().to_string();\n\n\t\t\tlet mut scrit_file = ScritFile::new(vec![doc]);\n\n\t\t\tscrit_file.set_title(format!(\"{}{}\", title_prefix, doc_title));\n\n\t\t\tscrit_file_list.push(scrit_file);\n\n\t\t}\n", "file_path": "src/compiler.rs", "rank": 12, "score": 193033.9761159515 }, { "content": "pub fn get_file_by_id<'b>(map: &'b mut Element, in_id: &'b String) -> Option<&'b mut Element> {\n\n\tlet files = get_files(map).children_mut();\n\n\tlet mut found: bool = false;\n\n\tfor file in files {\n\n\t\tmatch file.attr(\"id\") {\n\n\t\t\tSome(id) => {\n\n\t\t\t\tif id == in_id {\n\n\t\t\t\t\tfound = true;\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tNone => {}\n\n\t\t}\n\n\t\tif found {\n\n\t\t\treturn Some(file);\n\n\t\t}\n\n\t}\n\n\tNone\n\n}\n\n\n", "file_path": "src/map_operations.rs", "rank": 13, "score": 190485.95997378093 }, { "content": "pub fn get_directory_id(map: &Element) -> String {\n\n\tmap.get_child(\"Drive\", \"argabarga\").unwrap()\n\n\t\t.get_child(\"Directory\", \"argabarga\").unwrap().text()\n\n}\n\n\n", "file_path": "src/map_operations.rs", "rank": 16, "score": 165162.2238828064 }, { "content": "pub fn check_existing_files<'a>(map: &'a mut Element, in_title: &'a String) -> Option<&'a mut Element> {\n\n\tlet files = get_files(map).children_mut();\n\n\tfor file in files {\n\n\t\tif file.attr(\"title\").unwrap() == in_title {\n\n\t\t\treturn Some(file);\n\n\t\t}\n\n\t}\n\n\tNone\n\n}\n\n\n", "file_path": "src/map_operations.rs", "rank": 17, "score": 162956.81725387424 }, { "content": "pub fn get_files(map: &mut Element) -> &mut Element {\n\n\tmap.get_child_mut(\"Drive\", \"argabarga\").unwrap()\n\n\t\t.get_child_mut(\"Files\", \"argabarga\").unwrap()\n\n}\n\n\n", "file_path": "src/map_operations.rs", "rank": 18, "score": 151955.1746265888 }, { "content": "pub fn get_scrivening<'a> (name: &String, list: &'a[Scrivening]) -> Option<&'a Scrivening> {\n\n\tfor scriv in list {\n\n\t\tif name.starts_with(\"#\") && scriv.id() == name.get(1..).unwrap() {\n\n\t\t\treturn Some(scriv);\n\n\t\t} else if scriv.title() == name {\n\n\t\t\treturn Some(scriv);\n\n\t\t} else {\n\n\t\t\tmatch scriv.children() {\n\n\t\t\t\tNone => continue,\n\n\t\t\t\tSome(children) => {\n\n\t\t\t\t\tmatch get_scrivening(name, children) {\n\n\t\t\t\t\t\tNone => continue,\n\n\t\t\t\t\t\tSome(s) => return Some(s)\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\tNone\n\n}\n\n\n", "file_path": "src/scrivx_reader.rs", "rank": 19, "score": 141275.43102216435 }, { "content": "pub fn get_scrivx() -> Result<String, WrongDirError> {\n\n\tlet mut scrivx_name = String::new();\n\n\tfor entry in WalkDir::new(\".\").into_iter().filter_map(|e| e.ok()) {\n\n\t\tlet filename = entry.file_name().to_string_lossy();\n\n\t\tif filename.ends_with(\".scrivx\") {\n\n\t\t\tscrivx_name = filename.to_string();\n\n\t\t\tbreak;\n\n\t\t}\n\n\t}\n\n\tif scrivx_name != \"\" {\n\n\t\tOk(scrivx_name)\n\n\t} else {\n\n\t\tErr(WrongDirError)\n\n\t}\n\n}\n\n\n", "file_path": "src/scrivx_reader.rs", "rank": 20, "score": 140036.93569057365 }, { "content": "fn collect_ids(scrivening: &Scrivening, omit: &Option<Vec<String>>, include: &bool) -> Vec<String> {\n\n\tlet mut out_vec = Vec::new();\n\n\tif !include {\n\n\t\tif !scrivening.include() {return out_vec;}\n\n\t}\n\n\tmatch omit {\n\n\t\tNone => {},\n\n\t\tSome(omits) => {\n\n\t\t\tif omits.contains(scrivening.title()) {return out_vec;}\n\n\t\t}\n\n\t}\n\n\tmatch scrivening.filepath() {\n\n\t\tSome(_) => out_vec.push(scrivening.id().to_string()),\n\n\t\tNone => {}\n\n\t}\n\n\tmatch scrivening.children() {\n\n\t\tSome(kids) => {\n\n\t\t\tfor kid in kids {\n\n\t\t\t\t\tout_vec.extend(collect_ids(kid, &omit, &include));\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\tNone => {}\n\n\t}\n\n\tout_vec\n\n}\n\n\n", "file_path": "src/push.rs", "rank": 21, "score": 137908.4501548167 }, { "content": "pub fn check_init() -> bool {\n\n\tlet map_path = Path::new(\"Scrit/scrit_map.xml\");\n\n\tif map_path.exists() {\n\n\t\ttrue\n\n\t} else {\n\n\t\tfalse\n\n\t}\n\n}\n\n\n", "file_path": "src/map_operations.rs", "rank": 22, "score": 136002.39157785024 }, { "content": "pub fn help(args: &[String]) {\n\n\tif args.is_empty() {\n\n\t\tprintln!(\"\n\nFor more information on a command, type 'scrit help <command name>'. \n\nFor a basic runthrough of how to use this program, type 'scrit help tutorial'.\n\n\n\ninit \t\tInitializes Scrit in a .scriv directory.\n\npush \t\tPushes documents from a Scrivener project to a Google Drive.\n\npull \t\tPulls documents from a Google Drive and imports them into a Scrivener project.\n\ntree \t\tDisplays a filetree representation of a Scrivener project.\n\ninfo \t\tDisplays information about Scrit, including the installed version number.\n", "file_path": "src/help.rs", "rank": 23, "score": 130944.50594267028 }, { "content": "pub fn pull(args: &[String]) {\n\n\t\n\n}", "file_path": "src/pull.rs", "rank": 24, "score": 130944.5059426703 }, { "content": "pub fn push(args: &[String]) {\n\n\tif !check_init() {\n\n\t\tprintln!(\"\n\nScrit must be initialized for this project before you can use this command.\n\nType 'scrit init' to intialize, or type 'scrit help init' for more information.\n\n\t\t\t\");\n\n\t\treturn;\n\n\t}\n\n\tlet blueprint: Vec<Scrivening> = scrivx_reader::process_scrivx();\n\n\t\n\n\tlet mut exports: Vec<&Scrivening> = Vec::new();\n\n\tlet mut omit: Option<Vec<String>> = None;\n\n\tlet mut include: bool = false;\n\n\tlet mut split: bool = false;\n\n\tlet mut clean: bool = false;\n\n\tlet mut name: Option<String> = None;\n\n\n\n\t/* Process command line arguments */\n\n\tlet mut state: PushState = PushState::Initial;\n\n\tfor arg in args {\n", "file_path": "src/push.rs", "rank": 25, "score": 130944.50594267028 }, { "content": "pub fn update() {\n\n\n\n}", "file_path": "src/update.rs", "rank": 26, "score": 128538.97995373126 }, { "content": "fn list_scriv_contents(blueprint: &Vec<Scrivening>, depth: i32, active_levels: &mut Vec<bool>) {\n\n\tfor (index, scrivening) in blueprint.iter().enumerate() {\n\n\t\tlet mut out: String = String::new();\n\n\t\tlet mut i: i32 = 0;\n\n\t\twhile i < depth {\n\n\t\t\tif i == depth - 1 {\n\n\t\t\t\tif index == blueprint.len() - 1 {\n\n\t\t\t\t\tout.push_str(\"└── \");\n\n\t\t\t\t\tactive_levels[(depth - 1) as usize] = false;\n\n\t\t\t\t} else {\n\n\t\t\t\t\tout.push_str(\"├── \");\n\n\t\t\t\t}\t\n\n\t\t\t} else {\n\n\t\t\t\tif active_levels[i as usize] {\n\n\t\t\t\t\tout.push_str(\"│ \");\n\n\t\t\t\t} else {\n\n\t\t\t\t\tout.push_str(\" \");\n\n\t\t\t\t}\t\n\n\t\t\t}\t\t\n\n\t\t\ti += 1;\n", "file_path": "src/tree.rs", "rank": 27, "score": 126843.07129866329 }, { "content": "pub fn get_title_text(map: &Element) -> String {\n\n\tmap.get_child(\"Project\", \"argabarga\").unwrap()\n\n\t\t.get_child(\"Title\", \"argabarga\").unwrap().text()\n\n}\n\n\n", "file_path": "src/map_operations.rs", "rank": 28, "score": 119389.26681321478 }, { "content": "pub fn process_html(html: &String) -> Node<ASTElement> {\n\n\tlet mut document_root: Node<ASTElement> = Node::new(ASTElement::new(GroupType::Document));\n\n\tdocument_root\n\n}\n\n\n", "file_path": "src/html_operations.rs", "rank": 29, "score": 114834.06576149192 }, { "content": "pub fn process_rtf(rtf: &String) -> Node<ASTElement> {\n\n\tlet mut reader = RTFReader::new();\n\n\tlet mut builder = RTFBuilder::new();\t\n\n\tbuilder.build(reader.read(&rtf))\n\n}\n\n\n", "file_path": "src/rtf_operations.rs", "rank": 30, "score": 114834.06576149192 }, { "content": "pub fn write_html(dom: Node<ASTElement>) -> String {\n\n\tlet mut writer = HTMLWriter::new();\n\n\twriter.write(dom)\n\n}\n", "file_path": "src/html_operations.rs", "rank": 31, "score": 114834.06576149192 }, { "content": "pub fn write_rtf(dom: Node<ASTElement>) /*-> &String*/ {\n\n\t\n\n}", "file_path": "src/rtf_operations.rs", "rank": 32, "score": 114834.06576149192 }, { "content": "fn html_to_rtf(html: &String) /*-> &String*/ {\n\n\twrite_rtf(process_html(html))\n\n}\n\n\n", "file_path": "src/compiler.rs", "rank": 33, "score": 98600.4677936382 }, { "content": "fn rtf_to_html<'t>(rtf: &String) -> String {\n\n\twrite_html(process_rtf(rtf))\n\n}\n\n\n", "file_path": "src/compiler.rs", "rank": 34, "score": 96760.49203791728 }, { "content": "pub fn decompile() {\n\n\n\n}\n", "file_path": "src/compiler.rs", "rank": 35, "score": 93483.95728557903 }, { "content": "pub fn init() {\n\n\tif check_init() {\n\n\t\tprintln!(\"Scrit has already been initialized in this directory.\");\n\n\t\treturn\n\n\t} else {\n\n\t\tprintln!(\"Initializing...\");\n\n\t\tmatch fs::create_dir(\"Scrit\") {\n\n\t\t\tErr(why) => panic!(\"{}\", why.description()),\n\n\t\t\tOk(_) => println!(\"Successfully created scrit folder.\")\n\n\t\t}\n\n\t\tlet mut map = match fs::File::create(\"Scrit/scrit_map.xml\") {\n\n\t\t\tErr(why) => panic!(\"{}\", why.description()),\n\n\t\t\tOk(file) => file\n\n\t\t};\n\n\t\tmatch get_scrivx() {\n\n\t\t\tOk(name) => {\n\n\t\t\t\tlet project_name = name.trim_right_matches(\".scrivx\").clone();\n\n\t\t\t\tlet map_template = format!(r#\"\n\n<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<ScritMap xmlns=\"argabarga\">\n", "file_path": "src/map_operations.rs", "rank": 36, "score": 91718.63301317152 }, { "content": "pub fn binder_tree() {\n\n\tlet ref blueprint: Vec<Scrivening> = process_scrivx();\n\n\tprintln!(\"{}\", \"Binder\");\n\n\tlist_scriv_contents(&blueprint, 1, &mut vec![true]);\n\n}\n\n\n", "file_path": "src/tree.rs", "rank": 37, "score": 91718.63301317152 }, { "content": "pub fn get_map() -> Element {\n\n\tlet mut f = fs::File::open(\"Scrit/scrit_map.xml\").expect(\"File not found!\");\n\n\tlet mut contents = String::new();\n\n f.read_to_string(&mut contents)\n\n .expect(\"Something went wrong reading the file.\");\n\n let mut root: Element = contents.parse().unwrap();\n\n root\n\n}\n\n\n", "file_path": "src/map_operations.rs", "rank": 39, "score": 86088.5821748517 }, { "content": "pub fn process_scrivx() -> Vec<Scrivening> {\n\n\tlet mut scrivx_path: String;\n\n\tmatch get_scrivx() {\n\n\t\tOk(scrivx) => scrivx_path = scrivx,\n\n\t\tErr(e) => panic!(\"Error: {}\", e.description())\n\n\t}\n\n\tlet mut f = fs::File::open(scrivx_path).expect(\"File not found!\");\n\n\tlet mut contents = String::new();\n\n f.read_to_string(&mut contents)\n\n .expect(\"Something went wrong reading the file.\");\n\n let root: Element = contents.parse().unwrap();\n\n let mut outvec: Vec<Scrivening> = Vec::new();\n\n for child in root.get_named_child(\"Binder\").unwrap().children() {\n\n \tlet title: String = child.get_named_child(\"Title\").unwrap().text();\n\n \tlet id: String = child.attr(\"ID\").unwrap().to_string();\n\n \tlet include: bool = match child.get_named_child(\"MetaData\")\n\n \t\t\t\t\t\t\t\t.unwrap()\n\n \t\t\t\t\t\t\t\t.get_named_child(\"IncludeInCompile\") {\n\n \t\tNone => false,\n\n \t\tSome(t) => t.text().as_str() == \"Yes\"\n\n \t};\n\n \tlet children: Option<Vec<Scrivening>> = make_child_scrivenings(&child, 1);\n\n \tlet filepath = get_doc(&id);\n\n \tlet new_scriv: Scrivening = Scrivening {title, id, include, filepath, children, depth: 0};\n\n \toutvec.push(new_scriv);\n\n } \n\n return outvec;\n\n}\n\n\n", "file_path": "src/scrivx_reader.rs", "rank": 40, "score": 82468.07576297648 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n if !(args.len() > 1) {return;}\n\n match args[1].as_str() {\n\n \t\"init\" => map_operations::init(),\n\n \t\"push\" => push::push(&args[2..]),\n\n \t\"pull\" => pull::pull(&args[2..]),\n\n \t\"tree\" => tree::binder_tree(),\n\n \t\"help\" => help::help(&args[2..]),\n\n \t\"update\" => update::update(),\n\n \t\"info\" => {\n\n \t\tlet version: &'static str = env!(\"CARGO_PKG_VERSION\");\n\n \t\tprintln!(\"\n\nScrit - A push/pull interface between Scrivener and Google Docs\n\nVersion {}\n\nScrit was created and is maintained by Enda Hallahan. Source code available at https://github.com/EndaHallahan/Scrit\n\nScrit is protected under an MIT license. Scrit is not affiliated with Google LLC or Literature & Latte Ltd.\n\nScrivener © Literature & Latte Ltd. Google Drive and Google Docs © Google LLC. Please don't sue me.\n\n \t\t\t\", version);\n\n \t},\n\n \t_ => println!(\"Unknown command '{}'. Type 'scrit help' for a list of valid commands.\", args[1].as_str())\n\n }\t\n\n}", "file_path": "src/main.rs", "rank": 41, "score": 47952.057626853 }, { "content": "fn make_child_scrivenings(ele: &Element, depth: i32) -> Option<Vec<Scrivening>> {\n\n\tmatch ele.get_named_child(\"Children\") {\n\n\t\tNone => None,\n\n\t\tSome(e) => {\n\n\t\t\tlet mut outvec: Vec<Scrivening> = Vec::new();\n\n\t\t\tfor child in e.children() {\n\n\t\t\t\tlet title: String = child.get_named_child(\"Title\").unwrap().text();\n\n\t\t \tlet id: String = child.attr(\"ID\").unwrap().to_string();\n\n\t\t \tlet include: bool = match child.get_named_child(\"MetaData\").unwrap().get_named_child(\"IncludeInCompile\") {\n\n\t\t \t\tNone => false,\n\n\t\t \t\tSome(t) => t.text().as_str() == \"Yes\"\n\n\t\t \t};\n\n\t\t\t\tlet children: Option<Vec<Scrivening>> = make_child_scrivenings(&child, depth + 1);\n\n\t\t\t\tlet filepath = get_doc(&id);\n\n\t\t\t\tlet new_scriv = Scrivening {title, id, include, filepath, children, depth};\n\n\t\t\t\toutvec.push(new_scriv);\n\n\t\t\t}\n\n\t\t\treturn Some(outvec);\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "src/scrivx_reader.rs", "rank": 42, "score": 34048.48366586527 }, { "content": "update \t\tChecks if an updated version of Scrit is available.\n\nhelp \t\tDisplays help information for Scrit commands.\n\n\t\t\t\");\n\n\t} else {\n\n\t\tfor query in args {\n\n\t\t\tmatch query.as_str() {\n\n\t\t\t\t\"init\" => {println!(\"\n\nscrit init\n\n\n\nWhen in a .scriv folder, initializes Scrit for that project. This command must be executed before \n\nScrit can interact with a Scrivener project, and must be executed for each project you wish to use\n\nwith Scrit.\n\n\t\t\t\t\t\")},\n\n\t\t\t\t\"push\" => {println!(\"\n\nscrit push <documents> <options>\n\n\n\nCompiles specified <documents> and uploads them to a Google Drive. Documents can be specified \n\nby name, or by id by prefacing the id with a #. To compile an entire project, use 'Binder'. \n\nSubdocuments of specified documents will be included in the compile unless otherwise specified. \n\n\n", "file_path": "src/help.rs", "rank": 43, "score": 32853.22337064177 }, { "content": "\tpub fn body(&self) -> &Vec<BodyText> {\n\n\t\t&self.body\n\n\t}\n\n\tpub fn body_build(&mut self) {\n\n\t\tfor id in &self.contents {\n\n\t\t\tlet mut f = File::open(format!(\"./Files/Docs\\\\{}.rtf\", id)).expect(\"file not found\");\n\n\t\t let mut contents = String::new();\n\n\t\t f.read_to_string(&mut contents)\n\n\t\t .expect(\"Something went wrong reading the file!\");\n\n\t\t &self.body.push(BodyText::new(id.to_string(), contents));\n\n\t\t}\n\n\t}\n\n}\n\n\n\npub struct ScritFile {\n\n\ttitle: String,\n\n\tid: String,\n\n\tcontents: Vec<Document>,\n\n\tbody: String\n\n}\n", "file_path": "src/push.rs", "rank": 44, "score": 16.389259319417302 }, { "content": "impl ScritFile {\n\n\tpub fn new(contents: Vec<Document>) -> ScritFile {\n\n\t\tlet id = String::new();\n\n\t\tlet title = String::new();\n\n\t\tlet body = String::new();\n\n\t\tScritFile{title, id, contents, body}\n\n\t}\n\n\tpub fn title(&self) -> &String {\n\n\t\t&self.title\n\n\t}\n\n\tpub fn id(&self) -> &String {\n\n\t\t&self.id\n\n\t}\n\n\tpub fn contents(&self) -> &Vec<Document> {\n\n\t\t&self.contents\n\n\t}\n\n\tpub fn set_title(&mut self, in_title: String) {\n\n\t\tself.title = in_title;\n\n\t}\n\n\tpub fn set_id(&mut self, in_id: String) {\n", "file_path": "src/push.rs", "rank": 45, "score": 15.563720746490292 }, { "content": "\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\tif exports.is_empty() {println!(\"No documents selected for push!\"); return;}\n\n\tlet mut doc_list: Vec<Document> = Vec::new();\n\n\tfor item in exports {\n\n\t\tlet mut new_doc = Document::new(item.title().to_string(), collect_ids(item, &omit, &include));\n\n\t\tdoc_list.push(new_doc);\n\n\t}\n\n\n\n\tlet mut compiled_set = compiler::compile(doc_list, clean, split, name);\t\n\n\t/*for scrit_file in &compiled_set {\n\n\t\tprintln!(\"{:?}\\n\", scrit_file.body());\n\n\t}*/\n\n\tprintln!(\"Beginning upload...\");\n\n\tdrive_operations::upload(&mut compiled_set);\n\n\tprintln!(\"Ok! Upload successful.\");\n\n\n\n\t// Populate map\n", "file_path": "src/push.rs", "rank": 47, "score": 13.705699201449951 }, { "content": "\t\tself.id = in_id;\n\n\t}\n\n\tpub fn body(&self) -> &String {\n\n\t\t&self.body\n\n\t}\n\n\tpub fn set_body(&mut self, in_body: String) {\n\n\t\tself.body = in_body;\n\n\t}\n\n\tpub fn body_build(&mut self) {\n\n\t\tfor mut doc in &mut self.contents {\n\n\t\t\tdoc.body_build();\n\n\t\t}\n\n\t\tif self.title.is_empty() {\n\n\t\t\tself.title = self.contents[0].title().to_string();\n\n\t\t}\t\t\n\n\t}\n\n}\n\n\n", "file_path": "src/push.rs", "rank": 48, "score": 12.908504813594547 }, { "content": "\tprintln!(\"Updating map...\");\n\n\tlet mut map = get_map();\n\n\tfor scrit_file in compiled_set {\n\n\t\tlet file_element = get_me_a_file_with_id_and_title(&mut map, scrit_file.id(), scrit_file.title());\t\t\n\n\t\tfor doc in scrit_file.contents() {\n\n\t\t\tlet document_node = file_element.append_child(Element::bare(\"Document\"));\n\n\t\t\tfor subdoc in &doc.contents {\n\n\t\t\t\tdocument_node.append_child(Element::builder(\"ID\")\n\n\t\t\t\t\t\t\t\t\t.append(subdoc)\n\n\t\t\t\t\t\t\t\t\t.build());\n\n\t\t\t\tdocument_node.append_child(Element::builder(\"Checksum\")\n\n\t\t\t\t\t\t\t\t\t.append(\"argabarga\")\n\n\t\t\t\t\t\t\t\t\t.build());\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\tmap.write_to(&mut File::create(\"Scrit/scrit_map.xml\").unwrap());\n\n\t\n\n\tprintln!(\"Done!\")\n\n}", "file_path": "src/push.rs", "rank": 49, "score": 12.309101400142005 }, { "content": "\t}\n\n\tfor mut scrit_file in &mut scrit_file_list {\n\n\t\tscrit_file.body_build();\n\n\t\tlet mut compiled_string = String::new();\n\n\t\tfor doc in scrit_file.contents() {\t\n\n\t\t\tcompiled_string.push_str(&format!(\"<h2 data-scrivtitle='true'>{}</h2>\", doc.title()));\n\n\t\t\tfor doc_text in doc.body() {\n\n\t\t\t\tif !clean {\n\n\t\t\t\t\tcompiled_string.push_str(&format!(\"<div data-scrivpath='true'>[[[{}]]]</div>\", doc_text.id()));\n\n\t\t\t\t}\n\n\t\t\t\tcompiled_string.push_str(&rtf_to_html(doc_text.body()));\n\n\t\t\t}\t\n\n\t\t}\n\n\t\tscrit_file.set_body(format!(\"<!DOCTYPE html><body>{}</body></html>\", compiled_string));\n\n\t}\n\n\tscrit_file_list\n\n}\n\n\n", "file_path": "src/compiler.rs", "rank": 50, "score": 12.011337047903151 }, { "content": "use std::fs;\n\nuse std::io::Read;\n\nuse std::error::Error;\n\nuse std::fmt;\n\nuse minidom::Element;\n\nuse walkdir::WalkDir;\n\n\n\n#[derive(Debug)]\n\npub struct WrongDirError;\n\nimpl fmt::Display for WrongDirError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", &self.description())\n\n }\n\n}\n\nimpl Error for WrongDirError {\n\n\t fn description(&self) -> &str {\n\n \"No scrivx found. Check that you are in a .scriv directory.\"\n\n }\n\n}\n\n\n", "file_path": "src/scrivx_reader.rs", "rank": 51, "score": 11.20062325787222 }, { "content": "#[derive(Debug)]\n\npub struct Scrivening {\n\n\ttitle: String,\n\n\tid: String,\n\n\tinclude: bool,\n\n\tdepth: i32,\n\n\tfilepath: Option<String>,\n\n\tchildren: Option<Vec<Scrivening>>\n\n}\n\nimpl Scrivening {\n\n\tpub fn title(&self) -> &String {\n\n\t\t&self.title\n\n\t}\n\n\tpub fn id(&self) -> &String {\n\n\t\t&self.id\n\n\t}\n\n\tpub fn include(&self) -> &bool {\n\n\t\t&self.include\n\n\t}\n\n\tpub fn depth(&self) -> &i32 {\n", "file_path": "src/scrivx_reader.rs", "rank": 52, "score": 10.90475111707045 }, { "content": "\t}\n\n\n\n\tfn end_group(&mut self) {\n\n\t\tmatch self.current_node.parent() {\n\n\t\t\tNone => {},\n\n\t\t\tSome(parent) => {self.current_node = parent;}\n\n\t\t};\n\n\t}\t\n\n\n\n\tfn cmd_emdash(&mut self) {\n\n\t\tself.current_node.borrow_mut().add_text(&\"—\".to_string());\n\n\t}\n\n\tfn cmd_endash(&mut self) {\n\n\t\tself.current_node.borrow_mut().add_text(&\"–\".to_string());\n\n\t}\n\n\tfn cmd_tab(&mut self) {\n\n\t\tself.current_node.borrow_mut().add_text(&\"\\t\".to_string());\n\n\t}\n\n\tfn cmd_line(&mut self) {\n\n\t\tself.current_node.borrow_mut().add_text(&\"\\n\".to_string());\n", "file_path": "src/rtf_operations.rs", "rank": 53, "score": 9.321189530299467 }, { "content": "\t\t\t\tmatch self.current_instruction {\n\n\t\t\t\t\tInstruction::Text(ref mut contents) => {\n\n\t\t\t\t\t\tcontents.push(character);\n\n\t\t\t\t\t},\n\n\t\t\t\t\t_ => {\n\n\t\t\t\t\t\tself.current_instruction = Instruction::Text(character.to_string());\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\tfn parse_escape(&mut self, character: char) {\n\n\t\tmatch character {\n\n\t\t\t' '|'\\\\'|'{'|'}'|'\\n'|'\\r'|'\\t' => {\n\n\t\t\t\tself.mode = ReadMode::ParseText;\n\n\t\t\t\tmatch self.current_instruction {\n\n\t\t\t\t\tInstruction::Text(ref mut contents) => {\n\n\t\t\t\t\t\tcontents.push(character);\n\n\t\t\t\t\t},\n\n\t\t\t\t\t_ => {\n", "file_path": "src/rtf_operations.rs", "rank": 54, "score": 9.096689884558213 }, { "content": "use push::{Document, ScritFile};\n\nuse rtf_operations::{process_rtf, write_rtf};\n\nuse html_operations::{process_html, write_html};\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum Attribute {\n\n\tIgnorable,\n\n\tItalics(bool),\n\n\tBold(bool),\n\n\tStrikethrough(bool),\n\n\tSmallcaps(bool),\n\n\tUnderline(bool),\n\n\tSubscript(bool),\n\n\tSuperscript(bool),\n\n\tFontSize(i32)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum GroupType {\n\n\tNull,\n", "file_path": "src/compiler.rs", "rank": 55, "score": 8.978387782113487 }, { "content": "\t<Project>\n\n\t\t<Title>{}</Title>\n\n\t</Project>\n\n\t<Drive>\n\n\t\t<Owner/>\n\n\t\t<Directory/>\n\n\t\t<LastPush/>\n\n\t\t<Files/>\n\n\t</Drive>\n\n</ScritMap>\n\n\"#, project_name);\n\n\t\t\t\tmatch map.write_all(map_template.as_bytes()) {\n\n\t\t\t\t\tErr(why) => panic!(\"{}\", why.description()),\n\n\t\t\t\t\tOk(_) => println!(\"Successfully wrote map file.\")\n\n\t\t\t\t}\n\n\t\t\t},\n\n\t\t\tErr(_) => {return;}\n\n\t\t}\n\n\t\t\n\n\t\tprintln!(\"Done!\");\n\n\t}\n\n}\n\n\n", "file_path": "src/map_operations.rs", "rank": 56, "score": 8.74724132472462 }, { "content": "\tattributes: Vec<Attribute>,\n\n\tele_type: GroupType,\n\n\ttext_contents: String\n\n}\n\nimpl ASTElement {\n\n\tpub fn new(ele_type: GroupType) -> ASTElement {\n\n\t\tlet attributes = Vec::new();\n\n\t\tlet text_contents = String::new();\n\n\t\tASTElement{attributes, ele_type, text_contents}\n\n\t}\n\n\tpub fn ele_type(&self) -> &GroupType {\n\n\t\t&self.ele_type\n\n\t}\n\n\tpub fn set_ele_type(&mut self, new_type: GroupType) {\n\n\t\tself.ele_type = new_type;\n\n\t}\n\n\tpub fn add_att(&mut self, att: Attribute) {\n\n\t\tself.attributes.push(att);\n\n\t}\n\n\tpub fn add_text(&mut self, new_text: &String) {\n", "file_path": "src/compiler.rs", "rank": 57, "score": 8.739333526950507 }, { "content": "\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tInstruction::ListBreak => {}\n\n\t\t\t_ => {}\n\n\t\t}\n\n\t}\n\n\n\n\tfn parse_control(&mut self, control: &str) {\n\n\t\tlet mut att_value = 0;\n\n\t\tlet mut control_name = control;\n\n\t\tfor (i, c) in control.chars().enumerate() {\n\n\t\t\tif c.is_digit(10) {\n\n\t\t\t\tlet (a, b) = control.split_at(i);\n\n\t\t\t\tcontrol_name = a;\n\n\t\t\t\tatt_value = match b.parse() {\n\n\t\t\t\t\tOk(val) => val,\n\n\t\t\t\t\tErr(_) => 1\n\n\t\t\t\t};\n\n\t\t\t\tbreak;\n\n\t\t\t}\n", "file_path": "src/rtf_operations.rs", "rank": 58, "score": 8.344749797839789 }, { "content": "\t\t\t}\n\n\t\t}\n\n\t\tlet tag_string = format!(\"<{}{} {}'>\", tag, attributes, styles);\n\n\t\tself.output_string = format!(\"{}{}{}\", self.output_string, tag_string, element.text_contents());\n\n\t}\n\n\tfn end_element(&mut self, element: Ref<ASTElement>) {\n\n\t\tlet tag: &str = match element.ele_type() {\n\n\t\t\tGroupType::Text => \"</span>\",\n\n\t\t\tGroupType::Paragraph => \"</p><br>\",\n\n\t\t\tGroupType::Hr => \"</hr>\",\n\n\t\t\tGroupType::Body => {self.in_body = false; return;},\n\n\t\t\t_ => return\n\n\t\t};\n\n\t\tif !self.in_body {return;}\n\n\t\tself.output_string = format!(\"{}{}\", self.output_string, tag);\n\n\t}\n\n}\n\n\n", "file_path": "src/html_operations.rs", "rank": 59, "score": 8.16530859231481 }, { "content": "\t\t\t\t\t_ => {\n\n\t\t\t\t\t\tself.current_instruction = Instruction::Control(character.to_string());\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\tfn parse_hex(&mut self, character: char) {\n\n\t\tmatch self.current_instruction {\n\n\t\t\tInstruction::Hex(ref mut contents) => {\n\n\t\t\t\tif contents.len() < 2 {\n\n\t\t\t\t\tcontents.push(character);\n\n\t\t\t\t\treturn;\n\n\t\t\t\t} \n\n\t\t\t}\n\n\t\t\t_ => {}\n\n\t\t}\n\n\t\tself.set_cur_instruction();\n\n\t\tself.mode = ReadMode::ParseText;\n\n\t\tself.parse_text(character);\n", "file_path": "src/rtf_operations.rs", "rank": 60, "score": 7.930520087977126 }, { "content": "\t\tself.text_contents = format!(\"{}{}\", self.text_contents, new_text);\n\n\t}\n\n\tpub fn text_contents(&self) -> &String {\n\n\t\t&self.text_contents\n\n\t}\n\n\tpub fn attributes(&self) -> &Vec<Attribute> {\n\n\t\t&self.attributes\n\n\t}\n\n}\n\n\n", "file_path": "src/compiler.rs", "rank": 61, "score": 7.780933902779946 }, { "content": "When in a .scriv folder, displays a filetree representation of the contents of a Scrivener project.\n\nFiles are displayed in the format 'filename [id]'.\n\n\t\t\t\t\t\")},\n\n\t\t\t\t\"info\" => {println!(\"\n\nscrit info\n\n\n\nDisplays information about Scrit, including the installed version number.\n\n\t\t\t\t\t\")},\n\n\t\t\t\t\"update\" => {println!(\"\n\nscrit update\n\n\n\nChecks if an update to Scrit is available. If one is, you will be prompted to update.\n\n\t\t\t\t\t\")},\n\n\t\t\t\t\"tutorial\" => {println!(\"\n", "file_path": "src/help.rs", "rank": 62, "score": 7.777970227172934 }, { "content": "\t\t&self.body\n\n\t}\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Document {\n\n\ttitle: String,\n\n\tcontents: Vec<String>, \n\n\tbody: Vec<BodyText>,\n\n\tlocation: String\n\n}\n\nimpl Document {\n\n\tfn new(title: String, contents: Vec<String>) -> Document {\n\n\t\tlet body: Vec<BodyText> = Vec::new();\n\n\t\tlet location: String = String::new();\n\n\t\tDocument {title, contents, body, location}\n\n\t}\n\n\tpub fn title(&self) -> &String {\n\n\t\t&self.title\n\n\t}\n", "file_path": "src/push.rs", "rank": 63, "score": 7.620298650977279 }, { "content": "\t\t\t\tmatch state {\n\n\t\t\t\t\tPushState::Initial => {\n\n\t\t\t\t\t\tmatch scrivx_reader::get_scrivening(&arg.trim().to_string(), &blueprint) {\n\n\t\t\t\t\t\t\tNone => {println!(\"File {} not found!\", arg);}\n\n\t\t\t\t\t\t\tSome(scrivening) => {exports.push(scrivening);}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t},\n\n\t\t\t\t\tPushState::Omit => {\n\n\t\t\t\t\t\tlet arg_list: Vec<String> = arg.trim().split(',').map(|s| s.trim().to_string()).collect();\n\n\t\t\t\t\t\tomit = Some(arg_list);\n\n\t\t\t\t\t\tstate = PushState::Null;\n\n\t\t\t\t\t},\n\n\t\t\t\t\tPushState::Name => {\n\n\t\t\t\t\t\tname = Some(arg.trim().to_string());\n\n\t\t\t\t\t\tstate = PushState::Null;\n\n\t\t\t\t\t},\n\n\t\t\t\t\t_ => {\n\n\t\t\t\t\t\tprintln!(\"Invalid argument: {}\", arg);\n\n\t\t\t\t\t\tstate = PushState::Null;\n\n\t\t\t\t\t}\n", "file_path": "src/push.rs", "rank": 64, "score": 7.402104537271645 }, { "content": "\t\t\t\"line\" => self.cmd_line(),\n\n\t\t\t\"hrule\" => self.cmd_hrule(),\n\n\t\t\t_ => {}\n\n\t\t}\n\n\t}\n\n\n\n\tfn parse_hex(&mut self, hex: &String) {\n\n\t\tlet re_hex = (i64::from_str_radix(hex, 16).unwrap()) as usize;\n\n\t\tif self.current_node.borrow_mut().ele_type() == &GroupType::Text {\n\n\t\t\tself.current_node.borrow_mut().add_text(&WIN_1252[re_hex].to_string());\n\n\t\t} else {\n\n\t\t\tself.new_group(GroupType::Fragment);\n\n\t\t\tself.current_node.borrow_mut().add_text(&WIN_1252[re_hex].to_string());\n\n\t\t\tself.end_group();\n\n\t\t}\n\n\t}\n\n\n\n\tfn new_group(&mut self, ele_type: GroupType) {\n\n\t\tself.current_node.append(Node::new(ASTElement::new(ele_type)));\n\n\t\tself.current_node = self.current_node.last_child().unwrap();\n", "file_path": "src/rtf_operations.rs", "rank": 65, "score": 7.192809600888504 }, { "content": "Options:\n\n-omit (-o) \t\tOmit specified files from compilation. Argument is a comma-separated \n\n\t\t\tlist of file names or ids between quotation marks.\n\n-include (-i)\t\tIgnore files' include/exclude value from compile when compiling.\n\n-split (-s)\t\tSplit pushed files into separate documents.\n\n-clean (-c)\t\tPushes to GDocs without break placeholders. Documents exported \n\n\t\t\tin this manner cannot be pulled back into Scrivener.\n\n-name (-n)\t\tSpecifies an alternate filename to use when uploading to a Google Drive. \n\n\t\t\tIf the split option is also specified, the filename will be appended as \n\n\t\t\ta prefix to all generated files, separated by a hyphen.\n\n\t\t\t\t\t\")},\n\n\t\t\t\t\"pull\" => {println!(\"\n\nscrit pull <documents> <options>\n\n\n\nDownloads specified documents from a Google Drive, decompiles them, and merges them with the local \n\ncontents of a project. Coming soon!\n\n\t\t\t\t\t\")},\n\n\t\t\t\t\"tree\" => {println!(\"\n\nscrit tree\n\n\n", "file_path": "src/help.rs", "rank": 66, "score": 6.997457654426571 }, { "content": "\t\tlet mut tag: &str;\n\n\t\tlet mut attributes = String::new();\n\n\t\tlet mut styles = \"style='\".to_string();\n\n\t\tmatch element.ele_type() {\n\n\t\t\tGroupType::Text | GroupType::Fragment => {tag = \"span\";},\n\n\t\t\tGroupType::Paragraph => {tag = \"p\";},\n\n\t\t\tGroupType::Hr => {tag = \"hr\";},\n\n\t\t\t//GroupType::Document => \"html\",\n\n\t\t\tGroupType::Body => {self.in_body = true; return;},\n\n\t\t\t_ => return\n\n\t\t};\n\n\t\tif !self.in_body {return;}\n\n\t\tlet atts = element.attributes();\n\n\t\tfor att in atts {\n\n\t\t\tmatch *att {\n\n\t\t\t\tAttribute::Italics(true) => {\n\n\t\t\t\t\tstyles = format!(\"{}font-style:italic;\", styles);\n\n\t\t\t\t},\n\n\t\t\t\tAttribute::Bold(true) => {\n\n\t\t\t\t\tstyles = format!(\"{}font-weight:bold;\", styles);\n", "file_path": "src/html_operations.rs", "rank": 67, "score": 6.7573610866206835 }, { "content": "\t\t}\n\n\t\tout.push_str(&format!(\"{} [{}]\", &scrivening.title(), &scrivening.id()));\n\n\t\tprintln!(\"{}\", out);\n\n\t\tmatch scrivening.children() {\n\n\t\t\tNone => continue,\n\n\t\t\tSome(ref children) => {\n\n\t\t\t\tif depth >= active_levels.len() as i32{\n\n\t\t\t\t\tactive_levels.push(true);\n\n\t\t\t\t} else {\n\n\t\t\t\t\tactive_levels[depth as usize] = true;\n\n\t\t\t\t}\n\n\t\t\t\tlist_scriv_contents(&children, depth + 1, active_levels);\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n}", "file_path": "src/tree.rs", "rank": 68, "score": 6.575348403654438 }, { "content": "extern crate hyper;\n\nextern crate hyper_native_tls;\n\nextern crate yup_oauth2;\n\nextern crate google_drive3;\n\nextern crate minidom;\n\nextern crate walkdir;\n\nextern crate rctree;\n\n\n\nmod scrivx_reader;\n\nmod drive_operations;\n\nmod map_operations;\n\nmod rtf_operations;\n\nmod html_operations;\n\nmod compiler;\n\nmod client_info;\n\nmod tree;\n\nmod push;\n\nmod pull;\n\nmod help;\n\nmod update;\n\n\n\nuse std::env;\n\n\n", "file_path": "src/main.rs", "rank": 69, "score": 6.117205806171286 }, { "content": "use scrivx_reader;\n\nuse scrivx_reader::Scrivening;\n\nuse drive_operations;\n\nuse compiler;\n\nuse map_operations::*;\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse minidom::Element;\n\n\n", "file_path": "src/push.rs", "rank": 70, "score": 5.246794685810321 }, { "content": "\t\tmatch arg.as_str() {\n\n\t\t\t\"-omit\" | \"-o\" => {state = PushState::Omit;},\n\n\t\t\t\"-name\" | \"-n\" => {state = PushState::Name;},\n\n\t\t\t\"-include\" | \"-i\" => {\n\n\t\t\t\tinclude = true;\n\n\t\t\t\tstate = PushState::Null;\n\n\t\t\t},\n\n\t\t\t\"-split\" | \"-s\" => {\n\n\t\t\t\tsplit = true;\n\n\t\t\t\tstate = PushState::Null;\n\n\t\t\t},\n\n\t\t\t\"-clean\" | \"-c\" => {\n\n\t\t\t\tclean = true;\n\n\t\t\t\tstate = PushState::Null;\n\n\t\t\t},\n\n\t\t\t\"Binder\" => {\n\n\t\t\t\tfor item in &blueprint {exports.push(&item);}\n\n\t\t\t\tstate = PushState::Null;\n\n\t\t\t},\n\n\t\t\t_ => {\n", "file_path": "src/push.rs", "rank": 71, "score": 5.246309023437631 }, { "content": "\t\t&self.depth\n\n\t}\n\n\tpub fn filepath(&self) -> &Option<String> {\n\n\t\t&self.filepath\n\n\t}\n\n\tpub fn children(&self) -> &Option<Vec<Scrivening>> {\n\n\t\t&self.children\n\n\t}\n\n}\n\n\n", "file_path": "src/scrivx_reader.rs", "rank": 72, "score": 5.068070973465758 }, { "content": "\t\t\t},\n\n\t\t\t' ' => {\n\n\t\t\t\tself.set_cur_instruction();\n\n\t\t\t\tself.mode = ReadMode::ParseText;\n\n\t\t\t},\n\n\t\t\t';' => {\n\n\t\t\t\tself.set_cur_instruction();\n\n\t\t\t\tself.set_new_instruction(Instruction::ListBreak);\n\n\t\t\t\tself.mode = ReadMode::ParseText;\n\n\t\t\t},\n\n\t\t\t'\\\\'|'{'|'}'|'\\n'|'\\r'|'\\t' => {\n\n\t\t\t\tself.set_cur_instruction();\n\n\t\t\t\tself.mode = ReadMode::ParseText;\n\n\t\t\t\tself.parse_text(character);\n\n\t\t\t},\n\n\t\t\t_ => {\n\n\t\t\t\tmatch self.current_instruction {\n\n\t\t\t\t\tInstruction::Control(ref mut contents) => {\n\n\t\t\t\t\t\tcontents.push(character);\n\n\t\t\t\t\t},\n", "file_path": "src/rtf_operations.rs", "rank": 73, "score": 4.961640538948905 }, { "content": "\t\t\t\t\t\tself.set_cur_instruction();\n\n\t\t\t\t\t\tself.current_instruction = Instruction::Text(character.to_string());\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t},\n\n\t\t\t_ => {\n\n\t\t\t\tself.set_cur_instruction();\n\n\t\t\t\tself.mode = ReadMode::ParseControl;\n\n\t\t\t\tself.parse_control(character)\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\tfn parse_control(&mut self, character: char) {\n\n\t\tmatch character {\n\n\t\t\t'*' => {\n\n\t\t\t\tself.set_new_instruction(Instruction::Ignorable);\n\n\t\t\t},\n\n\t\t\t'\\'' => {\n\n\t\t\t\tself.mode = ReadMode::ParseHex;\n\n\t\t\t\tself.current_instruction = Instruction::Hex(String::new());\n", "file_path": "src/rtf_operations.rs", "rank": 74, "score": 4.932608446110989 }, { "content": "\t\tself.anchor.append(Node::new(ASTElement::new(GroupType::Document)));\n\n\t\tself.current_node = self.anchor.first_child().unwrap();\n\n\t\tfor instruction in instructions {\n\n\t\t\t//println!(\"{:?}\", instruction);\n\n\t\t\tself.execute(instruction);\n\n\t\t}\n\n\n\n\t\t/*for node in self.current_node.root().descendants() {\n\n\t\t\tprintln!(\"{:?}\",node.borrow());\n\n\t\t}*/\n\n\t\t\n\n\t\tself.current_node.root()\n\n\t}\n\n\n\n\tfn execute(&mut self, instruction: &Instruction) {\n\n\t\tif self.skip >0 {\n\n\t\t\tself.skip -= 1;\n\n\t\t\treturn;\n\n\t\t}\n\n\t\tmatch instruction {\n", "file_path": "src/rtf_operations.rs", "rank": 75, "score": 4.58482247345129 }, { "content": "use rctree::Node;\n\nuse compiler::{Attribute, ASTElement, GroupType};\n\n\n\nconst WIN_1252: [char; 255] = [' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',\n\n\t' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',' ',\n\n\t' ',' ',' ',' ',' ',' ',\n\n\t' ','!','\\\"','#','$','%','&','\\'','(',')','*','+',',','-','.','/',\n\n\t'0','1','2','3','4','5','6','7','8','9',':',';','<','=','>','@','A',\n\n\t'B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R',\n\n\t'S','T','U','V','W','X','Y','Z','[','\\\\',']','^','_','`','a','b','c',\n\n\t'd','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t',\n\n\t'u','v','w','x','y','z','{','|','}','~',' ','€','�','‚','ƒ','„','…',\n\n\t'†','‡','ˆ','‰','Š','‹','Œ','�','Ž','�','�','‘','’','“','”','•','–',\n\n\t'—','˜','™','š','›','œ','�','ž','Ÿ',' ','¡','¢','£','¤','¥','¦','§',\n\n\t'¨','©','ª','«','-','®','¯','°','±','²','³','´','µ','¶','·','¸','¹',\n\n\t'º','»','¼','½','¾','¿','À','Á','Â','Ã','Ä','Å','Æ','Ç','È','É','Ê',\n\n\t'Ë','Ì','Í','Î','Ï','Ð','Ñ','Ò','Ó','Ô','Õ','Ö','×','Ø','Ù','Ú','Û',\n\n\t'Ü','Ý','Þ','ß','à','á','â','ã','ä','å','æ','ç','è','é','ê','ë','ì',\n\n\t'í','î','ï','ð','ñ','ò','ó','ô','õ','ö','÷','ø','ù','ú','û','ü','ý',\n\n\t'þ','ÿ'];\n\n\n\n#[derive(Debug, PartialEq)]\n", "file_path": "src/rtf_operations.rs", "rank": 76, "score": 3.906004623013644 }, { "content": "\tAnchor,\n\n\tDocument,\n\n\tText,\n\n\tFragment,\n\n\tParagraph,\n\n\tBody,\n\n\tList(char),\n\n\tListItem,\n\n\tFontTable,\n\n\tFont(String),\n\n\tColourTable,\n\n\tColour,\n\n\tListTable,\n\n\tListLabel,\n\n\tListOverrideTable,\n\n\tHr\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ASTElement {\n", "file_path": "src/compiler.rs", "rank": 77, "score": 3.424023730405205 }, { "content": "use scrivx_reader::get_scrivx;\n\nuse std::path::Path;\n\nuse std::error::Error;\n\nuse std::io::{Read, Write};\n\nuse std::fs;\n\nuse minidom::Element;\n\n\n", "file_path": "src/map_operations.rs", "rank": 78, "score": 3.266159794866395 }, { "content": "use rctree::{Node, NodeEdge};\n\nuse std::cell::Ref;\n\nuse compiler::{Attribute, ASTElement, GroupType};\n\n\n", "file_path": "src/html_operations.rs", "rank": 79, "score": 3.0397901019707416 }, { "content": "\t\t\t}\n\n\t\t}\n\n\t\t&self.instructions\n\n\t}\n\n\tfn parse_text(&mut self, character: char) {\n\n\t\tmatch character {\n\n\t\t\t'\\\\' => {self.mode = ReadMode::ParseEscape},\n\n\t\t\t'{' => {\n\n\t\t\t\tself.set_cur_instruction();\n\n\t\t\t\tself.set_new_instruction(Instruction::GroupStart);\n\n\t\t\t}\n\n\t\t\t'}' => {\n\n\t\t\t\tself.set_cur_instruction();\n\n\t\t\t\tself.set_new_instruction(Instruction::GroupEnd);\n\n\t\t\t}\n\n\t\t\t'\\n' | '\\r' => {\n\n\t\t\t\tself.set_cur_instruction();\n\n\t\t\t\tself.set_new_instruction(Instruction::Break)\n\n\t\t\t}\n\n\t\t\t_ => {\n", "file_path": "src/rtf_operations.rs", "rank": 80, "score": 3.0164213217015927 }, { "content": "\t}\n\n\tfn cmd_ulnone(&mut self) {\n\n\t\tself.current_node.borrow_mut().add_att(Attribute::Underline(false));\n\n\t}\n\n\tfn cmd_sub(&mut self) {\n\n\t\tself.current_node.borrow_mut().add_att(Attribute::Subscript(true));\n\n\t}\n\n\tfn cmd_super(&mut self) {\n\n\t\tself.current_node.borrow_mut().add_att(Attribute::Superscript(true));\n\n\t}\n\n\tfn cmd_nosupersub(&mut self) {\n\n\t\tself.current_node.borrow_mut().add_att(Attribute::Superscript(false));\n\n\t\tself.current_node.borrow_mut().add_att(Attribute::Subscript(false));\n\n\t}\n\n\tfn cmd_fs(&mut self, val:i32) {\n\n\t\tself.current_node.borrow_mut().add_att(Attribute::FontSize(val));\n\n\t}\n\n\n\n\tfn cmd_pgnrestart(&mut self) {\n\n\t\twhile self.current_node.borrow().ele_type() != &GroupType::Document {\n", "file_path": "src/rtf_operations.rs", "rank": 81, "score": 2.8307843098094247 }, { "content": "\t}\n\n\tfn cmd_hrule(&mut self) {\n\n\t\tself.new_group(GroupType::Hr);\n\n\t\tself.end_group();\n\n\t}\n\n\n\n\tfn cmd_b(&mut self, val: i32) {\n\n\t\tself.current_node.borrow_mut().add_att(Attribute::Bold(val == 1));\n\n\t}\n\n\tfn cmd_i(&mut self, val: i32) {\n\n\t\tself.current_node.borrow_mut().add_att(Attribute::Italics(val == 1));\n\n\t}\n\n\tfn cmd_strike(&mut self, val: i32) {\n\n\t\tself.current_node.borrow_mut().add_att(Attribute::Strikethrough(val == 1));\n\n\t}\n\n\tfn cmd_scaps(&mut self, val: i32) {\n\n\t\tself.current_node.borrow_mut().add_att(Attribute::Smallcaps(val == 1));\n\n\t}\n\n\tfn cmd_ul(&mut self, val: i32) {\n\n\t\tself.current_node.borrow_mut().add_att(Attribute::Underline(val == 1));\n", "file_path": "src/rtf_operations.rs", "rank": 82, "score": 2.8202277774719113 }, { "content": "use scrivx_reader::{Scrivening, process_scrivx};\n\n\n", "file_path": "src/tree.rs", "rank": 83, "score": 2.586632004766421 }, { "content": "\t\t\tInstruction::Control(param) => {self.parse_control(&param);},\n\n\t\t\tInstruction::Text(param) => {\n\n\t\t\t\tif self.current_node.borrow_mut().ele_type() == &GroupType::Null {\n\n\t\t\t\t\tself.current_node.borrow_mut().set_ele_type(GroupType::Text);\n\n\t\t\t\t\tself.current_node.borrow_mut().add_text(&param);\n\n\t\t\t\t} else if self.current_node.borrow_mut().ele_type() == &GroupType::Text {\n\n\t\t\t\t\tself.current_node.borrow_mut().add_text(&param);\n\n\t\t\t\t} else {\n\n\t\t\t\t\tself.new_group(GroupType::Fragment);\n\n\t\t\t\t\tself.current_node.borrow_mut().add_text(&param);\n\n\t\t\t\t\tself.end_group();\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tInstruction::GroupStart => {self.new_group(GroupType::Null);}\n\n\t\t\tInstruction::GroupEnd => {self.end_group();}\n\n\t\t\tInstruction::Ignorable => {self.current_node.borrow_mut().add_att(Attribute::Ignorable)}\n\n\t\t\tInstruction::Hex(param) => {self.parse_hex(&param);}\n\n\t\t\tInstruction::Break => {\n\n\t\t\t\tif self.current_node.borrow_mut().ele_type() == &GroupType::Fragment {\n\n\t\t\t\t\tself.end_group();\n", "file_path": "src/rtf_operations.rs", "rank": 84, "score": 2.4683936561643516 }, { "content": "\t\t}\n\n\n\n\t\t//Need to find a better way of doing this; hashmaps let me down.\n\n\t\tmatch control_name {\n\n\t\t\t\"b\" => self.cmd_b(att_value),\n\n\t\t\t\"i\" =>self.cmd_i(att_value),\n\n\t\t\t\"strike\" =>self.cmd_strike(att_value),\n\n\t\t\t\"scaps\" =>self.cmd_scaps(att_value),\n\n\t\t\t\"ul\" =>self.cmd_ul(att_value),\n\n\t\t\t\"ulnone\" =>self.cmd_ulnone(),\n\n\t\t\t\"sub\" =>self.cmd_sub(),\n\n\t\t\t\"super\" =>self.cmd_super(),\n\n\t\t\t\"nosupersub\" =>self.cmd_nosupersub(),\n\n\t\t\t\"fs\" =>self.cmd_fs(att_value),\n\n\t\t\t\"par\" => self.cmd_par(),\n\n\t\t\t\"pard\" => self.cmd_pard(),\n\n\t\t\t\"pgnrestart\" => self.cmd_pgnrestart(),\n\n\t\t\t\"emdash\" => self.cmd_emdash(),\n\n\t\t\t\"endash\" => self.cmd_endash(),\n\n\t\t\t\"tab\" => self.cmd_tab(),\n", "file_path": "src/rtf_operations.rs", "rank": 85, "score": 2.420880446447338 }, { "content": "instance, that I have in my project a file called 'example'. To push that to my Drive, I would type the following:\n\n\n\n\tscrit push example\n\n\n\nYou can also specify multiple documents, like so:\n\n\n\n\tscrit push example1 example2\n\n\n\nBy default, these documents will be compiled together into one document called example1, but that behaviour can\n\nbe changed with the -s option (more on options in a minute). Note also that each document specified this way will \n\nbe given a title header in the compiled document.\n\n\n\nNow, specifying documents by name can get tiresome. Worry not; you can also specify documents by their three-digit\n\nids. These can be found by running 'scrit tree' (a very handy command in general), and should be preceded by a \n\npound sign:\n\n\n\n\tscrit push #123 #456 #789\n\n\n\nMuch more efficient.\n\n\n", "file_path": "src/help.rs", "rank": 86, "score": 2.2287400554571137 }, { "content": "\t}\n\n\tfn set_cur_instruction(&mut self) {\n\n\t\tif self.current_instruction != Instruction::Null {\n\n\t\t\tself.instructions.push(self.current_instruction.clone());\n\n\t\t\tself.current_instruction = Instruction::Null;\n\n\t\t}\n\n\t}\n\n\tfn set_new_instruction(&mut self, instruction: Instruction) {\n\n\t\tself.instructions.push(instruction.clone());\n\n\t\tself.current_instruction = Instruction::Null;\n\n\t}\n\n}\n\n\n", "file_path": "src/rtf_operations.rs", "rank": 87, "score": 2.1848655186391923 }, { "content": "One last thing: it is important to note that when you specify a document, all sub-documents under it in the tree\n\nwill be compiled into it as well. For example, if I have chapters as folders containing multiple scene documents, \n\nI can compile all of the scenes into one by specifying the chapter. Additionally, if you ever want to push your \n\nentire project to your Google Drive as a single document, this can be done by specifying 'Binder' in place of \n\ndocument names or ids.\n\n\n\nThe options argument comes after all of your documents. It is completely optional. These commands affect the way\n\nthe document is pushed. Option arguments are preceded by a -, and you can have as many as you wish. Additionally, \n\nthey come in both long and short form. For example, the 'clean' option can be specified as either '-clean' or '-c':\n\n\n\n\tscrit push example1 example2 -c\n\n\n\nTo see a list of these options, and to see more about the push command itself, type 'scrit help push'. To move on,\n\ntype 'scrit help tutorial3'.\n\n\n\n\t\t\t\t\")},\n\n\t\t\t\t\"tutorial3\" => {println!(\"\n", "file_path": "src/help.rs", "rank": 88, "score": 2.0038531734988125 }, { "content": "\t\t\tself.end_group();\n\n\t\t}\n\n\t\tself.new_group(GroupType::Body);\n\n\t}\n\n\n\n\tfn cmd_par(&mut self) {\n\n\t\twhile self.current_node.borrow().ele_type() != &GroupType::Body {\n\n\t\t\tself.end_group();\n\n\t\t}\n\n\t\tself.new_group(GroupType::Paragraph);\n\n\t}\n\n\n\n\tfn cmd_pard(&mut self) {\n\n\t\tif self.current_node.borrow().ele_type() != &GroupType::Paragraph {\n\n\t\t\twhile self.current_node.borrow().ele_type() != &GroupType::Body {\n\n\t\t\t\tself.end_group();\n\n\t\t\t}\n\n\t\t\tself.new_group(GroupType::Paragraph);\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "src/rtf_operations.rs", "rank": 89, "score": 1.9044909024362044 }, { "content": "pieces, along with any changes you may have made, and merged with what's already in your project.\n\n\n\nNote that I said merge, not replace. This is different to Push. Using the push command will replace files of the \n\nsame name on your Google Drive; however, pulling documents from your Drive does not automatically replace files in \n\nyour project. Instead, Scrit will compare the two documents and *merge* them. This means that you can still make \n\nchanges to your project locally without having to worry about losing them to changes made on the Drive.\n\n\n\nHowever, this poses one problem: what if the same piece of text has been changed on both the local copy and the \n\ncopy on the Drive? Worry not; if this happens, you will be shown both versions and given the choice of which to \n\naccept.\n\n\n\nAnd that's the basics of Scrit! For more information on the pull command, type 'scrit help push'. You've probably\n\nnoticed by now that you can recieve an explanation of any command by typing it after 'scrit help'. Additionally, \n\njust typing 'scrit help' by itself will give you a full list of all the commands scrit provides.\n\n\n\nGood luck, and happy trails!\n\n\t\t\t\t\t\")},\n\n\t\t\t\t_ => {}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n}", "file_path": "src/help.rs", "rank": 90, "score": 1.8257320528149337 }, { "content": "\t\t\t\t},\n\n\t\t\t\tAttribute::Underline(true) => {\n\n\t\t\t\t\tstyles = format!(\"{}text-decoration-line:underline;\", styles);\n\n\t\t\t\t},\n\n\t\t\t\tAttribute::Strikethrough(true) => {\n\n\t\t\t\t\tstyles = format!(\"{}text-decoration-line:line-through;\", styles);\n\n\t\t\t\t},\n\n\t\t\t\tAttribute::Smallcaps(true) => {\n\n\t\t\t\t\tstyles = format!(\"{}font-variant:small-caps;\", styles);\n\n\t\t\t\t},\n\n\t\t\t\tAttribute::Superscript(true) => {\n\n\t\t\t\t\tstyles = format!(\"{}vertical-align:super;font-size:smaller;\", styles);\n\n\t\t\t\t},\n\n\t\t\t\tAttribute::Subscript(true) => {\n\n\t\t\t\t\tstyles = format!(\"{}vertical-align:sub;font-size:smaller;\", styles);\n\n\t\t\t\t},\n\n\t\t\t\tAttribute::FontSize(val) => {\n\n\t\t\t\t\tstyles = format!(\"{}font-size:{}pt;\", styles, (val/2).to_string());\n\n\t\t\t\t},\n\n\t\t\t\t_ => {}\n", "file_path": "src/html_operations.rs", "rank": 91, "score": 1.1482317177662114 } ]
Rust
vk_tracer/src/render/renderer.rs
icanwalkonwater/rs-vk-tracer
424524af7e66de2b60137ec4aab80848e076efb5
use crate::{ command_recorder::QueueType, errors::{HandleType, Result}, render::{RenderablePipelineHandle, VkRecordable}, RenderPlanHandle, RenderTargetHandle, RendererHandle, VkTracerApp, }; use ash::{ version::{DeviceV1_0, DeviceV1_2}, vk, }; impl VkTracerApp { pub fn new_renderer_from_plan( &mut self, render_plan: RenderPlanHandle, render_target: RenderTargetHandle, ) -> RendererBuilder { RendererBuilder { app: self, render_plan, render_target, current_subpass: 0, pipelines_by_subpass: vec![Vec::with_capacity(1)], pipelines_amount: 0, } } pub fn recreate_renderer( &mut self, renderer: RendererHandle, render_target: RenderTargetHandle, ) -> Result<()> { let (render_plan, pipelines_by_subpass, pipelines_amount) = { let renderer = storage_access_mut!(self.renderer_storage, renderer, HandleType::Renderer); unsafe { let pool = self.command_pools.get(&QueueType::Graphics).unwrap().1; self.device .free_command_buffers(pool, &[renderer.main_commands]); self.device .free_command_buffers(pool, &renderer.secondary_commands); self.device.destroy_fence(renderer.render_fence, None); } ( renderer.render_plan, std::mem::take(&mut renderer.pipelines_by_subpass), renderer.pipelines_amount, ) }; let builder = RendererBuilder { app: self, render_plan, render_target, current_subpass: 0, pipelines_by_subpass, pipelines_amount, }; let ((main_commands, secondary_commands), fence) = builder.inner_build()?; let pipelines_by_subpass = builder.pipelines_by_subpass; let renderer = storage_access_mut!(self.renderer_storage, renderer, HandleType::Renderer); renderer.pipelines_by_subpass = pipelines_by_subpass; renderer.main_commands = main_commands; renderer.secondary_commands = secondary_commands; renderer.render_fence = fence; Ok(()) } } pub(crate) struct Renderer { pub(crate) main_commands: vk::CommandBuffer, secondary_commands: Box<[vk::CommandBuffer]>, pub(crate) render_fence: vk::Fence, render_plan: RenderPlanHandle, pipelines_by_subpass: Vec<Vec<RenderablePipelineHandle>>, pipelines_amount: u32, } pub struct RendererBuilder<'app> { app: &'app mut VkTracerApp, render_plan: RenderPlanHandle, render_target: RenderTargetHandle, current_subpass: usize, pipelines_by_subpass: Vec<Vec<RenderablePipelineHandle>>, pipelines_amount: u32, } type RendererData = ((vk::CommandBuffer, Box<[vk::CommandBuffer]>), vk::Fence); impl RendererBuilder<'_> { pub fn execute_pipeline(mut self, pipeline: RenderablePipelineHandle) -> Self { self.pipelines_by_subpass[self.current_subpass].push(pipeline); self.pipelines_amount += 1; self } pub fn next_subpass(mut self) -> Self { self.pipelines_by_subpass.push(Vec::with_capacity(1)); self.current_subpass += 1; self } fn inner_build(&self) -> Result<RendererData> { let render_plan = storage_access!( self.app.render_plan_storage, self.render_plan, HandleType::RenderPlan ); let render_target = storage_access!( self.app.render_target_storage, self.render_target, HandleType::RenderTarget ); let device = &self.app.device; let pool = self.app.command_pools.get(&QueueType::Graphics).unwrap(); let commands = unsafe { let mut secondary_commands_by_subpass = { let mut command_pool = device.allocate_command_buffers( &vk::CommandBufferAllocateInfo::builder() .command_pool(pool.1) .level(vk::CommandBufferLevel::SECONDARY) .command_buffer_count(self.pipelines_amount as u32), )?; let mut commands_by_subpass = Vec::with_capacity(self.pipelines_by_subpass.len()); for (i, subpass) in self.pipelines_by_subpass.iter().enumerate() { let mut subpass_commands = Vec::with_capacity(subpass.len()); for pipeline in subpass.iter().copied() { let commands = command_pool.pop().unwrap(); device.begin_command_buffer( commands, &vk::CommandBufferBeginInfo::builder() .flags(vk::CommandBufferUsageFlags::RENDER_PASS_CONTINUE) .inheritance_info( &vk::CommandBufferInheritanceInfo::builder() .render_pass(render_plan.render_pass) .subpass(i as u32) .framebuffer(render_target.framebuffer), ), )?; match pipeline { RenderablePipelineHandle::Forward(handle) => { let pipeline = storage_access!( self.app.forward_pipeline_storage, handle, HandleType::ForwardPipeline ); pipeline.record_commands( self.app, render_target.extent, commands, )?; } } device.end_command_buffer(commands)?; subpass_commands.push(commands); } commands_by_subpass.push(subpass_commands); } commands_by_subpass }; let top_level_commands = device.allocate_command_buffers( &vk::CommandBufferAllocateInfo::builder() .command_pool(pool.1) .level(vk::CommandBufferLevel::PRIMARY) .command_buffer_count(1), )?[0]; device .begin_command_buffer(top_level_commands, &vk::CommandBufferBeginInfo::default())?; device.cmd_begin_render_pass2( top_level_commands, &vk::RenderPassBeginInfo::builder() .render_pass(render_plan.render_pass) .framebuffer(render_target.framebuffer) .render_area( vk::Rect2D::builder() .offset(vk::Offset2D::default()) .extent(render_target.extent) .build(), ) .clear_values(&render_plan.clear_values), &vk::SubpassBeginInfo::builder() .contents(vk::SubpassContents::SECONDARY_COMMAND_BUFFERS), ); let mut secondary_commands = Vec::with_capacity(self.pipelines_amount as usize); loop { let subpass_commands = secondary_commands_by_subpass.pop().unwrap(); device.cmd_execute_commands(top_level_commands, &subpass_commands); secondary_commands.extend(subpass_commands); if secondary_commands_by_subpass.is_empty() { break; } device.cmd_next_subpass2( top_level_commands, &vk::SubpassBeginInfo::builder() .contents(vk::SubpassContents::SECONDARY_COMMAND_BUFFERS), &vk::SubpassEndInfo::default(), ); } device.cmd_end_render_pass2(top_level_commands, &vk::SubpassEndInfo::default()); device.end_command_buffer(top_level_commands)?; (top_level_commands, secondary_commands.into_boxed_slice()) }; let render_fence = unsafe { device.create_fence( &vk::FenceCreateInfo::builder().flags(vk::FenceCreateFlags::SIGNALED), None, )? }; Ok((commands, render_fence)) } pub fn build(self) -> Result<RendererHandle> { let (commands, render_fence) = self.inner_build()?; Ok(self.app.renderer_storage.insert(Renderer { main_commands: commands.0, secondary_commands: commands.1, render_fence, render_plan: self.render_plan, pipelines_by_subpass: self.pipelines_by_subpass, pipelines_amount: self.pipelines_amount, })) } }
use crate::{ command_recorder::QueueType, errors::{HandleType, Result}, render::{RenderablePipelineHandle, VkRecordable}, RenderPlanHandle, RenderTargetHandle, RendererHandle, VkTracerApp, }; use ash::{ version::{DeviceV1_0, DeviceV1_2}, vk, }; impl VkTracerApp { pub fn new_renderer_from_plan( &mut self, render_plan: RenderPlanHandle, render_target: RenderTargetHandle, ) -> RendererBuilder { RendererBuilder { app: self, render_plan, render_target, current_subpass: 0, pipelines_by_subpass: vec![Vec::with_capacity(1)], pipelines_amount: 0, } } pub fn recreate_renderer( &mut self, renderer: RendererHandle, render_target: RenderTargetHandle, ) -> Result<()> { let (render_plan, pipelines_by_subpass, pipelines_amount) = { let renderer = storage_access_mut!(self.renderer_storage, renderer, HandleType::Renderer); unsafe { let pool = self.command_pools.get(&QueueType::Graphics).unwrap().1; self.device .free_command_buffers(pool, &[renderer.main_commands]); self.device .free_command_buffers(pool, &renderer.secondary_commands); self.device.destroy_fence(renderer.render_fence, None); } ( renderer.render_plan, std::mem::take(&mut renderer.pipelines_by_subpass), renderer.pipelines_amount, ) }; let builder = RendererBuilder { app: self, render_plan, render_target, current_subpass: 0, pipelines_by_subpass, pipelines_amount, }; let ((main_commands, secondary_commands), fence) = builder.inner_build()?; let pipelines_by_subpass = builder.pipelines_by_subpass; let renderer = storage_access_mut!(self.renderer_storage, renderer, HandleType::Renderer); renderer.pipelines_by_subpass = pipelines_by_subpass; renderer.main_commands = main_commands; renderer.secondary_commands = secondary_commands; renderer.render_fence = fence; Ok(()) } } pub(crate) struct Renderer { pub(crate) main_commands: vk::CommandBuffer, secondary_commands: Box<[vk::CommandBuffer]>, pub(crate) render_fence: vk::Fence, render_plan: RenderPlanHandle, pipelines_by_subpass: Vec<Vec<RenderablePipelineHandle>>, pipelines_amount: u32, } pub struct RendererBuilder<'app> { app: &'app mut VkTracerApp, render_plan: RenderPlanHandle, render_target: RenderTargetHandle, current_subpass: usize, pipelines_by_subpass: Vec<Vec<RenderablePipelineHandle>>, pipelines_amount: u32, } type RendererData = ((vk::CommandBuffer, Box<[vk::CommandBuffer]>), vk::Fence); impl RendererBuilder<'_> { pub fn execute_pipeline(mut self, pipeline: RenderablePipelineHandle) -> Self { self.pipelines_by_subpass[self.current_subpass].push(pipeline); self.pipelines_amount += 1; self } pub fn next_subpass(mut self) -> Self { self.pipelines_by_subpass.push(Vec::with_capacity(1)); self.current_subpass += 1; self } fn inner_build(&self) -> Result<RendererData> { let render_plan = storage_access!( self.app.render_plan_storage, self.render_plan, HandleType::RenderPlan ); let render_target = storage_access!( self.app.render_target_storage, self.render_target, HandleType::RenderTarget ); let device = &self.app.device; let pool = self.app.command_pools.get(&QueueType::Graphics).unwrap(); let commands = unsafe { let mut secondary_commands_by_subpass = { let mut command_pool = device.allocate_command_buffers( &vk::CommandBufferAllocateInfo::builder() .command_pool(pool.1) .level(vk::CommandBufferLevel::SECONDARY) .command_buffer_count(self.pipelines_amount as u32), )?; let mut commands_by_subpass = Vec::with_capacity(self.pipelines_by_subpass.len()); for (i, subpass) in self.pipelines_by_subpass.iter().enumerate() { let mut subpass_commands = Vec::with_capacity(subpass.len()); for pipeline in subpass.iter().copied() { let commands = command_pool.pop().unwrap(); device.begin_command_buffer( commands, &vk::CommandBufferBeginInfo::builder() .flags(vk::CommandBufferUsageFlags::RENDER_PASS_CONTINUE) .inheritance_info( &vk::CommandBufferInheritanceInfo::builder() .render_pass(render_plan.render_pass) .subpass(i as u32) .framebuffer(render_target.framebuffer), ), )?; match pipeline { RenderablePipelineHandle::Forward(handle) => { let pipeline = storage_access!( self.app.forward_pipeline_storage, handle, HandleType::ForwardPipeline ); pipeline.record_commands( self.app, render_target.extent, commands, )?; } } device.end_command_buffer(commands)?; subpass_commands.push(commands); } commands_by_subpass.push(subpass_commands); } commands_by_subpass }; let top_level_commands = device.allocate_command_buffers( &vk::CommandBufferAllocateInfo::builder() .command_pool(pool.1) .level(vk::CommandBufferLevel::PRIMARY) .command_buffer_count(1), )?[0]; device .begin_command_buffer(top_level_commands, &vk::CommandBufferBeginInfo::default())?; device.cmd_begin_render_pass2( top_level_commands, &vk::RenderPassBeginInfo::builder() .render_pass(render_plan.render_pass) .framebuffer(render_target.framebuffer) .render_area( vk::Rect2D::builder() .offset(vk::Offset2D::default()) .extent(render_target.extent) .build(), ) .clear_values(&render_plan.clear_values), &vk::SubpassBeginInfo::builder() .contents(vk::SubpassContents::SECONDARY_COMMAND_BUFFERS), ); let mut secondary_commands = Vec::with_capacity(self.pipelines_amount as usize); loop { let subpass_commands = secondary_commands_by_subpass.pop().unwrap(); device.cmd_execute_commands(top_level_commands, &subpass_commands); secondary_commands.extend(subpass_commands); if secondary_commands_by_subpass.is_empty() { break; } device.cmd_next_subpass2( top_level_commands, &vk::SubpassBeginInfo::builder() .contents(vk::SubpassContents::SECONDARY_COMMAND_BUFFERS), &vk::SubpassEndInfo::default(), ); } device.cmd_end_render_pass2(top_level_commands, &vk::SubpassEndInfo::default()); device.end_command_buffer(top_level_commands)?; (top_level_commands, secondary_commands.into_boxed_slice()) }; let render_fence = unsafe { device.create_fence( &vk::FenceCreateInfo::builder().flags(vk::FenceCreateFlags::SIGNALED), None, )? }; Ok((commands, render_fence)) } pub fn build(self) -> Result<RendererHandle> { let (commands, render_fence) = self.inner_build()?;
} }
Ok(self.app.renderer_storage.insert(Renderer { main_commands: commands.0, secondary_commands: commands.1, render_fence, render_plan: self.render_plan, pipelines_by_subpass: self.pipelines_by_subpass, pipelines_amount: self.pipelines_amount, }))
call_expression
[ { "content": "pub fn dump_vma_stats(app: &VkTracerApp) {\n\n let stats = app.vma.build_stats_string(true).unwrap();\n\n let mut f = File::create(\"vma_stats.json\").unwrap();\n\n f.write_all(stats.as_bytes()).unwrap();\n\n}\n", "file_path": "vk_tracer/src/utils.rs", "rank": 1, "score": 152809.49754422286 }, { "content": "#[inline]\n\nfn find_depth_format(app: &VkTracerApp) -> Result<vk::Format> {\n\n find_supported_format(\n\n app,\n\n [\n\n vk::Format::D32_SFLOAT,\n\n vk::Format::D32_SFLOAT_S8_UINT,\n\n vk::Format::D24_UNORM_S8_UINT,\n\n ],\n\n vk::ImageTiling::OPTIMAL,\n\n vk::FormatFeatureFlags::DEPTH_STENCIL_ATTACHMENT,\n\n )\n\n}\n\n\n\n/// Needs to be kept in sync with [find_depth_format].\n", "file_path": "vk_tracer/src/mem/image.rs", "rank": 2, "score": 152587.225449464 }, { "content": "pub fn pick_adapter(\n\n instance: &ash::Instance,\n\n requirements: &AdapterRequirements,\n\n) -> Result<AdapterInfo> {\n\n let physical_devices = unsafe { instance.enumerate_physical_devices()? };\n\n\n\n let best_device = physical_devices\n\n .into_iter()\n\n .map(|physical_device| unsafe {\n\n let properties = instance.get_physical_device_properties(physical_device);\n\n let extensions = instance\n\n .enumerate_device_extension_properties(physical_device)\n\n .expect(\"Failed to enumerate device extensions\");\n\n let features = instance.get_physical_device_features(physical_device);\n\n let queue_families =\n\n instance.get_physical_device_queue_family_properties(physical_device);\n\n let memory_properties = instance.get_physical_device_memory_properties(physical_device);\n\n\n\n let surface_capabilities =\n\n requirements\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 3, "score": 121914.13154169137 }, { "content": "pub fn required_device_extensions() -> Vec<&'static CStr> {\n\n use ash::extensions::khr;\n\n // VK_KHR_create_renderpass2 promoted to vulkan 1.2\n\n vec![khr::Swapchain::name()]\n\n}\n", "file_path": "vk_tracer/src/setup/extensions.rs", "rank": 4, "score": 107680.27490021235 }, { "content": "fn vk_tracer_extensions_to_vk_extensions<'a>(\n\n extensions: impl Iterator<Item = &'a VkTracerExtensions>,\n\n) -> impl Iterator<Item = &'static CStr> {\n\n use ash::extensions::khr;\n\n\n\n let mut res = HashSet::new();\n\n\n\n for extension in extensions {\n\n match extension {\n\n VkTracerExtensions::PipelineRaytracing => {\n\n // VK_KHR_spirv_1_4 promoted to vulkan 1.2\n\n // VK_EXT_descriptor_indexing promoted to vulkan 1.2\n\n // VK_KHR_buffer_device_address promoted to vulkan 1.2\n\n res.insert(khr::DeferredHostOperations::name());\n\n res.insert(khr::AccelerationStructure::name());\n\n res.insert(khr::RayTracingPipeline::name());\n\n }\n\n }\n\n }\n\n\n\n res.into_iter()\n\n}\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 5, "score": 103788.08163871002 }, { "content": "fn main() -> anyhow::Result<()> {\n\n env_logger::init();\n\n\n\n // Compile shaders\n\n let (vertex_shader, fragment_shader) = {\n\n let mut compiler = ShaderCompiler::new()?;\n\n compiler.set_optimization_level(OptimizationLevel::Performance);\n\n\n\n (\n\n compiler.compile_and_return_file(\n\n \"vk_tracer/examples/shaders/triangle.vert.glsl\".into(),\n\n ShaderKind::Vertex,\n\n \"main\",\n\n )?,\n\n compiler.compile_and_return_file(\n\n \"vk_tracer/examples/shaders/triangle.frag.glsl\".into(),\n\n ShaderKind::Fragment,\n\n \"main\",\n\n )?,\n\n )\n", "file_path": "vk_tracer/examples/triangle.rs", "rank": 6, "score": 96308.96519414618 }, { "content": "fn main() -> anyhow::Result<()> {\n\n env_logger::init();\n\n\n\n // Compile shaders\n\n let (vertex_shader, fragment_shader) = {\n\n let mut compiler = ShaderCompiler::new()?;\n\n compiler.set_optimization_level(OptimizationLevel::Performance);\n\n (\n\n compiler.compile_and_return_file(\n\n \"vk_tracer/examples/shaders/model.vert.glsl\".into(),\n\n ShaderKind::Vertex,\n\n \"main\",\n\n )?,\n\n compiler.compile_and_return_file(\n\n \"vk_tracer/examples/shaders/model.frag.glsl\".into(),\n\n ShaderKind::Fragment,\n\n \"main\",\n\n )?,\n\n )\n\n };\n", "file_path": "vk_tracer/examples/model.rs", "rank": 7, "score": 96308.96519414618 }, { "content": "fn main() -> anyhow::Result<()> {\n\n env_logger::init();\n\n\n\n // Compile shaders\n\n let (vertex_shader, fragment_shader) = {\n\n let mut compiler = ShaderCompiler::new()?;\n\n compiler.set_optimization_level(OptimizationLevel::Performance);\n\n (\n\n compiler.compile_and_return_file(\n\n \"vk_tracer/examples/shaders/camera.vert.glsl\".into(),\n\n ShaderKind::Vertex,\n\n \"main\",\n\n )?,\n\n compiler.compile_and_return_file(\n\n \"vk_tracer/examples/shaders/camera.frag.glsl\".into(),\n\n ShaderKind::Fragment,\n\n \"main\",\n\n )?,\n\n )\n\n };\n", "file_path": "vk_tracer/examples/camera.rs", "rank": 8, "score": 96308.96519414618 }, { "content": "/// Get extensions required for the instance and to present to the given surface.\n\npub fn required_instance_extensions_with_surface(\n\n with_debug_utils: bool,\n\n handle: &dyn HasRawWindowHandle,\n\n) -> Result<Vec<*const c_char>> {\n\n let mut extensions = required_instance_extensions(with_debug_utils);\n\n\n\n extensions.extend(\n\n ash_window::enumerate_required_extensions(handle)\n\n .expect(\"That's not supposed to happen, damn\")\n\n .iter()\n\n .map(|ext| ext.as_ptr()),\n\n );\n\n\n\n Ok(extensions)\n\n}\n\n\n", "file_path": "vk_tracer/src/setup/extensions.rs", "rank": 9, "score": 93398.76413883996 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nenum PhysicalDevicePreference {\n\n Best,\n\n}\n\n\n\n#[derive(Hash, Eq, PartialEq, Copy, Clone, Debug)]\n\npub enum VkTracerExtensions {\n\n PipelineRaytracing,\n\n}\n\n\n\npub struct VkTracerAppBuilder {\n\n physical_device_preference: PhysicalDevicePreference,\n\n app_name: Cow<'static, str>,\n\n version: (u32, u32, u32),\n\n debug_utils: bool,\n\n extensions: HashSet<VkTracerExtensions>,\n\n}\n\n\n\nimpl VkTracerApp {\n\n pub fn builder() -> VkTracerAppBuilder {\n\n VkTracerAppBuilder {\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 10, "score": 91752.23977027673 }, { "content": "fn process_physical_device(\n\n info: PhysicalDeviceInfo,\n\n requirements: &AdapterRequirements,\n\n) -> Option<AdapterInfo> {\n\n info!(\n\n \"Processing physical device {:?}\",\n\n cstr_to_str(info.properties.device_name.as_ptr())\n\n );\n\n\n\n // *** Check vulkan version (I think its useless but whatever\n\n\n\n {\n\n debug!(\" Checking Vulkan version...\");\n\n\n\n let device_version = info.properties.api_version;\n\n let major = vk::version_major(device_version);\n\n let minor = vk::version_minor(device_version);\n\n let patch = vk::version_patch(device_version);\n\n let device_version_str = format!(\"{}.{}.{}\", major, minor, patch);\n\n\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 11, "score": 86266.94382396487 }, { "content": "fn corrected_perspective(mut p: glm::Mat4) -> glm::Mat4 {\n\n *p.get_mut((1, 1)).unwrap() *= -1.0;\n\n p\n\n}\n", "file_path": "vk_tracer/src/utils/camera.rs", "rank": 12, "score": 80248.00743044438 }, { "content": "/// Get extensions required for the instance.\n\npub fn required_instance_extensions(with_debug_utils: bool) -> Vec<*const c_char> {\n\n if with_debug_utils {\n\n vec![ash::extensions::ext::DebugUtils::name().as_ptr()]\n\n } else {\n\n vec![]\n\n }\n\n}\n\n\n", "file_path": "vk_tracer/src/setup/extensions.rs", "rank": 13, "score": 75240.52917804412 }, { "content": "fn find_supported_format<const N: usize>(\n\n app: &VkTracerApp,\n\n candidates: [vk::Format; N],\n\n tiling: vk::ImageTiling,\n\n features: vk::FormatFeatureFlags,\n\n) -> Result<vk::Format> {\n\n for format in candidates {\n\n let mut props = vk::FormatProperties2::default();\n\n unsafe {\n\n app.instance.get_physical_device_format_properties2(\n\n app.adapter.handle,\n\n format,\n\n &mut props,\n\n );\n\n }\n\n\n\n let available_features = match tiling {\n\n vk::ImageTiling::LINEAR => props.format_properties.linear_tiling_features,\n\n vk::ImageTiling::OPTIMAL => props.format_properties.optimal_tiling_features,\n\n _ => unreachable!(),\n", "file_path": "vk_tracer/src/mem/image.rs", "rank": 14, "score": 71955.00712971881 }, { "content": "\n\n pub fn with_debug_utils(mut self) -> Self {\n\n self.debug_utils = true;\n\n self\n\n }\n\n\n\n pub fn with_extensions(mut self, extensions: &[VkTracerExtensions]) -> Self {\n\n self.extensions.extend(extensions.iter());\n\n self\n\n }\n\n\n\n pub fn build<W: HasRawWindowHandle>(\n\n self,\n\n window: Option<(&W, (u32, u32))>,\n\n ) -> Result<VkTracerApp> {\n\n let entry = unsafe { ash::Entry::new()? };\n\n debug!(\"Entry created\");\n\n\n\n let instance = {\n\n // Convert app info\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 15, "score": 67987.80952338284 }, { "content": " physical_device_preference: PhysicalDevicePreference::Best,\n\n app_name: Cow::Borrowed(\"Unnamed\"),\n\n version: (0, 0, 1),\n\n debug_utils: false,\n\n extensions: HashSet::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl VkTracerAppBuilder {\n\n pub fn pick_best_physical_device(mut self) -> Self {\n\n self.physical_device_preference = PhysicalDevicePreference::Best;\n\n self\n\n }\n\n\n\n pub fn with_app_info(mut self, app_name: Cow<'static, str>, version: (u32, u32, u32)) -> Self {\n\n self.app_name = app_name;\n\n self.version = version;\n\n self\n\n }\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 16, "score": 67982.9897008256 }, { "content": " // Pool creation macro\n\n let pool_creator = |queue_index: u32, flags: vk::CommandPoolCreateFlags| unsafe {\n\n let queue = device.get_device_queue(queue_index, 0);\n\n let pool = device.create_command_pool(\n\n &vk::CommandPoolCreateInfo::builder()\n\n .flags(flags)\n\n .queue_family_index(queue_index),\n\n None,\n\n )?;\n\n Result::Ok((queue, pool))\n\n };\n\n\n\n let (graphics_pool, transfer_pool) =\n\n if adapter.info.graphics_queue.index == adapter.info.transfer_queue.index {\n\n let pool = pool_creator(\n\n adapter.info.graphics_queue.index,\n\n vk::CommandPoolCreateFlags::empty(),\n\n )?;\n\n (pool, pool)\n\n } else {\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 17, "score": 67982.41861755072 }, { "content": "use crate::{\n\n command_recorder::QueueType,\n\n errors::Result,\n\n present::Surface,\n\n setup::{\n\n debug_utils::DebugUtils,\n\n extensions::{required_instance_extensions, required_instance_extensions_with_surface},\n\n pick_adapter, Adapter, AdapterRequirements, QueueFamilyIndices,\n\n },\n\n utils::str_to_cstr,\n\n VkTracerApp, VULKAN_VERSION,\n\n};\n\nuse ash::{\n\n version::{DeviceV1_0, EntryV1_0, InstanceV1_0},\n\n vk,\n\n};\n\nuse log::debug;\n\nuse raw_window_handle::HasRawWindowHandle;\n\nuse slotmap::SlotMap;\n\nuse std::{\n\n borrow::Cow,\n\n collections::{HashMap, HashSet},\n\n ffi::{CStr, CString},\n\n};\n\n\n\n#[derive(Copy, Clone, Debug)]\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 18, "score": 67978.5526679316 }, { "content": " };\n\n\n\n if let Some(surface) = surface.as_mut() {\n\n surface.complete(&adapter);\n\n debug!(\"Surface complete\");\n\n }\n\n\n\n let vma = vk_mem::Allocator::new(&vk_mem::AllocatorCreateInfo {\n\n physical_device: adapter.handle,\n\n device: device.clone(),\n\n instance: instance.clone(),\n\n flags: vk_mem::AllocatorCreateFlags::NONE,\n\n preferred_large_heap_block_size: 0,\n\n frame_in_use_count: 0,\n\n heap_size_limits: None,\n\n })?;\n\n\n\n debug!(\"VMA allocator created\");\n\n\n\n let command_pools = {\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 19, "score": 67977.15051451915 }, { "content": " let graphics_pool = pool_creator(\n\n adapter.info.graphics_queue.index,\n\n vk::CommandPoolCreateFlags::empty(),\n\n )?;\n\n let transfer_pool = pool_creator(\n\n adapter.info.transfer_queue.index,\n\n vk::CommandPoolCreateFlags::TRANSIENT,\n\n )?;\n\n (graphics_pool, transfer_pool)\n\n };\n\n\n\n let mut command_pools = HashMap::with_capacity(2);\n\n command_pools.insert(QueueType::Graphics, graphics_pool);\n\n command_pools.insert(QueueType::Transfer, transfer_pool);\n\n command_pools\n\n };\n\n\n\n debug!(\"Command pools created\");\n\n\n\n Ok(VkTracerApp {\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 20, "score": 67977.07903094283 }, { "content": " } else {\n\n required_instance_extensions(self.debug_utils)\n\n };\n\n\n\n // Create instance\n\n let info = vk::InstanceCreateInfo::builder()\n\n .application_info(&vk_app_info)\n\n .enabled_extension_names(&vk_extensions);\n\n\n\n unsafe { entry.create_instance(&info, None)? }\n\n };\n\n debug!(\"Instance created\");\n\n\n\n let debug_utils = if self.debug_utils {\n\n Some(DebugUtils::new(&entry, &instance).unwrap())\n\n } else {\n\n None\n\n };\n\n\n\n let mut surface = if let Some((window, size)) = window.as_ref() {\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 21, "score": 67971.71430493945 }, { "content": " .map(|ext| ext.as_ptr())\n\n .collect::<Vec<_>>();\n\n\n\n // Queues create info\n\n let queues_create_info =\n\n QueueFamilyIndices::from(&adapter.info).into_queue_create_info();\n\n\n\n unsafe {\n\n instance.create_device(\n\n adapter.handle,\n\n &vk::DeviceCreateInfo::builder()\n\n .enabled_extension_names(&enable_extensions)\n\n .queue_create_infos(&queues_create_info),\n\n None,\n\n )?\n\n }\n\n };\n\n debug!(\"Created device\");\n\n\n\n (adapter, device)\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 22, "score": 67971.13123156595 }, { "content": " entry,\n\n instance,\n\n debug_utils,\n\n surface,\n\n adapter,\n\n device,\n\n vma,\n\n command_pools,\n\n mesh_storage: SlotMap::with_key(),\n\n ubo_storage: SlotMap::with_key(),\n\n swapchain_storage: SlotMap::with_key(),\n\n render_plan_storage: SlotMap::with_key(),\n\n render_target_storage: SlotMap::with_key(),\n\n forward_pipeline_storage: SlotMap::with_key(),\n\n renderer_storage: SlotMap::with_key(),\n\n descriptor_pool_storage: SlotMap::with_key(),\n\n descriptor_set_storage: SlotMap::with_key(),\n\n })\n\n }\n\n}\n\n\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 23, "score": 67968.97788332534 }, { "content": " Some(Surface::create(&entry, &instance, *window, *size).unwrap())\n\n } else {\n\n None\n\n };\n\n\n\n let (adapter, device) = {\n\n // Build adapter requirements\n\n let adapter_requirements = {\n\n let mut requirements = if let (Some((window, _)), Some(surface)) =\n\n (window.as_ref(), surface.as_ref())\n\n {\n\n AdapterRequirements::default_from_window(surface, *window).unwrap()\n\n } else {\n\n AdapterRequirements::default()\n\n };\n\n\n\n requirements\n\n .required_extensions\n\n .extend(vk_tracer_extensions_to_vk_extensions(\n\n self.extensions.iter(),\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 24, "score": 67968.38709780778 }, { "content": " let app_name = CString::new(self.app_name.as_bytes()).unwrap();\n\n\n\n let vk_app_info = vk::ApplicationInfo::builder()\n\n .application_name(&app_name)\n\n .application_version({\n\n let (major, minor, patch) = self.version;\n\n vk::make_version(major, minor, patch)\n\n })\n\n .engine_name(str_to_cstr(\"VK Tracer\\0\"))\n\n .engine_version({\n\n let major = env!(\"CARGO_PKG_VERSION_MAJOR\").parse().unwrap();\n\n let minor = env!(\"CARGO_PKG_VERSION_MINOR\").parse().unwrap();\n\n let patch = env!(\"CARGO_PKG_VERSION_PATCH\").parse().unwrap();\n\n vk::make_version(major, minor, patch)\n\n })\n\n .api_version(VULKAN_VERSION);\n\n\n\n // Gather extensions, window is optional\n\n let vk_extensions = if let Some((window, _)) = window {\n\n required_instance_extensions_with_surface(self.debug_utils, window).unwrap()\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 25, "score": 67964.23936516559 }, { "content": " ));\n\n requirements\n\n };\n\n\n\n // Query adapter\n\n let adapter_info = pick_adapter(&instance, &adapter_requirements).unwrap();\n\n let adapter = Adapter::new(\n\n adapter_info.physical_device_info.handle,\n\n adapter_info,\n\n adapter_requirements,\n\n );\n\n\n\n debug!(\"Created adapter\");\n\n\n\n // Create device\n\n let device = {\n\n let enable_extensions = adapter\n\n .requirements\n\n .required_extensions\n\n .iter()\n", "file_path": "vk_tracer/src/setup/app_builder.rs", "rank": 26, "score": 67961.68523583004 }, { "content": "#[inline]\n\nfn has_stencil(format: vk::Format) -> bool {\n\n format == vk::Format::D32_SFLOAT_S8_UINT || format == vk::Format::D24_UNORM_S8_UINT\n\n}\n\n\n", "file_path": "vk_tracer/src/mem/image.rs", "rank": 27, "score": 58497.65844693927 }, { "content": "fn severity_to_level(severity: vk::DebugUtilsMessageSeverityFlagsEXT) -> Level {\n\n match severity {\n\n vk::DebugUtilsMessageSeverityFlagsEXT::ERROR => Level::Error,\n\n vk::DebugUtilsMessageSeverityFlagsEXT::WARNING => Level::Warn,\n\n vk::DebugUtilsMessageSeverityFlagsEXT::INFO => Level::Info,\n\n vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE => Level::Trace,\n\n _ => Level::Trace,\n\n }\n\n}\n", "file_path": "vk_tracer/src/setup/debug_utils.rs", "rank": 28, "score": 53563.92566823696 }, { "content": "fn recreate_swapchain(\n\n graphics: &mut VkTracerApp,\n\n new_size: (u32, u32),\n\n swapchain: SwapchainHandle,\n\n render_plan: RenderPlanHandle,\n\n render_targets: &[RenderTargetHandle],\n\n renderers: &[RendererHandle],\n\n) -> anyhow::Result<()> {\n\n // Recreate swapchain\n\n graphics.recreate_swapchain(swapchain, new_size)?;\n\n let swapchain_images = graphics.get_images_from_swapchain(swapchain)?;\n\n\n\n // Recreate render targets\n\n for (render_target, image) in render_targets.iter().zip(swapchain_images.into_iter()) {\n\n graphics.recreate_render_target(render_plan, new_size, *render_target, [image])?;\n\n }\n\n\n\n // Recreate renderers\n\n for (renderer, render_target) in renderers\n\n .iter()\n\n .copied()\n\n .zip(render_targets.iter().copied())\n\n {\n\n graphics.recreate_renderer(renderer, render_target)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "vk_tracer/examples/triangle.rs", "rank": 29, "score": 52694.711165665794 }, { "content": "fn recreate_swapchain(\n\n graphics: &mut VkTracerApp,\n\n new_size: (u32, u32),\n\n swapchain: SwapchainHandle,\n\n render_plan: RenderPlanHandle,\n\n render_targets: &[RenderTargetHandle],\n\n renderers: &[RendererHandle],\n\n) -> anyhow::Result<()> {\n\n graphics.recreate_swapchain(swapchain, new_size)?;\n\n let swapchain_images = graphics.get_images_from_swapchain(swapchain)?;\n\n for (render_target, image) in render_targets.iter().zip(swapchain_images.into_iter()) {\n\n graphics.recreate_render_target(render_plan, new_size, *render_target, [image])?;\n\n }\n\n for (renderer, render_target) in renderers\n\n .iter()\n\n .copied()\n\n .zip(render_targets.iter().copied())\n\n {\n\n graphics.recreate_renderer(renderer, render_target)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "vk_tracer/examples/camera.rs", "rank": 30, "score": 52694.711165665794 }, { "content": "fn recreate_swapchain(\n\n graphics: &mut VkTracerApp,\n\n new_size: (u32, u32),\n\n swapchain: SwapchainHandle,\n\n render_plan: RenderPlanHandle,\n\n render_targets: &[RenderTargetHandle],\n\n renderers: &[RendererHandle],\n\n) -> anyhow::Result<()> {\n\n graphics.recreate_swapchain(swapchain, new_size)?;\n\n let swapchain_images = graphics.get_images_from_swapchain(swapchain)?;\n\n for (render_target, image) in render_targets.iter().zip(swapchain_images.into_iter()) {\n\n graphics.recreate_render_target(render_plan, new_size, *render_target, [image])?;\n\n }\n\n for (renderer, render_target) in renderers\n\n .iter()\n\n .copied()\n\n .zip(render_targets.iter().copied())\n\n {\n\n graphics.recreate_renderer(renderer, render_target)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "vk_tracer/examples/model.rs", "rank": 31, "score": 52694.711165665794 }, { "content": "pub trait MeshVertex: Copy + 'static {\n\n fn binding_description() -> &'static [vk::VertexInputBindingDescription];\n\n fn attribute_description() -> &'static [vk::VertexInputAttributeDescription];\n\n}\n\n\n\n#[cfg(feature = \"math\")]\n\nlazy_static! {\n\n static ref VERTEX_XYZ_UV_NORM_BINDING_DESC: [vk::VertexInputBindingDescription; 1] =\n\n [vk::VertexInputBindingDescription::builder()\n\n .binding(0)\n\n .stride(std::mem::size_of::<VertexXyzUvNorm>() as u32)\n\n .input_rate(vk::VertexInputRate::VERTEX)\n\n .build(),];\n\n static ref VERTEX_XYZ_UV_NORM_ATTRIBUTE_DESC: [vk::VertexInputAttributeDescription; 3] = [\n\n vk::VertexInputAttributeDescription::builder()\n\n .binding(0)\n\n .location(0)\n\n .format(vk::Format::R32G32B32_SFLOAT)\n\n .offset(offset_of!(VertexXyzUvNorm => xyz).get_byte_offset() as u32)\n\n .build(),\n", "file_path": "vk_tracer/src/mesh.rs", "rank": 32, "score": 47740.36378349019 }, { "content": "pub trait MeshIndex: Copy + 'static {\n\n fn ty() -> vk::IndexType;\n\n}\n\n\n\nimpl MeshIndex for u16 {\n\n fn ty() -> vk::IndexType {\n\n vk::IndexType::UINT16\n\n }\n\n}\n\n\n\nimpl MeshIndex for u32 {\n\n fn ty() -> vk::IndexType {\n\n vk::IndexType::UINT32\n\n }\n\n}\n\n\n\npub struct Mesh {\n\n pub(crate) vertices: RawBufferAllocation,\n\n pub(crate) vertex_desc: (\n\n TypeId, // For future use\n", "file_path": "vk_tracer/src/mesh.rs", "rank": 33, "score": 47740.36378349019 }, { "content": "pub trait GltfToVertex: MeshVertex + Sized {\n\n fn is_compatible(primitive: &gltf::Primitive) -> bool;\n\n fn from_gltf(primitive: &gltf::Primitive, buffers: &[gltf::buffer::Data]) -> Result<Vec<Self>>;\n\n}\n\n\n\nimpl GltfToVertex for VertexXyz {\n\n fn is_compatible(primitive: &gltf::Primitive) -> bool {\n\n primitive.get(&gltf::Semantic::Positions).is_some()\n\n }\n\n\n\n fn from_gltf(primitive: &gltf::Primitive, buffers: &[gltf::buffer::Data]) -> Result<Vec<Self>> {\n\n Ok(primitive\n\n .reader(|b| Some(&buffers[b.index()]))\n\n .read_positions()\n\n .unwrap()\n\n .map(|pos| VertexXyz(glm::make_vec3(&pos)))\n\n .collect())\n\n }\n\n}\n\n\n", "file_path": "vk_tracer/src/utils/model_loader.rs", "rank": 34, "score": 45254.442407524446 }, { "content": "use crate::{errors::Result, mem::ImageViewFatHandle, RenderPlanHandle, VkTracerApp};\n\nuse ash::{version::DeviceV1_2, vk, vk::ClearColorValue};\n\n\n\nimpl VkTracerApp {\n\n pub fn new_render_plan(&mut self) -> RenderPlanBuilder {\n\n RenderPlanBuilder {\n\n app: self,\n\n clear_values: Vec::new(),\n\n attachments: Vec::new(),\n\n references: Vec::new(),\n\n dependencies: Vec::new(),\n\n subpasses: Vec::new(),\n\n }\n\n }\n\n}\n\n\n\npub(crate) struct RenderPlan {\n\n pub(crate) render_pass: vk::RenderPass,\n\n // Data used to recreate the render pass when necessary\n\n pub(crate) clear_values: Vec<vk::ClearValue>,\n", "file_path": "vk_tracer/src/render/render_plan.rs", "rank": 47, "score": 43223.906479716556 }, { "content": " pub(crate) attachments: Vec<vk::AttachmentDescription2>,\n\n pub(crate) references: Vec<vk::AttachmentReference2>,\n\n pub(crate) subpasses: Vec<SubpassBuilder>,\n\n}\n\n\n\npub struct RenderPlanBuilder<'app> {\n\n app: &'app mut VkTracerApp,\n\n clear_values: Vec<vk::ClearValue>,\n\n attachments: Vec<vk::AttachmentDescription2>,\n\n references: Vec<vk::AttachmentReference2>,\n\n dependencies: Vec<vk::SubpassDependency2>,\n\n subpasses: Vec<SubpassBuilder>,\n\n}\n\n\n\nimpl RenderPlanBuilder<'_> {\n\n /// Add a color attachment that will be used for presentation.\n\n pub fn add_color_attachment_present(mut self, image: ImageViewFatHandle) -> Result<Self> {\n\n let description = vk::AttachmentDescription2::builder()\n\n .format(image.format)\n\n .samples(vk::SampleCountFlags::TYPE_1)\n", "file_path": "vk_tracer/src/render/render_plan.rs", "rank": 48, "score": 43223.0855034231 }, { "content": "use crate::{\n\n errors::{HandleType, Result},\n\n mem::ImageViewFatHandle,\n\n RenderPlanHandle, RenderTargetHandle, VkTracerApp,\n\n};\n\nuse ash::{version::DeviceV1_0, vk};\n\n\n\nimpl VkTracerApp {\n\n /// The first attachment must be the color attachment\n\n pub fn allocate_render_target(\n\n &mut self,\n\n render_plan: RenderPlanHandle,\n\n attachments: &[ImageViewFatHandle],\n\n ) -> Result<RenderTargetHandle> {\n\n let render_plan = storage_access!(\n\n self.render_plan_storage,\n\n render_plan,\n\n HandleType::RenderPlan\n\n );\n\n debug_assert_eq!(render_plan.attachments.len(), attachments.len());\n", "file_path": "vk_tracer/src/render/render_target.rs", "rank": 49, "score": 43220.382627818966 }, { "content": "\n\n subpasses.push(subpass_description.build());\n\n\n\n subpasses_references.push(color_attachments);\n\n }\n\n\n\n let render_pass = unsafe {\n\n self.app.device.create_render_pass2(\n\n &vk::RenderPassCreateInfo2::builder()\n\n .attachments(&self.attachments)\n\n .dependencies(&self.dependencies)\n\n .subpasses(&subpasses),\n\n None,\n\n )?\n\n };\n\n\n\n Ok(self.app.render_plan_storage.insert(RenderPlan {\n\n render_pass,\n\n clear_values: self.clear_values,\n\n attachments: self.attachments,\n", "file_path": "vk_tracer/src/render/render_plan.rs", "rank": 50, "score": 43214.25072650882 }, { "content": " depth_stencil: vk::ClearDepthStencilValue { depth, stencil },\n\n };\n\n self\n\n }\n\n\n\n pub fn add_subpass(\n\n mut self,\n\n subpass: SubpassBuilder,\n\n dependency: Option<vk::SubpassDependency2>,\n\n ) -> Self {\n\n self.subpasses.push(subpass);\n\n if let Some(dependency) = dependency {\n\n self.dependencies.push(dependency);\n\n }\n\n self\n\n }\n\n\n\n pub fn build(self) -> Result<RenderPlanHandle> {\n\n let mut subpasses = Vec::with_capacity(self.subpasses.len());\n\n let mut subpasses_references = Vec::with_capacity(self.subpasses.len());\n", "file_path": "vk_tracer/src/render/render_plan.rs", "rank": 51, "score": 43214.10248772955 }, { "content": " }\n\n}\n\n\n\nimpl SubpassBuilder {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n pub fn graphics(mut self) -> Self {\n\n self.bind_point = vk::PipelineBindPoint::GRAPHICS;\n\n self\n\n }\n\n\n\n pub fn compute(mut self) -> Self {\n\n self.bind_point = vk::PipelineBindPoint::COMPUTE;\n\n self\n\n }\n\n\n\n pub fn color_attachments<const N: usize>(mut self, attachments: [usize; N]) -> Self {\n\n self.color_attachments = Vec::from(attachments).into_boxed_slice();\n\n self\n\n }\n\n\n\n pub fn depth_stencil_attachment(mut self, attachment: usize) -> Self {\n\n self.depth_stencil_attachment = Some(attachment);\n\n self\n\n }\n\n}\n", "file_path": "vk_tracer/src/render/render_plan.rs", "rank": 52, "score": 43213.73063812588 }, { "content": " references: self.references,\n\n subpasses: self.subpasses,\n\n }))\n\n }\n\n}\n\n\n\npub struct SubpassBuilder {\n\n bind_point: vk::PipelineBindPoint,\n\n color_attachments: Box<[usize]>,\n\n depth_stencil_attachment: Option<usize>,\n\n}\n\n\n\nimpl Default for SubpassBuilder {\n\n #[inline]\n\n fn default() -> Self {\n\n Self {\n\n bind_point: vk::PipelineBindPoint::GRAPHICS,\n\n color_attachments: Box::default(),\n\n depth_stencil_attachment: None,\n\n }\n", "file_path": "vk_tracer/src/render/render_plan.rs", "rank": 53, "score": 43213.46063179888 }, { "content": "\n\n pub fn recreate_render_target<const N: usize>(\n\n &mut self,\n\n render_plan: RenderPlanHandle,\n\n new_window_size: (u32, u32),\n\n render_target: RenderTargetHandle,\n\n attachments: [ImageViewFatHandle; N],\n\n ) -> Result<()> {\n\n let render_plan = storage_access!(\n\n self.render_plan_storage,\n\n render_plan,\n\n HandleType::RenderPlan\n\n );\n\n let render_target = storage_access_mut!(\n\n self.render_target_storage,\n\n render_target,\n\n HandleType::RenderTarget\n\n );\n\n\n\n unsafe {\n", "file_path": "vk_tracer/src/render/render_target.rs", "rank": 54, "score": 43212.46420330942 }, { "content": " Ok(self)\n\n }\n\n\n\n pub fn add_depth_attachment(mut self, image: ImageViewFatHandle) -> Result<Self> {\n\n let description = vk::AttachmentDescription2::builder()\n\n .format(image.format)\n\n .samples(vk::SampleCountFlags::TYPE_1)\n\n .load_op(vk::AttachmentLoadOp::CLEAR)\n\n .store_op(vk::AttachmentStoreOp::DONT_CARE)\n\n .stencil_load_op(vk::AttachmentLoadOp::DONT_CARE)\n\n .stencil_store_op(vk::AttachmentStoreOp::DONT_CARE)\n\n .initial_layout(vk::ImageLayout::UNDEFINED)\n\n .final_layout(vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL)\n\n .build();\n\n\n\n let reference = vk::AttachmentReference2::builder()\n\n .attachment(self.attachments.len() as u32)\n\n .layout(vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL)\n\n .build();\n\n\n", "file_path": "vk_tracer/src/render/render_plan.rs", "rank": 55, "score": 43208.740865728156 }, { "content": "\n\n render_target.extent = vk::Extent2D::builder()\n\n .width(new_window_size.0)\n\n .height(new_window_size.1)\n\n .build();\n\n render_target.framebuffer = framebuffer;\n\n Ok(())\n\n }\n\n}\n\n\n\npub(crate) struct RenderTarget {\n\n pub(crate) framebuffer: vk::Framebuffer,\n\n pub(crate) extent: vk::Extent2D,\n\n}\n", "file_path": "vk_tracer/src/render/render_target.rs", "rank": 56, "score": 43208.220951416755 }, { "content": " self.device\n\n .destroy_framebuffer(render_target.framebuffer, None);\n\n }\n\n\n\n let mut attachment_views = [vk::ImageView::null(); N];\n\n for (i, attachment) in attachments.iter().enumerate() {\n\n attachment_views[i] = attachment.view;\n\n }\n\n\n\n let framebuffer = unsafe {\n\n self.device.create_framebuffer(\n\n &vk::FramebufferCreateInfo::builder()\n\n .render_pass(render_plan.render_pass)\n\n .attachments(&attachment_views)\n\n .width(new_window_size.0)\n\n .height(new_window_size.1)\n\n .layers(1),\n\n None,\n\n )?\n\n };\n", "file_path": "vk_tracer/src/render/render_target.rs", "rank": 57, "score": 43205.36311695722 }, { "content": " self.attachments.push(description);\n\n self.references.push(reference);\n\n self.clear_values.push(vk::ClearValue {\n\n depth_stencil: vk::ClearDepthStencilValue {\n\n depth: 1.0,\n\n stencil: 0,\n\n },\n\n });\n\n Ok(self)\n\n }\n\n\n\n pub fn set_clear_color(mut self, index: usize, color: [f32; 4]) -> Self {\n\n self.clear_values[index] = vk::ClearValue {\n\n color: vk::ClearColorValue { float32: color },\n\n };\n\n self\n\n }\n\n\n\n pub fn set_clear_depth_stencil(mut self, index: usize, depth: f32, stencil: u32) -> Self {\n\n self.clear_values[index] = vk::ClearValue {\n", "file_path": "vk_tracer/src/render/render_plan.rs", "rank": 58, "score": 43205.286493834894 }, { "content": "\n\n let attachment_views = attachments.iter().map(|a| a.view).collect::<Vec<_>>();\n\n\n\n let framebuffer = unsafe {\n\n self.device.create_framebuffer(\n\n &vk::FramebufferCreateInfo::builder()\n\n .render_pass(render_plan.render_pass)\n\n .attachments(&attachment_views)\n\n .width(attachments[0].extent.width)\n\n .height(attachments[0].extent.height)\n\n .layers(1),\n\n None,\n\n )?\n\n };\n\n\n\n Ok(self.render_target_storage.insert(RenderTarget {\n\n framebuffer,\n\n extent: attachments[0].extent,\n\n }))\n\n }\n", "file_path": "vk_tracer/src/render/render_target.rs", "rank": 59, "score": 43203.687149735146 }, { "content": "\n\n for subpass in &self.subpasses {\n\n let color_attachments = subpass\n\n .color_attachments\n\n .iter()\n\n .copied()\n\n .map(|i| self.references[i])\n\n .collect::<Box<[_]>>();\n\n\n\n // Ok we can build because we know that the attachments will not move or drop\n\n let mut subpass_description = vk::SubpassDescription2::builder()\n\n .pipeline_bind_point(subpass.bind_point)\n\n .color_attachments(&color_attachments);\n\n\n\n if let Some(i) = subpass.depth_stencil_attachment {\n\n subpass_description =\n\n subpass_description.depth_stencil_attachment(&self.references[i]);\n\n\n\n subpasses_references.push(Vec::from([self.references[i]]).into_boxed_slice());\n\n }\n", "file_path": "vk_tracer/src/render/render_plan.rs", "rank": 60, "score": 43203.34797453986 }, { "content": " .load_op(vk::AttachmentLoadOp::CLEAR)\n\n .store_op(vk::AttachmentStoreOp::STORE)\n\n .stencil_load_op(vk::AttachmentLoadOp::DONT_CARE)\n\n .stencil_store_op(vk::AttachmentStoreOp::DONT_CARE)\n\n .initial_layout(vk::ImageLayout::UNDEFINED)\n\n .final_layout(vk::ImageLayout::PRESENT_SRC_KHR)\n\n .build();\n\n\n\n let reference = vk::AttachmentReference2::builder()\n\n .attachment(self.attachments.len() as u32)\n\n .layout(vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL)\n\n .build();\n\n\n\n self.attachments.push(description);\n\n self.references.push(reference);\n\n self.clear_values.push(vk::ClearValue {\n\n color: ClearColorValue {\n\n float32: Default::default(),\n\n },\n\n });\n", "file_path": "vk_tracer/src/render/render_plan.rs", "rank": 61, "score": 43195.567169562855 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]\n\npub enum QueueType {\n\n Graphics,\n\n Transfer,\n\n}\n", "file_path": "vk_tracer/src/command_recorder.rs", "rank": 62, "score": 37458.88424108882 }, { "content": "use crate::{\n\n command_recorder::QueueType,\n\n errors::{HandleType, Result},\n\n ForwardPipelineHandle, RendererHandle, SwapchainHandle, VkTracerApp,\n\n};\n\nuse ash::{version::DeviceV1_0, vk};\n\nuse std::slice::from_ref;\n\n\n\nmod forward;\n\nmod render_plan;\n\nmod render_target;\n\nmod renderer;\n\n\n\npub(crate) use forward::*;\n\npub use render_plan::*;\n\npub(crate) use render_target::*;\n\npub use renderer::*;\n\n\n\n#[derive(Copy, Clone)]\n\npub enum RenderablePipelineHandle {\n\n Forward(ForwardPipelineHandle),\n\n}\n\n\n\nimpl Into<RenderablePipelineHandle> for ForwardPipelineHandle {\n\n fn into(self) -> RenderablePipelineHandle {\n\n RenderablePipelineHandle::Forward(self)\n\n }\n\n}\n\n\n", "file_path": "vk_tracer/src/render.rs", "rank": 63, "score": 37380.09277960609 }, { "content": " .swapchains(from_ref(&swapchain.handle))\n\n .wait_semaphores(from_ref(&render_semaphore))\n\n .image_indices(from_ref(&render_target_index));\n\n\n\n let graphics_queue = self.command_pools.get(&QueueType::Graphics).unwrap().0;\n\n let should_recreate_swapchain = unsafe {\n\n // Launch render\n\n self.device.queue_submit(\n\n graphics_queue,\n\n from_ref(&submit_info),\n\n renderer.render_fence,\n\n )?;\n\n\n\n match swapchain\n\n .loader\n\n .queue_present(graphics_queue, &present_info)\n\n {\n\n Err(vk::Result::ERROR_OUT_OF_DATE_KHR) => true,\n\n err @ Err(_) => err?,\n\n Ok(is_suboptimal) => is_suboptimal,\n", "file_path": "vk_tracer/src/render.rs", "rank": 64, "score": 37367.33500775707 }, { "content": " let render_semaphore = unsafe {\n\n self.device\n\n .create_semaphore(&vk::SemaphoreCreateInfo::default(), None)?\n\n };\n\n\n\n // Reset render fence\n\n unsafe {\n\n // Should return immediately but its a precaution\n\n self.device\n\n .wait_for_fences(from_ref(&renderer.render_fence), true, u64::MAX)?;\n\n self.device.reset_fences(from_ref(&renderer.render_fence))?;\n\n }\n\n\n\n let submit_info = vk::SubmitInfo::builder()\n\n .wait_dst_stage_mask(from_ref(&vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT))\n\n .wait_semaphores(from_ref(&swapchain.image_available_semaphore))\n\n .signal_semaphores(from_ref(&render_semaphore))\n\n .command_buffers(from_ref(&renderer.main_commands));\n\n\n\n let present_info = vk::PresentInfoKHR::builder()\n", "file_path": "vk_tracer/src/render.rs", "rank": 65, "score": 37366.2575615459 }, { "content": " }\n\n };\n\n\n\n unsafe {\n\n // Wait for the end of the render\n\n self.device\n\n .wait_for_fences(from_ref(&renderer.render_fence), true, u64::MAX)?;\n\n\n\n // Now we can free the semaphore\n\n self.device.destroy_semaphore(render_semaphore, None);\n\n }\n\n\n\n Ok(should_recreate_swapchain)\n\n }\n\n}\n", "file_path": "vk_tracer/src/render.rs", "rank": 66, "score": 37359.590558486256 }, { "content": "use std::{\n\n io::{Read, Seek},\n\n slice::from_ref,\n\n};\n\n\n\nuse ash::{version::DeviceV1_0, vk, vk::CommandBuffer};\n\n\n\nuse crate::{\n\n errors::{HandleType, Result},\n\n mesh::Mesh,\n\n render::{RenderPlan, VkRecordable},\n\n utils::str_to_cstr,\n\n DescriptorSetHandle, ForwardPipelineHandle, MeshHandle, RenderPlanHandle, VkTracerApp,\n\n};\n\n\n\nimpl VkTracerApp {\n\n pub fn create_forward_pipeline(\n\n &mut self,\n\n render_plan: RenderPlanHandle,\n\n subpass: u32,\n", "file_path": "vk_tracer/src/render/forward.rs", "rank": 67, "score": 36258.95298570133 }, { "content": "pub(crate) struct ForwardPipeline {\n\n pub(crate) pipeline: vk::Pipeline,\n\n pub(crate) pipeline_layout: vk::PipelineLayout,\n\n pub(crate) descriptor_sets: Box<[vk::DescriptorSet]>,\n\n pub(crate) mesh: MeshHandle,\n\n}\n\n\n\nimpl ForwardPipeline {\n\n pub fn new(\n\n device: &ash::Device,\n\n render_plan: &RenderPlan,\n\n subpass: u32,\n\n descriptor_layouts: &[vk::DescriptorSetLayout],\n\n descriptor_sets: Box<[vk::DescriptorSet]>,\n\n mut vertex_shader: impl Read + Seek,\n\n mut fragment_shader: impl Read + Seek,\n\n mesh_handle: MeshHandle,\n\n mesh: &Mesh,\n\n ) -> Result<Self> {\n\n let vertex_module = unsafe {\n", "file_path": "vk_tracer/src/render/forward.rs", "rank": 68, "score": 36255.45407562013 }, { "content": " };\n\n\n\n unsafe {\n\n device.destroy_shader_module(vertex_module, None);\n\n device.destroy_shader_module(fragment_module, None);\n\n }\n\n\n\n Ok(Self {\n\n pipeline,\n\n pipeline_layout,\n\n descriptor_sets,\n\n mesh: mesh_handle,\n\n })\n\n }\n\n}\n\n\n\nimpl VkRecordable for ForwardPipeline {\n\n unsafe fn record_commands(\n\n &self,\n\n app: &VkTracerApp,\n", "file_path": "vk_tracer/src/render/forward.rs", "rank": 69, "score": 36247.553673454706 }, { "content": " let spv = ash::util::read_spv(&mut vertex_shader)?;\n\n device.create_shader_module(&vk::ShaderModuleCreateInfo::builder().code(&spv), None)?\n\n };\n\n\n\n let fragment_module = unsafe {\n\n let spv = ash::util::read_spv(&mut fragment_shader)?;\n\n device.create_shader_module(&vk::ShaderModuleCreateInfo::builder().code(&spv), None)?\n\n };\n\n\n\n let stage_vertex = vk::PipelineShaderStageCreateInfo::builder()\n\n .stage(vk::ShaderStageFlags::VERTEX)\n\n .module(vertex_module)\n\n .name(str_to_cstr(\"main\\0\"));\n\n\n\n let stage_fragment = vk::PipelineShaderStageCreateInfo::builder()\n\n .stage(vk::ShaderStageFlags::FRAGMENT)\n\n .module(fragment_module)\n\n .name(str_to_cstr(\"main\\0\"));\n\n\n\n let stages = [stage_vertex.build(), stage_fragment.build()];\n", "file_path": "vk_tracer/src/render/forward.rs", "rank": 70, "score": 36237.665255012165 }, { "content": "\n\n if !self.descriptor_sets.is_empty() {\n\n app.device.cmd_bind_descriptor_sets(\n\n commands,\n\n vk::PipelineBindPoint::GRAPHICS,\n\n self.pipeline_layout,\n\n 0,\n\n &self.descriptor_sets,\n\n &[],\n\n );\n\n }\n\n\n\n app.device\n\n .cmd_bind_pipeline(commands, vk::PipelineBindPoint::GRAPHICS, self.pipeline);\n\n\n\n app.device.cmd_set_viewport(\n\n commands,\n\n 0,\n\n from_ref(\n\n &vk::Viewport::builder()\n", "file_path": "vk_tracer/src/render/forward.rs", "rank": 71, "score": 36234.55749868182 }, { "content": " viewport: vk::Extent2D,\n\n commands: CommandBuffer,\n\n ) -> Result<()> {\n\n let mesh = storage_access!(app.mesh_storage, self.mesh, HandleType::Mesh);\n\n\n\n app.device.cmd_bind_vertex_buffers(\n\n commands,\n\n 0,\n\n from_ref(&mesh.vertices.buffer),\n\n &[0],\n\n //from_ref(&(mesh.vertices.info.get_offset() as vk::DeviceSize)),\n\n );\n\n\n\n app.device.cmd_bind_index_buffer(\n\n commands,\n\n mesh.indices.buffer,\n\n 0,\n\n // mesh.indices.info.get_offset() as vk::DeviceSize,\n\n mesh.index_ty.1,\n\n );\n", "file_path": "vk_tracer/src/render/forward.rs", "rank": 72, "score": 36234.002625601555 }, { "content": " descriptor_sets_handles: &[DescriptorSetHandle],\n\n vertex_shader: impl Read + Seek,\n\n fragment_shader: impl Read + Seek,\n\n mesh_handle: MeshHandle,\n\n ) -> Result<ForwardPipelineHandle> {\n\n let mesh = storage_access!(self.mesh_storage, mesh_handle, HandleType::Mesh);\n\n let render_plan = storage_access!(\n\n self.render_plan_storage,\n\n render_plan,\n\n HandleType::RenderPlan\n\n );\n\n\n\n let mut descriptor_layouts = Vec::with_capacity(descriptor_sets_handles.len());\n\n let mut descriptor_sets = Vec::with_capacity(descriptor_sets_handles.len());\n\n for handle in descriptor_sets_handles.iter().copied() {\n\n let set = storage_access!(\n\n self.descriptor_set_storage,\n\n handle,\n\n HandleType::DescriptorSet\n\n );\n", "file_path": "vk_tracer/src/render/forward.rs", "rank": 73, "score": 36233.76107535567 }, { "content": "\n\n let pipeline = unsafe {\n\n let create_info = vk::GraphicsPipelineCreateInfo::builder()\n\n .stages(&stages)\n\n .vertex_input_state(&vertex_input_info)\n\n .input_assembly_state(&input_assembly_info)\n\n .rasterization_state(&raster_state_info)\n\n .multisample_state(&msaa_info)\n\n .depth_stencil_state(&depth_stencil_info)\n\n .color_blend_state(&color_blend_state)\n\n .viewport_state(&viewport_state_info)\n\n .dynamic_state(&dynamic_state)\n\n .layout(pipeline_layout)\n\n .render_pass(render_plan.render_pass)\n\n .subpass(subpass);\n\n\n\n let pipelines = device\n\n .create_graphics_pipelines(vk::PipelineCache::null(), from_ref(&create_info), None)\n\n .map_err(|(_, err)| err)?;\n\n pipelines[0]\n", "file_path": "vk_tracer/src/render/forward.rs", "rank": 74, "score": 36233.45664474542 }, { "content": " .height(viewport.height as f32)\n\n .width(viewport.width as f32)\n\n .x(0.0)\n\n .y(0.0)\n\n .min_depth(0.0)\n\n .max_depth(1.0),\n\n ),\n\n );\n\n\n\n app.device.cmd_set_scissor(\n\n commands,\n\n 0,\n\n from_ref(\n\n &vk::Rect2D::builder()\n\n .extent(viewport)\n\n .offset(vk::Offset2D::default()),\n\n ),\n\n );\n\n\n\n app.device\n\n .cmd_draw_indexed(commands, mesh.indices_len, 1, 0, 0, 1);\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "vk_tracer/src/render/forward.rs", "rank": 75, "score": 36231.390051081 }, { "content": " descriptor_layouts.push(set.layout);\n\n descriptor_sets.push(set.handle);\n\n }\n\n\n\n let pipeline = ForwardPipeline::new(\n\n &self.device,\n\n render_plan,\n\n subpass,\n\n &descriptor_layouts,\n\n descriptor_sets.into_boxed_slice(),\n\n vertex_shader,\n\n fragment_shader,\n\n mesh_handle,\n\n mesh,\n\n )?;\n\n\n\n Ok(self.forward_pipeline_storage.insert(pipeline))\n\n }\n\n}\n\n\n", "file_path": "vk_tracer/src/render/forward.rs", "rank": 76, "score": 36231.25744798035 }, { "content": " .viewport_count(1)\n\n .scissor_count(1);\n\n\n\n // TODO: attachments\n\n let color_blend_state = vk::PipelineColorBlendStateCreateInfo::builder()\n\n .logic_op_enable(false)\n\n .logic_op(vk::LogicOp::COPY)\n\n .attachments(from_ref(&color_blend_info));\n\n\n\n let dynamic_state = vk::PipelineDynamicStateCreateInfo::builder()\n\n .dynamic_states(&[vk::DynamicState::VIEWPORT, vk::DynamicState::SCISSOR]);\n\n\n\n let pipeline_layout = unsafe {\n\n device.create_pipeline_layout(\n\n &vk::PipelineLayoutCreateInfo::builder()\n\n .set_layouts(descriptor_layouts)\n\n .push_constant_ranges(&[]),\n\n None,\n\n )?\n\n };\n", "file_path": "vk_tracer/src/render/forward.rs", "rank": 77, "score": 36229.95917475244 }, { "content": " .rasterization_samples(vk::SampleCountFlags::TYPE_1)\n\n .min_sample_shading(1.0)\n\n .alpha_to_coverage_enable(false)\n\n .alpha_to_one_enable(false);\n\n\n\n let depth_stencil_info = vk::PipelineDepthStencilStateCreateInfo::builder()\n\n .depth_test_enable(true)\n\n .depth_write_enable(true)\n\n .depth_compare_op(vk::CompareOp::LESS)\n\n .depth_bounds_test_enable(false)\n\n .min_depth_bounds(0.0)\n\n .max_depth_bounds(1.0)\n\n .stencil_test_enable(false);\n\n\n\n let color_blend_info = vk::PipelineColorBlendAttachmentState::builder()\n\n .color_write_mask(vk::ColorComponentFlags::all())\n\n .blend_enable(false);\n\n\n\n // Dynamic state\n\n let viewport_state_info = vk::PipelineViewportStateCreateInfo::builder()\n", "file_path": "vk_tracer/src/render/forward.rs", "rank": 78, "score": 36224.49170972117 }, { "content": "\n\n let vertex_input_info = vk::PipelineVertexInputStateCreateInfo::builder()\n\n .vertex_binding_descriptions(mesh.vertex_desc.1)\n\n .vertex_attribute_descriptions(mesh.vertex_desc.2);\n\n\n\n let input_assembly_info = vk::PipelineInputAssemblyStateCreateInfo::builder()\n\n .topology(vk::PrimitiveTopology::TRIANGLE_LIST)\n\n .primitive_restart_enable(false);\n\n\n\n let raster_state_info = vk::PipelineRasterizationStateCreateInfo::builder()\n\n .depth_clamp_enable(false)\n\n .rasterizer_discard_enable(false)\n\n .polygon_mode(vk::PolygonMode::FILL)\n\n .cull_mode(vk::CullModeFlags::BACK)\n\n .front_face(vk::FrontFace::CLOCKWISE)\n\n .depth_bias_enable(false)\n\n .line_width(1.0);\n\n\n\n let msaa_info = vk::PipelineMultisampleStateCreateInfo::builder()\n\n .sample_shading_enable(false)\n", "file_path": "vk_tracer/src/render/forward.rs", "rank": 79, "score": 36223.23384621622 }, { "content": "trait VkRecordable {\n\n /// Only record bind and draw commands, no begin or end !\n\n unsafe fn record_commands(\n\n &self,\n\n app: &VkTracerApp,\n\n viewport: vk::Extent2D,\n\n commands: vk::CommandBuffer,\n\n ) -> Result<()>;\n\n}\n\n\n\nimpl VkTracerApp {\n\n pub fn render_and_present(\n\n &mut self,\n\n renderer: RendererHandle,\n\n swapchain: SwapchainHandle,\n\n render_target_index: u32,\n\n ) -> Result<bool> {\n\n let renderer = storage_access!(self.renderer_storage, renderer, HandleType::Renderer);\n\n let swapchain = storage_access!(self.swapchain_storage, swapchain, HandleType::Swapchain);\n\n\n", "file_path": "vk_tracer/src/render.rs", "rank": 80, "score": 36210.25422517279 }, { "content": "use std::{collections::HashSet, ffi::CStr};\n\n\n\nuse ash::{version::InstanceV1_0, vk};\n\nuse log::{debug, error, info};\n\n\n\nuse crate::{\n\n errors::{Result, VkTracerError},\n\n present::choose_surface_format,\n\n setup::AdapterRequirements,\n\n utils::cstr_to_str,\n\n VULKAN_VERSION, VULKAN_VERSION_STR,\n\n};\n\n\n\n#[derive(Debug)]\n\npub struct PhysicalDeviceInfo {\n\n pub handle: vk::PhysicalDevice,\n\n pub properties: vk::PhysicalDeviceProperties,\n\n pub extensions: Vec<vk::ExtensionProperties>,\n\n pub features: vk::PhysicalDeviceFeatures,\n\n pub queue_families: Vec<vk::QueueFamilyProperties>,\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 81, "score": 34944.061508165665 }, { "content": " // Fallback to using the graphics queue\n\n .unwrap_or_else(|| graphics_queue.clone());\n\n\n\n if transfer_queue.index == graphics_queue.index {\n\n debug!(\" - Using the graphics queue for transfer operations\");\n\n } else {\n\n debug!(\n\n \" - Using dedicated transfer queue (ID: {}) (x{}) [{:?}]\",\n\n transfer_queue.index,\n\n transfer_queue.properties.queue_count,\n\n transfer_queue.properties.queue_flags\n\n );\n\n }\n\n\n\n // Score additional properties\n\n\n\n let mut score = 0u32;\n\n // Prefer dedicated hardware\n\n if info.properties.device_type == vk::PhysicalDeviceType::DISCRETE_GPU {\n\n score += 1000;\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 82, "score": 34937.65232042179 }, { "content": " pub memory_properties: vk::PhysicalDeviceMemoryProperties,\n\n\n\n pub surface_capabilities: Option<vk::SurfaceCapabilitiesKHR>,\n\n pub surface_formats: Option<Vec<vk::SurfaceFormatKHR>>,\n\n pub surface_format_properties: Option<Vec<vk::FormatProperties>>,\n\n pub surface_present_modes: Option<Vec<vk::PresentModeKHR>>,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct QueueFamilyInfo {\n\n pub index: u32,\n\n pub properties: vk::QueueFamilyProperties,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct AdapterInfo {\n\n pub physical_device_info: PhysicalDeviceInfo,\n\n pub graphics_queue: QueueFamilyInfo,\n\n pub transfer_queue: QueueFamilyInfo,\n\n pub score: u32,\n\n}\n\n\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 83, "score": 34934.996160920906 }, { "content": " if device_version >= VULKAN_VERSION {\n\n debug!(\" Detected Vulkan {} [OK]\", device_version_str);\n\n } else {\n\n error!(\n\n \" Vulkan {} required but only version {} found [FATAL]\",\n\n VULKAN_VERSION_STR, device_version_str\n\n );\n\n return None;\n\n }\n\n }\n\n\n\n // *** Check extensions\n\n\n\n {\n\n debug!(\" Checking extensions...\");\n\n let mut missing_extensions = requirements\n\n .required_extensions\n\n .iter()\n\n .cloned()\n\n .collect::<HashSet<_>>();\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 84, "score": 34932.981139515134 }, { "content": " }\n\n\n\n // Count device memory\n\n let physical_device_memory_size = info\n\n .memory_properties\n\n .memory_heaps\n\n .iter()\n\n .take(info.memory_properties.memory_heap_count as usize)\n\n .filter(|heap| heap.flags.contains(vk::MemoryHeapFlags::DEVICE_LOCAL))\n\n .map(|heap| heap.size)\n\n .sum::<vk::DeviceSize>();\n\n // Count gigabytes of memory\n\n score += (physical_device_memory_size / 1_073_741_800) as u32;\n\n\n\n debug!(\" Additional score of {}\", score);\n\n\n\n Some(AdapterInfo {\n\n physical_device_info: info,\n\n graphics_queue,\n\n transfer_queue,\n\n score,\n\n })\n\n}\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 85, "score": 34932.556890452746 }, { "content": " if let Some(res) = &best_device {\n\n info!(\n\n \"Choosed physical device '{}'\",\n\n cstr_to_str(res.physical_device_info.properties.device_name.as_ptr())\n\n )\n\n }\n\n\n\n best_device.ok_or(VkTracerError::NoSuitableAdapterError)\n\n}\n\n\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 86, "score": 34931.128713003716 }, { "content": " }\n\n\n\n // *** Check queue families\n\n debug!(\" Checking queue families...\");\n\n\n\n // Graphics\n\n\n\n let graphics_queue = info\n\n .queue_families\n\n .iter()\n\n .enumerate()\n\n .find(|(_, queue)| queue.queue_flags.contains(vk::QueueFlags::GRAPHICS))\n\n .map(|(index, &properties)| QueueFamilyInfo {\n\n index: index as u32,\n\n properties,\n\n });\n\n\n\n if graphics_queue.is_none() {\n\n debug!(\" - No graphics queue found !\");\n\n return None;\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 87, "score": 34929.72415121488 }, { "content": " error!(\" - Graphics queue doesn't support presentation, that's a problem.\");\n\n return None;\n\n }\n\n }\n\n\n\n // Transfer\n\n\n\n let transfer_queue = info\n\n .queue_families\n\n .iter()\n\n .enumerate()\n\n // Try to find a queue exclusively for transfers\n\n .find(|(_, queue)| {\n\n queue.queue_flags.contains(vk::QueueFlags::TRANSFER)\n\n && !queue.queue_flags.contains(vk::QueueFlags::GRAPHICS)\n\n })\n\n .map(|(index, &properties)| QueueFamilyInfo {\n\n index: index as u32,\n\n properties,\n\n })\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 88, "score": 34929.25835532584 }, { "content": " for extension in info.extensions.iter() {\n\n let name = unsafe { CStr::from_ptr(extension.extension_name.as_ptr()) };\n\n if missing_extensions.remove(name) {\n\n debug!(\" - {} [OK]\", name.to_str().unwrap());\n\n }\n\n }\n\n\n\n if !missing_extensions.is_empty() {\n\n for missing in missing_extensions {\n\n debug!(\" - {} [NOT FOUND]\", missing.to_str().unwrap());\n\n }\n\n return None;\n\n }\n\n }\n\n\n\n // *** Check swapchain formats\n\n debug!(\" Checking swapchain formats...\");\n\n\n\n if let (Some(surface_formats), Some(surface_format_properties)) =\n\n (&info.surface_formats, &info.surface_format_properties)\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 89, "score": 34928.6660449083 }, { "content": " }\n\n let graphics_queue = graphics_queue.unwrap();\n\n debug!(\n\n \" - Graphics queue found (ID: {}) (x{}) [{:?}]\",\n\n graphics_queue.index,\n\n graphics_queue.properties.queue_count,\n\n graphics_queue.properties.queue_flags,\n\n );\n\n\n\n // Present\n\n\n\n // Juste check if the graphics queue supports it and fail otherwise\n\n if let Some((loader, surface)) = requirements.compatible_surface.as_ref() {\n\n let support = unsafe {\n\n loader\n\n .get_physical_device_surface_support(info.handle, graphics_queue.index, *surface)\n\n .unwrap()\n\n };\n\n\n\n if !support {\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 90, "score": 34928.39279066361 }, { "content": " features,\n\n queue_families,\n\n memory_properties,\n\n surface_capabilities,\n\n surface_formats,\n\n surface_format_properties,\n\n surface_present_modes,\n\n }\n\n })\n\n .filter_map(|device_info| {\n\n if let Some(res) = process_physical_device(device_info, requirements) {\n\n info!(\" => Device is eligible\");\n\n Some(res)\n\n } else {\n\n info!(\" => Device not suitable\");\n\n None\n\n }\n\n })\n\n .max_by(|left, right| Ord::cmp(&left.score, &right.score));\n\n\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 91, "score": 34928.052358236804 }, { "content": " .map(|format| {\n\n instance\n\n .get_physical_device_format_properties(physical_device, format.format)\n\n })\n\n .collect::<Vec<_>>()\n\n });\n\n let surface_present_modes =\n\n requirements\n\n .compatible_surface\n\n .as_ref()\n\n .map(|(loader, surface)| {\n\n loader\n\n .get_physical_device_surface_present_modes(physical_device, *surface)\n\n .expect(\"Failed to get surface present modes\")\n\n });\n\n\n\n PhysicalDeviceInfo {\n\n handle: physical_device,\n\n properties,\n\n extensions,\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 92, "score": 34927.87587178443 }, { "content": " {\n\n debug!(\" Available formats:\");\n\n for format in surface_formats.iter() {\n\n debug!(\n\n \" - Format {:?} / Color space {:?}\",\n\n format.format, format.color_space\n\n );\n\n }\n\n\n\n if let Some(format) =\n\n choose_surface_format(&surface_formats, &surface_format_properties, requirements)\n\n {\n\n debug!(\" - Format {:?} [OK]\", format.format);\n\n debug!(\" - Color space {:?} [OK]\", format.color_space);\n\n } else {\n\n debug!(\" - Can't find the required color space and format !\");\n\n return None;\n\n }\n\n } else {\n\n debug!(\" No surface provided, skipping.\")\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 93, "score": 34927.20603366864 }, { "content": " .compatible_surface\n\n .as_ref()\n\n .map(|(loader, surface)| {\n\n loader\n\n .get_physical_device_surface_capabilities(physical_device, *surface)\n\n .expect(\"Failed to get surface capabilities\")\n\n });\n\n\n\n let surface_formats =\n\n requirements\n\n .compatible_surface\n\n .as_ref()\n\n .map(|(loader, surface)| {\n\n loader\n\n .get_physical_device_surface_formats(physical_device, *surface)\n\n .expect(\"Faild to get surface formats\")\n\n });\n\n let surface_format_properties = surface_formats.as_ref().map(|surface_formats| {\n\n surface_formats\n\n .iter()\n", "file_path": "vk_tracer/src/setup/physical_device_selection.rs", "rank": 94, "score": 34925.1248095031 }, { "content": "def TypeToColor(sType, iUsage):\n\n if sType == 'FREE':\n\n return 220, 220, 220, 255\n\n elif sType == 'BUFFER':\n\n if (iUsage & 0x1C0) != 0: # INDIRECT_BUFFER | VERTEX_BUFFER | INDEX_BUFFER\n\n return 255, 148, 148, 255 # Red\n\n elif (iUsage & 0x28) != 0: # STORAGE_BUFFER | STORAGE_TEXEL_BUFFER\n\n return 255, 187, 121, 255 # Orange\n\n elif (iUsage & 0x14) != 0: # UNIFORM_BUFFER | UNIFORM_TEXEL_BUFFER\n\n return 255, 255, 0, 255 # Yellow\n\n else:\n\n return 255, 255, 165, 255 # Light yellow\n\n elif sType == 'IMAGE_OPTIMAL':\n\n if (iUsage & 0x20) != 0: # DEPTH_STENCIL_ATTACHMENT\n\n return 246, 128, 255, 255 # Pink\n\n elif (iUsage & 0xD0) != 0: # INPUT_ATTACHMENT | TRANSIENT_ATTACHMENT | COLOR_ATTACHMENT\n\n return 179, 179, 255, 255 # Blue\n\n elif (iUsage & 0x4) != 0: # SAMPLED\n\n return 0, 255, 255, 255 # Aqua\n\n else:\n\n return 183, 255, 255, 255 # Light aqua\n\n elif sType == 'IMAGE_LINEAR':\n\n return 0, 255, 0, 255 # Green\n\n elif sType == 'IMAGE_UNKNOWN':\n\n return 0, 255, 164, 255 # Green/aqua\n\n elif sType == 'UNKNOWN':\n\n return 175, 175, 175, 255 # Gray\n\n assert False\n", "file_path": "tools/VmaDumpVis.py", "rank": 95, "score": 31691.568943442344 }, { "content": "def GetDataForMemoryType(iMemTypeIndex):\n\n global data\n\n if iMemTypeIndex in data:\n\n return data[iMemTypeIndex]\n\n else:\n\n newMemTypeData = {'DedicatedAllocations':[], 'DefaultPoolBlocks':[], 'CustomPools':{}}\n\n data[iMemTypeIndex] = newMemTypeData\n", "file_path": "tools/VmaDumpVis.py", "rank": 96, "score": 30151.741724811684 }, { "content": "#\n\n# Copyright (c) 2018-2020 Advanced Micro Devices, Inc. All rights reserved.\n\n#\n\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n\n# of this software and associated documentation files (the \"Software\"), to deal\n\n# in the Software without restriction, including without limitation the rights\n\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\n# copies of the Software, and to permit persons to whom the Software is\n\n# furnished to do so, subject to the following conditions:\n\n#\n\n# The above copyright notice and this permission notice shall be included in\n\n# all copies or substantial portions of the Software.\n\n#\n\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n\n# THE SOFTWARE.\n\n#\n\n\n\nimport argparse\n\nimport json\n\nfrom PIL import Image, ImageDraw, ImageFont\n\n\n\n\n\nPROGRAM_VERSION = 'VMA Dump Visualization 2.0.1'\n\nIMG_SIZE_X = 1200\n\nIMG_MARGIN = 8\n\nFONT_SIZE = 10\n\nMAP_SIZE = 24\n\nCOLOR_TEXT_H1 = (0, 0, 0, 255)\n\nCOLOR_TEXT_H2 = (150, 150, 150, 255)\n\nCOLOR_OUTLINE = (155, 155, 155, 255)\n\nCOLOR_OUTLINE_HARD = (0, 0, 0, 255)\n\nCOLOR_GRID_LINE = (224, 224, 224, 255)\n\n\n\n\n\nargParser = argparse.ArgumentParser(description='Visualization of Vulkan Memory Allocator JSON dump.')\n\nargParser.add_argument('DumpFile', type=argparse.FileType(mode='r', encoding='UTF-8'), help='Path to source JSON file with memory dump created by Vulkan Memory Allocator library')\n\nargParser.add_argument('-v', '--version', action='version', version=PROGRAM_VERSION)\n\nargParser.add_argument('-o', '--output', required=True, help='Path to destination image file (e.g. PNG)')\n\nargs = argParser.parse_args()\n\n\n\ndata = {}\n\n\n\n\n\ndef ProcessBlock(dstBlockList, iBlockId, objBlock, sAlgorithm):\n\n iBlockSize = int(objBlock['TotalBytes'])\n\n arrSuballocs = objBlock['Suballocations']\n\n dstBlockObj = {'ID': iBlockId, 'Size':iBlockSize, 'Suballocations':[]}\n\n dstBlockObj['Algorithm'] = sAlgorithm\n\n for objSuballoc in arrSuballocs:\n\n dstBlockObj['Suballocations'].append((objSuballoc['Type'], int(objSuballoc['Size']), int(objSuballoc['Usage']) if ('Usage' in objSuballoc) else 0))\n\n dstBlockList.append(dstBlockObj)\n\n\n\n\n\ndef GetDataForMemoryType(iMemTypeIndex):\n\n global data\n\n if iMemTypeIndex in data:\n\n return data[iMemTypeIndex]\n\n else:\n\n newMemTypeData = {'DedicatedAllocations':[], 'DefaultPoolBlocks':[], 'CustomPools':{}}\n\n data[iMemTypeIndex] = newMemTypeData\n\n return newMemTypeData\n\n\n\n\n\ndef IsDataEmpty():\n\n global data\n\n for dictMemType in data.values():\n\n if 'DedicatedAllocations' in dictMemType and len(dictMemType['DedicatedAllocations']) > 0:\n\n return False\n\n if 'DefaultPoolBlocks' in dictMemType and len(dictMemType['DefaultPoolBlocks']) > 0:\n\n return False\n\n if 'CustomPools' in dictMemType:\n\n for lBlockList in dictMemType['CustomPools'].values():\n\n if len(lBlockList) > 0:\n\n return False\n\n return True\n\n\n\n\n\n# Returns tuple:\n\n# [0] image height : integer\n\n# [1] pixels per byte : float\n\ndef CalcParams():\n\n global data\n\n iImgSizeY = IMG_MARGIN\n\n iImgSizeY += FONT_SIZE + IMG_MARGIN # Grid lines legend - sizes\n\n iMaxBlockSize = 0\n\n for dictMemType in data.values():\n\n iImgSizeY += IMG_MARGIN + FONT_SIZE\n\n lDedicatedAllocations = dictMemType['DedicatedAllocations']\n\n iImgSizeY += len(lDedicatedAllocations) * (IMG_MARGIN * 2 + FONT_SIZE + MAP_SIZE)\n\n for tDedicatedAlloc in lDedicatedAllocations:\n\n iMaxBlockSize = max(iMaxBlockSize, tDedicatedAlloc[1])\n\n lDefaultPoolBlocks = dictMemType['DefaultPoolBlocks']\n\n iImgSizeY += len(lDefaultPoolBlocks) * (IMG_MARGIN * 2 + FONT_SIZE + MAP_SIZE)\n\n for objBlock in lDefaultPoolBlocks:\n\n iMaxBlockSize = max(iMaxBlockSize, objBlock['Size'])\n\n dCustomPools = dictMemType['CustomPools']\n\n for lBlocks in dCustomPools.values():\n\n iImgSizeY += len(lBlocks) * (IMG_MARGIN * 2 + FONT_SIZE + MAP_SIZE)\n\n for objBlock in lBlocks:\n\n iMaxBlockSize = max(iMaxBlockSize, objBlock['Size'])\n\n fPixelsPerByte = (IMG_SIZE_X - IMG_MARGIN * 2) / float(iMaxBlockSize)\n\n return iImgSizeY, fPixelsPerByte\n\n\n\n\n\ndef TypeToColor(sType, iUsage):\n\n if sType == 'FREE':\n\n return 220, 220, 220, 255\n\n elif sType == 'BUFFER':\n\n if (iUsage & 0x1C0) != 0: # INDIRECT_BUFFER | VERTEX_BUFFER | INDEX_BUFFER\n\n return 255, 148, 148, 255 # Red\n\n elif (iUsage & 0x28) != 0: # STORAGE_BUFFER | STORAGE_TEXEL_BUFFER\n\n return 255, 187, 121, 255 # Orange\n\n elif (iUsage & 0x14) != 0: # UNIFORM_BUFFER | UNIFORM_TEXEL_BUFFER\n\n return 255, 255, 0, 255 # Yellow\n\n else:\n\n return 255, 255, 165, 255 # Light yellow\n\n elif sType == 'IMAGE_OPTIMAL':\n\n if (iUsage & 0x20) != 0: # DEPTH_STENCIL_ATTACHMENT\n\n return 246, 128, 255, 255 # Pink\n\n elif (iUsage & 0xD0) != 0: # INPUT_ATTACHMENT | TRANSIENT_ATTACHMENT | COLOR_ATTACHMENT\n\n return 179, 179, 255, 255 # Blue\n\n elif (iUsage & 0x4) != 0: # SAMPLED\n\n return 0, 255, 255, 255 # Aqua\n\n else:\n\n return 183, 255, 255, 255 # Light aqua\n\n elif sType == 'IMAGE_LINEAR':\n\n return 0, 255, 0, 255 # Green\n\n elif sType == 'IMAGE_UNKNOWN':\n\n return 0, 255, 164, 255 # Green/aqua\n\n elif sType == 'UNKNOWN':\n\n return 175, 175, 175, 255 # Gray\n\n assert False\n\n return 0, 0, 0, 255\n\n\n\n\n\ndef DrawDedicatedAllocationBlock(draw, y, tDedicatedAlloc): \n\n global fPixelsPerByte\n\n iSizeBytes = tDedicatedAlloc[1]\n\n iSizePixels = int(iSizeBytes * fPixelsPerByte)\n\n draw.rectangle([IMG_MARGIN, y, IMG_MARGIN + iSizePixels, y + MAP_SIZE], fill=TypeToColor(tDedicatedAlloc[0], tDedicatedAlloc[2]), outline=COLOR_OUTLINE)\n\n\n\n\n\ndef DrawBlock(draw, y, objBlock):\n\n global fPixelsPerByte\n\n iSizeBytes = objBlock['Size']\n\n iSizePixels = int(iSizeBytes * fPixelsPerByte)\n\n draw.rectangle([IMG_MARGIN, y, IMG_MARGIN + iSizePixels, y + MAP_SIZE], fill=TypeToColor('FREE', 0), outline=None)\n\n iByte = 0\n\n iX = 0\n\n iLastHardLineX = -1\n\n for tSuballoc in objBlock['Suballocations']:\n\n sType = tSuballoc[0]\n\n iByteEnd = iByte + tSuballoc[1]\n\n iXEnd = int(iByteEnd * fPixelsPerByte)\n\n if sType != 'FREE':\n\n if iXEnd > iX + 1:\n\n iUsage = tSuballoc[2]\n\n draw.rectangle([IMG_MARGIN + iX, y, IMG_MARGIN + iXEnd, y + MAP_SIZE], fill=TypeToColor(sType, iUsage), outline=COLOR_OUTLINE)\n\n # Hard line was been overwritten by rectangle outline: redraw it.\n\n if iLastHardLineX == iX:\n\n draw.line([IMG_MARGIN + iX, y, IMG_MARGIN + iX, y + MAP_SIZE], fill=COLOR_OUTLINE_HARD)\n\n else:\n\n draw.line([IMG_MARGIN + iX, y, IMG_MARGIN + iX, y + MAP_SIZE], fill=COLOR_OUTLINE_HARD)\n\n iLastHardLineX = iX\n\n iByte = iByteEnd\n\n iX = iXEnd\n\n\n\n\n\ndef BytesToStr(iBytes):\n\n if iBytes < 1024:\n\n return \"%d B\" % iBytes\n\n iBytes /= 1024\n\n if iBytes < 1024:\n\n return \"%d KiB\" % iBytes\n\n iBytes /= 1024\n\n if iBytes < 1024:\n\n return \"%d MiB\" % iBytes\n\n iBytes /= 1024\n\n return \"%d GiB\" % iBytes\n\n\n\n\n\njsonSrc = json.load(args.DumpFile)\n\nif 'DedicatedAllocations' in jsonSrc:\n\n for tType in jsonSrc['DedicatedAllocations'].items():\n\n sType = tType[0]\n\n assert sType[:5] == 'Type '\n\n iType = int(sType[5:])\n\n typeData = GetDataForMemoryType(iType)\n\n for objAlloc in tType[1]:\n\n typeData['DedicatedAllocations'].append((objAlloc['Type'], int(objAlloc['Size']), int(objAlloc['Usage']) if ('Usage' in objAlloc) else 0))\n\nif 'DefaultPools' in jsonSrc:\n\n for tType in jsonSrc['DefaultPools'].items():\n\n sType = tType[0]\n\n assert sType[:5] == 'Type '\n\n iType = int(sType[5:])\n\n typeData = GetDataForMemoryType(iType)\n\n for sBlockId, objBlock in tType[1]['Blocks'].items():\n\n ProcessBlock(typeData['DefaultPoolBlocks'], int(sBlockId), objBlock, '')\n\nif 'Pools' in jsonSrc:\n\n objPools = jsonSrc['Pools']\n\n for sPoolId, objPool in objPools.items():\n\n iType = int(objPool['MemoryTypeIndex'])\n\n typeData = GetDataForMemoryType(iType)\n\n objBlocks = objPool['Blocks']\n\n sAlgorithm = objPool.get('Algorithm', '')\n\n sName = objPool.get('Name', None)\n\n if sName:\n\n sFullName = sPoolId + ' \"' + sName + '\"'\n\n else:\n\n sFullName = sPoolId\n\n dstBlockArray = []\n\n typeData['CustomPools'][sFullName] = dstBlockArray\n\n for sBlockId, objBlock in objBlocks.items():\n\n ProcessBlock(dstBlockArray, int(sBlockId), objBlock, sAlgorithm)\n\n\n\nif IsDataEmpty():\n\n print(\"There is nothing to put on the image. Please make sure you generated the stats string with detailed map enabled.\")\n\n exit(1)\n\n\n\niImgSizeY, fPixelsPerByte = CalcParams()\n\n\n\nimg = Image.new('RGB', (IMG_SIZE_X, iImgSizeY), 'white')\n\ndraw = ImageDraw.Draw(img)\n\n\n\ntry:\n\n font = ImageFont.truetype('segoeuib.ttf')\n\nexcept:\n\n font = ImageFont.load_default()\n\n\n\ny = IMG_MARGIN\n\n\n\n# Draw grid lines\n\niBytesBetweenGridLines = 32\n\nwhile iBytesBetweenGridLines * fPixelsPerByte < 64:\n\n iBytesBetweenGridLines *= 2\n\niByte = 0\n\nTEXT_MARGIN = 4\n\nwhile True:\n\n iX = int(iByte * fPixelsPerByte)\n\n if iX > IMG_SIZE_X - 2 * IMG_MARGIN:\n\n break\n\n draw.line([iX + IMG_MARGIN, 0, iX + IMG_MARGIN, iImgSizeY], fill=COLOR_GRID_LINE)\n\n if iByte == 0:\n\n draw.text((iX + IMG_MARGIN + TEXT_MARGIN, y), \"0\", fill=COLOR_TEXT_H2, font=font)\n\n else:\n\n text = BytesToStr(iByte)\n\n textSize = draw.textsize(text, font=font)\n\n draw.text((iX + IMG_MARGIN - textSize[0] - TEXT_MARGIN, y), text, fill=COLOR_TEXT_H2, font=font)\n\n iByte += iBytesBetweenGridLines\n\ny += FONT_SIZE + IMG_MARGIN\n\n\n\n# Draw main content\n\nfor iMemTypeIndex in sorted(data.keys()):\n\n dictMemType = data[iMemTypeIndex]\n\n draw.text((IMG_MARGIN, y), \"Memory type %d\" % iMemTypeIndex, fill=COLOR_TEXT_H1, font=font)\n\n y += FONT_SIZE + IMG_MARGIN\n\n index = 0\n\n for tDedicatedAlloc in dictMemType['DedicatedAllocations']:\n\n draw.text((IMG_MARGIN, y), \"Dedicated allocation %d\" % index, fill=COLOR_TEXT_H2, font=font)\n\n y += FONT_SIZE + IMG_MARGIN\n\n DrawDedicatedAllocationBlock(draw, y, tDedicatedAlloc)\n\n y += MAP_SIZE + IMG_MARGIN\n\n index += 1\n\n for objBlock in dictMemType['DefaultPoolBlocks']:\n\n draw.text((IMG_MARGIN, y), \"Default pool block %d\" % objBlock['ID'], fill=COLOR_TEXT_H2, font=font)\n\n y += FONT_SIZE + IMG_MARGIN\n\n DrawBlock(draw, y, objBlock)\n\n y += MAP_SIZE + IMG_MARGIN\n\n index = 0\n\n for sPoolName, listPool in dictMemType['CustomPools'].items():\n\n for objBlock in listPool:\n\n if 'Algorithm' in objBlock and objBlock['Algorithm']:\n\n sAlgorithm = ' (Algorithm: %s)' % (objBlock['Algorithm'])\n\n else:\n\n sAlgorithm = ''\n\n draw.text((IMG_MARGIN, y), \"Custom pool %s%s block %d\" % (sPoolName, sAlgorithm, objBlock['ID']), fill=COLOR_TEXT_H2, font=font)\n\n y += FONT_SIZE + IMG_MARGIN\n\n DrawBlock(draw, y, objBlock)\n\n y += MAP_SIZE + IMG_MARGIN\n\n index += 1\n\ndel draw\n\nimg.save(args.output)\n\n\n\n\"\"\"\n\nMain data structure - variable `data` - is a dictionary. Key is integer - memory type index. Value is dictionary of:\n\n- Fixed key 'DedicatedAllocations'. Value is list of tuples, each containing:\n\n - [0]: Type : string\n\n - [1]: Size : integer\n\n - [2]: Usage : integer (0 if unknown)\n\n- Fixed key 'DefaultPoolBlocks'. Value is list of objects, each containing dictionary with:\n\n - Fixed key 'ID'. Value is int.\n\n - Fixed key 'Size'. Value is int.\n\n - Fixed key 'Suballocations'. Value is list of tuples as above.\n\n- Fixed key 'CustomPools'. Value is dictionary.\n\n - Key is string with pool ID/name. Value is list of objects representing memory blocks, each containing dictionary with:\n\n - Fixed key 'ID'. Value is int.\n\n - Fixed key 'Size'. Value is int.\n\n - Fixed key 'Algorithm'. Optional. Value is string.\n\n - Fixed key 'Suballocations'. Value is list of tuples as above.\n\n\"\"\"\n", "file_path": "tools/VmaDumpVis.py", "rank": 97, "score": 16500.514253388606 }, { "content": " pub(crate) forward_pipeline_storage: SlotMap<ForwardPipelineHandle, ForwardPipeline>,\n\n pub(crate) renderer_storage: SlotMap<RendererHandle, Renderer>,\n\n pub(crate) descriptor_pool_storage: SlotMap<DescriptorPoolHandle, DescriptorPool>,\n\n pub(crate) descriptor_set_storage: SlotMap<DescriptorSetHandle, DescriptorSet>,\n\n}\n\n\n\nimpl Drop for VkTracerApp {\n\n fn drop(&mut self) {\n\n let device = &self.device;\n\n let graphics_pool = self.command_pools.get(&QueueType::Graphics).unwrap();\n\n let transfer_pool = self.command_pools.get(&QueueType::Transfer).unwrap();\n\n\n\n unsafe {\n\n for (_, pool) in &self.descriptor_pool_storage {\n\n device.destroy_descriptor_pool(pool.handle, None);\n\n }\n\n\n\n for (_, set) in &self.descriptor_set_storage {\n\n device.destroy_descriptor_set_layout(set.layout, None);\n\n }\n", "file_path": "vk_tracer/src/lib.rs", "rank": 98, "score": 4385.409837395754 }, { "content": " errors::Result, glsl_layout::Uniform, mem::DescriptorSetBuilder, mesh::MeshIndex,\n\n render::SubpassBuilder, setup::VkTracerExtensions, ForwardPipelineHandle, MeshHandle,\n\n RenderPlanHandle, RenderTargetHandle, RendererHandle, SwapchainHandle, VkTracerApp,\n\n };\n\n pub use ash::vk::{\n\n AccessFlags, PipelineStageFlags, SubpassDependency2 as SubpassDependency, SUBPASS_EXTERNAL,\n\n };\n\n}\n\n\n\nnew_key_type! {\n\n // Higher level objects\n\n pub struct MeshHandle;\n\n pub struct UboHandle;\n\n\n\n pub struct SwapchainHandle;\n\n pub struct RenderPlanHandle;\n\n pub struct RenderTargetHandle;\n\n pub struct ForwardPipelineHandle;\n\n pub struct RendererHandle;\n\n pub struct DescriptorPoolHandle;\n", "file_path": "vk_tracer/src/lib.rs", "rank": 99, "score": 4380.707634656461 } ]
Rust
src/main.rs
gourlaysama/girouette
c82e9f2b4eb8b684e46c54f0f11ab1bee6034141
use anyhow::{anyhow, Context, Result}; use env_logger::{Builder, Env}; use girouette::{ cli::ProgramOptions, config::ProgramConfig, show, Girouette, Location, WeatherClient, }; use log::*; use std::{env, time::Duration}; use structopt::StructOpt; use termcolor::*; use tokio::runtime; static DEFAULT_CONFIG: &str = include_str!("../config.yml"); const DEFAULT_TIMEOUT_SEC: u64 = 10; const LOG_ENV_VAR: &str = "GIROUETTE_LOG"; fn main() -> Result<(), Box<dyn std::error::Error>> { let options = ProgramOptions::from_args(); let mut b = Builder::default(); b.format_timestamp(None); b.filter_level(LevelFilter::Warn); b.parse_env(Env::from(LOG_ENV_VAR)); if let Some(level) = options.log_level_with_default(2) { b.filter_level(level); }; b.try_init()?; let rt = runtime::Builder::new_current_thread() .enable_all() .build()?; std::process::exit(match rt.block_on(run_async()) { Ok(()) => 0, Err(e) => { let causes = e.chain().skip(1); if causes.len() != 0 { if log_enabled!(Level::Info) { show!("Error: {}", e); for cause in e.chain().skip(1) { info!("cause: {}", cause); } } else { show!("Error: {}; rerun with '-v' for more information", e); } } else { show!("Error: {}", e); } 1 } }) } async fn run_async() -> Result<()> { let options_matches = ProgramOptions::clap().get_matches(); let options = ProgramOptions::from_clap(&options_matches); if options.version { let i = options_matches .index_of("version") .ok_or_else(|| anyhow!("should never happen: version set yet no version flag"))?; if std::env::args().nth(i).unwrap_or_default() == "-V" { print_version(false); } else { print_version(true); } return Ok(()); } if options.clean_cache { return WeatherClient::clean_cache(); } if options.print_default_config { print!("{}", DEFAULT_CONFIG); return Ok(()); } let conf = make_config(&options)?; let cache_length = match conf.cache { Some(c) => Some( humantime::parse_duration(&c) .context("failed to parse cache length: not a valid duration")?, ), None => None, }; let timeout = match conf.timeout { Some(c) => humantime::parse_duration(&c) .context("failed to parse timeout: not a valid duration")?, None => Duration::from_secs(DEFAULT_TIMEOUT_SEC), }; let location = match conf.location { Some(loc) => loc, None => find_location(timeout).await?, }; let key = conf.key.clone().ok_or_else(|| { anyhow!( "no API key for OpenWeather was found you can get a key over at https://openweathermap.org/appid", ) })?; let lib = Girouette::new( conf.display_config, cache_length, timeout, key, conf.language, ); let mut stdout = StandardStream::stdout(ColorChoice::Auto); lib.display(&location, &mut stdout).await } #[cfg(feature = "geoclue")] async fn find_location(timeout: Duration) -> Result<Location> { info!("no location to query, trying geoclue"); girouette::geoclue::get_location(timeout) .await .map_err(|e| { e.context("geoclue couldn't report your location; use `-l/--location' argument`") }) } #[cfg(not(feature = "geoclue"))] async fn find_location(_timeout: Duration) -> Result<Location> { use anyhow::bail; info!("no location to query, trying geoclue"); bail!("geolocalization unsupported: set a location with '-l/--location' or in the config file") } fn make_config(options: &ProgramOptions) -> Result<ProgramConfig> { let mut empty = false; let mut conf = config::Config::default(); if let Some(path) = &options.config { debug!("looking for config file '{}'", path.display()); conf.merge(config::File::from(path.as_ref()))?; info!("using config from '{}'", path.canonicalize()?.display()); } else if let Some(p) = WeatherClient::directories() { let f = p.config_dir().join("config.yml"); debug!("looking for config file '{}'", f.display()); if f.exists() { info!("using config from '{}'", f.canonicalize()?.display()); conf.merge(config::File::from(f))?; } else { empty = true; } }; if empty { warn!("no config file found, using fallback"); conf.merge(config::File::from_str( DEFAULT_CONFIG, config::FileFormat::Yaml, ))?; }; fn set_conf_from_options( conf: &mut config::Config, option: &Option<String>, key: &str, ) -> Result<()> { if let Some(value) = option { conf.set(key, Some(value.as_str()))?; } Ok(()) } set_conf_from_options(&mut conf, &options.key, "key")?; set_conf_from_options(&mut conf, &options.location, "location")?; set_conf_from_options(&mut conf, &options.cache, "cache")?; set_conf_from_options(&mut conf, &options.language, "language")?; set_conf_from_options(&mut conf, &options.units, "units")?; if let Some(cache) = conf.get::<Option<String>>("cache").unwrap_or(None) { if cache == "none" { conf.set::<Option<String>>("cache", None)?; } } match conf.get::<Option<Location>>("location").unwrap_or(None) { Some(Location::Place(loc)) if loc == "auto" => { conf.set::<Option<String>>("location", None)?; } _ => {} }; let conf: ProgramConfig = conf.try_into()?; trace!("full config: {:#?}", conf); Ok(conf) } fn print_version(long: bool) { if long { println!( "{} {} ({})", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION"), option_env!("BUILD_ID").unwrap_or("unknown") ); println!("rustc {} ({})", env!("BUILD_RUSTC"), env!("BUILD_INFO")); if let Some(p) = WeatherClient::directories() { println!( "\nconfig location: {}", p.config_dir().join("config.yml").display() ); println!("cache location: {}", p.cache_dir().display()); } if cfg!(feature = "geoclue") { println!("features: geoclue") } } else { println!("{} {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")); } }
use anyhow::{anyhow, Context, Result}; use env_logger::{Builder, Env}; use girouette::{ cli::ProgramOptions, config::ProgramConfig, show, Girouette, Location, WeatherClient, }; use log::*; use std::{env, time::Duration}; use structopt::StructOpt; use termcolor::*; use tokio::runtime; static DEFAULT_CONFIG: &str = include_str!("../config.yml"); const DEFAULT_TIMEOUT_SEC: u64 = 10; const LOG_ENV_VAR: &str = "GIROUETTE_LOG"; fn main() -> Result<(), Box<dyn std::error::Error>> { let options = ProgramOptions::from_args(); let mut b = Builder::default(); b.format_timestamp(None); b.filter_level(LevelFilter::Warn); b.parse_env(Env::from(LOG_ENV_VAR)); if let Some(level) = options.log_level_with_default(2) { b.filter_level(level); }; b.try_init()?; let rt = runtime::Builder::new_current_thread() .enable_all() .build()?; std::process::exit(match rt.block_on(run_async()) { Ok(()) => 0, Err(e) => { let causes = e.chain().skip(1); if causes.len() != 0 { if log_enabled!(Level::Info) { show!("Error: {}", e); for cause in e.chain().skip(1) { info!("cause: {}", cause); } } else { show!("Error: {}; rerun with '-v' for more information", e); } } else { show!("Error: {}", e); } 1 } }) } async fn run_async() -> Result<()> { let options_matches = ProgramOptions::clap().get_matches(); let options = ProgramOptions::from_clap(&options_matches); if options.version { let i = options_matches .index_of("version") .ok_or_else(|| anyhow!("should never happen: version set yet no version flag"))?; if std::env::args().nth(i).unwrap_or_default() == "-V" { print_version(false); } else { print_version(true); } return Ok(()); } if options.clean_cache { return WeatherClient::clean_cache(); } if options.print_default_config { print!("{}", DEFAULT_CONFIG); return Ok(()); } let conf = make_config(&options)?; let cache_length = match conf.cache { Some(c) => Some( humantime::parse_duration(&c) .context("failed to parse cache length: not a valid duration")?, ), None => None, }; let timeout = match conf.timeout { Some(c) => humantime::parse_duration(&c) .context("failed to parse timeout: not a valid duration")?, None => Duration::from_secs(DEFAULT_TIMEOUT_SEC), }; let location = match conf.location { Some(loc) => loc, None => find_location(
#[cfg(feature = "geoclue")] async fn find_location(timeout: Duration) -> Result<Location> { info!("no location to query, trying geoclue"); girouette::geoclue::get_location(timeout) .await .map_err(|e| { e.context("geoclue couldn't report your location; use `-l/--location' argument`") }) } #[cfg(not(feature = "geoclue"))] async fn find_location(_timeout: Duration) -> Result<Location> { use anyhow::bail; info!("no location to query, trying geoclue"); bail!("geolocalization unsupported: set a location with '-l/--location' or in the config file") } fn make_config(options: &ProgramOptions) -> Result<ProgramConfig> { let mut empty = false; let mut conf = config::Config::default(); if let Some(path) = &options.config { debug!("looking for config file '{}'", path.display()); conf.merge(config::File::from(path.as_ref()))?; info!("using config from '{}'", path.canonicalize()?.display()); } else if let Some(p) = WeatherClient::directories() { let f = p.config_dir().join("config.yml"); debug!("looking for config file '{}'", f.display()); if f.exists() { info!("using config from '{}'", f.canonicalize()?.display()); conf.merge(config::File::from(f))?; } else { empty = true; } }; if empty { warn!("no config file found, using fallback"); conf.merge(config::File::from_str( DEFAULT_CONFIG, config::FileFormat::Yaml, ))?; }; fn set_conf_from_options( conf: &mut config::Config, option: &Option<String>, key: &str, ) -> Result<()> { if let Some(value) = option { conf.set(key, Some(value.as_str()))?; } Ok(()) } set_conf_from_options(&mut conf, &options.key, "key")?; set_conf_from_options(&mut conf, &options.location, "location")?; set_conf_from_options(&mut conf, &options.cache, "cache")?; set_conf_from_options(&mut conf, &options.language, "language")?; set_conf_from_options(&mut conf, &options.units, "units")?; if let Some(cache) = conf.get::<Option<String>>("cache").unwrap_or(None) { if cache == "none" { conf.set::<Option<String>>("cache", None)?; } } match conf.get::<Option<Location>>("location").unwrap_or(None) { Some(Location::Place(loc)) if loc == "auto" => { conf.set::<Option<String>>("location", None)?; } _ => {} }; let conf: ProgramConfig = conf.try_into()?; trace!("full config: {:#?}", conf); Ok(conf) } fn print_version(long: bool) { if long { println!( "{} {} ({})", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION"), option_env!("BUILD_ID").unwrap_or("unknown") ); println!("rustc {} ({})", env!("BUILD_RUSTC"), env!("BUILD_INFO")); if let Some(p) = WeatherClient::directories() { println!( "\nconfig location: {}", p.config_dir().join("config.yml").display() ); println!("cache location: {}", p.cache_dir().display()); } if cfg!(feature = "geoclue") { println!("features: geoclue") } } else { println!("{} {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")); } }
timeout).await?, }; let key = conf.key.clone().ok_or_else(|| { anyhow!( "no API key for OpenWeather was found you can get a key over at https://openweathermap.org/appid", ) })?; let lib = Girouette::new( conf.display_config, cache_length, timeout, key, conf.language, ); let mut stdout = StandardStream::stdout(ColorChoice::Auto); lib.display(&location, &mut stdout).await }
function_block-function_prefixed
[ { "content": "fn main() {\n\n let outdir = match env::var_os(\"OUT_DIR\") {\n\n None => return,\n\n Some(outdir) => outdir,\n\n };\n\n let mut app = ProgramOptions::clap();\n\n\n\n app.gen_completions(\"girouette\", Shell::Bash, &outdir);\n\n\n\n app.gen_completions(\"girouette\", Shell::Zsh, &outdir);\n\n\n\n app.gen_completions(\"girouette\", Shell::Fish, outdir);\n\n\n\n if let Some(v) = version_check::Version::read() {\n\n println!(\"cargo:rustc-env=BUILD_RUSTC={}\", v)\n\n }\n\n\n\n if let Some(hash) = get_commit_hash().or_else(|| env::var(\"BUILD_ID\").ok()) {\n\n println!(\"cargo:rustc-env=BUILD_ID={}\", hash);\n\n }\n\n\n\n println!(\n\n \"cargo:rustc-env=BUILD_INFO={}-{}-{}-{}\",\n\n env::var(\"CARGO_CFG_TARGET_ARCH\").unwrap(),\n\n env::var(\"CARGO_CFG_TARGET_VENDOR\").unwrap(),\n\n env::var(\"CARGO_CFG_TARGET_OS\").unwrap(),\n\n env::var(\"CARGO_CFG_TARGET_ENV\").unwrap(),\n\n );\n\n}\n\n\n", "file_path": "build.rs", "rank": 0, "score": 122214.08836122403 }, { "content": "fn handle_error(error_code: StatusCode, message: &str, location: &Location) -> Result<Response> {\n\n match error_code {\n\n StatusCode::NOT_FOUND => bail!(\"location error: '{}' for '{}'\", message, location),\n\n StatusCode::TOO_MANY_REQUESTS => bail!(\"Too many calls to the API! If you not using your own API key, please get your own for free over at http://openweathermap.org\"),\n\n _ => bail!(\"error from OpenWeather API: {}: {}\", error_code, message),\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, serde::Deserialize, serde::Serialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum DisplayMode {\n\n NerdFonts,\n\n Unicode,\n\n Ascii,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum QueryKind {\n\n Current,\n\n ForeCast,\n", "file_path": "src/lib.rs", "rank": 4, "score": 98659.13728344862 }, { "content": "fn get_commit_hash() -> Option<String> {\n\n Command::new(\"git\")\n\n .args(&[\"rev-parse\", \"--short\", \"HEAD\"])\n\n .output()\n\n .ok()\n\n .and_then(|r| {\n\n if r.status.success() {\n\n String::from_utf8(r.stdout).ok()\n\n } else {\n\n None\n\n }\n\n })\n\n}\n", "file_path": "build.rs", "rank": 5, "score": 96187.51500710918 }, { "content": "fn get_unicode(id: u16, night: bool) -> &'static str {\n\n match (night, id) {\n\n // thunderstorm + rain\n\n (_, 200..=209) => \"\\u{26c8}\",\n\n // thunderstorm\n\n (_, 210..=219) | (_, 221) | (_, 230..=239) => \"\\u{1f329}\",\n\n // rain (all types)\n\n (true, 300..=309)\n\n | (true, 310..=312)\n\n | (true, 500..=509)\n\n | (true, 511)\n\n | (true, 520..=529)\n\n | (true, 313..=319)\n\n | (true, 531)\n\n | (true, 611..=615)\n\n | (true, 620..=629)\n\n | (true, 616) => \"\\u{1f327}\",\n\n (false, 300..=309)\n\n | (false, 310..=312)\n\n | (false, 500..=509)\n", "file_path": "src/segments.rs", "rank": 6, "score": 87318.83873655839 }, { "content": "fn make_openweather_language_codes(s: &str) -> Cow<str> {\n\n // openweather supports these directly\n\n if let \"zh_CN\" | \"zh_TW\" | \"pt_BR\" = s {\n\n return s.to_lowercase().into();\n\n };\n\n\n\n let l_code = s.split_once('_').map(|t| t.0).unwrap_or(s);\n\n\n\n // openweather uses country codes for those\n\n match l_code {\n\n \"sq\" => \"al\", // Albanian\n\n \"cs\" => \"cz\", // Czech\n\n \"ko\" => \"kr\", // Korean\n\n \"lv\" => \"la\", // Latvian\n\n \"nb\" | \"nn\" => \"no\", // Norwegian\n\n s => s,\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 76972.19189312746 }, { "content": "fn get_icon(id: u16, night: bool, wind_type: &WindType) -> &'static str {\n\n match (night, id) {\n\n // thunderstorm + rain\n\n (true, 200..=209) => \"\\u{e32a}\",\n\n (false, 200..=209) => \"\\u{e30f}\",\n\n // thunderstorm\n\n (true, 210..=219) | (true, 221) => \"\\u{e332}\",\n\n (false, 210..=219) | (false, 221) => \"\\u{e305}\",\n\n // thunderstorm + sleet/drizzle\n\n (true, 230..=239) => \"\\u{e364}\",\n\n (false, 230..=239) => \"\\u{e362}\",\n\n // sprinkle\n\n (true, 300..=309) | (true, 310..=312) => \"\\u{e328}\",\n\n (false, 300..=309) | (false, 310..=312) => \"\\u{e30b}\",\n\n // rain\n\n (true, 500..=509) => \"\\u{e325}\",\n\n (false, 500..=509) => \"\\u{e308}\",\n\n // freezing rain\n\n (true, 511) => \"\\u{e321}\",\n\n (false, 511) => \"\\u{e304}\",\n", "file_path": "src/segments.rs", "rank": 8, "score": 74939.6332968454 }, { "content": "fn fg_copied(c: &ColorSpec) -> Option<Color> {\n\n c.fg().copied()\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 9, "score": 64572.69701392403 }, { "content": "fn bg_copied(c: &ColorSpec) -> Option<Color> {\n\n c.bg().copied()\n\n}\n", "file_path": "src/config.rs", "rank": 10, "score": 64572.69701392403 }, { "content": " Location to query (required if not set in config).\n\n\n\n Possible values are: \n\n * Location names: \"London, UK\", \"Dubai\"\n\n * Geographic coordinates (lat,lon): \"35.68,139.69\" This option overrides\n\n the corresponding value from the config.\n\n\n\n-u, --units <units>\n\n Units to use when displaying temperatures and speeds.\n\n\n\n Possible units are:\n\n\n\n - metric: Celsius temperatures and kilometers/hour speeds (the default),\n\n\n\n - imperial: Fahrenheit temperatures and miles/hour speeds,\n\n\n\n - standard: Kelvin temperatures and meters/second speeds.\n\n\n\n This option overrides the corresponding value from the config.\n\n\n\n --clean-cache\n\n Removes all cached responses and exits.\n\n\n\n This empties the cache directory used when caching responses with \"-c/--cache\".\n\n\n\n By default, girouette puts the cache in:\n\n\n\n - on Linux in \"$XDG_CACHE_HOME/girouette/results/\" or\n\n \"$HOME/.cache/girouette/results/\"\n\n - on MacOS in \"$HOME/Library/Caches/rs.Girouette/results/\"\n\n - on Windows in \"%AppData%\\Girouette\\cache\\results\\\"\n\n\n\n --print-default-config \n\n Prints the contents of the default configuration and exits.\n\n\n\n-q, --quiet\n\n Pass for less log output\n\n\n\n-v, --verbose\n\n Pass for more log output\n\n\n\n-h, --help \n\n Prints help information.\n\n\n\n-V, --version \n\n Prints version information.\n\n```\n\n\n", "file_path": "README.md", "rank": 11, "score": 50141.30038479901 }, { "content": "### Changed\n\n\n\n* Geoclue must now return a location within the `timeout` given in the configuration (instead of a hard-coded 1 second) before we give up and return an error.\n\n\n\n### Fixed\n\n\n\n* Unicode weather icons are now printed in Emoji mode, if supported by the font (using the emoji variation selector).\n\n\n\n## [0.5.0] - 2021-03-29\n\n\n\n### Added\n\n\n\n* Localization support: added new `-L/--language` CLI & config option to choose the output language for location names and weather descriptions. Possible values are any 2-letter language code supported by OpenWeather.\n\n\n\n## [0.4.3] - 2021-02-10\n\n\n\n### Added\n\n\n\n* New `snow` segment to show the current snowfall level (in mm in the last hour, like with rainfall).\n\n* More descriptive `--version` output: now shows the build environment and if geolocation is supported.\n\n\n\n## [0.4.2] - 2021-02-09\n\n\n\n### Added\n\n\n\n* Allow colors to be set with hexadecimal color codes (e.g. `\"#00e8ed\"`).\n\n\n\n### Changed\n\n\n\n* The hard-coded location was removed from the default config. The default is now `auto` if geolocation is enabled, and setting it using `-l/--location` (or in the config) is needed otherwise.\n\n\n\n## [0.4.1] - 2021-02-03\n\n\n\n### Added\n\n\n\n* Shell completions for girouette are now provided (for bash, zsh and fish).\n\n\n\n## [0.4.0] - 2021-02-02\n\n\n\n### Added\n\n\n\n* New `--print-default-config` option to print the content of the default configuration file.\n\n* New `auto` value for `-l/--location`: girouette will use geoclue (and thus dbus) to find the location.\n\n This is the default if there is no location set in the config file.\n\n\n\n### Fixed\n\n\n\n* Fixed parsing error that prevented passing on the CLI a location starting with a minus sign (negative latitude).\n\n\n\n## [0.3.2] - 2021-01-14\n\n\n\n### Added\n\n\n\n* Release binaries are now published to the corresponding GitHub release.\n\n\n\n## [0.3.1] - 2021-01-14\n\n\n\n### Added\n\n\n\n* Trying girouette without registering an OpenWeather API key is now possible (using an hard-coded key).\n\n* New `--clean-cache` option to empty the cache used when API responses are cached to disk.\n\n\n", "file_path": "CHANGELOG.md", "rank": 12, "score": 50137.14964897452 }, { "content": "## Options\n\n\n\n```\n\n-c, --cache <cache> \n\n Cache responses for this long (e.g. \"1m\", \"2 days 6h\", \"5 sec\"), or `none` to\n\n disable it.\n\n\n\n If there is a cached response younger than the duration given as argument, it \n\n is returned directly. Otherwise, it queries the API and write the response to\n\n the cache for use by a later invocation.\n\n\n\n NOTE: No response is written to the cache if this option isn't set. The\n\n invocation doing the caching and the one potentially querying it *both* need\n\n this option set.\n\n\n\n Recognized durations go from seconds (\"seconds, second, sec, s\") to\n\n years (\"years, year, y\"). This option overrides the corresponding value from\n\n the config.\n\n\n\n --config <config> \n\n Use the specified configuration file instead of the default.\n\n\n\n By default, girouette looks for a configuration file:\n\n\n\n - on Linux in \"$XDG_CONFIG_HOME/girouette/config.yml\" or\n\n \"$HOME/.config/girouette/config.yml\"\n\n - on MacOS in \"$HOME/Library/Application Support/rs.Girouette/config.yml\"\n\n - on Windows in \"%AppData%\\Girouette\\config\\config.yml\"\n\n\n\n-k, --key <key> \n\n OpenWeather API key (required for anything more than light testing).\n\n\n\n This option overrides the corresponding value from the config.\n\n\n\n-L, --language <language>\n\n Use this language for location names, weather descriptions and date formatting.\n\n\n\n This asks OpenWeather to provide location names and weather descriptions in the\n\n given language, and uses it to format date and times.\n\n\n\n Possible values are of the form 'aa_AA' like 'en_US' or 'fr_FR'. Note that\n\n OpenWeather only supports a subset of all valid LANG values.\n\n\n\n-l, --location <location> \n", "file_path": "README.md", "rank": 13, "score": 50136.59417443324 }, { "content": "## Configuration\n\n\n\n### Configuration file location\n\n\n\ngirouette doesn't create a configuration file for you, but looks for it in the following locations:\n\n\n\n* on Linux in `$XDG_CONFIG_HOME/girouette/config.yml` or `$HOME/.config/girouette/config.yml`\n\n* on MacOS in `$HOME/Library/Application Support/rs.Girouette/config.yml`\n\n* on Windows in `%AppData%\\Girouette\\config\\config.yml`\n\n\n\nThe `--print-default-config` option displays the content of the default config. It can be use to initialize a custom configuration file:\n\n\n\n```sh\n\ngirouette --print-default-config > myconfig.yml\n\n```\n\n\n\n### Global configuration keys\n\n\n\n* `key` (string): the OpenWeather API key to use (can be overridden on the command-line with `-k/--key`). Registering a key is required for anything more than light testing.\n\n* `location` (string): a default location to query (can be overridden on the command-line with `-l/--location`).\n\n * If built with geolocation support (`geoclue` feature), can be `auto` or left empty to attempt geolocation.\n\n * Can be any name of a place.\n\n * Can be a tuple of latitude, longitude (e.g. `\"35.68,139.69\"`)\n\n* `separator` (string): a separator string to use between segments. Defaults to two white-space characters `\" \"`.\n\n* `units` (string: `metric`, `imperial` or `standard`): unit system to use when displaying temperatures and speeds. Defaults to `metric`.\n\n* `cache` (string): how long to cache responses from the API (can be overridden on the command-line with `-c/--cache`), or `none` to disable it.\n\n If there is a cached response younger than the duration given as argument, it is returned directly. Otherwise, it queries the API and write the response to the cache for use by a later invocation.\n\n NOTE: No response is written to the cache if this option isn't set. The invocation doing the caching and the one potentially querying it *both* need this option set.\n\n Recognized durations go from seconds (\"seconds, second, sec, s\") to years (\"years, year, y\").\n", "file_path": "README.md", "rank": 14, "score": 50134.61922971842 }, { "content": "### Changed\n\n\n\n* Responses are now cached by default for one minute (the `cache` key in the default config is set to `1m`).\n\n\n\n### Fixed\n\n\n\n* Never return an error if a response is not in cache, just query the API instead.\n\n\n\n## [0.3.0] - 2020-04-01\n\n\n\n### Added\n\n\n\n* The new `-c/--cache <duration>` option will cache API responses and reuse them on following invocations,\n\n within a time limit like `1m 15s`, `2 days`, etc. The cache is only written when the option is present.\n\n* The `rain` and `pressure` can now be styled like the other segments in the configuration.\n\n\n\n### Changed\n\n\n\n* Tweaked the fallback color theme used when users do not have a configuration file.\n\n\n\n## [0.2.1] - 2020-03-27\n\n\n\n### Changed\n\n\n\n* girouette supports a `GIROUETTE_LOG` that can be set to `warn/info/debug/trace` and supports the\n\n usual `env_logger` features.\n\n* LTO is now enabled for release builds.\n\n\n\n### Fixed\n\n\n\n* High winds are now properly indicated (above 35 km/h).\n\n* Much improved error output.\n\n\n\n## [0.2.0] - 2020-03-26\n\n\n\n### Added\n\n\n\n* Support for ASCII and Unicode (emoji) output.\n\n* Support reading configuration from a file at `$XDG_CONFIG_HOME/girouette/config.yml`\n\n (`{%USERPROFILE%\\AppData\\Roaming\\girouette\\config.yml` on Windows,\n\n `$HOME/Library/Preferences/rs.girouette/config.yml` on macOS). Fallback is the\n\n example config at the root of the project.\n\n* `--config` option to specify a different (and only) config file.\n\n\n\n### Changed\n\n\n\n* The default display mode is now `unicode`: it will only use Unicode (including emoji)\n\n characters. Support for Nerd Fonts is still available with `display_mode: \"nerd_fonts\"`,\n\n both globally and per-segment.\n\n* Renamed to project to girouette; weather was just a placeholder, really.\n\n* The apparent temperature is only displayed when `feels_like: true` for the temperature segment.\n\n* Users can opt-out of the color scale for temp/wind/humidity by specifying a style in the\n\n segment config, instead of the default of `style: \"scaled\"`.\n\n\n", "file_path": "CHANGELOG.md", "rank": 15, "score": 50131.991802736244 }, { "content": "# girouette\n\n\n\n**girouette** is a command line tool that displays the current weather (from [OpenWeather])\n\nin the terminal.\n\n\n\nIt supports advanced fonts like Nerd Fonts with full color output:\n\n\n\n![example of styled girouette output](screenshots/girouette_tests.png)\n\n\n\nAnd customizable output segments, including fallback to 16/256-color and simpler ASCII/Unicode support:\n\n\n\n![examples of girouette output](screenshots/girouette_main.png)\n\n\n\ngirouette requires an [OpenWeather API key] (free for 1 call per second). A default key is hard-coded for people to try things, but it will get rate limited quickly.\n\n\n\n[![ci status][ci image]][ci link]\n\n\n\n## Installation\n\n\n\nPre-compiled binaries are available on the [Release Page] for x86_64 Linux. They are statically linked and do not support the `geoclue` feature (no auto-geolocation).\n\n\n\nIf you are a **Fedora/RHEL/CentOS-Stream** user, you can install girouette with:\n\n\n\n```sh\n\nsudo dnf copr enable gourlaysama/girouette\n\nsudo dnf install girouette\n\n```\n\n\n\n(Supported archs are: aarch64, armhfp, ppc64le or x86_64 on Fedora, and the subset supported by COPR for RHEL/CentOS)\n\n\n\nOtherwise you will need to [build from source](#building-from-source).\n\n\n\n## Usage\n\n\n\nShow the weather at a location:\n\n\n\n```sh\n\ngirouette -l \"Los Angeles\"\n\ngirouette -l \"35.68,139.69\"\n\ngirouette -l auto # if built with geoclue support (not available in static build)\n\n```\n\n\n\nThe location can be set and the output customized in the [configuration file](#configuration).\n\n\n", "file_path": "README.md", "rank": 16, "score": 50129.124338905465 }, { "content": "## Building from source\n\n\n\ngirouette is written in Rust, so you need a [Rust install] to build it. girouette compiles with\n\nRust 1.53 or newer.\n\n\n\nBuilding a dynamically-linked girouette (the default) also requires d-bus (for geolocalization support) and OpenSSL\n\n(`libdbus-1-dev` and `libssl-dev` on Ubuntu, `dbus-devel` and `openssl-devel` on Fedora).\n\n\n\nBuild the latest release (0.6.7) from source with:\n\n\n\n```sh\n\n$ git clone https://github.com/gourlaysama/girouette -b v0.6.7\n\n$ cd girouette\n\n$ cargo build --release\n\n$ ./target/release/girouette --version\n\ngirouette 0.6.7\n\n```\n\n\n\nYou can disable geolocation (and the need for d-bus and Geoclue) by building instead with:\n\n\n\n```sh\n\ncargo build --release --no-default-features --features dynamic\n\n```\n\n\n\nYou can also build a fully static Linux binary using the musl libc. After installing musl\n\n(`musl-tools` on Ubuntu, `musl-libc-static` on Fedora), run:\n\n\n\n```sh\n\nrustup target add x86_64-unknown-linux-musl # run this only once\n\ncargo build --release --no-default-features --features default-static --target x86_64-unknown-linux-musl\n\n./target/x86_64-unknown-linux-musl/release/girouette\n\n```\n\n\n", "file_path": "README.md", "rank": 17, "score": 50128.92757983744 }, { "content": "### Features\n\n\n\n* New `daily_forecast` segment to show the temperature and general weather for the next 1 to 7 days. A `days` option controls the number of days to display (defaults to 3).\n\n* New `hourly_forecast` segment to show the temperature and general weather for each hour in the next 48 hours (defaults to 3). An `hours` option controls the number of hours to display (defaults to 3). A `step` controls how many hours to step over between forecasts (defaults to 2).\n\n\n\n### Changes\n\n\n\n* **Breaking change**: the `-L/--language` command line option and `language` config option now take a locale value of the form `aa_AA`, like `en_US` or `zh_CN`, instead of a 2-letter country code. girouette will warn if the value is not recognized and then fall back to `en_US` for date/time formatting.\n\n* If the `language` option is unset, girouette will try to use the `LANG` environment variable, before falling back to `en_US`.\n\n\n\n## [0.5.2] - 2021-07-23\n\n\n\n### Changes\n\n\n\n* In Unicode mode, temperature is now indicated by a Unicode thermometer (🌡️ ,`U+1f321 U+fe0f`) instead of the letter `T`.\n\n\n\n### Features\n\n\n\n* The temperature segment can now display the local min/max temperature, when setting the segment's new `min_max` option to `true`. Those values give a range of the temperature around the queried area at the current moment. The default is `false`.\n\n\n\n## [0.5.1] - 2021-05-20\n\n\n\n### Added\n\n\n\n* New `cloud_cover` segment to show the current cloud cover in %.\n\n* New `-v/-q` pair of short options to respectively increase/decrease verbosity. The options can be stacked (`-vvv`). The default verbosity level is `warn` (from `off`, `error`, `warn`, `info`, `debug`, `trace`), with the CLI arguments overriding the `GIROUETTE_LOG` environment variable. `-qq` silences all output except weather segments.\n\n* New `timeout` config option to decide how long to wait for a response from Openweather, or for a location from Geoclue. The default is 10 seconds.\n\n\n", "file_path": "CHANGELOG.md", "rank": 18, "score": 50127.70058620588 }, { "content": "# Changelog\n\n\n\n**girouette** is a command line tool that displays the current weather (from [OpenWeather]) in the terminal.\n\n\n\n<!-- next-header -->\n\n## [Unreleased] - TBD\n\n\n\n## [0.6.7] - 2021-12-30\n\n\n\n### Features\n\n\n\n* New `description` and `sender` boolean option for `alerts` segment (both default to false). If true, the full description (resp. the organization sending the alert) is displayed for each alert.\n\n\n\n### Fixed\n\n\n\n* Alerts for extreme temperature events and snow/ice events were missing icons.\n\n\n\n## [0.6.6] - 2021-12-15\n\n\n\n### Features\n\n\n\n* New `units` config option and `-u/--units` to choose the unit system used to display temperatures and wind speeds. Available values are: `metric` (to use `°C` and `km/h`), `imperial` (to use `°F` and `mph`) and `standard` (to use `K` and `m/s`).\n\n\n\n### Fixes\n\n\n\n* Fixed error when OpenWeather doesn't provide a description for a weather condition.\n\n* Fixed index-out-of-bounds crash when using the temperature color scale with very high temperatures.\n\n\n\n## [0.6.5] - 2021-11-26\n\n\n\n### Fixes\n\n\n\n* Hourly forecast icons now properly indicate daytime/nighttime.\n\n\n\n## [0.6.4] - 2021-11-17\n\n\n\n### Features\n\n\n\n* New `daytime` segment to display sunrise and sunset times for the current day.\n\n\n\n## [0.6.3] - 2021-10-25\n\n\n\n### Features\n\n\n\n* New `alerts` segment to display weather alerts for the current location.\n\n\n\n### Fixes\n\n\n\n* Unknown weather codes used to cause a `N/A` icon to be used as weather icon: we now use basic day/night icons instead (the sun or the moon).\n\n* The unicode icon for haze is now the same for night as it was for day (the fog emoji).\n\n\n\n## [0.6.2] - 2021-09-10\n\n\n\n### Fixes\n\n\n\n* Querying some locations with a hourly/daily forecast segment could sometimes fail when local forecast data wasn't provided by OpenWeather.\n\n\n\n## [0.6.1] - 2021-09-10\n\n\n\n### Changes\n\n\n\n* The default `config.yml` has been changed to include the hourly forecast; the color scheme has also been updated.\n\n\n\n## [0.6.0] - 2021-09-03\n\n\n\n### Packaging\n\n\n\n* The Minimum Supported Rust Version for girouette is now 1.53.\n\n\n", "file_path": "CHANGELOG.md", "rank": 19, "score": 50126.0535941035 }, { "content": "### Style configuration keys\n\n\n\n* `base_style` (style): the default style for the whole output. This is used as the style to render the separator around the segments, and as the parent style for the segments themselves. Individual segments only need to override the specific things they want to change (e.g. the foreground color), leaving the rest intact.\n\n* `display_mode` (string): what kind of characters to use in the output. Three values are possible:\n\n * `ascii`: only use ASCII characters. Icons will be very limited or even nonexistent. Will display fine on any terminal emulator.\n\n * `unicode`: use standard Unicode characters. This will use the somewhat limited list of weather-related icons in Unicode. This is the default.\n\n Note that most of these icons are emoji and their rendering in a terminal emulator will depend on the font, font fallbacks, and so on. Terminal emulators will also usually ignore the foreground color when emoji are not rendered \"text-style\".\n\n * `nerd_fonts`: use characters from the Nerd Fonts package. These provide the most specific and best-looking icons for weather, but the Nerd Font variant of your font must be [installed][Nerd Fonts] first (and your terminal emulator configured to use it).\n\n* `segments` (list of segments): the list of information segments to display. Each segment contains an optional `style` attribute (described below) in addition to their own attributes.\n\n Available segments:\n\n * `instant`: the date/time of the weather measure. Has a single attribute:\n\n * `date_format` (string): how to format the date/time. Takes a [mostly C-style format][chrono format].\n\n * `location_name` (string): the location name from the weather measure. This isn't always the same as the queried location.\n\n * `temperature`: the temperature. The `style` attribute takes an additional `scaled` value to use an (hard-coded) color scale that varies with the temperature (this is the default). Has a two attributes:\n\n * `feels_like` (boolean): if yes, also displays the (estimated) felt-like temperature (takes into account wind/humidity/...).\n\n * `min_max` (boolean): if yes, also show the current local minimum and maximum temperatures. Those values give a range of the temperature around the queried area at the current moment. Defaults to false.\n\n * `weather_icon`: a single icon summarizing the weather.\n\n * `weather_description`: a textual description of the weather.\n\n * `rain`: the amount of falling rain (if any).\n\n * `snow`: the amount of falling snow (if any).\n\n * `wind_speed`: the measured wind speed.\n\n * `humidity`: the measured humidity.\n\n * `pressure`: the measured pressure.\n\n * `cloud_cover`: the overall fraction of the sky covered by clouds.\n\n * `daily_forecast`: the temperature and general weather for the next 1 to 7 days. Has three optional attributes:\n\n * `style` (style): the style used to display the weather icon.\n\n * `temp_style` (style): the style used to display the temperature, see the `temperature` segment.\n\n * `days` (integer): the number of days to display (between 1 and 7, defaults to 3).\n\n * `hourly_forecast`: the temperature and general weather for each hour in the next 48 hours. Has four attributes:\n\n * `style` (style): the style used to display the weather icon.\n\n * `temp_style` (style): the style used to display the temperature, see the `temperature` segment.\n\n * `hours` (integer): the number of hours to display (defaults to 3).\n\n * `step` (integer): the number of hours to step over between forecasts (defaults to 2).\n\n * `alerts`: the weather alerts for the current location.\n\n * `description` (boolean): if yes, show the alert's full description.\n\n * `sender` (boolean): if yes, show the name of the organization sending the alert.\n", "file_path": "README.md", "rank": 20, "score": 50123.7943996181 }, { "content": "### Fixed\n\n\n\n* Ignore the `visibility` value from OpenWeather (instead of throwing an error if missing).\n\n* Avoid adding double separators when a segment has no output (if there is no rain, etc.).\n\n\n\n## [0.1.0] - 2020-03-23\n\n\n\n* Currently requires [Nerd Fonts] to be installed.\n\n* Requires the `-k/--key` option to choose the OpenWeather API key\n\n* Requires the `-l/--location` option to choose the location (text or `lat,lon`)\n\n\n\n<!-- next-url -->\n\n[Unreleased]: https://github.com/gourlaysama/girouette/compare/v0.6.7...HEAD\n\n[0.6.7]: https://github.com/gourlaysama/girouette/compare/v0.6.6...v0.6.7\n\n[0.6.6]: https://github.com/gourlaysama/girouette/compare/v0.6.5...v0.6.6\n\n[0.6.5]: https://github.com/gourlaysama/girouette/compare/v0.6.4...v0.6.5\n\n[0.6.4]: https://github.com/gourlaysama/girouette/compare/v0.6.3...v0.6.4\n\n[0.6.3]: https://github.com/gourlaysama/girouette/compare/v0.6.2...v0.6.3\n\n[0.6.2]: https://github.com/gourlaysama/girouette/compare/v0.6.1...v0.6.2\n\n[0.6.1]: https://github.com/gourlaysama/girouette/compare/v0.6.0...v0.6.1\n\n[0.6.0]: https://github.com/gourlaysama/girouette/compare/v0.5.2...v0.6.0\n\n[0.5.2]: https://github.com/gourlaysama/girouette/compare/v0.5.1...v0.5.2\n\n[0.5.1]: https://github.com/gourlaysama/girouette/compare/v0.5.0...v0.5.1\n\n[0.5.0]: https://github.com/gourlaysama/girouette/compare/v0.4.3...v0.5.0\n\n[0.4.3]: https://github.com/gourlaysama/girouette/compare/v0.4.2...v0.4.3\n\n[0.4.2]: https://github.com/gourlaysama/girouette/compare/v0.4.1...v0.4.2\n\n[0.4.1]: https://github.com/gourlaysama/girouette/compare/v0.4.0...v0.4.1\n\n[0.4.0]: https://github.com/gourlaysama/girouette/compare/v0.3.2...v0.4.0\n\n[0.3.2]: https://github.com/gourlaysama/girouette/compare/v0.3.1...v0.3.2\n\n[0.3.1]: https://github.com/gourlaysama/girouette/compare/v0.3.0...v0.3.1\n\n[0.3.0]: https://github.com/gourlaysama/girouette/compare/v0.2.1...v0.3.0\n\n[0.2.1]: https://github.com/gourlaysama/girouette/compare/v0.2.0...v0.2.1\n\n[0.2.0]: https://github.com/gourlaysama/girouette/compare/v0.1.0...v0.2.0\n\n[0.1.0]: https://github.com/gourlaysama/girouette/compare/e1ab692...v0.1.0\n\n[Nerd Fonts]: https://www.nerdfonts.com/\n\n[OpenWeather]: https://openweathermap.org\n", "file_path": "CHANGELOG.md", "rank": 21, "score": 50123.27191722913 }, { "content": "#### License\n\n\n\n<sub>\n\ngirouette is licensed under either of <a href=\"LICENSE-APACHE\">Apache License, Version 2.0</a> or <a href=\"LICENSE-MIT\">MIT license</a> at your option.\n\n</sub>\n\n\n\n<br>\n\n\n\n<sub>\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in girouette by you, as defined in the Apache-2.0 license, shall be\n\ndual licensed as above, without any additional terms or conditions.\n\n</sub>\n\n\n\n[OpenWeather]: https://openweathermap.org\n\n[OpenWeather API key]: https://openweathermap.org/appid\n\n[Rust install]: https://www.rust-lang.org/tools/install\n\n[Release Page]: https://github.com/gourlaysama/girouette/releases/latest\n\n[ci image]: https://github.com/gourlaysama/girouette/workflows/Continuous%20integration/badge.svg?branch=master\n\n[ci link]: https://github.com/gourlaysama/girouette/actions?query=workflow%3A%22Continuous+integration%22\n\n[Nerd Fonts]: https://www.nerdfonts.com/\n\n[chrono format]: https://docs.rs/chrono/0.4.19/chrono/format/strftime/index.html#specifiers\n", "file_path": "README.md", "rank": 22, "score": 50122.38084747695 }, { "content": " * `daytime`: the sunrise and sunset times for the current day.\n\n\n\nA style attribute is an object with 6 attributes, all optional:\n\n\n\n* `bg` (color): the background color.\n\n* `fg` (color): the foreground color.\n\n* `bold` (boolean): if yes, render the text bold.\n\n* `intense` (boolean): if yes, mark the text as intense. Terminal support varies.\n\n* `underline` (boolean): if yes, mark the text as underlined. Terminal support varies.\n\n* `italic` (boolean): if yes, mark the text as italic. Terminal support varies.\n\n\n\nA color attribute used in styles can be either:\n\n\n\n* a string containing a color name (black, blue, green, red, cyan, magenta, yellow, white),\n\n* a integer between 0 and 255 representing an ANSI color code (e.g. `122`),\n\n* a hexadecimal color code (e.g. `\"#002b3f\"`),\n\n* a triple of integer representing an RGB color (e.g. `[15, 55, 84]`).\n\n\n", "file_path": "README.md", "rank": 23, "score": 50119.90092854305 }, { "content": "* `language` (string): the language used for location names, weather descriptions and date/time formatting. Possible values are of the form 'aa_AA' like 'en_US' or 'fr_FR'.\n\n\n\nSee the default configuration file [config.yml](config.yml) and browse the [example_configs](example_configs/) directory for examples (the example output shown above displays the default and both example configurations).\n\n\n", "file_path": "README.md", "rank": 24, "score": 50116.91183383808 }, { "content": "fn display_temp(\n\n color_scale: &ScaledColor,\n\n out: &mut StandardStream,\n\n temp: f32,\n\n base_style: &ColorSpec,\n\n units: UnitMode,\n\n) -> Result<()> {\n\n match color_scale {\n\n ScaledColor::Scaled => {\n\n let c = match units {\n\n UnitMode::Standard => (temp - 273.15),\n\n UnitMode::Metric => temp,\n\n UnitMode::Imperial => (temp - 32f32) * 0.555_555_6,\n\n };\n\n let temp_idx = (c.round() + 16f32).min(56f32).max(0f32) as usize;\n\n\n\n out.set_color(\n\n base_style\n\n .clone()\n\n .set_fg(Some(Color::Ansi256(TEMP_COLORS[temp_idx])))\n", "file_path": "src/segments.rs", "rank": 25, "score": 41452.03153118842 }, { "content": "use std::env;\n\nuse std::process::Command;\n\nuse structopt::clap::Shell;\n\n\n\ninclude!(\"src/cli.rs\");\n\n\n", "file_path": "build.rs", "rank": 26, "score": 31222.975188255405 }, { "content": "pub trait OrgFreedesktopGeoClue2Location {\n\n fn latitude(&self) -> nonblock::MethodReply<f64>;\n\n fn longitude(&self) -> nonblock::MethodReply<f64>;\n\n fn accuracy(&self) -> nonblock::MethodReply<f64>;\n\n fn altitude(&self) -> nonblock::MethodReply<f64>;\n\n fn speed(&self) -> nonblock::MethodReply<f64>;\n\n fn heading(&self) -> nonblock::MethodReply<f64>;\n\n fn description(&self) -> nonblock::MethodReply<String>;\n\n fn timestamp(&self) -> nonblock::MethodReply<(u64, u64)>;\n\n}\n\n\n\nimpl<'a, T: nonblock::NonblockReply, C: ::std::ops::Deref<Target = T>>\n\n OrgFreedesktopGeoClue2Location for nonblock::Proxy<'a, C>\n\n{\n\n fn latitude(&self) -> nonblock::MethodReply<f64> {\n\n <Self as nonblock::stdintf::org_freedesktop_dbus::Properties>::get(\n\n self,\n\n \"org.freedesktop.GeoClue2.Location\",\n\n \"Latitude\",\n\n )\n", "file_path": "src/geoclue/location.rs", "rank": 35, "score": 28020.366570884846 }, { "content": "fn get_wind_type(speed: f32, units: UnitMode) -> WindType {\n\n match units {\n\n UnitMode::Standard => {\n\n if speed >= 9.722_222_f32 {\n\n WindType::High\n\n } else if speed >= 5.555_555_3_f32 {\n\n WindType::Mid\n\n } else {\n\n WindType::Low\n\n }\n\n }\n\n UnitMode::Metric => {\n\n if speed >= 35f32 {\n\n WindType::High\n\n } else if speed >= 20_f32 {\n\n WindType::Mid\n\n } else {\n\n WindType::Low\n\n }\n\n }\n", "file_path": "src/segments.rs", "rank": 36, "score": 27042.472091559444 }, { "content": "\n\nimpl arg::AppendAll for PropertiesPropertiesChanged {\n\n fn append(&self, i: &mut arg::IterAppend) {\n\n arg::RefArg::append(&self.interface_name, i);\n\n arg::RefArg::append(&self.changed_properties, i);\n\n arg::RefArg::append(&self.invalidated_properties, i);\n\n }\n\n}\n\n\n\nimpl arg::ReadAll for PropertiesPropertiesChanged {\n\n fn read(i: &mut arg::Iter) -> Result<Self, arg::TypeMismatchError> {\n\n Ok(PropertiesPropertiesChanged {\n\n interface_name: i.read()?,\n\n changed_properties: i.read()?,\n\n invalidated_properties: i.read()?,\n\n })\n\n }\n\n}\n\n\n\nimpl dbus::message::SignalArgs for PropertiesPropertiesChanged {\n\n const NAME: &'static str = \"PropertiesChanged\";\n\n const INTERFACE: &'static str = \"org.freedesktop.DBus.Properties\";\n\n}\n\n\n", "file_path": "src/geoclue/location.rs", "rank": 37, "score": 26812.71063441274 }, { "content": " interface_name: &str,\n\n property_name: &str,\n\n ) -> nonblock::MethodReply<arg::Variant<Box<dyn arg::RefArg + 'static>>> {\n\n self.method_call(\n\n \"org.freedesktop.DBus.Properties\",\n\n \"Get\",\n\n (interface_name, property_name),\n\n )\n\n .and_then(|r: (arg::Variant<Box<dyn arg::RefArg + 'static>>,)| Ok(r.0))\n\n }\n\n\n\n fn get_all(&self, interface_name: &str) -> nonblock::MethodReply<arg::PropMap> {\n\n self.method_call(\n\n \"org.freedesktop.DBus.Properties\",\n\n \"GetAll\",\n\n (interface_name,),\n\n )\n\n .and_then(|r: (arg::PropMap,)| Ok(r.0))\n\n }\n\n\n", "file_path": "src/geoclue/location.rs", "rank": 38, "score": 26811.075090950948 }, { "content": "// This code was autogenerated with `dbus-codegen-rust -c nonblock -m None -s -d org.freedesktop.GeoClue2 -p /org/freedesktop/GeoClue2/Client/2/Location/0 -i org.freedesktop.DBus.`, see https://github.com/diwic/dbus-rs\n\n#[allow(unused_imports)]\n\nuse dbus::arg;\n\nuse dbus::nonblock;\n\n\n", "file_path": "src/geoclue/location.rs", "rank": 39, "score": 26810.175956113002 }, { "content": " }\n\n\n\n fn description(&self) -> nonblock::MethodReply<String> {\n\n <Self as nonblock::stdintf::org_freedesktop_dbus::Properties>::get(\n\n self,\n\n \"org.freedesktop.GeoClue2.Location\",\n\n \"Description\",\n\n )\n\n }\n\n\n\n fn timestamp(&self) -> nonblock::MethodReply<(u64, u64)> {\n\n <Self as nonblock::stdintf::org_freedesktop_dbus::Properties>::get(\n\n self,\n\n \"org.freedesktop.GeoClue2.Location\",\n\n \"Timestamp\",\n\n )\n\n }\n\n}\n", "file_path": "src/geoclue/location.rs", "rank": 40, "score": 26808.76745727538 }, { "content": " fn set(\n\n &self,\n\n interface_name: &str,\n\n property_name: &str,\n\n value: arg::Variant<Box<dyn arg::RefArg>>,\n\n ) -> nonblock::MethodReply<()> {\n\n self.method_call(\n\n \"org.freedesktop.DBus.Properties\",\n\n \"Set\",\n\n (interface_name, property_name, value),\n\n )\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct PropertiesPropertiesChanged {\n\n pub interface_name: String,\n\n pub changed_properties: arg::PropMap,\n\n pub invalidated_properties: Vec<String>,\n\n}\n", "file_path": "src/geoclue/location.rs", "rank": 41, "score": 26808.26399215471 }, { "content": " self,\n\n \"org.freedesktop.GeoClue2.Location\",\n\n \"Altitude\",\n\n )\n\n }\n\n\n\n fn speed(&self) -> nonblock::MethodReply<f64> {\n\n <Self as nonblock::stdintf::org_freedesktop_dbus::Properties>::get(\n\n self,\n\n \"org.freedesktop.GeoClue2.Location\",\n\n \"Speed\",\n\n )\n\n }\n\n\n\n fn heading(&self) -> nonblock::MethodReply<f64> {\n\n <Self as nonblock::stdintf::org_freedesktop_dbus::Properties>::get(\n\n self,\n\n \"org.freedesktop.GeoClue2.Location\",\n\n \"Heading\",\n\n )\n", "file_path": "src/geoclue/location.rs", "rank": 42, "score": 26805.955688733953 }, { "content": " }\n\n\n\n fn longitude(&self) -> nonblock::MethodReply<f64> {\n\n <Self as nonblock::stdintf::org_freedesktop_dbus::Properties>::get(\n\n self,\n\n \"org.freedesktop.GeoClue2.Location\",\n\n \"Longitude\",\n\n )\n\n }\n\n\n\n fn accuracy(&self) -> nonblock::MethodReply<f64> {\n\n <Self as nonblock::stdintf::org_freedesktop_dbus::Properties>::get(\n\n self,\n\n \"org.freedesktop.GeoClue2.Location\",\n\n \"Accuracy\",\n\n )\n\n }\n\n\n\n fn altitude(&self) -> nonblock::MethodReply<f64> {\n\n <Self as nonblock::stdintf::org_freedesktop_dbus::Properties>::get(\n", "file_path": "src/geoclue/location.rs", "rank": 43, "score": 26805.36920513223 }, { "content": "struct RenderConf<'a> {\n\n base_style: &'a ColorSpec,\n\n display_mode: DisplayMode,\n\n locale: Locale,\n\n units: UnitMode,\n\n}\n\n\n\nimpl Renderer {\n\n pub fn from(display_config: &DisplayConfig) -> Self {\n\n let mut display_config = display_config.clone();\n\n\n\n // reset stays false for segments but we hardcode it to true\n\n // for the base style. TODO: find a better way to do this\n\n display_config.base_style.set_reset(true);\n\n\n\n Renderer { display_config }\n\n }\n\n\n\n pub fn render(\n\n &mut self,\n", "file_path": "src/segments.rs", "rank": 44, "score": 25406.17799519225 }, { "content": "pub trait Introspectable {\n\n fn introspect(&self) -> nonblock::MethodReply<String>;\n\n}\n\n\n\nimpl<'a, T: nonblock::NonblockReply, C: ::std::ops::Deref<Target = T>> Introspectable\n\n for nonblock::Proxy<'a, C>\n\n{\n\n fn introspect(&self) -> nonblock::MethodReply<String> {\n\n self.method_call(\"org.freedesktop.DBus.Introspectable\", \"Introspect\", ())\n\n .and_then(|r: (String,)| Ok(r.0))\n\n }\n\n}\n\n\n", "file_path": "src/geoclue/location.rs", "rank": 45, "score": 23644.297749672267 }, { "content": "pub trait Properties {\n\n fn get(\n\n &self,\n\n interface_name: &str,\n\n property_name: &str,\n\n ) -> nonblock::MethodReply<arg::Variant<Box<dyn arg::RefArg + 'static>>>;\n\n fn get_all(&self, interface_name: &str) -> nonblock::MethodReply<arg::PropMap>;\n\n fn set(\n\n &self,\n\n interface_name: &str,\n\n property_name: &str,\n\n value: arg::Variant<Box<dyn arg::RefArg>>,\n\n ) -> nonblock::MethodReply<()>;\n\n}\n\n\n\nimpl<'a, T: nonblock::NonblockReply, C: ::std::ops::Deref<Target = T>> Properties\n\n for nonblock::Proxy<'a, C>\n\n{\n\n fn get(\n\n &self,\n", "file_path": "src/geoclue/location.rs", "rank": 46, "score": 23644.297749672267 }, { "content": "pub trait Peer {\n\n fn ping(&self) -> nonblock::MethodReply<()>;\n\n fn get_machine_id(&self) -> nonblock::MethodReply<String>;\n\n}\n\n\n\nimpl<'a, T: nonblock::NonblockReply, C: ::std::ops::Deref<Target = T>> Peer\n\n for nonblock::Proxy<'a, C>\n\n{\n\n fn ping(&self) -> nonblock::MethodReply<()> {\n\n self.method_call(\"org.freedesktop.DBus.Peer\", \"Ping\", ())\n\n }\n\n\n\n fn get_machine_id(&self) -> nonblock::MethodReply<String> {\n\n self.method_call(\"org.freedesktop.DBus.Peer\", \"GetMachineId\", ())\n\n .and_then(|r: (String,)| Ok(r.0))\n\n }\n\n}\n\n\n", "file_path": "src/geoclue/location.rs", "rank": 47, "score": 23644.297749672267 }, { "content": " /// By default, girouette puts the cache in:\n\n ///\n\n /// - on Linux in \"$XDG_CACHE_HOME/girouette/results/\" or \"$HOME/.cache/girouette/results/\"\n\n ///\n\n /// - on MacOS in \"$HOME/Library/Caches/rs.Girouette/results/\"\n\n ///\n\n /// - on Windows in \"%AppData%\\Girouette\\cache\\results\\\"\n\n pub clean_cache: bool,\n\n\n\n #[structopt(long)]\n\n /// Prints the contents of the default configuration and exits.\n\n ///\n\n /// This allows creating a new configuration using the default configuration as a template.\n\n pub print_default_config: bool,\n\n\n\n /// Prints version information.\n\n #[structopt(short = \"V\", long = \"version\")]\n\n pub version: bool,\n\n\n\n /// Pass for more log output.\n", "file_path": "src/cli.rs", "rank": 48, "score": 23.689142153040887 }, { "content": "\n\n let (incoming, mut stream) = conn\n\n .add_match(OrgFreedesktopGeoClue2ClientLocationUpdated::match_rule(\n\n None, None,\n\n ))\n\n .await\n\n .context(\"D-bus error\")?\n\n .stream();\n\n\n\n // required to be able to query geoclue\n\n client\n\n .set_desktop_id(\"girouette\".to_string())\n\n .await\n\n .context(\"D-bus error\")?;\n\n\n\n client.start().await.context(\"D-bus error\")?;\n\n\n\n let res: (_, OrgFreedesktopGeoClue2ClientLocationUpdated) =\n\n timeout(timeout_duration, stream.next())\n\n .await\n", "file_path": "src/geoclue/mod.rs", "rank": 49, "score": 20.970220171398253 }, { "content": "\n\n Ok(())\n\n }\n\n}\n\n\n\npub struct WeatherClient {\n\n client: reqwest::Client,\n\n cache_length: Option<Duration>,\n\n timeout: Duration,\n\n}\n\n\n\nimpl WeatherClient {\n\n pub fn new(cache_length: Option<Duration>, timeout: Duration) -> Self {\n\n WeatherClient {\n\n client: reqwest::Client::new(),\n\n cache_length,\n\n timeout,\n\n }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 50, "score": 20.113627343331462 }, { "content": " let sp: Vec<_> = s.split(',').collect();\n\n if sp.len() == 2 {\n\n if let (Ok(lat), Ok(lon)) = (sp[0].parse(), sp[1].parse()) {\n\n return Location::LatLon(lat, lon);\n\n }\n\n\n\n debug!(\n\n \"could not parse '{}' as 'lat,lon', assuming it is a place\",\n\n s\n\n );\n\n }\n\n\n\n Location::Place(s.to_owned())\n\n }\n\n}\n\n\n\npub struct Girouette {\n\n config: DisplayConfig,\n\n cache_length: Option<Duration>,\n\n timeout: Duration,\n", "file_path": "src/lib.rs", "rank": 51, "score": 19.969118604183198 }, { "content": " .context(\"Unable to connect to openweathermap.org\")?;\n\n\n\n if log_enabled!(Level::Trace) {\n\n trace!(\"received response: {}\", std::str::from_utf8(&bytes)?);\n\n }\n\n\n\n match kind {\n\n QueryKind::Current => {\n\n let resp: CResponse = serde_json::from_slice(&bytes)?;\n\n match resp {\n\n CResponse::Success(w) => {\n\n if self.cache_length.is_some() {\n\n if let Err(e) =\n\n self.write_cache(kind, location, language, units, &bytes)\n\n {\n\n warn!(\"error while writing cached response: {}\", e);\n\n }\n\n }\n\n Ok(Response::from_current(w))\n\n }\n", "file_path": "src/lib.rs", "rank": 52, "score": 19.67656680884144 }, { "content": "pub mod client;\n\npub mod location;\n\npub mod manager;\n\n\n\nuse crate::geoclue::client::*;\n\nuse crate::geoclue::location::*;\n\nuse crate::{geoclue::manager::*, Location};\n\nuse anyhow::*;\n\nuse dbus::message::SignalArgs;\n\nuse dbus::nonblock;\n\nuse dbus_tokio::connection;\n\nuse futures_util::*;\n\nuse log::*;\n\nuse std::time::Duration;\n\nuse tokio::time::timeout;\n\n\n\npub async fn get_location(timeout_duration: Duration) -> Result<Location> {\n\n let (resource, conn) = connection::new_system_sync()?;\n\n\n\n tokio::spawn(async {\n", "file_path": "src/geoclue/mod.rs", "rank": 53, "score": 19.188450655165894 }, { "content": " pub fn clean_cache() -> Result<()> {\n\n if let Some(p) = WeatherClient::directories() {\n\n let results = p.cache_dir().join(\"results\");\n\n if results.exists() {\n\n std::fs::remove_dir_all(&results)?;\n\n println!(\"Cleaned cache directory ({})\", results.to_string_lossy());\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn directories() -> Option<ProjectDirs> {\n\n ProjectDirs::from(\"rs\", \"\", \"Girouette\")\n\n }\n\n\n\n fn find_cache_for(\n\n &self,\n\n kind: QueryKind,\n\n location: &Location,\n\n language: Option<&str>,\n", "file_path": "src/lib.rs", "rank": 54, "score": 19.116737351538514 }, { "content": " key: String,\n\n language: Option<String>,\n\n}\n\n\n\nimpl Girouette {\n\n pub fn new(\n\n config: DisplayConfig,\n\n cache_length: Option<Duration>,\n\n timeout: Duration,\n\n key: String,\n\n language: Option<String>,\n\n ) -> Self {\n\n Self {\n\n config,\n\n cache_length,\n\n timeout,\n\n key,\n\n language,\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 55, "score": 19.0701890505946 }, { "content": "\n\n pub async fn display(&self, loc: &Location, out: &mut StandardStream) -> Result<()> {\n\n let mut renderer = Renderer::from(&self.config);\n\n\n\n let kind = renderer.display_kind()?;\n\n\n\n let mut response = Response::empty();\n\n if kind != QueryKind::ForeCast {\n\n let res = WeatherClient::new(self.cache_length, self.timeout)\n\n .query(\n\n QueryKind::Current,\n\n loc,\n\n self.key.clone(),\n\n self.language.as_deref(),\n\n self.config.units,\n\n )\n\n .await?;\n\n response.merge(res);\n\n }\n\n let new_loc = if let Location::Place(_) = loc {\n", "file_path": "src/lib.rs", "rank": 56, "score": 19.04576380584565 }, { "content": " let mut current_part = parts.next();\n\n while let Some(part) = current_part {\n\n let value = part.to_lowercase();\n\n buf.push_str(&value);\n\n current_part = parts.next();\n\n if current_part.is_some() {\n\n buf.push('_');\n\n }\n\n }\n\n buf\n\n }\n\n\n\n fn query_cache(\n\n &self,\n\n kind: QueryKind,\n\n location: &Location,\n\n language: Option<&str>,\n\n units: UnitMode,\n\n ) -> Result<Option<Response>> {\n\n if let Some(cache_length) = self.cache_length {\n", "file_path": "src/lib.rs", "rank": 57, "score": 18.96538734031112 }, { "content": " out: &mut StandardStream,\n\n resp: &Response,\n\n language: Option<&str>,\n\n ) -> Result<()> {\n\n if self.display_config.segments.is_empty() {\n\n warn!(\"there are not segments to display!\");\n\n return Ok(());\n\n }\n\n\n\n out.set_color(&self.display_config.base_style)?;\n\n\n\n let env_locale = std::env::var(\"LANG\").ok();\n\n let locale = language\n\n .or_else(|| env_locale.as_deref())\n\n .and_then(|l| {\n\n let l = if let Some(s) = l.split_once('.') {\n\n s.0\n\n } else {\n\n l\n\n };\n", "file_path": "src/segments.rs", "rank": 58, "score": 17.689079169946982 }, { "content": "pub(crate) mod option_color {\n\n use hex::FromHex;\n\n use serde::de::{self, SeqAccess, Visitor};\n\n use serde::ser::SerializeTuple;\n\n use termcolor::Color;\n\n const FIELDS: &[&str] = &[\n\n \"black\", \"blue\", \"green\", \"red\", \"cyan\", \"magenta\", \"yellow\", \"white\",\n\n ];\n\n\n\n struct OVisitor;\n\n\n\n impl<'de> Visitor<'de> for OVisitor {\n\n type Value = Option<Color>;\n\n\n\n fn expecting(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n f.write_str(\"a color string, an ansi int value or a triple of RGB values\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Option<Color>, E>\n\n where\n", "file_path": "src/serde_utils.rs", "rank": 59, "score": 17.550198643442172 }, { "content": " // ISO 3166 country codes and locale-like codes...\n\n let language = language.map(make_openweather_language_codes);\n\n\n\n match self.query_cache(kind, location, language.as_deref(), units) {\n\n Ok(Some(resp)) => return Ok(resp),\n\n Ok(None) => {}\n\n Err(e) => {\n\n warn!(\"error while looking for cache: {}\", e);\n\n }\n\n }\n\n\n\n self.query_api(kind, location, key, language.as_deref(), units)\n\n .await\n\n }\n\n\n\n async fn query_api(\n\n &self,\n\n kind: QueryKind,\n\n location: &Location,\n\n key: String,\n", "file_path": "src/lib.rs", "rank": 60, "score": 17.41819269540244 }, { "content": " }\n\n }\n\n QueryKind::Both => bail!(\"internal error: query_cache(Both)\"),\n\n }\n\n } else {\n\n info!(\"ignoring expired cached response for {}\", location);\n\n }\n\n } else {\n\n info!(\"no cached response found for {}\", location);\n\n }\n\n }\n\n\n\n Ok(None)\n\n }\n\n\n\n fn write_cache(\n\n &self,\n\n kind: QueryKind,\n\n location: &Location,\n\n language: Option<&str>,\n", "file_path": "src/lib.rs", "rank": 61, "score": 17.306772988391582 }, { "content": " }\n\n\n\n fn visit_i64<E>(self, value: i64) -> Result<Option<Color>, E>\n\n where\n\n E: de::Error,\n\n {\n\n Ok(Some(Color::Ansi256(value as u8)))\n\n }\n\n\n\n fn visit_seq<A>(self, mut seq: A) -> Result<Option<Color>, A::Error>\n\n where\n\n A: SeqAccess<'de>,\n\n {\n\n let r = seq\n\n .next_element()?\n\n .ok_or_else(|| de::Error::invalid_length(0, &self))?;\n\n let g = seq\n\n .next_element()?\n\n .ok_or_else(|| de::Error::invalid_length(1, &self))?;\n\n let b = seq\n", "file_path": "src/serde_utils.rs", "rank": 62, "score": 17.025781629971924 }, { "content": " ///\n\n /// If there is a cached response younger than the duration given as argument, it is returned directly.\n\n /// Otherwise, it queries the API and write the response to the cache for use by a later invocation.\n\n ///\n\n /// NOTE: No response is written to the cache if this option isn't set. The invocation doing the caching and\n\n /// the one potentially querying it *both* need this option set.\n\n ///\n\n /// Recognized durations go from seconds (\"seconds, second, sec, s\") to years (\"years, year, y\").\n\n /// This option overrides the corresponding value from the config.\n\n pub cache: Option<String>,\n\n\n\n #[structopt(short = \"L\", long)]\n\n /// Use this language for location names, weather descriptions and date formatting.\n\n ///\n\n /// This asks OpenWeather to provide location names and weather descriptions\n\n /// in the given language, and uses it to format date and times.\n\n ///\n\n /// Possible values are of the form 'aa_AA' like 'en_US' or 'fr_FR'. Note that\n\n /// OpenWeather only supports a subset of all valid LANG values.\n\n pub language: Option<String>,\n", "file_path": "src/cli.rs", "rank": 63, "score": 16.429778301557704 }, { "content": " units: UnitMode,\n\n bytes: &[u8],\n\n ) -> Result<()> {\n\n let path = self.find_cache_for(kind, location, language, units)?;\n\n debug!(\"writing cache for {}\", location);\n\n std::fs::write(path, bytes)?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub async fn query(\n\n &self,\n\n kind: QueryKind,\n\n location: &Location,\n\n key: String,\n\n language: Option<&str>,\n\n units: UnitMode,\n\n ) -> Result<Response> {\n\n // Adapt between locales and Openweather language codes:\n\n // the codes OW accepts are a mix of ISO 639-1 language codes,\n", "file_path": "src/lib.rs", "rank": 64, "score": 16.34040844593956 }, { "content": " for (i, a) in alerts.iter().enumerate() {\n\n write!(out, \"{}. \", i + 1)?;\n\n\n\n let mut seen_tags = HashSet::new();\n\n for t in &a.tags {\n\n let t = t.to_ascii_lowercase();\n\n if !seen_tags.contains(&t) {\n\n match t.as_str() {\n\n \"flood\" => {\n\n display_print!(\n\n out,\n\n conf.display_mode,\n\n \"\\u{e375} \",\n\n \"\\u{1f4a7}\\u{fe0f} \",\n\n \"\"\n\n )\n\n }\n\n \"wind\" => display_print!(\n\n out,\n\n conf.display_mode,\n", "file_path": "src/segments.rs", "rank": 65, "score": 15.58733229690763 }, { "content": " Some(ref value) => FakeColorSpec::serialize(value, ser),\n\n None => ser.serialize_none(),\n\n }\n\n }\n\n}\n\n\n\npub(crate) mod scaled_color {\n\n use serde::de::{self, Visitor};\n\n\n\n struct SVisitor;\n\n\n\n impl<'de> Visitor<'de> for SVisitor {\n\n type Value = ();\n\n\n\n fn expecting(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n f.write_str(\"either 'scaled' or a style definition\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<(), E>\n\n where\n", "file_path": "src/serde_utils.rs", "rank": 66, "score": 15.449101763427915 }, { "content": "use crate::{segments::*, serde_utils::*, DisplayMode, Location, UnitMode};\n\nuse serde::{Deserialize, Serialize};\n\nuse termcolor::{Color, ColorSpec};\n\n\n\n#[derive(Debug, Default, Deserialize, Serialize)]\n\n#[serde(default)]\n\npub struct ProgramConfig {\n\n pub key: Option<String>,\n\n\n\n pub location: Option<Location>,\n\n\n\n pub cache: Option<String>,\n\n\n\n pub language: Option<String>,\n\n\n\n pub timeout: Option<String>,\n\n\n\n #[serde(flatten)]\n\n pub display_config: DisplayConfig,\n\n}\n", "file_path": "src/config.rs", "rank": 67, "score": 15.413790920182448 }, { "content": " E: de::Error,\n\n {\n\n match value {\n\n \"scaled\" => Ok(()),\n\n _ => Err(de::Error::invalid_value(\n\n de::Unexpected::Str(value),\n\n &\"scaled\",\n\n )),\n\n }\n\n }\n\n }\n\n\n\n pub(crate) fn deserialize<'de, D>(d: D) -> Result<(), D::Error>\n\n where\n\n D: serde::Deserializer<'de>,\n\n {\n\n d.deserialize_str(SVisitor)\n\n }\n\n\n\n pub(crate) fn serialize<S>(ser: S) -> Result<S::Ok, S::Error>\n", "file_path": "src/serde_utils.rs", "rank": 68, "score": 14.777901418819171 }, { "content": " let path = self.find_cache_for(kind, location, language, units)?;\n\n\n\n if path.exists() {\n\n let m = std::fs::metadata(&path)?;\n\n let elapsed = m.modified()?.elapsed()?;\n\n if elapsed <= cache_length {\n\n let f = std::fs::File::open(&path)?;\n\n match kind {\n\n QueryKind::Current => {\n\n if let CResponse::Success(resp) = serde_json::from_reader(f)? {\n\n info!(\"using cached response for {}\", location);\n\n\n\n return Ok(Some(Response::from_current(resp)));\n\n }\n\n }\n\n QueryKind::ForeCast => {\n\n if let OResponse::Success(resp) = serde_json::from_reader(f)? {\n\n info!(\"using cached response for {}\", location);\n\n\n\n return Ok(Some(Response::from_forecast(*resp)));\n", "file_path": "src/lib.rs", "rank": 69, "score": 14.720827934646191 }, { "content": " display_mode: DisplayMode,\n\n ) -> Result<()> {\n\n display_print!(stdout, display_mode, \"\\u{e372}\", \"P\", \"P\");\n\n\n\n if let Some(ref style) = self.style {\n\n stdout.set_color(style)?;\n\n }\n\n write!(stdout, \" {}\", pressure)?;\n\n stdout.set_color(base_style)?;\n\n write!(stdout, \" hPa\")?;\n\n\n\n Ok(())\n\n }\n\n\n\n fn render(\n\n &self,\n\n out: &mut StandardStream,\n\n conf: &RenderConf,\n\n resp: &Response,\n\n ) -> Result<RenderStatus> {\n", "file_path": "src/segments.rs", "rank": 70, "score": 14.697483314399099 }, { "content": " .map_err(|_| anyhow!(\"geoclue timed-out trying to find your location\"))?\n\n .ok_or_else(|| anyhow!(\"no location\"))?;\n\n\n\n conn.remove_match(incoming.token())\n\n .await\n\n .context(\"D-bus error\")?;\n\n\n\n let location_path = res.1.new;\n\n\n\n trace!(\"location path: {}\", location_path);\n\n\n\n let location = nonblock::Proxy::new(\n\n \"org.freedesktop.GeoClue2\",\n\n &location_path,\n\n Duration::from_secs(1),\n\n conn.clone(),\n\n );\n\n\n\n let lat = location.latitude().await.context(\"D-bus error\")?;\n\n let lon = location.longitude().await.context(\"D-bus error\")?;\n\n\n\n Ok(Location::LatLon(lat, lon))\n\n}\n", "file_path": "src/geoclue/mod.rs", "rank": 71, "score": 14.641636171993124 }, { "content": " &self,\n\n out: &mut StandardStream,\n\n conf: &RenderConf,\n\n resp: &Response,\n\n ) -> Result<RenderStatus> {\n\n let snow = resp.as_current()?.snow.as_ref();\n\n\n\n if let Some(r) = snow {\n\n if let Some(mm) = r.one_h.or(r.three_h) {\n\n display_print!(out, conf.display_mode, \"\\u{f2dc}\", \"\\u{2744}\\u{fe0f}\", \"S\");\n\n if let Some(ref style) = self.style {\n\n out.set_color(style)?;\n\n }\n\n write!(out, \" {:.1} \", mm)?;\n\n out.set_color(conf.base_style)?;\n\n write!(out, \"mm/h\")?;\n\n\n\n return Ok(RenderStatus::Rendered);\n\n }\n\n }\n", "file_path": "src/segments.rs", "rank": 72, "score": 14.26273885308217 }, { "content": "use log::LevelFilter;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::path::PathBuf;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(StructOpt, Debug, Serialize, Deserialize)]\n\n#[structopt(\n\n about = \"Display the current weather using the Openweather API.\",\n\n setting = structopt::clap::AppSettings::DisableVersion,\n\n)]\n\npub struct ProgramOptions {\n\n /// OpenWeather API key (required for anything more than light testing).\n\n ///\n\n /// This option overrides the corresponding value from the config.\n\n #[structopt(short, long)]\n\n pub key: Option<String>,\n\n\n\n #[structopt(short, long, allow_hyphen_values(true))]\n\n /// Location to query (required if not set in config).\n\n ///\n", "file_path": "src/cli.rs", "rank": 73, "score": 14.027597765592025 }, { "content": " }\n\n }\n\n}\n\n\n\npub(crate) mod option_color_spec {\n\n use crate::config::FakeColorSpec;\n\n use termcolor::ColorSpec;\n\n\n\n pub(crate) fn deserialize<'de, D>(d: D) -> Result<Option<ColorSpec>, D::Error>\n\n where\n\n D: serde::Deserializer<'de>,\n\n {\n\n FakeColorSpec::deserialize(d).map(Some)\n\n }\n\n\n\n pub(crate) fn serialize<S>(c: &Option<ColorSpec>, ser: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n {\n\n match c {\n", "file_path": "src/serde_utils.rs", "rank": 74, "score": 13.914266871211579 }, { "content": " fn render(\n\n &self,\n\n out: &mut StandardStream,\n\n conf: &RenderConf,\n\n resp: &Response,\n\n ) -> Result<RenderStatus> {\n\n let humidity = resp.as_current()?.main.humidity;\n\n\n\n self.display_humidity(out, humidity, conf.base_style, conf.display_mode)?;\n\n\n\n Ok(RenderStatus::Rendered)\n\n }\n\n}\n\n\n\n#[derive(Clone, Default, Debug, Deserialize, Serialize)]\n\n#[serde(default)]\n\npub struct Rain {\n\n pub display_mode: Option<DisplayMode>,\n\n #[serde(with = \"option_color_spec\")]\n\n pub style: Option<ColorSpec>,\n", "file_path": "src/segments.rs", "rank": 75, "score": 13.807064739250354 }, { "content": " format!(\"results/{}{}-{}-{}.json\", prefix, prefix2, lang, suffix)\n\n } else {\n\n format!(\"results/{}{}-{}.json\", prefix, prefix2, suffix)\n\n };\n\n let file = p.cache_dir().join(f);\n\n debug!(\"looking for cache file at '{}'\", file.display());\n\n\n\n if let Some(p) = file.parent() {\n\n std::fs::create_dir_all(p)?;\n\n }\n\n\n\n Ok(file)\n\n } else {\n\n bail!(\"Count not locate project directory!\");\n\n }\n\n }\n\n\n\n fn clean_up_for_path(&self, name: &str) -> String {\n\n let mut buf = String::with_capacity(name.len());\n\n let mut parts = name.split_whitespace();\n", "file_path": "src/lib.rs", "rank": 76, "score": 13.750608940980369 }, { "content": " cloud_cover: u16,\n\n base_style: &ColorSpec,\n\n display_mode: DisplayMode,\n\n ) -> Result<()> {\n\n display_print!(stdout, display_mode, \"\\u{e33d}\", \"\\u{2601}\\u{fe0f}\", \"C\");\n\n\n\n if let Some(ref style) = self.style {\n\n stdout.set_color(style)?;\n\n }\n\n write!(stdout, \" {}\", cloud_cover)?;\n\n stdout.set_color(base_style)?;\n\n write!(stdout, \" %\")?;\n\n\n\n Ok(())\n\n }\n\n\n\n fn render(\n\n &self,\n\n out: &mut StandardStream,\n\n conf: &RenderConf,\n", "file_path": "src/segments.rs", "rank": 77, "score": 13.632374873697096 }, { "content": " arg::RefArg::append(&self.new, i);\n\n }\n\n}\n\n\n\nimpl arg::ReadAll for OrgFreedesktopGeoClue2ClientLocationUpdated {\n\n fn read(i: &mut arg::Iter) -> Result<Self, arg::TypeMismatchError> {\n\n Ok(OrgFreedesktopGeoClue2ClientLocationUpdated {\n\n old: i.read()?,\n\n new: i.read()?,\n\n })\n\n }\n\n}\n\n\n\nimpl dbus::message::SignalArgs for OrgFreedesktopGeoClue2ClientLocationUpdated {\n\n const NAME: &'static str = \"LocationUpdated\";\n\n const INTERFACE: &'static str = \"org.freedesktop.GeoClue2.Client\";\n\n}\n", "file_path": "src/geoclue/client.rs", "rank": 78, "score": 13.616249508876578 }, { "content": " CResponse::OtherInt { cod, message } => {\n\n handle_error(StatusCode::from_u16(cod)?, &message, location)\n\n }\n\n CResponse::OtherString { cod, message } => {\n\n handle_error(cod.parse()?, &message, location)\n\n }\n\n }\n\n }\n\n QueryKind::ForeCast => {\n\n let resp: OResponse = serde_json::from_slice(&bytes)?;\n\n match resp {\n\n OResponse::Success(w) => {\n\n if self.cache_length.is_some() {\n\n if let Err(e) =\n\n self.write_cache(kind, location, language, units, &bytes)\n\n {\n\n warn!(\"error while writing cached response: {}\", e);\n\n }\n\n }\n\n Ok(Response::from_forecast(*w))\n", "file_path": "src/lib.rs", "rank": 79, "score": 13.483399129642262 }, { "content": "pub mod api;\n\npub mod cli;\n\npub mod config;\n\n#[cfg(feature = \"geoclue\")]\n\npub mod geoclue;\n\npub mod segments;\n\nmod serde_utils;\n\n\n\nuse std::{borrow::Cow, fmt::Display, time::Duration};\n\n\n\nuse crate::config::DisplayConfig;\n\nuse anyhow::{bail, Context, Result};\n\nuse api::{current::ApiResponse as CResponse, one_call::ApiResponse as OResponse, Response};\n\nuse directories_next::ProjectDirs;\n\nuse log::*;\n\nuse reqwest::StatusCode;\n\nuse segments::Renderer;\n\nuse serde::{Deserialize, Serialize};\n\nuse termcolor::StandardStream;\n\nuse tokio::time::timeout;\n", "file_path": "src/lib.rs", "rank": 80, "score": 13.377919952972213 }, { "content": " E: de::Error,\n\n {\n\n match value {\n\n \"black\" => Ok(Some(Color::Black)),\n\n \"blue\" => Ok(Some(Color::Blue)),\n\n \"green\" => Ok(Some(Color::Green)),\n\n \"red\" => Ok(Some(Color::Red)),\n\n \"cyan\" => Ok(Some(Color::Cyan)),\n\n \"magenta\" => Ok(Some(Color::Magenta)),\n\n \"yellow\" => Ok(Some(Color::Yellow)),\n\n \"white\" => Ok(Some(Color::White)),\n\n a if a.starts_with('#') => match <[u8; 3]>::from_hex(a.trim_start_matches('#')) {\n\n Ok(c) => Ok(Some(Color::Rgb(c[0], c[1], c[2]))),\n\n Err(_) => Err(de::Error::invalid_value(\n\n de::Unexpected::Str(a),\n\n &\"a sharp '#' character followed by 6 hex digits\",\n\n )),\n\n },\n\n _ => Err(de::Error::unknown_field(value, FIELDS)),\n\n }\n", "file_path": "src/serde_utils.rs", "rank": 81, "score": 13.329788052442868 }, { "content": " /// Possible values are:\n\n /// * Location names: \"London, UK\", \"Dubai\"\n\n /// * Geographic coordinates (lat,lon): \"35.68,139.69\"\n\n /// This option overrides the corresponding value from the config.\n\n pub location: Option<String>,\n\n\n\n #[structopt(long)]\n\n /// Use the specified configuration file instead of the default.\n\n ///\n\n /// By default, girouette looks for a configuration file:\n\n ///\n\n /// - on Linux in \"$XDG_CONFIG_HOME/girouette/config.yml\" or \"$HOME/.config/girouette/config.yml\"\n\n ///\n\n /// - on MacOS in \"$HOME/Library/Application Support/rs.Girouette/config.yml\"\n\n ///\n\n /// - on Windows in \"%AppData%\\Girouette\\config\\config.yml\"\n\n pub config: Option<PathBuf>,\n\n\n\n #[structopt(short, long)]\n\n /// Cache responses for this long (e.g. \"1m\", \"2 days 6h\", \"5 sec\"), or `none` to disable it.\n", "file_path": "src/cli.rs", "rank": 82, "score": 13.179284681066278 }, { "content": " impl<'de> Visitor<'de> for SVisitor {\n\n type Value = Vec<Segment>;\n\n\n\n fn expecting(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n f.write_str(\"a segment name, alone or as key to a mapping of options\")\n\n }\n\n\n\n fn visit_seq<S>(self, mut seq: S) -> Result<Self::Value, S::Error>\n\n where\n\n S: de::SeqAccess<'de>,\n\n {\n\n let mut vec = Vec::new();\n\n while let Some(inner) = seq.next_element()? {\n\n match inner {\n\n Inner::Struct(s) => vec.push(s),\n\n Inner::Name(name) => {\n\n vec.push(match name.as_ref() {\n\n \"instant\" => Segment::Instant(Instant::default()),\n\n \"location_name\" => Segment::LocationName(LocationName::default()),\n\n \"temperature\" => Segment::Temperature(Temperature::default()),\n", "file_path": "src/serde_utils.rs", "rank": 83, "score": 12.893471849475052 }, { "content": "}\n\n\n\nimpl Rain {\n\n fn render(\n\n &self,\n\n out: &mut StandardStream,\n\n conf: &RenderConf,\n\n resp: &Response,\n\n ) -> Result<RenderStatus> {\n\n let rain = resp.as_current()?.rain.as_ref();\n\n\n\n if let Some(r) = rain {\n\n if let Some(mm) = r.one_h.or(r.three_h) {\n\n display_print!(out, conf.display_mode, \"\\u{e371}\", \"\\u{2614}\", \"R\");\n\n if let Some(ref style) = self.style {\n\n out.set_color(style)?;\n\n }\n\n write!(out, \" {:.1} \", mm)?;\n\n out.set_color(conf.base_style)?;\n\n write!(out, \"mm/h\")?;\n", "file_path": "src/segments.rs", "rank": 84, "score": 12.517241761685325 }, { "content": " fn render(\n\n &self,\n\n out: &mut StandardStream,\n\n _conf: &RenderConf,\n\n resp: &Response,\n\n ) -> Result<RenderStatus> {\n\n let description = &resp.as_current()?.weather[0].description;\n\n\n\n let description = if let Some(d) = description {\n\n d\n\n } else {\n\n return Ok(RenderStatus::Empty);\n\n };\n\n\n\n if let Some(ref style) = self.style {\n\n out.set_color(style)?;\n\n }\n\n\n\n write!(out, \"{}\", description)?;\n\n\n", "file_path": "src/segments.rs", "rank": 85, "score": 12.492728296745492 }, { "content": " }\n\n}\n\n\n\nimpl std::fmt::Display for Location {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Location::LatLon(lat, lon) => write!(f, \"{}, {}\", lat, lon),\n\n Location::Place(place) => write!(f, \"{}\", place),\n\n }\n\n }\n\n}\n\n\n\npub enum WindType {\n\n Low,\n\n Mid,\n\n High,\n\n}\n\n\n\nimpl Location {\n\n pub fn new(s: &str) -> Location {\n", "file_path": "src/lib.rs", "rank": 86, "score": 12.441237662068389 }, { "content": " out: &mut StandardStream,\n\n display_mode: DisplayMode,\n\n units: UnitMode,\n\n style: &Option<ColorSpec>,\n\n night: bool,\n\n wind: Option<&Wind>,\n\n id: u16,\n\n ) -> Result<RenderStatus> {\n\n if let Some(ref style) = style {\n\n out.set_color(style)?;\n\n }\n\n\n\n display_print!(\n\n out,\n\n display_mode,\n\n {\n\n let wind_type = wind.map_or(WindType::Low, |w| {\n\n let speed = match units {\n\n UnitMode::Metric => w.speed * 3.6,\n\n _ => w.speed,\n", "file_path": "src/segments.rs", "rank": 87, "score": 12.416085528127747 }, { "content": " .next_element()?\n\n .ok_or_else(|| de::Error::invalid_length(2, &self))?;\n\n\n\n Ok(Some(Color::Rgb(r, g, b)))\n\n }\n\n }\n\n\n\n pub(crate) fn deserialize<'de, D>(d: D) -> Result<Option<Color>, D::Error>\n\n where\n\n D: serde::Deserializer<'de>,\n\n {\n\n d.deserialize_any(OVisitor)\n\n }\n\n\n\n #[allow(clippy::trivially_copy_pass_by_ref)]\n\n pub(crate) fn serialize<S>(c: &Option<Color>, ser: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n {\n\n match c {\n", "file_path": "src/serde_utils.rs", "rank": 88, "score": 12.292716426488328 }, { "content": " language: Option<&str>,\n\n units: UnitMode,\n\n ) -> Result<Response> {\n\n debug!(\"querying {:?} with '{:?}' OpenWeather API\", location, kind);\n\n let mut params = Vec::with_capacity(3);\n\n match location {\n\n Location::LatLon(lat, lon) => {\n\n params.push((\"lat\", lat.to_string()));\n\n params.push((\"lon\", lon.to_string()));\n\n }\n\n Location::Place(place) => params.push((\"q\", place.to_string())),\n\n };\n\n\n\n if let Some(language) = language {\n\n params.push((\"lang\", language.to_owned()));\n\n }\n\n\n\n params.push((\"appid\", key));\n\n\n\n params.push((\"units\", units.to_string()));\n", "file_path": "src/lib.rs", "rank": 89, "score": 12.12062508401344 }, { "content": " write!(stdout, \" {}\", unit)?;\n\n\n\n Ok(())\n\n }\n\n\n\n fn render(\n\n &self,\n\n out: &mut StandardStream,\n\n conf: &RenderConf,\n\n resp: &Response,\n\n ) -> Result<RenderStatus> {\n\n let wind = resp.as_current()?.wind.as_ref();\n\n\n\n if let Some(w) = wind {\n\n self.display_wind(out, w, conf)?;\n\n Ok(RenderStatus::Rendered)\n\n } else {\n\n Ok(RenderStatus::Empty)\n\n }\n\n }\n", "file_path": "src/segments.rs", "rank": 90, "score": 11.55392705683688 }, { "content": " let pressure = resp.as_current()?.main.pressure;\n\n\n\n self.display_pressure(out, pressure, conf.base_style, conf.display_mode)?;\n\n\n\n Ok(RenderStatus::Rendered)\n\n }\n\n}\n\n\n\n#[derive(Clone, Default, Debug, Deserialize, Serialize)]\n\n#[serde(default)]\n\npub struct CloudCover {\n\n pub display_mode: Option<DisplayMode>,\n\n #[serde(with = \"option_color_spec\")]\n\n pub style: Option<ColorSpec>,\n\n}\n\n\n\nimpl CloudCover {\n\n fn display_cover(\n\n &self,\n\n stdout: &mut StandardStream,\n", "file_path": "src/segments.rs", "rank": 91, "score": 11.403137241238388 }, { "content": " let resp = response.as_current()?;\n\n Location::LatLon(resp.coord.lat, resp.coord.lon)\n\n } else {\n\n loc.clone()\n\n };\n\n\n\n if kind != QueryKind::Current {\n\n let res = WeatherClient::new(self.cache_length, self.timeout)\n\n .query(\n\n QueryKind::ForeCast,\n\n &new_loc,\n\n self.key.clone(),\n\n self.language.as_deref(),\n\n self.config.units,\n\n )\n\n .await?;\n\n response.merge(res);\n\n }\n\n\n\n renderer.render(out, &response, self.language.as_deref())?;\n", "file_path": "src/lib.rs", "rank": 92, "score": 11.066915182146358 }, { "content": " }\n\n\n\n out.reset()?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn display_kind(&self) -> Result<QueryKind> {\n\n let mut current = false;\n\n let mut forecast = false;\n\n for s in &self.display_config.segments {\n\n if s.is_forecast() {\n\n forecast = true;\n\n } else {\n\n current = true;\n\n }\n\n }\n\n\n\n match (current, forecast) {\n\n (true, true) => Ok(QueryKind::Both),\n", "file_path": "src/segments.rs", "rank": 93, "score": 11.047440651584008 }, { "content": "use crate::api::Response;\n\nuse crate::{api::current::Wind, DisplayMode, WindType};\n\nuse crate::{config::*, serde_utils::*, QueryKind, UnitMode};\n\nuse anyhow::*;\n\nuse chrono::{Datelike, FixedOffset, Locale, TimeZone, Utc};\n\nuse log::*;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashSet;\n\nuse std::convert::TryInto;\n\nuse std::io::Write;\n\nuse termcolor::{Color, ColorSpec, StandardStream, WriteColor};\n\n\n\nmacro_rules! display_print {\n\n ($out:ident, $display:expr, $a:expr, $b:expr, $c:expr) => {\n\n match $display {\n\n DisplayMode::NerdFonts => write!($out, \"{}\", $a)?,\n\n DisplayMode::Unicode => write!($out, \"{}\", $b)?,\n\n DisplayMode::Ascii => write!($out, \"{}\", $c)?,\n\n }\n\n };\n\n}\n\n\n\npub struct Renderer {\n\n pub display_config: DisplayConfig,\n\n}\n\n\n", "file_path": "src/segments.rs", "rank": 94, "score": 10.937752204798421 }, { "content": " CloudCover(CloudCover),\n\n DailyForecast(DailyForecast),\n\n HourlyForecast(HourlyForecast),\n\n Alerts(Alerts),\n\n DayTime(DayTime),\n\n}\n\n\n\nimpl Segment {\n\n fn render(\n\n &self,\n\n out: &mut StandardStream,\n\n conf: &RenderConf,\n\n resp: &Response,\n\n ) -> Result<RenderStatus> {\n\n match self {\n\n Segment::Instant(i) => i.render(out, conf, resp),\n\n Segment::LocationName(i) => i.render(out, conf, resp),\n\n Segment::Temperature(i) => i.render(out, conf, resp),\n\n Segment::WeatherIcon(i) => i.render(out, conf, resp),\n\n Segment::WeatherDescription(i) => i.render(out, conf, resp),\n", "file_path": "src/segments.rs", "rank": 95, "score": 10.758764651630244 }, { "content": " _conf: &RenderConf,\n\n resp: &Response,\n\n ) -> Result<RenderStatus> {\n\n let name = &resp.as_current()?.name;\n\n\n\n if let Some(ref style) = self.style {\n\n out.set_color(style)?;\n\n }\n\n\n\n write!(out, \"{}\", name)?;\n\n\n\n Ok(RenderStatus::Rendered)\n\n }\n\n}\n\n\n\nconst TEMP_COLORS: [u8; 57] = [\n\n 57, 63, 63, 63, 27, 27, 27, 33, 33, 33, 39, 39, 39, 45, 45, 45, 51, 51, 50, 50, 49, 49, 48, 48,\n\n 47, 47, 46, 46, 46, 82, 82, 82, 118, 118, 118, 154, 154, 154, 190, 190, 190, 226, 226, 226,\n\n 220, 220, 220, 214, 214, 214, 208, 208, 208, 202, 202, 202, 196,\n\n];\n", "file_path": "src/segments.rs", "rank": 96, "score": 10.62193904953305 }, { "content": " fn default() -> Self {\n\n ScaledColor::Scaled\n\n }\n\n}\n\n\n\nimpl Temperature {\n\n fn render(\n\n &self,\n\n out: &mut StandardStream,\n\n conf: &RenderConf,\n\n resp: &Response,\n\n ) -> Result<RenderStatus> {\n\n let resp = resp.as_current()?;\n\n let temp = resp.main.temp;\n\n let feels_like = resp.main.feels_like;\n\n let temp_min = resp.main.temp_min;\n\n let temp_max = resp.main.temp_max;\n\n\n\n let display_mode = self.display_mode.unwrap_or(conf.display_mode);\n\n\n", "file_path": "src/segments.rs", "rank": 97, "score": 10.557970025805858 }, { "content": " &self.style,\n\n night,\n\n Some(&wind),\n\n hour.weather[0].id,\n\n )?;\n\n display_print!(out, conf.display_mode, \" \", \" \", \"\");\n\n\n\n display_temp(&self.temp_style, out, t, conf.base_style, conf.units)?;\n\n\n\n out.set_color(conf.base_style)?;\n\n }\n\n\n\n i += 1;\n\n }\n\n\n\n Ok(RenderStatus::Rendered)\n\n }\n\n}\n\n\n\n#[derive(Clone, Default, Debug, Deserialize, Serialize)]\n", "file_path": "src/segments.rs", "rank": 98, "score": 10.546101435276835 }, { "content": " match &self.style {\n\n ScaledColor::Scaled => {\n\n let speed_color_idx = speed.floor() as usize;\n\n let mut tmp_style = conf.base_style.clone();\n\n stdout.set_color(\n\n tmp_style.set_fg(Some(Color::Ansi256(WIND_COLORS[speed_color_idx]))),\n\n )?;\n\n }\n\n ScaledColor::Spec(Some(style)) => {\n\n stdout.set_color(style)?;\n\n }\n\n _ => {}\n\n };\n\n write!(stdout, \" {:.1}\", speed)?;\n\n stdout.set_color(conf.base_style)?;\n\n let unit = match conf.units {\n\n UnitMode::Standard => \"m/s\",\n\n UnitMode::Metric => \"km/h\",\n\n UnitMode::Imperial => \"mph\",\n\n };\n", "file_path": "src/segments.rs", "rank": 99, "score": 10.524618037743696 } ]
Rust
src/operator/helmert.rs
busstoptaktik/geodesy
2718c094001b0a2168deb4fbaaa09c4ab9f78a7c
#![allow(non_snake_case)] use crate::operator_construction::*; use crate::Context; use crate::CoordinateTuple; use crate::GeodesyError; #[derive(Debug)] pub struct Helmert { R: [[f64; 3]; 3], T0: [f64; 3], R0: [f64; 3], dR: [f64; 3], dT: [f64; 3], t_epoch: f64, t_obs: f64, scale: f64, dscale: f64, exact: bool, position_vector: bool, rotation: bool, inverted: bool, args: OperatorArgs, } fn rotation_matrix(rx: f64, ry: f64, rz: f64, exact: bool, position_vector: bool) -> [[f64; 3]; 3] { let rx = (rx / 3600.).to_radians(); let ry = (ry / 3600.).to_radians(); let rz = (rz / 3600.).to_radians(); let (mut sx, mut sy, mut sz) = (rx, ry, rz); let (mut cx, mut cy, mut cz) = (1.0, 1.0, 1.0); if exact { let scx = rx.sin_cos(); let scy = ry.sin_cos(); let scz = rz.sin_cos(); sx = scx.0; cx = scx.1; sy = scy.0; cy = scy.1; sz = scz.0; cz = scz.1; } let r11 = cy * cz; let mut r12 = cx * sz; let mut r13 = -cx * sy * cz; let r21 = -cy * sz; let mut r22 = cx * cz; let mut r23 = sx * cz; let r31 = sy; let r32 = -sx * cy; let r33 = cx * cy; if exact { r12 += sx * sy * cz; r13 += sx * sz; r22 -= sx * sy * sz; r23 += cx * sy * sz; } if position_vector { return [[r11, r21, r31], [r12, r22, r32], [r13, r23, r33]]; } [[r11, r12, r13], [r21, r22, r23], [r31, r32, r33]] } impl Helmert { fn new(args: &mut OperatorArgs) -> Result<Helmert, GeodesyError> { let x = args.numeric_value("x", 0.0)?; let y = args.numeric_value("y", 0.0)?; let z = args.numeric_value("z", 0.0)?; let rx = args.numeric_value("rx", 0.0)?; let ry = args.numeric_value("ry", 0.0)?; let rz = args.numeric_value("rz", 0.0)?; let dx = args.numeric_value("dx", 0.0)?; let dy = args.numeric_value("dy", 0.0)?; let dz = args.numeric_value("dz", 0.0)?; let drx = args.numeric_value("drx", 0.0)?; let dry = args.numeric_value("dry", 0.0)?; let drz = args.numeric_value("drz", 0.0)?; let t_epoch = args.numeric_value("t_epoch", std::f64::NAN)?; let t_obs = args.numeric_value("t_obs", std::f64::NAN)?; let scale = args.numeric_value("s", 0.0)?; let dscale = args.numeric_value("ds", 0.0)? * 1e-6; let convention = args.value("convention", ""); let exact = args.flag("exact"); let rotation = !((rx, ry, rz) == (0., 0., 0.) && (drx, dry, drz) == (0., 0., 0.)); if rotation { if convention.is_empty() { return Err(GeodesyError::General( "Helmert: Need value for convention when rotating", )); } if convention != "position_vector" && convention != "coordinate_frame" { return Err(GeodesyError::General( "Helmert: value for convention must be one of {position_vector, coordinate_frame}", )); } } let inverted = args.flag("inv"); let argsc = args.clone(); let scale = 1.0 + scale * 1e-6; let T0 = [x, y, z]; let dT = [dx, dy, dz]; let R0 = [rx, ry, rz]; let dR = [drx, dry, drz]; let position_vector = convention == "position_vector"; let R = rotation_matrix(rx, ry, rz, exact, position_vector); Ok(Helmert { R, R0, dR, T0, dT, scale, dscale, t_epoch, t_obs, exact, position_vector, rotation, inverted, args: argsc, }) } pub(crate) fn operator(args: &mut OperatorArgs) -> Result<Operator, GeodesyError> { let op = crate::operator::helmert::Helmert::new(args)?; Ok(Operator(Box::new(op))) } } impl OperatorCore for Helmert { fn fwd(&self, _ctx: &mut Context, operands: &mut [CoordinateTuple]) -> bool { let mut scale = self.scale; let mut R = self.R; let mut T = self.T0; let mut prev_t = std::f64::NAN; for c in operands { if !self.t_epoch.is_nan() { let t = if self.t_obs.is_nan() { c[3] } else { self.t_obs }; #[allow(clippy::float_cmp)] if t != prev_t { prev_t = t; let dt = t - self.t_epoch; T[0] += dt * self.dT[0]; T[1] += dt * self.dT[1]; T[2] += dt * self.dT[2]; let rx = self.R0[0] + dt * self.dR[0]; let ry = self.R0[1] + dt * self.dR[1]; let rz = self.R0[2] + dt * self.dR[2]; if self.rotation { R = rotation_matrix(rx, ry, rz, self.exact, self.position_vector); } scale += dt * self.dscale; } } if self.rotation { let x = c[0] * R[0][0] + c[1] * R[0][1] + c[2] * R[0][2]; let y = c[0] * R[1][0] + c[1] * R[1][1] + c[2] * R[1][2]; let z = c[0] * R[2][0] + c[1] * R[2][1] + c[2] * R[2][2]; c[0] = scale * x + T[0]; c[1] = scale * y + T[1]; c[2] = scale * z + T[2]; continue; } c[0] = scale * c[0] + T[0]; c[1] = scale * c[1] + T[1]; c[2] = scale * c[2] + T[2]; } true } fn inv(&self, _ctx: &mut Context, operands: &mut [CoordinateTuple]) -> bool { let mut scale = self.scale; let mut R = self.R; let mut T = self.T0; let mut prev_t = std::f64::NAN; for c in operands { #[allow(clippy::float_cmp)] if !self.t_epoch.is_nan() { let t = if self.t_obs.is_nan() { c[3] } else { self.t_obs }; if t != prev_t { prev_t = t; let dt = t - self.t_epoch; T[0] += dt * self.dT[0]; T[1] += dt * self.dT[1]; T[2] += dt * self.dT[2]; let rx = self.R0[0] + dt * self.dR[0]; let ry = self.R0[1] + dt * self.dR[1]; let rz = self.R0[2] + dt * self.dR[2]; if self.rotation { R = rotation_matrix(rx, ry, rz, self.exact, self.position_vector); } scale += dt * self.dscale; } } let x = (c[0] - T[0]) / scale; let y = (c[1] - T[1]) / scale; let z = (c[2] - T[2]) / scale; if self.rotation { c[0] = x * R[0][0] + y * R[1][0] + z * R[2][0]; c[1] = x * R[0][1] + y * R[1][1] + z * R[2][1]; c[2] = x * R[0][2] + y * R[1][2] + z * R[2][2]; } else { c[0] = x; c[1] = y; c[2] = z; } } true } fn name(&self) -> &'static str { "helmert" } fn is_inverted(&self) -> bool { self.inverted } fn args(&self, _step: usize) -> &OperatorArgs { &self.args } } #[cfg(test)] mod tests { use crate::operator::operator_factory; #[test] fn helmert() { use super::*; let mut ctx = Context::new(); let mut args = OperatorArgs::new(); args.name("helmert"); args.insert("x", "foo"); args.insert("y", "-96"); args.insert("z", "-120"); let h = operator_factory(&mut args, &mut ctx, 0); assert!(h.is_err()); args.insert("x", "-87"); assert_eq!(args.value("x", ""), "-87"); assert_eq!(args.value("y", ""), "-96"); assert_eq!(args.value("z", ""), "-120"); let h = operator_factory(&mut args, &mut ctx, 0).unwrap(); let mut operands = [CoordinateTuple::origin()]; h.fwd(&mut ctx, operands.as_mut()); assert_eq!(operands[0].first(), -87.); assert_eq!(operands[0].second(), -96.); assert_eq!(operands[0].third(), -120.); h.inv(&mut ctx, operands.as_mut()); assert_eq!(operands[0].first(), 0.); assert_eq!(operands[0].second(), 0.); assert_eq!(operands[0].third(), 0.); let definition = "helmert: { convention: coordinate_frame, x: 0.06155, rx: -0.0394924, y: -0.01087, ry: -0.0327221, z: -0.04019, rz: -0.0328979, s: -0.009994 }"; let op = ctx.operation(definition).unwrap(); let GDA94 = CoordinateTuple([-4052051.7643, 4212836.2017, -2545106.0245, 0.0]); let GDA2020 = CoordinateTuple([-4052052.7379, 4212835.9897, -2545104.5898, 0.0]); let mut operands = [GDA94]; ctx.fwd(op, &mut operands); assert!(GDA2020.hypot3(&operands[0]) < 75e-6); ctx.inv(op, &mut operands); assert!(GDA94.hypot3(&operands[0]) < 75e-7); let definition = "helmert: { exact: true, convention: coordinate_frame, x: 0, rx: 0, dx: 0, drx: 0.00150379, y: 0, ry: 0, dy: 0, dry: 0.00118346, z: 0, rz: 0, dz: 0, drz: 0.00120716, s: 0, ds: 0, t_epoch: 2020.0 }"; let op = ctx.operation(definition).unwrap(); let ITRF2014 = CoordinateTuple([-4052052.6588, 4212835.9938, -2545104.6946, 2018.0]); let GDA2020 = CoordinateTuple([-4052052.7373, 4212835.9835, -2545104.5867, 2020.0]); let mut operands = [ITRF2014]; ctx.fwd(op, &mut operands); assert!(GDA2020.hypot3(&operands[0]) < 40e-6); ctx.inv(op, &mut operands); assert!(ITRF2014.hypot3(&operands[0]) < 40e-8); } }
#![allow(non_snake_case)] use crate::operator_construction::*; use crate::Context; use crate::CoordinateTuple; use crate::GeodesyError; #[derive(Debug)] pub struct Helmert { R: [[f64; 3]; 3], T0: [f64; 3], R0: [f64; 3], dR: [f64; 3], dT: [f64; 3], t_epoch: f64, t_obs: f64, scale: f64, dscale: f64, exact: bool, position_vector: bool, rotation: bool, inverted: bool, args: OperatorArgs, } fn rotation_matrix(rx: f64, ry: f64, rz: f64, exact: bool, position_vector: bool) -> [[f64; 3]; 3] { let rx = (rx / 3600.).to_radians(); let ry = (ry / 3600.).to_radians(); let rz = (rz / 3600.).to_radians(); let (mut sx, mut sy, mut sz) = (rx, ry, rz); let (mut cx, mut cy, mut cz) = (1.0, 1.0, 1.0); if exact { let scx = rx.sin_cos(); let scy = ry.sin_cos(); let scz = rz.sin_cos(); sx = scx.0; cx = scx.1; sy = scy.0; cy = scy.1; sz = scz.0; cz = scz.1; } let r11 = cy * cz; let mut r12 = cx * sz; let mut r13 = -cx * sy * cz; let r21 = -cy * sz; let mut r22 = cx * cz; let mut r23 = sx * cz; let r31 = sy; let r32 = -sx * cy; let r33 = cx * cy; if exact { r12 += sx * sy * cz; r13 += sx * sz; r22 -= sx * sy * sz; r23 += cx * sy * sz; } if position_vector { return [[r11, r21, r31], [r12, r22, r32], [r13, r23, r33]]; } [[r11, r12, r13], [r21, r22, r23], [r31, r32, r33]] } impl Helmert { fn new(args: &mut OperatorArgs) -> Result<Helmert, GeodesyError> { let x = args.numeric_value("x", 0.0)?; let y = args.numeric_value("y", 0.0)?; let z = args.numeric_value("z", 0.0)?; let rx = args.numeric_value("rx", 0.0)?; let ry = args.numeric_value("ry", 0.0)?; let rz = args.numeric_value("rz", 0.0)?; let dx = args.numeric_value("dx", 0.0)?; let dy = args.numeric_value("dy", 0.0)?; let dz = args.numeric_value("dz", 0.0)?; let drx = args.numeric_value("drx", 0.0)?; let dry = args.numeric_value("dry", 0.0)?; let drz = args.numeric_value("drz", 0.0)?; let t_epoch = args.numeric_value("t_epoch", std::f64::NAN)?; let t_obs = args.numeric_value("t_obs", std::f64::NAN)?; let scale = args.numeric_value("s", 0.0)?; let dscale = args.numeric_value("ds", 0.0)? * 1e-6; let convention = args.value("convention", ""); let exact = args.flag("exact"); let rotation = !((rx, ry, rz) == (0., 0., 0.) && (drx, dry, drz) == (0., 0., 0.)); if rotation { if convention.is_empty() { return Err(GeodesyError::General( "Helmert: Need value for convention when rotating", )); } if convention != "position_vector" && convention != "coordinate_frame" { return Err(GeodesyError::General( "Helmert: value for convention must be one of {position_vector, coordinate_frame}", )); } } let inverted = args.flag("inv"); let argsc = args.clone(); let scale = 1.0 + scale * 1e-6; let T0 = [x, y, z]; let dT = [dx, dy, dz]; let R0 = [rx, ry, rz]; let dR = [drx, dry, drz]; let position_vector = convention == "position_vector"; let R = rotation_matrix(rx, ry, rz, exact, position_vector); Ok(Helmert { R, R0, dR, T0, dT, scale, dscale, t_epoch, t_obs, exact, position_vector, rotation, inverted, args: argsc, }) } pub(crate) fn operator(args: &mut OperatorArgs) -> Result<Operator, GeodesyError> { let op = crate::operator::helmert::Helmert::new(args)?; Ok(Operator(Box::new(op))) } } impl OperatorCore for Helmert { fn fwd(&self, _ctx: &mut Context, operands: &mut [CoordinateTuple]) -> bool { let mut scale = self.scale; let mut R = self.R; let mut T = self.T0; let mut prev_t = std::f64::NAN; for c in operands { if !self.t_epoch.is_nan() { let t = if self.t_obs.is_nan() { c[3] } else { self.t_obs }; #[allow(clippy::float_cmp)] if t != prev_t { prev_t = t; let dt = t - self.t_epoch; T[0] += dt * self.dT[0]; T[1] += dt * self.dT[1]; T[2] += dt * self.dT[2]; let rx = self.R0[0] + dt * self.dR[0]; let ry = self.R0[1] + dt * self.dR[1]; let rz = self.R0[2] + dt * self.dR[2]; if self.rotation { R = rotation_matrix(rx, ry, rz, self.exact, self.position_vector); } scale += dt * self.dscale; } } if self.rotation { let x = c[0] * R[0][0] + c[1] * R[0][1] + c[2] * R[0][2]; let y = c[0] * R[1][0] + c[1] * R[1][1] + c[2] * R[1][2]; let z = c[0] * R[2][0] + c[1] * R[2][1] + c[2] * R[2][2]; c[0] = scale * x + T[0]; c[1] = scale * y + T[1]; c[2] = scale * z + T[2]; continue; } c[0] = scale * c[0] + T[0]; c[1] = scale * c[1] + T[1]; c[2] = scale * c[2] + T[2]; } true } fn inv(&self, _ctx: &mut Context, operands: &mut [CoordinateTuple]) -> bool { let mut scale = self.scale; let mut R = self.R; let mut T = self.T0; let mut prev_t = std::f64::NAN; for c in operands { #[allow(clippy::float_cmp)] if !self.t_epoch.is_nan() { let t = if self.t_obs.is_nan() { c[3] } else { self.t_obs }; if t != prev_t { prev_t = t; let dt = t - self.t_epoch; T[0] += dt * self.dT[0]; T[1] += dt * self.dT[1]; T[2] += dt * self.dT[2]; let rx = self.R0[0] + dt * self.dR[0]; let ry = self.R0[1] + dt * self.dR[1]; let rz = self.R0[2] + dt * self.dR[2];
scale += dt * self.dscale; } } let x = (c[0] - T[0]) / scale; let y = (c[1] - T[1]) / scale; let z = (c[2] - T[2]) / scale; if self.rotation { c[0] = x * R[0][0] + y * R[1][0] + z * R[2][0]; c[1] = x * R[0][1] + y * R[1][1] + z * R[2][1]; c[2] = x * R[0][2] + y * R[1][2] + z * R[2][2]; } else { c[0] = x; c[1] = y; c[2] = z; } } true } fn name(&self) -> &'static str { "helmert" } fn is_inverted(&self) -> bool { self.inverted } fn args(&self, _step: usize) -> &OperatorArgs { &self.args } } #[cfg(test)] mod tests { use crate::operator::operator_factory; #[test] fn helmert() { use super::*; let mut ctx = Context::new(); let mut args = OperatorArgs::new(); args.name("helmert"); args.insert("x", "foo"); args.insert("y", "-96"); args.insert("z", "-120"); let h = operator_factory(&mut args, &mut ctx, 0); assert!(h.is_err()); args.insert("x", "-87"); assert_eq!(args.value("x", ""), "-87"); assert_eq!(args.value("y", ""), "-96"); assert_eq!(args.value("z", ""), "-120"); let h = operator_factory(&mut args, &mut ctx, 0).unwrap(); let mut operands = [CoordinateTuple::origin()]; h.fwd(&mut ctx, operands.as_mut()); assert_eq!(operands[0].first(), -87.); assert_eq!(operands[0].second(), -96.); assert_eq!(operands[0].third(), -120.); h.inv(&mut ctx, operands.as_mut()); assert_eq!(operands[0].first(), 0.); assert_eq!(operands[0].second(), 0.); assert_eq!(operands[0].third(), 0.); let definition = "helmert: { convention: coordinate_frame, x: 0.06155, rx: -0.0394924, y: -0.01087, ry: -0.0327221, z: -0.04019, rz: -0.0328979, s: -0.009994 }"; let op = ctx.operation(definition).unwrap(); let GDA94 = CoordinateTuple([-4052051.7643, 4212836.2017, -2545106.0245, 0.0]); let GDA2020 = CoordinateTuple([-4052052.7379, 4212835.9897, -2545104.5898, 0.0]); let mut operands = [GDA94]; ctx.fwd(op, &mut operands); assert!(GDA2020.hypot3(&operands[0]) < 75e-6); ctx.inv(op, &mut operands); assert!(GDA94.hypot3(&operands[0]) < 75e-7); let definition = "helmert: { exact: true, convention: coordinate_frame, x: 0, rx: 0, dx: 0, drx: 0.00150379, y: 0, ry: 0, dy: 0, dry: 0.00118346, z: 0, rz: 0, dz: 0, drz: 0.00120716, s: 0, ds: 0, t_epoch: 2020.0 }"; let op = ctx.operation(definition).unwrap(); let ITRF2014 = CoordinateTuple([-4052052.6588, 4212835.9938, -2545104.6946, 2018.0]); let GDA2020 = CoordinateTuple([-4052052.7373, 4212835.9835, -2545104.5867, 2020.0]); let mut operands = [ITRF2014]; ctx.fwd(op, &mut operands); assert!(GDA2020.hypot3(&operands[0]) < 40e-6); ctx.inv(op, &mut operands); assert!(ITRF2014.hypot3(&operands[0]) < 40e-8); } }
if self.rotation { R = rotation_matrix(rx, ry, rz, self.exact, self.position_vector); }
if_condition
[ { "content": "/// Handle instantiation of built-in operators.\n\nfn builtins(ctx: &mut Context, args: &mut OperatorArgs) -> Result<Operator, GeodesyError> {\n\n // Pipelines are not characterized by the name \"pipeline\", but simply by containing steps.\n\n if let Ok(steps) = args.numeric_value(\"_nsteps\", 0.0) {\n\n if steps > 0.0 {\n\n match crate::operator::pipeline::Pipeline::new(args, ctx) {\n\n Err(err) => {\n\n ctx.error(&err.to_string(), \"pipeline\");\n\n return Err(err);\n\n }\n\n Ok(ok) => {\n\n return Ok(Operator(Box::new(ok)));\n\n }\n\n }\n\n }\n\n }\n\n\n\n // The operator name may be prefixed with \"builtin_\", so operator-named\n\n // macros can delegate the hard work to the operators they shadow.\n\n let mut opname = args.name.clone().to_lowercase();\n\n if opname.starts_with(\"builtin_\") {\n", "file_path": "src/operator/mod.rs", "rank": 1, "score": 131908.03735265275 }, { "content": "/// Expand gys ARGS and translate to YAML\n\nfn expand_gys(definition: &str, args: &mut OperatorArgs) -> String {\n\n let mut gysargs = String::new();\n\n for (key, value) in &args.args {\n\n if key == \"ellps\" || key == \"_definition\" {\n\n continue;\n\n }\n\n let elt = format!(\" {key}:{value}\", key = key, value = value);\n\n gysargs += &elt;\n\n }\n\n let definition = definition.replace(\"ARGS\", &gysargs);\n\n\n\n // Then translate to YAML and return\n\n Context::gys_to_yaml(&definition)\n\n}\n\n\n\n// --------------------------------------------------------------------------------------\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::CoordinateTuple;\n", "file_path": "src/operator/mod.rs", "rank": 2, "score": 92777.20580533835 }, { "content": "fn main() {\n\n let mut ctx = geodesy::Context::new();\n\n ctx.register_operator(\"geodesic_shift\", GeodesicShift::operator);\n\n let op = \"geodesic_shift: {lat_0: 55, lon_0: 12, lat_1: 48, lon_1: 16.}\";\n\n\n\n let cph_to_vie = match ctx.operation(op) {\n\n Ok(value) => value,\n\n _ => {\n\n println!(\"Awful!\");\n\n return;\n\n }\n\n };\n\n\n\n // Same test coordinates as in example 00, but no conversion to radians.\n\n let cph = CoordinateTuple::geo(55., 12., 0., 0.); // Copenhagen\n\n let osl = CoordinateTuple::geo(60., 10., 0., 0.); // Oslo\n\n let sth = CoordinateTuple::geo(59., 18., 0., 0.); // Stockholm\n\n let hel = CoordinateTuple::geo(60., 25., 0., 0.); // Helsinki\n\n\n\n let mut data = [osl, cph, sth, hel];\n", "file_path": "examples/04-rotating_the_earth.rs", "rank": 3, "score": 73015.1680044385 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct CoordinateOrderDescriptor {\n\n post: [usize; 4],\n\n mult: [f64; 4],\n\n noop: bool,\n\n}\n\n\n", "file_path": "src/operator/adapt.rs", "rank": 4, "score": 44421.166980739355 }, { "content": "fn main() {\n\n // The CoordinateTuple type is much used, so\n\n // we give it a very brief abbreviation\n\n use geodesy::CoordinateTuple as C;\n\n\n\n // The context is the entry point to all transformation functionality:\n\n let mut ctx = geodesy::Context::new();\n\n // The concept of a \"context data structure\" will be well known to\n\n // PROJ users, where the context plays a somewhat free-flowing role,\n\n // and only becomes truly visible in multithreaded cases.\n\n // In Rust Geodesy, the context plays a much more visible role, as\n\n // most transformation functionality is implemented directly as\n\n // methods of the context data structure.\n\n\n\n // We need some coordinates to test the code. The convenience methods\n\n // `gis` and `geo` produces a 4D coordinate tuple and automatically handles\n\n // conversion of the angular parts to radians, and geographical coordinates\n\n // in latitude/longitude order, to the GIS convention of longitude/latitude.\n\n let cph = C::gis(12., 55., 0., 0.); // Copenhagen\n\n let osl = C::gis(10., 60., 0., 0.); // Oslo\n", "file_path": "examples/00-transformations.rs", "rank": 5, "score": 42738.80844981363 }, { "content": "/// The core functionality exposed by the individual operator implementations.\n\n/// This is not immediately intended for application program consumption: The\n\n/// actual API is in the `impl`ementation for the [`Operator`](Operator) newtype struct,\n\n/// which builds on this `trait` (which only holds `pub`ness in order to support\n\n/// construction of user-defined operators).\n\npub trait OperatorCore {\n\n fn fwd(&self, ctx: &mut Context, operands: &mut [CoordinateTuple]) -> bool;\n\n\n\n // implementations must override at least one of {inv, invertible}\n\n #[allow(unused_variables)]\n\n fn inv(&self, ctx: &mut Context, operands: &mut [CoordinateTuple]) -> bool {\n\n ctx.error(self.name(), \"Operator not invertible\");\n\n false\n\n }\n\n\n\n fn invertible(&self) -> bool {\n\n true\n\n }\n\n\n\n fn is_noop(&self) -> bool {\n\n false\n\n }\n\n\n\n // operate fwd/inv, taking operator inversion into account.\n\n fn operate(&self, ctx: &mut Context, operands: &mut [CoordinateTuple], forward: bool) -> bool {\n", "file_path": "src/operator/mod.rs", "rank": 6, "score": 42697.30015672036 }, { "content": "fn main() {\n\n // In example 00, we saw that the `Context` data structure is the\n\n // coordinating element for all things related to transformations\n\n // in Rust Geodesy. For generic geometric geodesy the same can be\n\n // said about the `Ellipsoid`. So to do anything, we must first\n\n // instantiate an `Ellipsoid`. We can do that in two ways - either\n\n // by asking, by name, for one of the built in ellipsoids, or by\n\n // providing our own ellipsoid parameters:\n\n\n\n // The GRS 1980 ellipsoid is built in, so we use the ::named function.\n\n let GRS80 = geodesy::Ellipsoid::named(\"GRS80\");\n\n\n\n // The Maupertuis 1738 ellipsoid is not built in, so we provide `a`,\n\n // the semimajor axis, and `f`, the flattening to the `new()`\n\n // constructor.\n\n let Mau38 = geodesy::Ellipsoid::new(6_397_300., 1.0 / 191.);\n\n\n\n // Now, let's compute som ancillary ellipsoidal parameters:\n\n let E = GRS80.linear_eccentricity();\n\n let b = GRS80.semiminor_axis();\n", "file_path": "examples/01-geometric_geodesy.rs", "rank": 7, "score": 41124.99157760569 }, { "content": "fn main() {\n\n let mut ctx = geodesy::Context::new();\n\n ctx.register_operator(\"add42\", Add42::operator);\n\n\n\n let add42 = match ctx.operation(\"add42: {}\") {\n\n Ok(value) => value,\n\n _ => {\n\n println!(\"Awful!\");\n\n return;\n\n }\n\n };\n\n\n\n // Same test coordinates as in example 00, but no conversion to radians.\n\n let cph = CoordinateTuple::raw(12., 55., 0., 0.); // Copenhagen\n\n let osl = CoordinateTuple::raw(10., 60., 0., 0.); // Oslo\n\n let sth = CoordinateTuple::raw(59., 18., 0., 0.); // Stockholm\n\n let hel = CoordinateTuple::raw(60., 25., 0., 0.); // Helsinki\n\n\n\n let mut data = [osl, cph, sth, hel];\n\n\n", "file_path": "examples/03-user_defined_operators.rs", "rank": 8, "score": 39659.566771897706 }, { "content": "fn main() {\n\n use geodesy::CoordinateTuple as C;\n\n let mut ctx = geodesy::Context::new();\n\n\n\n // Same test coordinates as in example 00.\n\n let cph = C::gis(12., 55., 0., 0.); // Copenhagen\n\n let osl = C::gis(10., 60., 0., 0.); // Oslo\n\n let sth = C::geo(59., 18., 0., 0.); // Stockholm\n\n let hel = C::geo(60., 25., 0., 0.); // Helsinki\n\n\n\n let mut data = [osl, cph, sth, hel];\n\n\n\n // In example 00, we instantiated a pipeline comprising a Helmert\n\n // transformation sandwiched between conversions between geodetic/\n\n // cartesian coordinates.\n\n // Since this cartesian|helmert|geodetic triplet is quite useful in\n\n // its own right, then why not create a macro, making it immediately\n\n // available under the name `geohelmert`?\n\n\n\n let geohelmert_macro_text = \"pipeline: {\n", "file_path": "examples/02-user_defined_macros.rs", "rank": 9, "score": 39659.566771897706 }, { "content": "#[allow(clippy::float_cmp)]\n\nfn combine_descriptors(\n\n from: &CoordinateOrderDescriptor,\n\n to: &CoordinateOrderDescriptor,\n\n) -> CoordinateOrderDescriptor {\n\n let mut give = CoordinateOrderDescriptor::default();\n\n for i in 0..4 {\n\n give.mult[i] = from.mult[i] / to.mult[i];\n\n give.post[i] = from.post.iter().position(|&p| p == to.post[i]).unwrap();\n\n }\n\n give.noop = give.mult == [1.0; 4] && give.post == [0_usize, 1, 2, 3];\n\n give\n\n}\n\n\n\nimpl Adapt {\n\n pub fn new(args: &mut OperatorArgs) -> Result<Adapt, GeodesyError> {\n\n let inverted = args.flag(\"inv\");\n\n\n\n // What we go `from` and what we go `to` both defaults to the internal\n\n // representation - i.e. \"do nothing\", neither on in- or output.\n\n let mut from = args.value(\"from\", \"enut\");\n", "file_path": "src/operator/adapt.rs", "rank": 10, "score": 39659.566771897706 }, { "content": " pub fn operator(args: &mut OperatorArgs) -> Result<Operator, GeodesyError> {\n\n let op = GeodesicShift::new(args)?;\n\n Ok(Operator(Box::new(op)))\n\n }\n\n}\n\n\n\nimpl OperatorCore for GeodesicShift {\n\n fn fwd(&self, _ctx: &mut Context, operands: &mut [CoordinateTuple]) -> bool {\n\n for coord in operands {\n\n let res = self.ellps.geodesic_fwd(&coord, self.bearing, self.distance);\n\n coord[0] = res[0];\n\n coord[1] = res[1];\n\n }\n\n true\n\n }\n\n\n\n // This operator is not invertible (because the return azimuth depends\n\n // on the destination), so we implement `invertible()` as false, let the\n\n // empty default implementation from the trait take care of `inv()`, and\n\n // leave it for a rainy day to implement an iterative inverse solution.\n", "file_path": "examples/04-rotating_the_earth.rs", "rank": 11, "score": 33533.25102820458 }, { "content": "// The technique implemented here is not in general geodetic use,\n\n// so consider it more of a geodetic pun in the pursuit of a reasonable\n\n// answer to the oft-occurring question \"If we shift Copenhagen to Vienna,\n\n// where would Helsinki end up?\".\n\n//\n\n// Also note that since the return-bearing depends on the destination,\n\n// this operator is **not** directly invertible (although an iterative\n\n// solution is feasible)\n\n\n\nuse geodesy::{operator_construction::*, GeodesyError};\n\nuse geodesy::{Context, CoordinateTuple, Ellipsoid};\n\n\n\npub struct GeodesicShift {\n\n args: OperatorArgs,\n\n inverted: bool,\n\n\n\n ellps: Ellipsoid,\n\n\n\n bearing: f64,\n\n distance: f64,\n", "file_path": "examples/04-rotating_the_earth.rs", "rank": 12, "score": 33526.35739557385 }, { "content": "}\n\n\n\nimpl GeodesicShift {\n\n fn new(args: &mut OperatorArgs) -> Result<GeodesicShift, GeodesyError> {\n\n let ellps = Ellipsoid::named(&args.value(\"ellps\", \"GRS80\"));\n\n let inverted = args.flag(\"inv\");\n\n\n\n // Coordinate of the origin\n\n let lat_0 = args.numeric_value(\"lat_0\", std::f64::NAN)?;\n\n let lon_0 = args.numeric_value(\"lon_0\", std::f64::NAN)?;\n\n\n\n // Coordinate of the target\n\n let lat_1 = args.numeric_value(\"lat_1\", std::f64::NAN)?;\n\n let lon_1 = args.numeric_value(\"lon_1\", std::f64::NAN)?;\n\n\n\n if [lat_0, lon_0, lat_1, lon_1].iter().any(|&f| f.is_nan()) {\n\n return Err(GeodesyError::General(\n\n \"Missing lat_0, lon_0, lat_1 or lon_1\",\n\n ));\n\n }\n", "file_path": "examples/04-rotating_the_earth.rs", "rank": 13, "score": 33524.349185865714 }, { "content": " fn invertible(&self) -> bool {\n\n false\n\n }\n\n\n\n fn name(&self) -> &'static str {\n\n \"geodesic_shift\"\n\n }\n\n\n\n fn is_inverted(&self) -> bool {\n\n self.inverted\n\n }\n\n\n\n fn args(&self, _step: usize) -> &OperatorArgs {\n\n &self.args\n\n }\n\n}\n\n\n", "file_path": "examples/04-rotating_the_earth.rs", "rank": 14, "score": 33518.578501113356 }, { "content": "// examples/04-rotating_the_earth\n\n\n\n// See also 03-user_defined_operators.rs\n\n// Run with:\n\n// cargo run --example 04-rotating_the_earth\n\n\n\n// In this example we implement a user defined operator. In contrast\n\n// to the previous example in 03-user_defined_operators.rs, this one\n\n// actually carries out something marginally geodetically useful: It\n\n// formally shifts the geodetic reference frame, such that a certain\n\n// surface point `A` ends where another point `B` used to be. And it\n\n// does so using the **distance and bearing** between A and B, so\n\n// while it may sound somewhat like a Helmert transformation, it really\n\n// isnt: This operator moves all points the same amount, and in the same\n\n// direction, **as measured on the *surface* of the earth**, i.e. taking\n\n// care of variations in local ellipsoidal curvature. The Helmert\n\n// transformation on the other hand, works in cartesian coordinates, and\n\n// generally moves points on the earth's surface away from the surface\n\n// when the system rotates.\n\n//\n", "file_path": "examples/04-rotating_the_earth.rs", "rank": 15, "score": 33516.937073576504 }, { "content": "\n\n // Now find the distance and bearing between the origin and the target\n\n let origin = CoordinateTuple::geo(lat_0, lon_0, 0., 0.);\n\n let target = CoordinateTuple::geo(lat_1, lon_1, 0., 0.);\n\n\n\n let d = ellps.geodesic_inv(&origin, &target);\n\n let bearing = d[0];\n\n let distance = d[2];\n\n\n\n Ok(GeodesicShift {\n\n args: args.clone(),\n\n ellps,\n\n inverted,\n\n bearing,\n\n distance,\n\n })\n\n }\n\n\n\n // This is the interface to the Rust Geodesy library: Construct a\n\n // GeodesicShift element, and wrap it properly for consumption.\n", "file_path": "examples/04-rotating_the_earth.rs", "rank": 16, "score": 33511.214166507256 }, { "content": "\n\n // Now do the transformation\n\n ctx.fwd(cph_to_vie, &mut data);\n\n println!(\"cph_to_vie (fwd):\");\n\n let mut result = data.clone();\n\n CoordinateTuple::geo_all(&mut result);\n\n for coord in result {\n\n println!(\" {:?}\", coord);\n\n }\n\n\n\n // And assert there is no way back...\n\n assert_eq!(false, ctx.inv(cph_to_vie, &mut data));\n\n}\n", "file_path": "examples/04-rotating_the_earth.rs", "rank": 17, "score": 33509.100860852785 }, { "content": "use crate::operator::OperatorCore;\n\nuse crate::Context;\n\nuse crate::CoordinateTuple;\n\n\n\nimpl Context {\n\n /// Roundtrip test that `operation` yields `results` when given `operands`.\n\n #[allow(clippy::too_many_arguments)]\n\n pub fn test(\n\n operation: &str,\n\n fwd_metric: u8,\n\n fwd_delta: f64,\n\n inv_metric: u8,\n\n inv_delta: f64,\n\n operands: &mut [CoordinateTuple],\n\n results: &mut [CoordinateTuple],\n\n ) -> bool {\n\n let mut ctx = Context::new();\n\n let op = ctx.operation(operation);\n\n if op.is_err() {\n\n println!(\"{}\", ctx.report());\n", "file_path": "src/context/test.rs", "rank": 36, "score": 32562.04108192576 }, { "content": "\n\n #[test]\n\n fn operate() {\n\n use crate::Context;\n\n use crate::CoordinateTuple;\n\n\n\n let pipeline = \"ed50_etrs89: {\n\n steps: [\n\n cart: {ellps: intl},\n\n helmert: {x: -87, y: -96, z: -120},\n\n cart: {inv: true, ellps: GRS80}\n\n ]\n\n }\";\n\n\n\n let mut ctx = Context::new();\n\n let op = ctx.operation(pipeline);\n\n assert!(op.is_ok());\n\n let op = op.unwrap();\n\n let geo = CoordinateTuple::gis(12., 55., 100., 0.);\n\n let mut operands = [geo];\n", "file_path": "src/context/mod.rs", "rank": 37, "score": 32557.306407898384 }, { "content": " operator.operate(self, operands, forward)\n\n }\n\n\n\n pub fn operate(\n\n &mut self,\n\n operation: usize,\n\n operands: &mut [CoordinateTuple],\n\n forward: bool,\n\n ) -> bool {\n\n if operation >= self.operations.len() {\n\n self.last_failing_operation = String::from(\"Invalid\");\n\n self.cause = String::from(\"Attempt to access an invalid operator from context\");\n\n return false;\n\n }\n\n let mut i = 0_usize;\n\n let mut result = true;\n\n for chunk in operands.chunks_mut(Self::CHUNK_SIZE) {\n\n // Need a bit more std::thread-Rust-fu to do actual mutithreading.\n\n // For now, we just split the input data in chunks, process them\n\n // and verify that the parallel stack-functionality works.\n", "file_path": "src/context/mod.rs", "rank": 38, "score": 32554.509288627756 }, { "content": " result &= self.minions[i]._operate(&self.operations[operation], chunk, forward);\n\n self.minions[i].stack.clear();\n\n i = (i + 1) % Self::CHUNKS;\n\n }\n\n result\n\n }\n\n\n\n /// Forward operation.\n\n pub fn fwd(&mut self, operation: usize, operands: &mut [CoordinateTuple]) -> bool {\n\n self.operate(operation, operands, true)\n\n }\n\n\n\n /// Inverse operation.\n\n pub fn inv(&mut self, operation: usize, operands: &mut [CoordinateTuple]) -> bool {\n\n self.operate(operation, operands, false)\n\n }\n\n\n\n pub fn error(&mut self, which: &str, why: &str) {\n\n self.last_failing_operation = String::from(which);\n\n self.cause = String::from(why);\n", "file_path": "src/context/mod.rs", "rank": 39, "score": 32552.083698448463 }, { "content": "use crate::Context;\n\n\n\nimpl Context {\n\n /// Convert \"Geodetic YAML Shorthand\" to YAML\n\n pub fn gys_to_yaml(gys: &str) -> String {\n\n let lines = gys.lines();\n\n let mut s = Vec::new();\n\n for line in lines {\n\n if line.trim().starts_with('#') {\n\n continue;\n\n }\n\n s.push(line);\n\n }\n\n let gys = s.join(\"\\n\").trim().to_string();\n\n\n\n // Appears to be YAML already - do nothing!\n\n if !Context::is_gys(&gys) {\n\n return gys;\n\n }\n\n\n", "file_path": "src/context/gys.rs", "rank": 40, "score": 32551.607992450772 }, { "content": " steps: [\n\n cart: {ellps: intl},\n\n helmert: {x: -87, y: -96, z: -120},\n\n cart: {inv: true, ellps: GRS80}\n\n ]\n\n }\";\n\n\n\n // Same pipeline in Geodetic YAML Shorthand (GYS), with some nasty\n\n // inline comments to stress test gys_to_yaml().\n\n let gys = \"# bla bla\\n\\n cart ellps: intl # another comment ending at newline\\n | helmert x:-87 y:-96 z:-120 # inline comment ending at step, not at newline | cart inv ellps:GRS80\";\n\n\n\n // Check that GYS instantiates exactly as the corresponding YAML\n\n let op_yaml = ctx.operation(pipeline).unwrap();\n\n let op_gys = ctx.operation(gys).unwrap();\n\n\n\n let copenhagen = C::geo(55., 12., 0., 0.);\n\n let stockholm = C::geo(59., 18., 0., 0.);\n\n let mut yaml_data = [copenhagen, stockholm];\n\n let mut gys_data = [copenhagen, stockholm];\n\n\n", "file_path": "src/context/gys.rs", "rank": 41, "score": 32550.67124851827 }, { "content": " {\n\n return true;\n\n }\n\n\n\n // Otherwise not a GYS - hopefully it's YAML then!\n\n false\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n #[test]\n\n fn gys() {\n\n use crate::Context;\n\n use crate::CoordinateTuple as C;\n\n\n\n let mut ctx = Context::new();\n\n\n\n // Test the corner case of giving just \"inv\" as operation name\n", "file_path": "src/context/gys.rs", "rank": 42, "score": 32550.436810963198 }, { "content": " continue;\n\n }\n\n println!(\n\n \"Failure in forward test[{}]: delta = {:.4e} (expected delta < {:e})\",\n\n i, delta, fwd_delta\n\n );\n\n println!(\" got {:?}\", operands[i]);\n\n println!(\" expected {:?}\", results[i]);\n\n return false;\n\n }\n\n\n\n if !ctx.operations[op].invertible() {\n\n return true;\n\n }\n\n\n\n // Roundtrip\n\n if !ctx.inv(op, results) {\n\n println!(\"{}\", ctx.report());\n\n return false;\n\n }\n", "file_path": "src/context/test.rs", "rank": 43, "score": 32550.047046516265 }, { "content": " }\n\n\n\n pub fn report(&mut self) -> String {\n\n format!(\n\n \"Last failure in {}: {}\\n{}\",\n\n self.last_failing_operation, self.cause, self.last_failing_operation_definition\n\n )\n\n }\n\n}\n\n\n\n//----------------------------------------------------------------------------------\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn operand() {\n\n use crate::Context;\n\n let ctx = Context::new();\n\n assert_eq!(ctx.stack.len(), 0);\n\n }\n", "file_path": "src/context/mod.rs", "rank": 44, "score": 32549.413673373867 }, { "content": " ctx.fwd(op_yaml, &mut yaml_data);\n\n ctx.fwd(op_gys, &mut gys_data);\n\n\n\n C::geo_all(&mut yaml_data);\n\n C::geo_all(&mut gys_data);\n\n\n\n // We assert that the difference is exactly zero, since the operations\n\n // should be identical. But float equality comparisons are frowned at...\n\n assert!(yaml_data[0].hypot3(&gys_data[0]) < 1e-30);\n\n assert!(yaml_data[1].hypot3(&gys_data[1]) < 1e-30);\n\n }\n\n}\n", "file_path": "src/context/gys.rs", "rank": 45, "score": 32547.975616637887 }, { "content": " continue;\n\n };\n\n\n\n // Ultra compact notation: key:value, no whitespace\n\n if e.contains(':') {\n\n yaml += &e.replace(\":\", \": \");\n\n if i + 1 < n {\n\n yaml += \", \";\n\n }\n\n continue;\n\n }\n\n\n\n // Key with no value? provide \"true\"\n\n yaml += &e;\n\n yaml += \": true\";\n\n if i + 1 < n {\n\n yaml += \", \";\n\n }\n\n }\n\n yaml += \"}\";\n", "file_path": "src/context/gys.rs", "rank": 46, "score": 32546.556263405957 }, { "content": " return false;\n\n }\n\n let op = op.unwrap();\n\n\n\n // We need a copy of the operands as \"expected results\" in the roundtrip case\n\n // Note that the .to_vec() method actually copies, so .clone() is not needed.\n\n let roundtrip = operands.to_vec();\n\n\n\n // Forward test\n\n if !ctx.fwd(op, operands) {\n\n println!(\"{}\", ctx.report());\n\n return false;\n\n }\n\n for i in 0..operands.len() {\n\n let delta = match fwd_metric {\n\n 0 => operands[i].hypot2(&results[i]),\n\n 2 => operands[i].hypot2(&results[i]),\n\n _ => operands[i].hypot3(&results[i]),\n\n };\n\n if delta < fwd_delta {\n", "file_path": "src/context/test.rs", "rank": 47, "score": 32546.318493922412 }, { "content": "\n\n ctx.fwd(op, &mut operands);\n\n let result = operands[0].to_degrees();\n\n assert!((result[0] - 11.998815342385206861).abs() < 1e-10);\n\n assert!((result[1] - 54.999382648950991381).abs() < 1e-10);\n\n\n\n ctx.inv(op, &mut operands);\n\n let result = operands[0].to_degrees();\n\n assert!((result[0] - 12.).abs() < 1e-12);\n\n assert!((result[1] - 55.).abs() < 1e-12);\n\n }\n\n}\n", "file_path": "src/context/mod.rs", "rank": 48, "score": 32546.254366527322 }, { "content": " Context {\n\n stack: Vec::new(),\n\n minions: Vec::new(),\n\n last_failing_operation_definition: String::new(),\n\n last_failing_operation: String::new(),\n\n cause: String::new(),\n\n user_defined_operators: HashMap::new(),\n\n user_defined_macros: HashMap::new(),\n\n operations: Vec::new(),\n\n pile: thepile,\n\n }\n\n }\n\n\n\n // Parallel execution helper for `operate`, below\n\n fn _operate(\n\n &mut self,\n\n operator: &Operator,\n\n operands: &mut [CoordinateTuple],\n\n forward: bool,\n\n ) -> bool {\n", "file_path": "src/context/mod.rs", "rank": 49, "score": 32545.62461174592 }, { "content": " .to_string();\n\n let mut elements: Vec<&str> = strip.split_whitespace().collect();\n\n let n = elements.len();\n\n if n == 0 {\n\n return String::from(\"Error: Empty step!\");\n\n }\n\n\n\n // changing indent after use to get linebreaks after the first step\n\n yaml += indent;\n\n indent = \",\\n \";\n\n\n\n yaml += elements[0];\n\n yaml += \":\";\n\n\n\n // No args? Then insert an empty argument list\n\n if n == 1 {\n\n yaml += \" {}\";\n\n continue;\n\n }\n\n\n", "file_path": "src/context/gys.rs", "rank": 50, "score": 32545.273357714515 }, { "content": "}\n\n\n\nmod gys;\n\nmod test;\n\nmod user_defined;\n\n\n\nimpl Default for Context {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl Context {\n\n /// Number of chunks to process in (principle in) parallel.\n\n const CHUNKS: usize = 3;\n\n\n\n /// Maximum size of each chunk.\n\n const CHUNK_SIZE: usize = 1000;\n\n\n\n pub fn new() -> Context {\n", "file_path": "src/context/mod.rs", "rank": 51, "score": 32545.01684964403 }, { "content": " // Handle args\n\n yaml += \" {\";\n\n\n\n for i in 1..n {\n\n // We constructed a key-value par in last iteration?\n\n if elements[i].is_empty() {\n\n continue;\n\n }\n\n let e = elements[i].to_string();\n\n if e.ends_with(':') {\n\n if i == n - 1 {\n\n return String::from(\"Missing value for key '\") + &e + \"'\";\n\n }\n\n yaml += &e;\n\n yaml += \" \";\n\n yaml += elements[i + 1];\n\n if i + 2 < n {\n\n yaml += \", \";\n\n }\n\n elements[i + 1] = \"\";\n", "file_path": "src/context/gys.rs", "rank": 52, "score": 32544.493264854606 }, { "content": " }\n\n\n\n if nsteps > 1 {\n\n yaml += \"\\n ]\\n}\";\n\n }\n\n\n\n yaml\n\n }\n\n\n\n // True if a str appears to be in GYS format\n\n pub fn is_gys(gys: &str) -> bool {\n\n // GYS if contains a whitespace-wrapped pipe\n\n if gys.contains(\" | \") {\n\n return true;\n\n }\n\n\n\n // GYS if starting or ending with an empty step\n\n if gys.starts_with('|') {\n\n return true;\n\n }\n", "file_path": "src/context/gys.rs", "rank": 53, "score": 32544.374331534626 }, { "content": "use std::collections::HashMap;\n\nuse std::fs::File;\n\n\n\nuse log::info;\n\n\n\nuse crate::operator_construction::*;\n\nuse crate::CoordinateTuple;\n\n\n\n/// The central administration of the transformation functionality\n\n// #[derive(Default)]\n\npub struct Context {\n\n pub stack: Vec<Vec<CoordinateTuple>>,\n\n minions: Vec<Context>,\n\n user_defined_operators: HashMap<String, OperatorConstructor>,\n\n user_defined_macros: HashMap<String, String>,\n\n operations: Vec<Operator>,\n\n last_failing_operation_definition: String,\n\n last_failing_operation: String,\n\n cause: String,\n\n pile: Result<File, std::io::Error>,\n", "file_path": "src/context/mod.rs", "rank": 54, "score": 32544.05808402893 }, { "content": " for i in 0..operands.len() {\n\n let delta = match inv_metric {\n\n 0 => roundtrip[i].default_ellps_dist(&results[i]),\n\n 2 => roundtrip[i].hypot2(&results[i]),\n\n _ => roundtrip[i].hypot3(&results[i]),\n\n };\n\n if delta < inv_delta {\n\n continue;\n\n }\n\n println!(\n\n \"Failure in inverse test[{}]: delta = {:.4e} (expected delta < {:e})\",\n\n i, delta, inv_delta\n\n );\n\n println!(\" got {:?}\", results[i]);\n\n println!(\" expected {:?}\", roundtrip[i]);\n\n return false;\n\n }\n\n true\n\n }\n\n}\n", "file_path": "src/context/test.rs", "rank": 55, "score": 32542.387300057042 }, { "content": " info!(\"Creating new Context\");\n\n let mut ctx = Context::_new();\n\n for _ in 0..Self::CHUNKS {\n\n ctx.minions.push(Context::_new());\n\n }\n\n ctx\n\n }\n\n\n\n fn _new() -> Context {\n\n let mut pile_path = dirs::data_local_dir().unwrap_or_default();\n\n pile_path.push(\"geodesy\");\n\n pile_path.push(\"assets.pile\");\n\n let pile_name = pile_path.clone();\n\n let thepile = File::open(pile_path);\n\n if thepile.is_err() {\n\n info!(\"Could not find asset pile {:?}\", pile_name);\n\n } else {\n\n info!(\"Found asset pile {:?}\", pile_name);\n\n }\n\n\n", "file_path": "src/context/mod.rs", "rank": 56, "score": 32541.036570335145 }, { "content": " let inv = ctx.operation(\"[inv]\");\n\n assert!(inv.is_err());\n\n\n\n // Test that an inv-operator actually instantiates\n\n let invcart = ctx.operation(\"[cart inv]\");\n\n assert!(invcart.is_ok());\n\n\n\n // Check that the GYS syntactical indicators trigger\n\n assert!(Context::is_gys(\"[cart]\"));\n\n assert!(Context::is_gys(\"|cart|\"));\n\n assert!(Context::is_gys(\"|cart\"));\n\n assert!(Context::is_gys(\"cart|\"));\n\n assert!(!Context::is_gys(\"[cart\"));\n\n assert!(!Context::is_gys(\"cart]\"));\n\n\n\n // Now a more complete test of YAML vs. GYS\n\n\n\n // A pipeline in YAML\n\n let pipeline = \"ed50_etrs89: {\n\n # with cucumbers\n", "file_path": "src/context/gys.rs", "rank": 57, "score": 32538.203552072846 }, { "content": " // Strip off superfluous GYS indicators\n\n let gys = gys.trim_matches('|');\n\n let gys = gys.trim_matches('[');\n\n let gys = gys.trim_matches(']');\n\n\n\n let mut yaml = String::new();\n\n let mut indent = \"\";\n\n let steps: Vec<&str> = gys.split('|').collect();\n\n let nsteps = steps.len();\n\n if nsteps > 1 {\n\n yaml += \"pipeline_from_gys: {\\n steps: [\\n\";\n\n indent = \" \";\n\n }\n\n for step in steps {\n\n // Strip inline comments\n\n let strip = step\n\n .find('#')\n\n .map(|index| &step[..index])\n\n .unwrap_or(step)\n\n .trim()\n", "file_path": "src/context/gys.rs", "rank": 58, "score": 32536.96475174455 }, { "content": " if gys.ends_with('|') {\n\n return true;\n\n }\n\n\n\n // GYS if wrapped in square brackets: [gys]. Note that\n\n // we cannot merge these two ifs without damaging the\n\n // following test for \"no trailing colon\"\n\n if gys.starts_with('[') {\n\n return gys.ends_with(']');\n\n }\n\n if gys.ends_with(']') {\n\n return gys.starts_with('[');\n\n }\n\n\n\n // GYS if no trailing colon on first token\n\n if !gys\n\n .split_whitespace()\n\n .next()\n\n .unwrap_or_default()\n\n .ends_with(':')\n", "file_path": "src/context/gys.rs", "rank": 59, "score": 32536.69549994993 }, { "content": "\n\n // dx and dy are straightforward\n\n args.insert(\"dx\", \"11\");\n\n args.insert(\"dy\", \"22\");\n\n\n\n // But we hide dz behind two levels of indirection\n\n args.insert(\"dz\", \"^ddz\");\n\n args.insert(\"ddz\", \"^dddz\");\n\n args.insert(\"dddz\", \"33\");\n\n\n\n assert_eq!(\"00\", args.value(\"\", \"00\"));\n\n assert_eq!(\"11\", args.value(\"dx\", \"\"));\n\n assert_eq!(\"22\", args.value(\"dy\", \"\"));\n\n assert_eq!(args.used.len(), 2);\n\n\n\n args.insert(\"dx\", \"^dx\");\n\n assert_eq!(\"11\", args.value(\"dx\", \"\"));\n\n args.insert(\"dx\", \"^^^^dx\");\n\n assert_eq!(\"11\", args.value(\"dx\", \"\"));\n\n\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 60, "score": 31767.771062481872 }, { "content": " Err(GeodesyError::Syntax(format!(\n\n \"Numeric value expected for '{}.{}' - got [{}: {}].\",\n\n self.name, key, key, arg\n\n )))\n\n }\n\n\n\n // If key is given, and value != false: true; else: false\n\n pub fn flag(&mut self, key: &str) -> bool {\n\n self.value(key, \"false\") != \"false\"\n\n }\n\n}\n\n\n\n//----------------------------------------------------------------------------------\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn operator_args() {\n\n use super::*;\n\n let mut args = OperatorArgs::new();\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 61, "score": 31766.64359930364 }, { "content": "use std::collections::HashMap;\n\nuse yaml_rust::{Yaml, YamlEmitter, YamlLoader};\n\n\n\nuse crate::GeodesyError;\n\n\n\n#[derive(Debug, Clone, Default, PartialEq)]\n\npub struct OperatorArgs {\n\n pub name: String,\n\n pub args: HashMap<String, String>,\n\n pub used: HashMap<String, String>,\n\n pub all_used: HashMap<String, String>,\n\n}\n\n\n\nimpl OperatorArgs {\n\n #[must_use]\n\n pub fn new() -> OperatorArgs {\n\n let mut op = OperatorArgs {\n\n name: String::new(),\n\n args: HashMap::new(),\n\n used: HashMap::new(),\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 62, "score": 31765.771202343618 }, { "content": " all_used: HashMap::new(),\n\n };\n\n\n\n // Global defaults\n\n op.insert(\"ellps\", \"GRS80\");\n\n op\n\n }\n\n\n\n /// Provides an `OperatorArgs` object, populated by the defaults from an existing\n\n /// `OperatorArgs`, combined with a new object definition.\n\n ///\n\n /// This is the mechanism for inheritance of global args in pipelines.\n\n #[must_use]\n\n pub fn spawn(&self, definition: &str) -> OperatorArgs {\n\n let mut oa = OperatorArgs::new();\n\n for (arg, val) in &self.args {\n\n if arg.starts_with('_') || (arg == \"inv\") {\n\n continue;\n\n }\n\n oa.insert(arg, val);\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 63, "score": 31765.70772114439 }, { "content": " if arg != default {\n\n self.used.insert(key.to_string(), arg.to_string());\n\n }\n\n arg\n\n }\n\n\n\n pub fn numeric_value(&mut self, key: &str, default: f64) -> Result<f64, GeodesyError> {\n\n let arg = self.value(key, \"\");\n\n\n\n // key not given: return default\n\n if arg.is_empty() {\n\n return Ok(default);\n\n }\n\n\n\n // key given, value numeric: return value\n\n if let Ok(v) = arg.parse::<f64>() {\n\n return Ok(v);\n\n }\n\n\n\n // key given, but not numeric: return error string\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 64, "score": 31764.616198243213 }, { "content": " assert_eq!(\"33\", args.value(\"dz\", \"\"));\n\n assert_eq!(33.0, args.numeric_value(\"dz\", 42.0).unwrap());\n\n assert_eq!(42.0, args.numeric_value(\"bar\", 42.0).unwrap());\n\n\n\n assert_eq!(args.used.len(), 3);\n\n assert_eq!(args.all_used.len(), 5);\n\n\n\n // println!(\"used: {:?}\", &args.used);\n\n // println!(\"all_used: {:?}\", &args.all_used);\n\n\n\n assert_eq!(\"\", args.value(\"abcdefg\", \"\"));\n\n\n\n // Finally one for testing 'err' returned for non-numerics\n\n args.insert(\"ds\", \"foo\");\n\n assert!(args.numeric_value(\"ds\", 0.0).is_err());\n\n // if let Err(msg) = args.numeric_value(\"bar\", \"ds\", 0.0) {\n\n // println!(\"**** err: {}\", msg)\n\n // }\n\n }\n\n\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 65, "score": 31762.984137825344 }, { "content": " let arg = match arg {\n\n Some(arg) => arg.to_string(),\n\n None => return default.to_string(),\n\n };\n\n // all_used includes intermediate steps in indirect definitions\n\n self.all_used.insert(key.to_string(), arg.to_string());\n\n\n\n if let Some(arg) = arg.strip_prefix('^') {\n\n // Default if looking for an out-of-scope arg.\n\n if self.args.get(arg).is_none() {\n\n return default.to_string();\n\n }\n\n return self.value_search(arg, default, recursions + 1);\n\n }\n\n arg\n\n }\n\n\n\n /// Return the arg for a given key; maintain usage info.\n\n pub fn value(&mut self, key: &str, default: &str) -> String {\n\n let arg = self.value_search(key, default, 0);\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 66, "score": 31762.898847031887 }, { "content": " #[test]\n\n fn preparing_args() {\n\n use super::*;\n\n let mut args = OperatorArgs::new();\n\n\n\n // Explicitly stating the name of the pipeline\n\n let txt = std::fs::read_to_string(\"tests/tests.yml\").unwrap_or_default();\n\n assert!(args.populate(&txt, \"a_pipeline_for_testing\"));\n\n assert_eq!(&args.value(\"_step_0\", \" \")[0..4], \"cart\");\n\n\n\n // Let populate() figure out what we want\n\n let mut args = OperatorArgs::new();\n\n assert!(args.populate(&txt, \"\"));\n\n assert_eq!(&args.value(\"x\", \"5\"), \"3\");\n\n\n\n // When op is not a pipeline\n\n let mut args = OperatorArgs::new();\n\n assert!(args.populate(\"cart: {ellps: intl}\", \"\"));\n\n assert_eq!(args.name, \"cart\");\n\n assert_eq!(&args.value(\"ellps\", \"\"), \"intl\");\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 67, "score": 31762.157809152228 }, { "content": " self.name = name.to_string();\n\n }\n\n\n\n pub fn insert(&mut self, key: &str, value: &str) {\n\n // Self-referencing keys (x: ^x), are no-ops.\n\n // The syntax \"x: ^x\" makes sense in a nested command (or a pipeline\n\n // step), but the meaning is \"use the value already in the hashmap\".\n\n // Actually inserting it will lead to overwriting of the actual\n\n // value-of-interest, and to infinite recursion on lookup.\n\n if key != value.trim_start_matches('^') {\n\n self.args.insert(key.to_string(), value.to_string());\n\n }\n\n }\n\n\n\n // Recursive workhorse, tracing indirect definitions for ::value\n\n fn value_search(&mut self, key: &str, default: &str, recursions: usize) -> String {\n\n if recursions > 100 {\n\n return default.to_string();\n\n }\n\n let arg = self.args.get(key);\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 68, "score": 31761.655263033772 }, { "content": " ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use geodesy::operator_construction::OperatorArgs;\n\n ///\n\n /// let mut args = OperatorArgs::new();\n\n /// let txt = std::fs::read_to_string(\"tests/tests.yml\").unwrap_or_default();\n\n ///\n\n /// assert!(args.populate(&txt, \"a_pipeline_for_testing\"));\n\n /// assert_eq!(&args.value(\"_step_0\", \"\")[0..4], \"cart\");\n\n /// ```\n\n ///\n\n ///\n\n pub fn populate(&mut self, definition: &str, which: &str) -> bool {\n\n // First, we copy the full text in the args, to enable recursive definitions\n\n self.insert(\"_definition\", definition);\n\n\n\n // Read the entire YAML-document and try to locate the `which` document\n\n let docs = YamlLoader::load_from_str(definition);\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 69, "score": 31760.42758202316 }, { "content": "\n\n // Inheritance\n\n let mut moreargs = args.spawn(\"foo: {bar: baz}\");\n\n assert_eq!(&moreargs.name, \"foo\");\n\n assert_eq!(&moreargs.value(\"ellps\", \"\"), \"intl\");\n\n assert_eq!(&moreargs.value(\"bar\", \"\"), \"baz\");\n\n }\n\n\n\n #[test]\n\n fn bad_value() {\n\n use super::*;\n\n let v = Yaml::BadValue;\n\n assert!(v.is_badvalue());\n\n let v = Yaml::Null;\n\n assert!(v.is_null());\n\n let v = Yaml::Integer(77);\n\n assert!(v == Yaml::Integer(77));\n\n }\n\n}\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 70, "score": 31755.9650816498 }, { "content": " let mut step_definition = String::new();\n\n let mut emitter = YamlEmitter::new(&mut step_definition);\n\n emitter.dump(step).unwrap();\n\n\n\n // Remove the initial doc separator \"---\\n\"\n\n let stripped_definition = step_definition.trim_start_matches(\"---\\n\");\n\n let step_key = format!(\"_step_{}\", index);\n\n self.insert(&step_key, stripped_definition);\n\n }\n\n\n\n true\n\n }\n\n\n\n fn badvalue(&mut self, cause: &str) -> bool {\n\n self.name = \"badvalue\".to_string();\n\n self.insert(\"cause\", cause);\n\n false\n\n }\n\n\n\n pub fn name(&mut self, name: &str) {\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 71, "score": 31755.346034302424 }, { "content": " let mut main_entry_name = which;\n\n if main_entry_name.is_empty() {\n\n for (arg, val) in main {\n\n if val.is_badvalue() {\n\n return self.badvalue(\"Cannot parse definition\");\n\n }\n\n let name = &arg.as_str().unwrap();\n\n if name.starts_with('_') {\n\n continue;\n\n }\n\n if !main_entry_name.is_empty() {\n\n return self.badvalue(\"Too many items in definition root\");\n\n }\n\n main_entry_name = name;\n\n }\n\n }\n\n self.name = main_entry_name.to_string();\n\n\n\n // Grab the sub-tree defining the 'main_entry_name'\n\n let main_entry = &docs[index][main_entry_name];\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 72, "score": 31751.94903722092 }, { "content": " }\n\n oa.populate(definition, \"\");\n\n oa\n\n }\n\n\n\n ///\n\n /// Insert PROJ style operator definition arguments, converted from a YAML\n\n /// setup string.\n\n ///\n\n /// If `which` is set to the empty string, we first look for a pipeline\n\n /// definition. If that is not found, and there is only one list element\n\n /// in the setup string, we assert that this is the element to handle.\n\n ///\n\n /// If `which` is not the empty string, we look for a list element with\n\n /// that name, and handle that either as a pipeline definition, or as a\n\n /// single operator definition.\n\n ///\n\n /// # Returns\n\n ///\n\n /// `true` on success, `false` on sseccus.\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 73, "score": 31751.74238110045 }, { "content": " }\n\n }\n\n\n\n // Try to locate the step definitions, to determine whether we\n\n // are handling a pipeline or a plain operator definition\n\n let steps = main_entry[\"steps\"].as_vec();\n\n\n\n // Not a pipeline? Just insert the operator args and return\n\n if steps.is_none() {\n\n let args = main_entry.as_hash();\n\n if args.is_none() {\n\n return self.badvalue(\"Cannot read args\");\n\n }\n\n let args = args.unwrap();\n\n for (arg, val) in args {\n\n let thearg = arg.as_str().unwrap_or(\"\");\n\n if thearg.is_empty() {\n\n continue;\n\n }\n\n let theval = match val {\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 74, "score": 31751.202614408656 }, { "content": " if main_entry.is_badvalue() {\n\n return self.badvalue(\"Cannot locate definition\");\n\n }\n\n\n\n // Loop over all globals and create the corresponding OperatorArgs entries\n\n if let Some(globals) = main_entry[\"globals\"].as_hash() {\n\n for (arg, val) in globals {\n\n let thearg = arg.as_str().unwrap();\n\n if thearg != \"inv\" {\n\n let theval = match val {\n\n Yaml::Integer(val) => val.to_string(),\n\n Yaml::Real(val) => val.as_str().to_string(),\n\n Yaml::String(val) => val.to_string(),\n\n Yaml::Boolean(val) => val.to_string(),\n\n _ => \"\".to_string(),\n\n };\n\n if !theval.is_empty() {\n\n self.insert(thearg, &theval);\n\n }\n\n }\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 75, "score": 31747.42364403856 }, { "content": " if docs.is_err() {\n\n return false;\n\n }\n\n let docs = docs.unwrap();\n\n let mut index = Some(0_usize);\n\n\n\n if !which.is_empty() {\n\n index = docs.iter().position(|doc| !doc[which].is_badvalue());\n\n if index.is_none() {\n\n return self.badvalue(\"Cannot locate definition\");\n\n }\n\n }\n\n let index = index.unwrap();\n\n let main = &docs[index].as_hash();\n\n if main.is_none() {\n\n return self.badvalue(\"Cannot parse definition\");\n\n }\n\n let main = main.unwrap();\n\n\n\n // Is it conforming?\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 76, "score": 31746.295779084663 }, { "content": " Yaml::Integer(val) => val.to_string(),\n\n Yaml::Real(val) => val.as_str().to_string(),\n\n Yaml::String(val) => val.to_string(),\n\n Yaml::Boolean(val) => val.to_string(),\n\n _ => \"\".to_string(),\n\n };\n\n if !theval.is_empty() {\n\n self.insert(thearg, &theval);\n\n }\n\n }\n\n return true;\n\n }\n\n\n\n // It's a pipeline - insert the number of steps into the argument list.\n\n let steps = steps.unwrap();\n\n self.insert(\"_nsteps\", &steps.len().to_string());\n\n\n\n // Insert each step into the argument list, formatted as YAML.\n\n for (index, step) in steps.iter().enumerate() {\n\n // Write the step definition to a new string\n", "file_path": "src/operator_construction/operatorargs.rs", "rank": 77, "score": 31746.01909917239 }, { "content": "use std::path::PathBuf;\n\n\n\nuse crate::operator_construction::Operator;\n\nuse crate::operator_construction::OperatorConstructor;\n\nuse crate::Context;\n\nuse crate::GeodesyError;\n\n\n\nimpl Context {\n\n pub fn register_operator(&mut self, name: &str, constructor: OperatorConstructor) {\n\n self.user_defined_operators\n\n .insert(name.to_string(), constructor);\n\n }\n\n\n\n pub(crate) fn locate_operator(&mut self, name: &str) -> Option<&OperatorConstructor> {\n\n self.user_defined_operators.get(name)\n\n }\n\n\n\n #[must_use]\n\n pub fn register_macro(&mut self, name: &str, definition: &str) -> bool {\n\n // Registering a macro under the same name as its definition name\n", "file_path": "src/context/user_defined.rs", "rank": 78, "score": 30987.268989965658 }, { "content": " pub fn operation(&mut self, definition: &str) -> Result<usize, GeodesyError> {\n\n self.last_failing_operation_definition = definition.to_string();\n\n self.last_failing_operation.clear();\n\n self.cause.clear();\n\n let op = Operator::new(definition, self)?;\n\n let index = self.operations.len();\n\n self.operations.push(op);\n\n Ok(index)\n\n }\n\n\n\n /// Get definition string from the assets in the shared assets directory\n\n /// ($HOME/share or whatever passes for data_local_dir on the platform)\n\n pub fn get_shared_asset(branch: &str, name: &str, ext: &str) -> Option<String> {\n\n if let Some(mut dir) = dirs::data_local_dir() {\n\n dir.push(\"geodesy\");\n\n return Context::get_asset(&mut dir, branch, name, ext);\n\n }\n\n None\n\n }\n\n\n", "file_path": "src/context/user_defined.rs", "rank": 79, "score": 30977.58629751383 }, { "content": " // leads to infinite nesting - so we prohibit that\n\n let illegal_start = name.to_string() + \":\";\n\n if definition.trim_start().starts_with(&illegal_start) {\n\n return false;\n\n }\n\n\n\n if self\n\n .user_defined_macros\n\n .insert(name.to_string(), definition.to_string())\n\n .is_some()\n\n {\n\n return false;\n\n }\n\n true\n\n }\n\n\n\n pub(crate) fn locate_macro(&mut self, name: &str) -> Option<&String> {\n\n self.user_defined_macros.get(name)\n\n }\n\n\n", "file_path": "src/context/user_defined.rs", "rank": 80, "score": 30974.229920095582 }, { "content": " /// Get definition string from the assets in the current directory\n\n pub fn get_private_asset(branch: &str, name: &str, ext: &str) -> Option<String> {\n\n let mut dir = PathBuf::from(\".\");\n\n Context::get_asset(&mut dir, branch, name, ext)\n\n }\n\n\n\n /// Workhorse for `get_shared_asset` and `get_private_asset`\n\n fn get_asset(dir: &mut PathBuf, branch: &str, name: &str, ext: &str) -> Option<String> {\n\n // This is the base directory we look in\n\n //dir.push(\"geodesy\");\n\n\n\n // This is the filename we're looking for\n\n let mut filename = name.to_string();\n\n filename += ext;\n\n\n\n // We first look for standalone files that match\n\n let mut fullpath = dir.clone();\n\n fullpath.push(\"assets\");\n\n fullpath.push(branch);\n\n fullpath.push(filename.clone());\n", "file_path": "src/context/user_defined.rs", "rank": 81, "score": 30973.803332739208 }, { "content": " if let Ok(definition) = std::fs::read_to_string(fullpath) {\n\n return Some(definition);\n\n }\n\n\n\n // If not found as a freestanding file, try assets.zip\n\n use std::io::prelude::*;\n\n dir.push(\"assets.zip\");\n\n // Open the physical zip file\n\n if let Ok(zipfile) = std::fs::File::open(dir) {\n\n // Hand it over to the zip archive reader\n\n if let Ok(mut archive) = zip::ZipArchive::new(zipfile) {\n\n // Is there a file with the name we're looking for in the zip archive?\n\n let mut full_filename = String::from(\"assets/\");\n\n full_filename += branch;\n\n full_filename += \"/\";\n\n full_filename += &filename;\n\n if let Ok(mut file) = archive.by_name(&full_filename) {\n\n let mut definition = String::new();\n\n if file.read_to_string(&mut definition).is_ok() {\n\n return Some(definition);\n\n }\n\n }\n\n }\n\n }\n\n None\n\n }\n\n}\n", "file_path": "src/context/user_defined.rs", "rank": 82, "score": 30971.756369053437 }, { "content": "#[allow(clippy::float_cmp)]\n\nfn descriptor(desc: &str) -> Option<CoordinateOrderDescriptor> {\n\n let mut post = [0_usize, 1, 2, 3];\n\n let mut mult = [1_f64, 1., 1., 1.];\n\n if desc == \"pass\" {\n\n return Some(CoordinateOrderDescriptor {\n\n post,\n\n mult,\n\n noop: true,\n\n });\n\n }\n\n\n\n if desc.len() != 4 && desc.len() != 8 {\n\n return None;\n\n }\n\n\n\n let mut torad = 1_f64;\n\n if desc.len() == 8 {\n\n let good_angular = desc.ends_with(\"_deg\")\n\n || desc.ends_with(\"_gon\")\n\n || desc.ends_with(\"_rad\")\n", "file_path": "src/operator/adapt.rs", "rank": 83, "score": 30834.03294216388 }, { "content": "# Geodesy\n\n\n\n*Rust Geodesy* (RG), is a platform for experiments with geodetic software, transformations, and standards. *RG* vaguely resembles the [PROJ](https://proj.org) transformation system, and was built in part on the basis of experiments with alternative data flow models for PROJ. The actual transformation functionality of *RG* is, however, minimal: At time of writing, it includes just a few low level operations, including:\n\n\n\n- The three, six, seven, and fourteen-parameter versions of the *Helmert transformation*\n\n- Helmert's companion, the *cartesian/geographic* coordinate conversion\n\n- The full and abridged versions of the *Molodensky transformation*\n\n- Three widely used conformal projections: The *Mercator*, the *Transverse Mercator*, and the *Lambert Conformal Conic* projection\n\n- The *Adapt* operator, which mediates between various conventions for coordinate units and order\n\n\n\nWhile this is sufficient to test the architecture, it is very far from enough to get through a geodesist's daily work.\n\n\n\nHence, viewing *RG* as *another PROJ*, or *PROJ [RiiR](https://acronyms.thefreedictionary.com/RIIR)*, will lead to bad disappointment. At best, you may catch a weak mirage of a *potential* [shape of jazz to come](https://en.wikipedia.org/wiki/The_Shape_of_Jazz_to_Come) for the PROJ internal dataflow.\n\n\n\nBut dataflow experimentation is just one aspect of *RG*. Overall, the aims are fourfold:\n\n\n\n1. Support experiments for evolution of geodetic standards.\n\n2. Support development of geodetic transformations.\n\n3. Hence, provide easy access to a number of basic geodetic operations, not limited to coordinate operations.\n\n4. Support experiments with data flow and alternative abstractions. Mostly as a tool for aims (1, 2, 3)\n\n\n\nAll four aims are guided by a wish to amend explicitly identified shortcomings in the existing geodetic system landscape.\n\n\n", "file_path": "README.md", "rank": 84, "score": 22356.971658846025 }, { "content": "## Documentation\n\n\n\nThe documentation is currently limited, but take a look at:\n\n\n\n- The coordinate operator [documentation](/ruminations/002-rumination.md)\n\n- The [description of `kp`](/ruminations/003-rumination.md), the *Rust Geodesy* coordinate processing program\n\n- This essayistic [rumination](/ruminations/000-rumination.md), outlining the overall philosophy and architecture of *Rust Geodesy*.\n\n- The API documentation at [Docs.rs](https://docs.rs/geodesy)\n\n- The [examples](examples)\n\n- The tests embedded in the [source code](/src/)\n\n\n\n## License\n\n\n\n*Rust Geodesy*: Copyright 2020, 2021 by Thomas Knudsen <knudsen.thomas@gmail.com>.\n\n\n\nLicensed under either of\n\n\n\n- Apache License, Version 2.0\n\n ([LICENSE-APACHE](LICENSE-APACHE) or [here](http://www.apache.org/licenses/LICENSE-2.0))\n\n- MIT license\n\n ([LICENSE-MIT](LICENSE-MIT) or [here](http://opensource.org/licenses/MIT))\n\n\n\nat your option.\n\n\n\n## Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 85, "score": 22349.445385499894 }, { "content": "Rather, geodetic reference frames are empirical constructions, realised using datum specific rules for survey and adjustment. Hence, coordinate tuples subjected to a given similarity transform, do not magically become realised using the survey rules of the target datum. But they gain a degree of interoperability with coordinate tuples from the target: The transformed (aligned) values represent our best knowledge about *what coordinates we would obtain,* if we re-surveyed the same physical point, using the survey rules of the target datum.\n\n\n\n**Warning:**\n\nTwo different conventions are common in Helmert transformations involving rotations. In some cases the rotations define a rotation of the reference frame. This is called the \"coordinate frame\" convention (EPSG methods 1032 and 9607). In other cases, the rotations define a rotation of the vector from the origin to the position indicated by the coordinate tuple. This is called the \"position vector\" convention (EPSG methods 1033 and 9606).\n\n\n\nBoth conventions are common, and trivially converted between as they differ by sign only. To reduce this great source of confusion, the `convention` parameter must be set to either `position vector` or `coordinate_frame` whenever the operation involved rotations. In all other cases, all parameters are optional.\n\n\n\n| Parameter | Description |\n\n|-----------|-------------|\n\n| `inv` | Inverse operation: output-to-input datum. Mathematically, a sign reversion of all parameters. |\n\n| `x` | offset along the first axis |\n\n| `y` | offset along the second axis |\n\n| `z` | offset along the third axis |\n\n| `rx` | rotation around the first axis |\n\n| `ry` | rotation around the second axis |\n\n| `rz` | rotation around the third axis |\n\n| `s` | scaling factor given in parts-per-million |\n\n| `dx` | rate-of-change for offset along the first axis |\n\n| `dy` | rate-of-change for offset along the second axis |\n\n| `dz` | rate-of-change for offset along the third axis |\n\n| `drx` | rate-of-change for rotation around the first axis |\n\n| `dry` | rate-of-change for rotation around the second axis |\n\n| `drz` | rate-of-change for rotation around the third axis |\n\n| `ds` | rate-of-change for scaling factor |\n\n| `t_epoch` | origin of the time evolution |\n\n| `t_obs` | fixed value for observation time. Ignore fourth coordinate |\n\n| `exact` | Do not use small-angle approximations when constructing the rotation matrix |\n\n| `convention` | Either `position_vector` or `coordinate_frame`, as described above. Mandatory if any of the rotation parameters are used. |\n\n\n", "file_path": "ruminations/002-rumination.md", "rank": 86, "score": 21627.740870009944 }, { "content": "### Going ellipsoidal\n\n\n\nMuch functionality related to geometrical geodesy can be associated with the ellipsoid model in use, and hence, in a software context, be modelled as methods on the ellipsoid object.\n\n\n\nIn RG, ellipsoids are represented by the `Ellipsoid` data type:\n\n\n\n```rust\n\npub struct Ellipsoid {\n\n a: f64,\n\n ay: f64,\n\n f: f64,\n\n}\n\n```\n\n\n\nIn most cases, the ellipsoid in use will be rotationally symmetrical, but RG anticipates the use of triaxial ellipsoids. As can be seen, the `Ellipsoid` data type is highly restricted, containing only the bare essentials for defining the ellipsoidal size and shape. All other items are implemented as methods:\n\n\n\n```rust\n\nlet GRS80 = geodesy::Ellipsoid::named(\"GRS80\");\n\n\n\nlet E = GRS80.linear_eccentricity();\n\nlet b = GRS80.semiminor_axis();\n\nlet c = GRS80.polar_radius_of_curvature();\n\nlet n = GRS80.third_flattening();\n\nlet es = GRS80.eccentricity_squared();\n\n```\n\n\n\nThe functionality also includes ancillary latitudes, and computation of geodesics on the ellipsoid - see [example 01](../examples/01-geometrical-geodesy.rs) for details.\n\n\n\n### Recent additions\n\n\n\n#### GYS: The Geodetic YAML Shorthand\n\n\n\nAs YAML is somewhat verbose, GYS, the \"Geodetic YAML Shorthand\" was introduced with RG version 0.6.0. GYS can be discerned from YAML by not containing any curly braces, using pipe symbols (`|`) to indicate pipeline steps, and in general leaving out syntactical elements which are superfluous given that we know the context is RG.\n\n\n\nInternally, GYS is transformed to YAML by a simple mechanical rule set, so YAML is still the cornerstone of the RG descriptor system. The two pipelines shown below demonstrate the essentials of speaking GYS:\n\n\n\n##### **A pipeline in YAML**\n\n\n\n```yaml\n\ned50_etrs89: {\n\n steps: [\n\n cart: {ellps: intl},\n\n helmert: {x: -87, y: -96, z: -120},\n\n cart: {inv: true, ellps: GRS80}\n\n ]\n\n}\n\n```\n\n\n", "file_path": "ruminations/000-rumination.md", "rank": 87, "score": 21616.29073629774 }, { "content": "### Redefining the world\n\n\n\nBeing intended for authoring of geodetic functionality, customization is a very important aspect of the RG design. Hence, RG allows temporal overshadowing of built in functionality by registering user defined macros and operators. This is treated in detail in examples [02 (macros)](/examples/02-user_defined_macros.rs) and [03 (operators)](/examples/03-user_defined_operators.rs). Here, let's just take a minimal look at the workflow, which can be described briefly as *define, register, instantiate, and use:*\n\n\n\nFirst a macro:\n\n\n\n```rust\n\n// Define a macro, using hat notation (^) for the macro parameters\n\nlet macro_text = \"pipeline: {\n\n steps: [\n\n cart: {ellps: ^left},\n\n helmert: {x: ^x, y: ^y, z: ^z},\n\n cart: {inv: true, ellps: ^right}\n\n ]\n\n }\";\n\n\n\n// Register the macro, under the name \"geohelmert\"\n\nctx.register_macro(\"geohelmert\", macro_text);\n\n\n\n// Instantiate the geohelmert macro with replacement values\n\n// for the parameters left, right, x, y, z\n\ned50_wgs84 = ctx.operation(\"geohelmert: {\n\n left: intl,\n\n right: GRS80,\n\n x: -87, y: -96, z: -120\n\n}\").unwrap();\n\n\n\n// ... and use:\n\nctx.fwd(ed50_wgs84, data);\n\n```\n\n\n\nThen a user defined operator:\n\n\n\n```rust\n\nuse geodesy::operator_construction::*;\n\n\n\n// See examples/03-user-defined-operators.rs for implementation details\n\npub struct MyNewOperator {\n\n args: OperatorArgs,\n\n foo: f64,\n\n ...\n\n}\n\n\n\n// Register\n\nctx.register_operator(\"my_new_operator\", MyNewOperator::operator);\n\n\n\n// Instantiate\n\nlet my_new_operator_with_foo_as_42 = ctx.operation(\n\n \"my_new_operator: {foo: 42}\"\n\n).unwrap();\n\n\n\n// ... and use:\n\nctx.fwd(my_new_operator_with_foo_as_42, data);\n\n```\n\n\n\nEssentially, once they are registered, macros and user defined operators work exactly like the built-ins. Also, they overshadow the built-in names, so testing alternative implementations of built-in operators is as easy as registering a new operator with the same name as a built-in.\n\n\n", "file_path": "ruminations/000-rumination.md", "rank": 88, "score": 21615.15693072077 }, { "content": "(see also [ellps implied](#note-ellps-implied) in the Notes section).\n\n\n\nSo essentially, PROJ and RG uses identical operator parametrisations, but RG, being 40 years younger than PROJ, is able to leverage YAML, an already 20 years old generic, JSON compatible, data representation format. PROJ, on the other hand, was born 20 years prior to YAML, and had to implement its own domain specific format.\n\n\n\nNote, however, that contrary to PROJ, when we instantiate an operator in RG, we do not actually get an `Operator` object back, but just a handle to an `Operator`, living its entire life embedded inside the `Context`.\n\nAnd while the `Context` is mutable, the `Operator`, once created, is *immutable*.\n\n\n\nThis makes `Operator`s thread-sharable, so the `Context` will eventually (although still not implemented), be able to automatically parallelize large transformation jobs, eliminating some of the need for separate thread handling at the application program level.\n\n\n\nNote, by the way, that the method for instantiating an `Operator` is called `Context::opera`**`tion`**`(...)`, not `Context::opera`**`tor`**`(...)`: Conceptually, an **operation** is an *instantiation of an operator*, i.e. an operator with parameters fixed, and ready for work. An **operator** on the other hand, is formally a datatype, i.e. just a description of a memory layout of the parameters.\n\n\n\nHence, the `operation(...)` method returns a handle to an **operation**, which can be used to **operate** on a set of **operands**. It's op...s all the way down!\n\n\n\n---\n\n\n\n```rust\n\n// [3] Coordinates of some Scandinavian capitals\n\nlet copenhagen = Coord::geo(55., 12., 0., 0.);\n\nlet stockholm = Coord::geo(59., 18., 0., 0.);\n\n\n\n// [4] We put the coordinates into an array\n\nlet mut data = [copenhagen, stockholm];\n\n```\n\n\n", "file_path": "ruminations/000-rumination.md", "rank": 89, "score": 21613.432352815205 }, { "content": "(See also [Idiomatic Rust](#note-idiomatic-rust) in the Notes section)\n\n\n\nAt comment `[0]`, we start by renaming the library functionality for coordinate handling, from `geodesy::CoordinateTuple` to `Coord`. Since coordinates are at the heart of what we're doing, it should have a brief and clear name. Then why give it such a long name by design, you may wonder - well, `CoordinateTuple` is the ISO-19111 standard designation of what we colloquially would call *the coordinates*.\n\n\n\n---\n\n\n\n```rust\n\n// [1] Build some context\n\nlet mut ctx = geodesy::Context::new();\n\n```\n\n\n\nAt comment `[1]` we instantiate a `Context`, which should come as no surprise for anyone having used [PROJ](https:://proj.org) within the last fifteen years. The `Context` provides the interface to the messy world external to RG (files, threads, communication), and in general centralizes all the *mutable state* of the system.\n\n\n\nAlso, the `Context` is the sole interface between the `RG` transformation functionality and the application program: You may instantiate a transformation object, but the `Context` handles it for you. While you need a separate `Context` for each thread of your program, the `Context` itself is designed to eventually do its work in parallel, using several threads.\n\n\n\n---\n\n\n\n```rust\n\n// [2] Obtain a handle to the utm-operator\n\nlet utm32 = ctx.operation(\"utm: {zone: 32}\").unwrap();\n\n```\n\n\n\nAt comment `[2]`, we use the `operation` method of the `Context` to instantiate an `Operator` (closely corresponding to the `PJ` object in PROJ). The parametrisation of the operator, i.e. the text `utm: {zone: 32}` is expressed in [YAML](https://en.wikipedia.org/wiki/YAML) using parameter naming conventions closely corresponding to those used in PROJ, where the same operator would be described as `proj=utm zone=32`\n", "file_path": "ruminations/000-rumination.md", "rank": 90, "score": 21612.388083290152 }, { "content": "#### Why Rust Geodesy?\n\n\n\nThe motivation for these aims, i.e. the **why** of the project, is the **wish to amend explicitly identified shortcommings** in the existing landscape of geodetic software and standards.\n\n\n\n#### How will it emerge?\n\n\n\nThe development work driven by this motivation is supported by a few basic design principles, the **how** of the project:\n\n\n\n- An architectural scaffolding of four dimensional data flow paths, enabling the constrution of complex operations from simpler elements\n\n- A design philosophy of keeping things flexible by not overspecifying\n\n- A geodetic focus on transformations, i.e. relations *between* systems, rather than definition *of* systems\n\n\n\nor in fewer words: *Don't overdo it*.\n\n\n\n### Getting beefy\n\n\n\nBut talking architecture and design philosophy out of thin air is at best counterproductive, so let's start with a brief example, demonstrating the RG idiom for converting geographical coordinates to UTM zone 32 coordinates (for the corresponding operation using the RG coordinate processing command line program `kp`, see [Rumination 003](/ruminations/003-rumination.md)).\n\n\n\n```rust\n\nfn main() {\n\n // [0] Use a brief name for some much used functionality\n\n use geodesy::CoordinateTuple as Coord;\n\n\n\n // [1] Build some context\n\n let mut ctx = geodesy::Context::new();\n\n\n\n // [2] Obtain a handle to the utm-operator\n\n let utm32 = ctx.operation(\"utm: {zone: 32}\").unwrap();\n\n\n\n // [3] Coordinates of some Scandinavian capitals\n\n let copenhagen = Coord::geo(55., 12., 0., 0.);\n\n let stockholm = Coord::geo(59., 18., 0., 0.);\n\n\n\n // [4] We put the coordinates into an array\n\n let mut data = [copenhagen, stockholm];\n\n\n\n // [5] Then do the forward conversion, i.e. geo -> utm\n\n ctx.fwd(utm32, &mut data);\n\n println!({:?}, data);\n\n\n\n // [6] And go back, i.e. utm -> geo\n\n ctx.inv(utm32, &mut data);\n\n Coord::geo_all(&mut data);\n\n println!({:?}, data);\n\n}\n\n```\n\n\n", "file_path": "ruminations/000-rumination.md", "rank": 91, "score": 21611.463664024915 }, { "content": "### Conclusion\n\n\n\nRust Geodesy is a new, still functionally limited, system for experimentation with, and authoring of, new geodetic transformations, concepts, algorithms and standards. Go get it while it's hot!\n\n\n\n### References\n\n\n\n**Reference:** `[Knudsen et al, 2019]`\n\n\n\nThomas Knudsen, Kristian Evers, Geir Arne Hjelle, Guðmundur Valsson, Martin Lidberg and Pasi Häkli: *The Bricks and Mortar for Contemporary Reimplementation of Legacy Nordic Transformations*. Geophysica (2019), 54(1), 107–116.\n\n\n\n### Notes\n\n\n\n#### **Note:** ellps implied\n\n\n\nIn both cases, the use of the GRS80 ellipsoid is implied, but may be expressly stated as `utm: {zone: 32, ellps: GRS80}` resp. `proj=utm zone=32 ellps=GRS80`\n\n\n\n#### **Note:** Idiomatic Rust\n\n\n\nIn production, we would check the return of `ctx.operation(...)`, rather than just `unwrap()`ping:\n\n\n\n```rust\n\nif let Some(utm32) = ctx.operation(\"utm: {zone: 32}\") {\n\n let copenhagen = C::geo(55., 12., 0., 0.);\n\n let stockholm = C::geo(59., 18., 0., 0.);\n\n ...\n\n}\n\n```\n\n\n\nIn C, using PROJ, the demo program would resemble this (untested) snippet:\n\n\n\n```C\n\n#include <proj.h>\n\n\n\n#int main() {\n\n PJ_CONTEXT *C = proj_context_create();\n\n PJ *P = proj_create(C, \"proj=utm zone=32\");\n\n\n\n PJ_COORD copenhagen = proj_coord(12, 55, 0, 0);\n\n PJ_COORD stockholm = proj_coord(18, 59, 0, 0);\n\n\n\n /* Forward */\n\n copenhagen = proj_trans(P, PJ_FWD, copenhagen);\n\n stockholm = proj_trans(P, PJ_FWD, stockholm);\n\n\n\n /* ... and back */\n\n copenhagen = proj_trans(P, PJ_INV, copenhagen);\n\n stockholm = proj_trans(P, PJ_INV, stockholm);\n\n\n\n proj_destroy(P);\n\n proj_context_destroy(C);\n\n}\n\n```\n\n\n\n### Document History\n\n\n\nMajor revisions and additions:\n\n\n\n- 2021-08-08: Added a section briefly describing GYS\n\n- 2021-08-26: Extended prologue\n", "file_path": "ruminations/000-rumination.md", "rank": 92, "score": 21610.92096668703 }, { "content": "### Operator `molodensky`\n\n\n\n**Purpose:** Transform between two geodetic datums using the full or abridged Molodensky formulas.\n\n\n\n**Description:**\n\nThe full and abridged Molodensky transformations for 2D and 3D data. Closely related to the 3-parameter Helmert transformation, but operating directly on geographical coordinates.\n\n\n\nThis implementation is based:\n\n\n\n- partially on the PROJ implementation by Kristian Evers,\n\n- partially on OGP Publication 373-7-2: *Geomatics Guidance Note\n\nnumber 7, part 2,* and\n\n- partially on [R.E.Deakin, 2004:](http://www.mygeodesy.id.au/documents/Molodensky%20V2.pdf) *The Standard\n\nand Abridged Molodensky Coordinate Transformation Formulae.*\n\n\n\n**Note:**\n\nWe may use `ellps, da, df`, to parameterize the operator,\n\nbut `left_ellps, right_ellps` is a more likely set of\n\nparameters to come across in real life.\n\n\n\n| Argument | Description |\n\n|----------|-------------|\n\n| `inv` | Inverse operation |\n\n| `ellps: name` | Use ellipsoid `name` for the conversion |\n\n| `dx` | offset along the first axis |\n\n| `dy` | offset along the second axis |\n\n| `dz` | offset along the third axis |\n\n| `da` | change in semimajor axis between the ellipsoids of the source and target datums |\n\n| `df` | change in flattening between the ellipsoids of the source and target datums |\n\n| `left_ellps` | Ellipsoid of the source datum |\n\n| `right_ellps` | Ellipsoid of the target datum |\n\n| `abridged` | Use the abridged version of the transformation, which ignores the source height |\n\n\n\n**Example**:\n\n\n\n```js\n\nmolodensky left_ellps:WGS84 right_ellps:intl dx:84.87 dy:96.49 dz:116.95 abridged:false\n\n```\n\n\n\n**See also:** [PROJ documentation](https://proj.org/operations/transformations/molodensky.html): *Molodensky*. The current implementations differ between PROJ and RG: RG implements some minor numerical improvements and the ability to parameterize using two ellipsoids, rather than differences between them.\n\n\n\n---\n\n\n", "file_path": "ruminations/002-rumination.md", "rank": 93, "score": 21610.534342044746 }, { "content": "### Operator `cart`\n\n\n\n**Purpose:** Convert from geographic coordinates + ellipsoidal height to geocentric cartesian coordinates\n\n\n\n**Description:**\n\n\n\n| Argument | Description |\n\n|----------|-------------|\n\n| `inv` | Inverse operation: cartesian-to-geographic |\n\n| `ellps: name` | Use ellipsoid `name` for the conversion|\n\n\n\n**Example**:\n\n\n\n```sh\n\ngeo | cart ellps:intl | helmert x:-87 y:-96 z:-120 | cart inv ellps:GRS80 | geo inv\n\n```\n\n\n\ncf. [Rumination no. 001](/ruminations/001-rumination.md) for details about this perennial pipeline.\n\n\n\n---\n\n\n\n### Operator `helmert`\n\n\n\n**Purpose:**\n\nDatum shift using a 3, 6, 7 or 14 parameter similarity transformation.\n\n\n\n**Description:**\n\nIn strictly mathematical terms, the Helmert (or *similarity*) transformation transforms coordinates from their original coordinate system, *the source basis,* to a different system, *the target basis.* The target basis may be translated, rotated and/or scaled with respect to the source basis. The inter-axis angles are, however, fixed (hence, the *similarity* moniker).\n\n\n\nSo mathematically we may think of this as \"*transforming* the coordinates from one well defined basis to another\". But geodetically, it is more correct to think of the operation as *aligning* rather than *transforming,* since geodetic reference frames are very far from the absolute platonic ideals implied in the mathematical idea of bases.\n\n\n", "file_path": "ruminations/002-rumination.md", "rank": 94, "score": 21609.868702328327 }, { "content": "# Ruminations on Rust Geodesy\n\n\n\n## Rumination 001: A few words about an often-seen pipeline\n\n\n\nThomas Knudsen <knudsen.thomas@gmail.com>\n\n\n\n2021-08-11. Last [revision](#document-history) 2021-08-11\n\n\n\n### Abstract\n\n\n\n```js\n\norigin of | cart ellps:intl | helmert x:-87 y:-96 z:-120 | cart inv\n\n```\n\n\n\n\n\n---\n\n\n\n### Prologue\n\n\n\nIn the Rust Geodesy source code, test cases, and documentation, you will often encounter this transformation pipeline:\n\n\n\n```js\n\ncart ellps:intl | helmert x:-87 y:-96 z:-120 | cart inv ellps:GRS80\n\n```\n\n\n\nIt was selected as the *go to* example because it is only marginally more complex than the identity operator, `noop`, while still doing real geodetic work. So by implementing just two operators, `cart` and `helmert` we can already:\n\n\n\n- Provide instructive examples of useful geodetic work\n\n- Test the RG operator instantiation\n\n- Test the internal data flow architecture\n\n- Develop test- and documentation workflows\n\n- and in general get a good view of the RG *look and feel*\n\n\n\nFor these reasons, `cart` and `helmert` were the first two operators implemented in RG.\n\n\n\n### The operators\n\n\n\n**cart** converts from geographcal coordinates, to earth centered cartesian coordinates (and v.v. in the inverse case).\n\n\n\n**helmert** performs the Helmert transformation which in the simple 3-parameter case used here simply adds the parameters `[x, y, z]` to the input coordinate `[X, Y, Z]`, so the output becomes `[X+x, Y+y, Z+z]`, or in our case: `[X-87, Y-96, Z-120]` (due to the negative signs of `x, y, z`).\n\n\n\n### What happens?\n\n\n\nFrom end-to-end:\n\n\n\n1. The `cart` step takes geographical coordinates given on the *international ellipsoid* (`ellps:intl`) and converts them to earth-centered cartesian coordinates\n\n2. The `helmert` step shifts the cartesian coordinates to a new origin `[x,y,z]`\n\n3. Finally, the inverse `cart` step converts the cartesian coordinates back to geographical coordinates. This time on the *GRS80 ellipsoid* (`ellps:GRS80`)\n\n\n", "file_path": "ruminations/001-rumination.md", "rank": 95, "score": 21609.61473180099 }, { "content": "---\n\n\n\n```rust\n\n// [6] And go back, i.e. utm -> geo\n\nctx.inv(utm32, &mut data);\n\nCoord::geo_all(&mut data);\n\nprintln!({:?}, data);\n\n```\n\n\n\nAt comment `[6]`, we roundtrip back to geographical coordinates. Prior to print out, we let `Coord::geo_all(...)` convert from the internal coordinate representation, to the geodetic convention of \"latitude before longitude, and angles in degrees\".\n\n\n", "file_path": "ruminations/000-rumination.md", "rank": 96, "score": 21607.968802138294 }, { "content": "Also, we introduce the 3 common angular representations *degrees, gradians, radians*, conventionally abbreviated as `deg`, `gon` and `rad`.\n\n\n\nThe Rust Geodesy internal format of a four dimensional coordinate tuple is `e, n, u, t`, and the internal unit of measure for angular coordinates is radians. In `adapt`, terms, this is described as `enut_rad`.\n\n\n\n`adapt` covers much of the same ground as the `PROJ` operators [`axisswap`](https://proj.org/operations/conversions/axisswap.html) and [`unitconvert`](https://proj.org/operations/conversions/unitconvert.html), but using a declarative, rather than imperative, approach: You never tell `adapt` how you want things done, only what kind of result you want. You tell it where you want to go `from`, and where you want to go `to` (and in most cases actually only one of those). Then `adapt` figures out how to fulfill that wish.\n\n\n\n**Example:** Read data in degrees, (latitude, longitude, height, time)-order, write homologous data in radians, (longitude, latitude, height, time)-order, i.e. latitude and longitude swapped.\n\n\n\n```js\n\nadapt from: neut_deg to: enut_rad\n\n```\n\n\n\nBut since the target format is identical to the default internal format, it can be left out, and the operation be written simply as:\n\n\n\n```js\n\nadapt from: neut_deg\n\n```\n\n\n\n(end of example)\n\n\n\n**Usage:** Typically, `adapt` is used in one or both ends of a pipeline, to match data between the RG internal representation and the requirements of the embedding system:\n\n\n\n```sh\n\nadapt from: neut_deg | cart ... | helmert ... | cart inv ... | adapt to: neut_deg\n\n```\n\n\n\nNote that `adapt to: ...` and `adapt inv from: ...` are equivalent. The latter form is useful when using RG's predefined symbolic definitions, `geo` (latitude, longitude) and `gis` (longitude, latitude), as in:\n\n\n\n```sh\n\ngeo | cart ... | helmert ... | cart inv ... | geo inv\n\n```\n\n\n\n---\n\n\n", "file_path": "ruminations/002-rumination.md", "rank": 97, "score": 21605.797354216378 }, { "content": "### A brief `kp` HOWTO\n\n\n\nThe `kp` command line syntax is\n\n\n\n```sh\n\nkp \"operation\" file1 file2 ...\n\n```\n\n\n\nor, with input from `stdin`:\n\n\n\n```sh\n\necho coordinate | kp \"operation\"\n\n```\n\n\n\n**Example:**\n\nConvert the geographical coordinate tuple (55 N, 12 E) to utm, zone 32 coordinates:\n\n\n\n```sh\n\necho 55 12 0 0 | kp \"geo | utm zone:32\"\n\n> 691875.6321 6098907.8250 0.0000 0.0000\n\n```\n\n\n\nWhile RG coordinates are always 4D, `kp` will provide zero-values for any left-out postfix dimensions:\n\n\n\n```sh\n\necho 55 12 | kp \"geo | utm zone:32\"\n\n> 691875.6321 6098907.8250 0.0000 0.0000\n\n```\n\n\n\nIn the examples in the operator descriptions below, we will just give the [GYS](/ruminations/000-rumination.md#gys-the-geodetic-yaml-shorthand) representation, and imply the `echo ... | kp ...` part.\n\n\n\nIf in doubt, use `kp --help` or read [Rumination 003: `kp` - the RG Coordinate Processing program](/ruminations/003-rumination.md).\n\n\n\n---\n\n\n\n### Operator `adapt`\n\n\n\n**Purpose:** Adapt source coordinate order and angular units to target ditto, using a declarative approach.\n\n\n\n**Description:** Let us first introduce the **coordinate traits** *eastish, northish, upish, timish*, and their geometrical inverses *westish, southish, downish, reversed-timeish*, with mostly evident meaning:\n\n\n\nA coordinate is\n\n\n\n- **eastish** if you would typically draw it along an abscissa (e.g. longitude or easting),\n\n- **northish** if you would typically draw it along an ordinate (e.g. latitude or northing),\n\n- **upish** if you would need to draw it out of the paper (e.g. height or elevation), and\n\n- **timeish** if it represents ordinary, forward evolving time (e.g. time or time interval).\n\n\n\n*Westish, southish, downish*, and *reversed-timeish* are the axis-reverted versions of the former four. These 8 spatio-temporal directional designations have convenient short forms,\n\n`e, n, u, t` and `w, s, d, r`, respectively.\n\n\n", "file_path": "ruminations/002-rumination.md", "rank": 98, "score": 21605.546995108667 }, { "content": "### Prologue\n\n\n\nArchitecturally, the operators in Rust Geodesy (`cart`, `tmerc`, `helmert` etc.) live below the API surface. This means they are not (and should not be) described in the API documentation over at [docs.rs](https://docs.rs/geodesy). Rather, their use should be documented in a separate *Rust Geodesy User's Guide*, a book which may materialize some day, as time permits, interest demands, and RG has matured and stabilized sufficiently. Until then, this *Rumination* will serve as stop gap for operator documentation.\n\n\n\nA *Rust Geodesy Programmer's Guide* would probably also be useful, and wil definitely materialize before the next week with ten fridays. Until then, the [API documentation](https://docs.rs/geodesy), the [code examples](/examples), and the [architectural overview](/ruminations/000-rumination.md) may be useful. The RG transformation program `kp` will be described in an upcomming [RG Rumination](/ruminations/003-rumination.md). Its [source code](/src/bin/kp.rs) may also be of interest as study material for programmers. But since it is particularly useful for practical experimentation with RG operators, let's start with a *very* brief description of `kp`.\n\n\n", "file_path": "ruminations/002-rumination.md", "rank": 99, "score": 21605.233535550946 } ]
Rust
ecap/src/common/name.rs
Mark-Simulacrum/ecap-rs
842f70a5bded587308c329ffbf7eacf3289a380c
use std::borrow::Cow; use std::cell::Cell; use std::sync::atomic::{AtomicUsize, Ordering}; static LAST_ID: AtomicUsize = AtomicUsize::new(0); #[derive(Debug, Clone)] pub struct Name<'a> { image: Option<Cow<'a, [u8]>>, id: Id, host_id: Cell<Option<u32>>, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum Id { Unknown, Unidentified, Id(u32), } impl<'a> Name<'a> { pub fn id(&self) -> Id { self.id } pub fn to_owned(self) -> Name<'static> { Name { id: self.id, host_id: self.host_id, image: match self.image { Some(cow) => Some(Cow::from(cow.into_owned())), None => None, }, } } pub fn from_raw<I: Into<Cow<'a, [u8]>>>(image: I, id: Id, host_id: Option<u32>) -> Self { let image = image.into(); Name { image: if image.is_empty() { None } else { Some(image) }, id, host_id: Cell::new(host_id), } } pub fn unknown() -> Name<'static> { Name { image: None, id: Id::Unknown, host_id: Cell::new(None), } } pub fn new_known<I: Into<Cow<'a, [u8]>>>(image: I) -> Name<'a> { Name { image: Some(image.into()), id: Id::Unidentified, host_id: Cell::new(None), } } pub fn new_identified<I: Into<Cow<'a, [u8]>>>(image: I) -> Name<'a> { Name { image: Some(image.into()), id: Id::Id(LAST_ID.fetch_add(1, Ordering::Relaxed) as u32), host_id: Cell::new(None), } } pub fn identified(&self) -> bool { if let Id::Id(_) = self.id { true } else { false } } pub fn known(&self) -> bool { if let Id::Unknown = self.id { false } else { true } } pub fn image(&self) -> Option<&[u8]> { self.image.as_ref().map(|s| s.as_ref()) } pub fn host_id(&self) -> Option<u32> { self.host_id.get() } pub fn assign_host_id(&self, id: u32) { assert_eq!(self.host_id.replace(Some(id)), None); } } impl<'a> PartialEq for Name<'a> { fn eq(&self, other: &Self) -> bool { self.known() && if self.identified() { self.id == other.id } else { self.image == other.image } } }
use std::borrow::Cow; use std::cell::Cell; use std::sync::atomic::{AtomicUsize, Ordering}; static LAST_ID: AtomicUsize = AtomicUsize::new(0); #[derive(Debug, Clone)] pub struct Name<'a> { image: Option
elf.id { false } else { true } } pub fn image(&self) -> Option<&[u8]> { self.image.as_ref().map(|s| s.as_ref()) } pub fn host_id(&self) -> Option<u32> { self.host_id.get() } pub fn assign_host_id(&self, id: u32) { assert_eq!(self.host_id.replace(Some(id)), None); } } impl<'a> PartialEq for Name<'a> { fn eq(&self, other: &Self) -> bool { self.known() && if self.identified() { self.id == other.id } else { self.image == other.image } } }
<Cow<'a, [u8]>>, id: Id, host_id: Cell<Option<u32>>, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum Id { Unknown, Unidentified, Id(u32), } impl<'a> Name<'a> { pub fn id(&self) -> Id { self.id } pub fn to_owned(self) -> Name<'static> { Name { id: self.id, host_id: self.host_id, image: match self.image { Some(cow) => Some(Cow::from(cow.into_owned())), None => None, }, } } pub fn from_raw<I: Into<Cow<'a, [u8]>>>(image: I, id: Id, host_id: Option<u32>) -> Self { let image = image.into(); Name { image: if image.is_empty() { None } else { Some(image) }, id, host_id: Cell::new(host_id), } } pub fn unknown() -> Name<'static> { Name { image: None, id: Id::Unknown, host_id: Cell::new(None), } } pub fn new_known<I: Into<Cow<'a, [u8]>>>(image: I) -> Name<'a> { Name { image: Some(image.into()), id: Id::Unidentified, host_id: Cell::new(None), } } pub fn new_identified<I: Into<Cow<'a, [u8]>>>(image: I) -> Name<'a> { Name { image: Some(image.into()), id: Id::Id(LAST_ID.fetch_add(1, Ordering::Relaxed) as u32), host_id: Cell::new(None), } } pub fn identified(&self) -> bool { if let Id::Id(_) = self.id { true } else { false } } pub fn known(&self) -> bool { if let Id::Unknown = s
random
[ { "content": "/// Reading of `(Name, Area)` pairs across the adapter/host boundary.\n\n///\n\n/// This is used to share configuration information and transaction meta-information.\n\n///\n\n/// FIXME: \"Options objects and individual option values may be temporary. They must not\n\n/// be used beyond the method call that supplied or asked for them.\" -- what does this mean?\n\npub trait Options {\n\n /// Returns the value of the named option.\n\n ///\n\n /// `None` is returned if unknown or nonexistant.\n\n fn option(&self, name: &Name) -> Option<Area>;\n\n\n\n /// Calls visitor for each `(Name, Area)` pair.\n\n ///\n\n /// Accesses all options, including those whose `Name` is unknown.\n\n fn visit_each<V: NamedValueVisitor>(&self, visitor: V);\n\n}\n\n\n\nimpl<T> Options for Box<T>\n\nwhere\n\n T: Options + ?Sized,\n\n{\n\n fn option(&self, name: &Name) -> Option<Area> {\n\n (&**self).option(name)\n\n }\n\n fn visit_each<V: NamedValueVisitor>(&self, visitor: V) {\n\n (&**self).visit_each(visitor)\n\n }\n\n}\n", "file_path": "ecap/src/common/options.rs", "rank": 0, "score": 90864.61822748902 }, { "content": "pub trait Options {\n\n fn option(&self, name: &Name) -> Option<Area>;\n\n fn visit_each(&self, visitor: &mut dyn NamedValueVisitor);\n\n}\n\n\n\nimpl<U> Options for U\n\nwhere\n\n U: ecap::common::Options + ?Sized,\n\n{\n\n fn option(&self, name: &Name) -> Option<Area> {\n\n U::option(self, name)\n\n }\n\n\n\n fn visit_each(&self, visitor: &mut dyn NamedValueVisitor) {\n\n U::visit_each(self, visitor)\n\n }\n\n}\n\n\n\nimpl<'a> ecap::common::Options for &'a (dyn Options + 'a) {\n\n fn option(&self, name: &Name) -> Option<Area> {\n\n <(dyn Options) as Options>::option(&**self, name)\n\n }\n\n\n\n fn visit_each<V: NamedValueVisitor>(&self, mut visitor: V) {\n\n <(dyn Options) as Options>::visit_each(&**self, &mut visitor)\n\n }\n\n}\n", "file_path": "erased-ecap/src/common/options.rs", "rank": 1, "score": 88876.0726058527 }, { "content": "pub trait Service<H: 'static + host::Host + ?Sized> {\n\n fn uri(&self) -> String;\n\n fn tag(&self) -> String;\n\n fn describe(&self) -> String;\n\n fn configure(&mut self, options: &dyn common::Options);\n\n fn reconfigure(&mut self, options: &dyn common::Options);\n\n fn start(&self);\n\n fn stop(&self);\n\n fn retire(&self);\n\n fn wants_url(&self, url: &CStr) -> bool;\n\n fn make_transaction<'a>(\n\n &mut self,\n\n host: &'a mut (dyn ErasedTransaction<H> + 'static),\n\n ) -> Box<dyn adapter::Transaction>;\n\n fn is_async(&self) -> bool;\n\n fn suspend(&self, _timeout: &mut Duration);\n\n fn resume(&self);\n\n}\n\n\n\nimpl<S> Service<dyn ErasedHost> for S\n", "file_path": "erased-ecap/src/adapter/service.rs", "rank": 2, "score": 75281.45611007593 }, { "content": "pub trait Transaction: common::Options {\n\n fn start<'a>(&mut self, host: &'a mut (dyn ErasedTransaction<dyn ErasedHost> + 'static));\n\n fn stop<'a>(&mut self, host: &'a mut (dyn ErasedTransaction<dyn ErasedHost> + 'static));\n\n fn resume<'a>(&mut self, host: &'a mut (dyn ErasedTransaction<dyn ErasedHost> + 'static));\n\n fn adapted_body_discard<'a>(\n\n &mut self,\n\n host: &'a mut (dyn ErasedTransaction<dyn ErasedHost> + 'static),\n\n );\n\n fn adapted_body_make<'a>(\n\n &mut self,\n\n host: &'a mut (dyn ErasedTransaction<dyn ErasedHost> + 'static),\n\n );\n\n fn adapted_body_make_more<'a>(\n\n &mut self,\n\n host: &'a mut (dyn ErasedTransaction<dyn ErasedHost> + 'static),\n\n );\n\n fn adapted_body_stop_making<'a>(\n\n &mut self,\n\n host: &'a mut (dyn ErasedTransaction<dyn ErasedHost> + 'static),\n\n );\n", "file_path": "erased-ecap/src/adapter/transaction.rs", "rank": 3, "score": 71621.54549002042 }, { "content": "pub fn register_erased_translator<T: 'static + ErasedTranslator>(translator: T) {\n\n unsafe {\n\n let translator = ErasedTranslatorS::new(translator);\n\n register_translator(translator);\n\n }\n\n}\n", "file_path": "ecap-common-link/src/lib.rs", "rank": 4, "score": 71090.31236783657 }, { "content": "/// Equivalent of libecap/adapter/xaction.h\n\n///\n\n/// This trait describes the adapatation of a single message from the\n\n/// virgin state to the adapted state.\n\n///\n\n/// Implementations are created via Service::make_transaction and are\n\n/// dropped by the host either before the call to `start` or after\n\n/// `stop`.\n\n///\n\n/// All methods on this are intended only for calling by the host.\n\n///\n\n/// Transactions must also implement `Options` so that hosts can visit\n\n/// meta-information from them.\n\n///\n\n/// XXX: What is the meta information?\n\npub trait Transaction<H: ?Sized + host::Host>: Options {\n\n /// Called by the host to initiate processing of the virgin request.\n\n ///\n\n /// XXX: Confirm that options methods can't be called prior to\n\n /// start.\n\n ///\n\n /// This will be called prior to any other methods on Transaction by\n\n /// the host, after creation in\n\n /// [`Service::make_transaction`](`::adapter::Service::make_transaction`).\n\n fn start<'a>(&mut self, host: &'a mut H::TransactionRef)\n\n where\n\n H::TransactionRef: 'a;\n\n\n\n /// Processing has finished.\n\n ///\n\n /// No further calls to the host transaction should be made. The\n\n /// host transaction will also call no more methods on this adapter\n\n /// transaction.\n\n fn stop<'a>(&mut self, host: &'a mut H::TransactionRef)\n\n where\n", "file_path": "ecap/src/adapter/transaction.rs", "rank": 5, "score": 64618.32839021155 }, { "content": "struct pstr {\n\n size_t size;\n\n const char *buf;\n\n};\n\n\n", "file_path": "ecap-sys/src/shim.cpp", "rank": 6, "score": 55264.28113625161 }, { "content": "struct body_size {\n\n bool known;\n\n uint64_t size;\n\n};\n\n\n\nextern \"C\" bool rust_shim_body_size(libecap::Body *body, body_size *out) noexcept {\n\n return call_cpp_catch_exception([&] () {\n\n auto size = body->bodySize();\n\n if (size.known()) {\n\n *out = body_size {\n\n known: true,\n\n size: size.value(),\n\n };\n\n } else {\n\n *out = body_size {\n\n known: false,\n\n size: 0,\n\n };\n\n }\n\n });\n", "file_path": "ecap-sys/src/shim.cpp", "rank": 7, "score": 54108.08701200105 }, { "content": "// XXX This is intended as a one-to-one copy of the libecap::Name class to bypass `private`\n\nstruct cpp_name {\n\n std::string image_;\n\n int id_;\n\n int hostId_;\n\n};\n\n\n\nstatic_assert(sizeof(cpp_name) == sizeof(libecap::Name));\n\nstatic_assert(alignof(cpp_name) == alignof(libecap::Name));\n\n\n", "file_path": "ecap-sys/src/shim.cpp", "rank": 8, "score": 54108.08701200105 }, { "content": "struct rust_panic {\n\n bool is_exception;\n\n rust_string message;\n\n panic_location location;\n\n};\n\n\n", "file_path": "ecap-sys/src/shim.cpp", "rank": 9, "score": 54108.08701200105 }, { "content": "struct panic_location {\n\n rust_string file;\n\n int line;\n\n int column;\n\n};\n\n\n", "file_path": "ecap-sys/src/shim.cpp", "rank": 10, "score": 54108.08701200105 }, { "content": "#[derive(Debug)]\n\nstruct PanicLocation {\n\n file: String,\n\n line: u32,\n\n column: u32,\n\n}\n\n\n", "file_path": "ecap-cpp/src/lib.rs", "rank": 11, "score": 54108.08701200105 }, { "content": "struct rust_details__ {\n\n char details[DETAILS_SIZE];\n\n uint64_t __align[0];\n\n};\n\n\n", "file_path": "ecap-sys/src/shim.cpp", "rank": 12, "score": 54108.08701200105 }, { "content": "struct CppTranslator;\n\n\n\nimpl Translator for CppTranslator {\n\n fn register_service<H, T>(&self, service: T)\n\n where\n\n H: Host + ?Sized,\n\n T: Service<H> + 'static,\n\n {\n\n // We only support this kind of service anyway, so just assert\n\n // that that's what we got. It's possible that Translator should\n\n // not be implemented for all services as it is today or some\n\n // other kind of work should go into this bit.\n\n {\n\n let service_any: &Any = &service;\n\n assert!(service_any.is::<Box<dyn ErasedService<dyn ErasedHost>>>());\n\n }\n\n let thin_ptr = Box::into_raw(Box::new(service));\n\n unsafe {\n\n assert!(call_ffi_maybe_panic(|raw| unsafe {\n\n ffi::rust_shim_register_service(thin_ptr as *mut *mut c_void, raw)\n\n }));\n\n }\n\n }\n\n}\n\n\n\nuse crossbeam::sync::TreiberStack;\n\nlazy_static! {\n\n static ref PANICS: TreiberStack<PanicPayload> = TreiberStack::new();\n\n}\n\n\n", "file_path": "ecap-cpp/src/lib.rs", "rank": 13, "score": 54108.08701200105 }, { "content": "struct rust_area {\n\n size_t size;\n\n const char *buf;\n\n rust_details__ details;\n\n};\n\n\n", "file_path": "ecap-sys/src/shim.cpp", "rank": 14, "score": 54108.08701200105 }, { "content": "struct rust_name {\n\n pstr image;\n\n int id;\n\n int host_id;\n\n};\n\n\n\n// The returned rust_name contains pointers into the passed name and must not outlive name.\n\nrust_name to_rust_name(const libecap::Name &name) {\n\n const cpp_name &namef = reinterpret_cast<const cpp_name &>(name);\n\n return rust_name {\n\n image: pstr {\n\n size: namef.image_.size(),\n\n buf: namef.image_.data(),\n\n },\n\n id: namef.id_,\n\n host_id: namef.hostId_,\n\n };\n\n}\n\n\n\nlibecap::Name from_rust_name(const rust_name *name) {\n", "file_path": "ecap-sys/src/shim.cpp", "rank": 15, "score": 54108.08701200105 }, { "content": "struct rust_version {\n\n int majr;\n\n int minr;\n\n int micr;\n\n};\n\n\n\n#define DETAILS_SIZE 16\n\n#define DETAILS_ALIGN 8\n\n\n", "file_path": "ecap-sys/src/shim.cpp", "rank": 16, "score": 54108.08701200105 }, { "content": "struct rust_string {\n\n size_t size;\n\n const char *buf;\n\n size_t capacity;\n\n};\n\n\n", "file_path": "ecap-sys/src/shim.cpp", "rank": 17, "score": 54108.08701200105 }, { "content": "// This is intended to signal in a panic that the error occurred in C++...\n\nstruct CppError;\n\n\n\n#[no_mangle]\n\n#[unwind(aborts)]\n\npub unsafe extern \"C\" fn rust_panic_pop(panic: *mut ffi::Panic) -> bool {\n\n // This code should be panic-free as they will not be properly handled by it.\n\n let next = match PANICS.try_pop() {\n\n Some(n) => n,\n\n None => return false,\n\n };\n\n\n\n ptr::write(panic, next.into_ffi());\n\n\n\n true\n\n}\n\n\n\n#[no_mangle]\n\n#[unwind(aborts)]\n\npub extern \"C\" fn rust_panic_free(panic: ffi::Panic) {\n\n // We are just dropping vectors here which should be panic-free; this is std\n", "file_path": "ecap-cpp/src/lib.rs", "rank": 18, "score": 54108.08701200105 }, { "content": "#[derive(Debug)]\n\nstruct PanicPayload {\n\n is_exception: bool,\n\n payload: String,\n\n location: Option<PanicLocation>,\n\n}\n\n\n\nuse std::panic::{self, PanicInfo};\n", "file_path": "ecap-cpp/src/lib.rs", "rank": 19, "score": 54108.08701200105 }, { "content": "struct rust_exception_ptr {\n\n void *ptr;\n\n};\n\n\n\n// We'll be memcpying the raw bytes in here to preserve them across the C boundary\n\nstatic_assert(sizeof(libecap::Area::Details) <= DETAILS_SIZE);\n\nstatic_assert(alignof(libecap::Area::Details) == DETAILS_ALIGN);\n\nstatic_assert(alignof(rust_details__) == DETAILS_ALIGN);\n\n\n", "file_path": "ecap-sys/src/shim.cpp", "rank": 20, "score": 53030.95100126299 }, { "content": "struct RustLogVerbosity {\n\n size_t mask;\n\n};\n\n\n", "file_path": "ecap-sys/src/shim.cpp", "rank": 21, "score": 53030.95100126299 }, { "content": "struct rust_shared_ptr_message {\n\n char value[SHARED_PTR_MESSAGE_SIZE];\n\n uint64_t __align[0];\n\n};\n\n\n\n// We'll be memcpying the raw bytes in here to preserve them across the C boundary\n\nstatic_assert(sizeof(libecap::shared_ptr<libecap::Message>) <= SHARED_PTR_MESSAGE_SIZE);\n\nstatic_assert(alignof(libecap::shared_ptr<libecap::Message>) == alignof(rust_shared_ptr_message));\n\nstatic_assert(alignof(libecap::shared_ptr<libecap::Message>) == 8);\n\n\n\nrust_shared_ptr_message to_rust_shared_ptr_message(libecap::shared_ptr<libecap::Message> msg) {\n\n rust_shared_ptr_message foo;\n\n auto ptr = new (&foo.value) libecap::shared_ptr<libecap::Message>;\n\n *ptr = msg;\n\n return foo;\n\n}\n\n\n\nextern \"C\" const libecap::Message *rust_shim_shared_ptr_message_ref(\n\n const libecap::shared_ptr<libecap::Message> *msg) noexcept {\n\n return msg->get();\n", "file_path": "ecap-sys/src/shim.cpp", "rank": 22, "score": 52025.03242810411 }, { "content": "/// The primary interface for talking to the host itself.\n\npub trait Host\n\nwhere\n\n <Self::Message as Message<Self>>::MessageClone: Message<Self>,\n\n{\n\n type DebugStream: DebugStream;\n\n type Message: Message<Self>;\n\n type MessageRef: Message<Self> + ?Sized;\n\n type Transaction: Transaction<Self>;\n\n type TransactionRef: Transaction<Self> + ?Sized;\n\n type Body: Body + ?Sized;\n\n type Header: Header + ?Sized;\n\n type FirstLine: FirstLine + ?Sized;\n\n type Trailer: Header + ?Sized;\n\n\n\n /// A unique identifer across all vendors.\n\n fn uri(&self) -> String;\n\n\n\n /// A description of the Host, free-format.\n\n fn describe(&self) -> String;\n\n\n", "file_path": "ecap/src/host/host.rs", "rank": 23, "score": 49987.420572082905 }, { "content": "/// This represents a header structure.\n\n///\n\n/// It contains many fields, and is essentially a map of Name to Area.\n\npub trait Header {\n\n /// Returns true if this header has at least one field with the specified Name.\n\n ///\n\n /// XXX: This should possibly do something like std's contains methods with Borrow<Self::Name>\n\n /// instead.\n\n fn contains_field(&self, field: &Name) -> bool;\n\n\n\n /// Get the specified field(s) by `Name`.\n\n ///\n\n /// If multiple headers with the specified field are present, will\n\n /// return a list of entries separated by ', '.\n\n ///\n\n /// This returns an owned `Area` because it may need to allocate in\n\n /// the list case.\n\n fn get(&self, field: &Name) -> Option<Area>;\n\n\n\n /// Insert a field, value pair into the header.\n\n fn insert(&mut self, field: Name, value: Area);\n\n\n\n /// Remove all entries matching the field name.\n", "file_path": "ecap/src/common/header.rs", "rank": 24, "score": 49987.420572082905 }, { "content": "/// Message body buffer, shared by producer and consumer.\n\n///\n\n/// Usually implemented in hosts.\n\n///\n\n/// This trait may be deprecated in favor of moving this information\n\n/// into Message itself in the future.\n\npub trait Body {\n\n /// Returns the size of the Body.\n\n ///\n\n /// Will return `None` if the size is not known.\n\n fn size(&self) -> Option<u64>;\n\n}\n", "file_path": "ecap/src/common/body.rs", "rank": 25, "score": 49987.420572082905 }, { "content": "/// Area content and reference-counting support.\n\npub trait Details {\n\n /// Get the underlying buffer.\n\n fn as_bytes(&self) -> &[u8];\n\n\n\n /// Increment the strong count.\n\n fn increment(&self);\n\n\n\n /// Decrement the strong count.\n\n ///\n\n /// If this is the last Details to be decremented,\n\n /// this should also deallocate the memory.\n\n fn decrement(&self);\n\n}\n\n\n", "file_path": "ecap/src/common/area.rs", "rank": 26, "score": 49987.420572082905 }, { "content": "pub trait Header {\n\n fn contains_field(&self, field: &Name) -> bool;\n\n fn get(&self, field: &Name) -> Option<Area>;\n\n fn remove_any(&mut self, field: &Name);\n\n fn insert(&mut self, field: Name, value: Area);\n\n fn visit_each(&self, visitor: &mut dyn NamedValueVisitor);\n\n fn image(&self) -> Area;\n\n fn parse(&mut self, buf: &Area) -> Result<(), ()>;\n\n}\n\n\n\nimpl<T: ?Sized> Header for T\n\nwhere\n\n T: ecap::common::header::Header,\n\n{\n\n fn contains_field(&self, field: &Name) -> bool {\n\n self.contains_field(field)\n\n }\n\n fn get(&self, field: &Name) -> Option<Area> {\n\n self.get(field)\n\n }\n", "file_path": "erased-ecap/src/common/header.rs", "rank": 27, "score": 48917.90800481045 }, { "content": "pub trait Host {\n\n fn uri(&self) -> String;\n\n fn describe(&self) -> String;\n\n //fn note_versioned_service(&mut self, ecap_version: &CStr, service: ErasedService);\n\n fn open_debug(&self, verbosity: LogVerbosity) -> Option<Box<dyn DebugStream>>;\n\n fn close_debug(&self, stream: Box<dyn DebugStream>);\n\n fn new_request(&self) -> Box<dyn Message>;\n\n fn new_response(&self) -> Box<dyn Message>;\n\n}\n\n\n\nimpl ecap::host::Host for dyn Host {\n\n type Message = Box<dyn Message>;\n\n type MessageRef = dyn Message;\n\n type DebugStream = Box<dyn DebugStream>;\n\n type Transaction = Box<dyn Transaction<dyn Host>>;\n\n type TransactionRef = dyn Transaction<dyn Host>;\n\n type Body = dyn Body;\n\n type Header = dyn Header;\n\n type FirstLine = dyn FirstLine;\n\n type Trailer = dyn Header;\n", "file_path": "erased-ecap/src/host/host.rs", "rank": 28, "score": 48917.90800481045 }, { "content": "pub fn main() {\n\n println!(\"cargo:rerun-if-changed=src/build.rs\");\n\n println!(\"cargo:rustc-link-lib=dylib=ecap_common\");\n\n}\n", "file_path": "ecap-common-link/src/build.rs", "rank": 29, "score": 48917.90800481045 }, { "content": "/// Conversion to a Details object.\n\n///\n\n/// This represents the conversion from some T to a type implementing\n\n/// the `Details` trait.\n\npub trait DetailsConstructor {\n\n /// Create the object that will be later used for increment/decrement.\n\n fn details(self) -> DetailsStack;\n\n}\n\n\n", "file_path": "ecap/src/common/area.rs", "rank": 30, "score": 48917.90800481045 }, { "content": "pub trait ErasedTranslator {\n\n fn register_service(&self, s: ErasedService);\n\n}\n\n\n\nimpl<T> ErasedTranslator for T\n\nwhere\n\n T: ecap::Translator,\n\n{\n\n fn register_service(&self, s: ErasedService) {\n\n let service = s.take::<dyn host::Host>();\n\n Self::register_service(self, service);\n\n }\n\n}\n\n\n\npub struct ErasedTranslatorS {\n\n service: *mut dyn ErasedTranslator, // lies\n\n}\n\n\n\nimpl ErasedTranslatorS {\n\n pub fn new<T: ErasedTranslator + 'static>(t: T) -> Self {\n", "file_path": "erased-ecap/src/lib.rs", "rank": 31, "score": 48917.90800481045 }, { "content": "/// The first line in a request/response, e.g. `GET / HTTP/1.1` or\n\n/// `HTTP/1.1 200 OK`.\n\n///\n\n/// See also the [`RequestLine`] and [`StatusLine`] traits.\n\n///\n\n/// XXX: This does not correlate directly in a HTTP 2 scenario?\n\npub trait FirstLine {\n\n fn version(&self) -> Version;\n\n fn set_version(&mut self, version: Version);\n\n\n\n fn protocol(&self) -> Name;\n\n fn set_protocol(&mut self, protocol: Name);\n\n}\n\n\n", "file_path": "ecap/src/common/header.rs", "rank": 32, "score": 48917.90800481045 }, { "content": "pub trait NamedValueVisitor {\n\n fn visit(&mut self, name: &Name, value: &Area);\n\n}\n\n\n\nimpl<'a, T: ?Sized + NamedValueVisitor> NamedValueVisitor for &'a mut T {\n\n fn visit(&mut self, name: &Name, value: &Area) {\n\n (&mut **self).visit(name, value);\n\n }\n\n}\n", "file_path": "ecap/src/common/named_values.rs", "rank": 33, "score": 46984.23440097702 }, { "content": "pub trait Message: Any {\n\n fn clone(&self) -> Box<dyn Message>;\n\n\n\n fn first_line_mut<'a>(&'a mut self) -> &'a mut (dyn FirstLine + 'static);\n\n fn first_line<'a>(&'a self) -> &'a (dyn FirstLine + 'static);\n\n\n\n fn header_mut<'a>(&'a mut self) -> &'a mut (dyn Header + 'static);\n\n fn header<'a>(&'a self) -> &'a (dyn Header + 'static);\n\n\n\n fn add_body(&mut self);\n\n fn body_mut<'a>(&'a mut self) -> Option<&'a mut (dyn Body + 'static)>;\n\n fn body<'a>(&'a self) -> Option<&'a (dyn Body + 'static)>;\n\n\n\n fn add_trailer(&mut self); // XXX: throws by default\n\n fn trailer_mut<'a>(&'a mut self) -> &'a mut (dyn Header + 'static);\n\n fn trailer<'a>(&'a self) -> &'a (dyn Header + 'static);\n\n}\n\n\n\nmopafy!(Message);\n\n\n", "file_path": "erased-ecap/src/common/message.rs", "rank": 34, "score": 46664.81678389288 }, { "content": "#[derive(Debug)]\n\nstruct RcPtr<T: ?Sized> {\n\n rc: NonNull<T>,\n\n}\n\n\n\nimpl<T: ?Sized + AsRef<[u8]>> Clone for RcPtr<T> {\n\n fn clone(&self) -> RcPtr<T> {\n\n self.increment();\n\n RcPtr { rc: self.rc }\n\n }\n\n}\n\n\n\nimpl<T: ?Sized + AsRef<[u8]>> Copy for RcPtr<T> {}\n\n\n\nimpl<T: ?Sized + AsRef<[u8]>> Details for RcPtr<T> {\n\n fn increment(&self) {\n\n unsafe {\n\n let v = Rc::from_raw(self.rc.as_ptr() as *const T);\n\n mem::forget(v.clone());\n\n mem::forget(v);\n\n }\n", "file_path": "ecap/src/common/area.rs", "rank": 35, "score": 46579.866992084804 }, { "content": "pub trait Translator: Send + Sync {\n\n fn register_service<H, T>(&self, service: T)\n\n where\n\n H: Host + ?Sized,\n\n T: Service<H> + 'static;\n\n}\n", "file_path": "ecap/src/lib.rs", "rank": 36, "score": 44896.160652801256 }, { "content": "/// The URI and method, e.g. \"GET /\".\n\n///\n\n/// This is only present in requests.\n\npub trait RequestLine: FirstLine {\n\n fn uri(&self) -> Area;\n\n fn set_uri(&mut self, area: Area);\n\n\n\n fn method(&self) -> Name;\n\n fn set_method(&mut self, name: Name);\n\n}\n\n\n", "file_path": "ecap/src/common/header.rs", "rank": 37, "score": 44731.143180059444 }, { "content": "/// The status code and reason phrase, e.g. \"200 OK\".\n\n///\n\n/// This is only present in responses.\n\npub trait StatusLine: FirstLine {\n\n fn status_code(&self) -> u16;\n\n fn set_status_code(&mut self, code: u16);\n\n\n\n fn reason_phrase(&self) -> Name;\n\n fn set_reason_phrase(&mut self, name: Name);\n\n}\n", "file_path": "ecap/src/common/header.rs", "rank": 38, "score": 44731.143180059444 }, { "content": "pub trait DebugStream: fmt::Write {}\n", "file_path": "ecap/src/common/log.rs", "rank": 39, "score": 42962.48704896782 }, { "content": "#[no_mangle]\n\n#[unwind(allowed)]\n\npub fn register_service(service: ErasedService) {\n\n let translator_slot = REGISTERED_TRANSLATORS.lock().unwrap();\n\n if let Some(translator) = &*translator_slot {\n\n translator.register_service(service);\n\n } else {\n\n let mut adapters = REGISTERED_ADAPTERS.lock().unwrap();\n\n adapters.push(service);\n\n }\n\n}\n\n\n", "file_path": "ecap-common/src/lib.rs", "rank": 40, "score": 42085.590726209324 }, { "content": "/// The host side of the eCAP transaction.\n\n///\n\n/// adapter::Transaction implementors use this interface to get virgin messages.\n\npub trait Transaction<H: ?Sized + Host> {\n\n /// Access to the request or the response.\n\n ///\n\n /// XXX: Signature will change to &self -> &Message\n\n fn virgin(&mut self) -> &mut H::MessageRef;\n\n\n\n /// Other side of the request/response pair, as compared to `virgin`.\n\n ///\n\n /// This will return `None` if the adapter is on the request side of\n\n /// a proxy, as there is no cause in that case.\n\n ///\n\n /// XXX: Signature will change to &self -> Option<&Message>\n\n fn cause(&mut self) -> &H::MessageRef;\n\n\n\n /// The message passed to `use_adapted`.\n\n ///\n\n /// This method will return `None` if the `use_adapted` method has\n\n /// not been called.\n\n ///\n\n /// XXX: Why does this return a reference to message and not the\n", "file_path": "ecap/src/host/transaction.rs", "rank": 41, "score": 41540.65195327437 }, { "content": "pub trait Message<H: ?Sized + Host> {\n\n // FIXME: Message bound here may be too limiting\n\n type MessageClone: Message<H> + 'static;\n\n\n\n fn clone(&self) -> Self::MessageClone;\n\n\n\n /// Always present, determines direction\n\n ///\n\n /// XXX: We cannot do FirstLine without additional code to subclass into the other traits\n\n /// XXX: Should this return an enum?\n\n fn first_line_mut(&mut self) -> &mut H::FirstLine;\n\n fn first_line(&self) -> &H::FirstLine;\n\n\n\n fn header_mut(&mut self) -> &mut H::Header;\n\n fn header(&self) -> &H::Header;\n\n\n\n fn add_body(&mut self);\n\n fn body_mut(&mut self) -> Option<&mut H::Body>;\n\n fn body(&self) -> Option<&H::Body>;\n\n\n", "file_path": "ecap/src/common/message.rs", "rank": 42, "score": 41537.19487486187 }, { "content": "struct CfgVisitor<'a>(&'a mut ModifyService);\n\n\n\nimpl<'a> NamedValueVisitor for CfgVisitor<'a> {\n\n fn visit(&mut self, name: &Name, value: &Area) {\n\n let value = value.as_bytes();\n\n match name.image() {\n\n Some(b\"victim\") => {\n\n if value.is_empty() {\n\n panic!(\"unsupported empty victim\");\n\n }\n\n self.0.victim = Some(Rc::new(value.into()));\n\n }\n\n Some(b\"replacement\") => {\n\n self.0.replacement = Some(Rc::new(value.into()));\n\n }\n\n _ if name.host_id().is_some() => {\n\n // skip host options\n\n return;\n\n }\n\n key => {\n", "file_path": "sample-adapters/modifying/src/lib.rs", "rank": 43, "score": 41424.247277091345 }, { "content": "#[no_mangle]\n\n#[unwind(allowed)]\n\npub fn register_translator(translator: ErasedTranslatorS) {\n\n let mut adapters = REGISTERED_ADAPTERS.lock().unwrap();\n\n for adapter in adapters.drain(..) {\n\n translator.register_service(adapter);\n\n }\n\n let mut translator_slot = REGISTERED_TRANSLATORS.lock().unwrap();\n\n assert!(translator_slot.is_none());\n\n *translator_slot = Some(translator);\n\n}\n", "file_path": "ecap-common/src/lib.rs", "rank": 44, "score": 41261.44128241746 }, { "content": "pub trait DebugStream: fmt::Write + Any {}\n\n\n\nmopafy!(DebugStream);\n\n\n\nimpl<U> DebugStream for U\n\nwhere\n\n U: ecap::common::log::DebugStream + 'static,\n\n{\n\n}\n\n\n\nimpl ecap::common::log::DebugStream for dyn DebugStream {}\n\nimpl ecap::common::log::DebugStream for Box<dyn DebugStream> {}\n\n\n\nimpl fmt::Write for Box<dyn DebugStream> {\n\n fn write_str(&mut self, s: &str) -> fmt::Result {\n\n (&mut **self).write_str(s)\n\n }\n\n}\n", "file_path": "erased-ecap/src/common/log.rs", "rank": 45, "score": 40660.29855210337 }, { "content": "/// This trait is the equivalent of libecap::adapter::Service.\n\npub trait Service<H: ?Sized + host::Host> {\n\n type Transaction: adapter::Transaction<H>;\n\n\n\n /// The returned string should be unique across vendors.\n\n fn uri(&self) -> String;\n\n\n\n /// Identifies this version and configuration of this adapter: the\n\n /// URI and tag should uniquely identify a given adapter.\n\n fn tag(&self) -> String;\n\n\n\n /// Free-format description of the adapter\n\n // FIXME: Migrate to fmt::Display impl?\n\n fn describe(&self) -> String;\n\n\n\n /// Determines whether this adapter requires async transactions.\n\n ///\n\n /// If false (as by default) the Host will not call suspend and resume.\n\n fn is_async(&self) -> bool {\n\n false\n\n }\n", "file_path": "ecap/src/adapter/service.rs", "rank": 46, "score": 39487.22734832435 }, { "content": "pub trait Transaction<H: ecap::host::Host + ?Sized> {\n\n fn virgin(&mut self) -> &mut dyn Message;\n\n fn cause(&mut self) -> &dyn Message;\n\n fn adapted(&mut self) -> &mut dyn Message;\n\n fn use_virgin(&mut self);\n\n fn use_adapted(&mut self, msg: Box<dyn Message>);\n\n fn block_virgin(&mut self);\n\n fn adaptation_delayed(&mut self, delay: &Delay);\n\n fn adaptation_aborted(&mut self);\n\n fn resume(&mut self);\n\n fn virgin_body_discard(&mut self);\n\n fn virgin_body_make(&mut self);\n\n fn virgin_body_make_more(&mut self);\n\n fn virgin_body_stop_making(&mut self);\n\n fn virgin_body_pause(&mut self);\n\n fn virgin_body_resume(&mut self);\n\n fn virgin_body_content(&mut self, offset: usize, size: usize) -> Area;\n\n fn virgin_body_content_shift(&mut self, size: usize);\n\n fn adapted_body_content_done(&mut self, at_end: bool);\n\n fn adapted_body_content_available(&mut self);\n", "file_path": "erased-ecap/src/host/transaction.rs", "rank": 47, "score": 36904.69718981435 }, { "content": "pub fn call_ffi_maybe_panic<F, R>(f: F) -> R\n\nwhere\n\n F: FnOnce(*mut R) -> bool,\n\n{\n\n unsafe {\n\n let mut raw: ManuallyDrop<R> = ManuallyDrop::new(mem::uninitialized());\n\n let res = f(&mut *raw);\n\n if res {\n\n ManuallyDrop::into_inner(raw)\n\n } else {\n\n panic!(::CppError);\n\n }\n\n }\n\n}\n\n\n\npub extern \"C\" fn on_load() {\n\n panic::set_hook(Box::new(panic_hook));\n\n ecap_common_link::register_erased_translator(CppTranslator);\n\n}\n\n\n\n#[link_section = \".ctors\"]\n\n#[used]\n\npub static __ON_LOAD_PTR: extern \"C\" fn() = on_load;\n", "file_path": "ecap-cpp/src/lib.rs", "rank": 48, "score": 36172.70499253027 }, { "content": "use common::{Area, Name, NamedValueVisitor};\n\n\n\n/// Reading of `(Name, Area)` pairs across the adapter/host boundary.\n\n///\n\n/// This is used to share configuration information and transaction meta-information.\n\n///\n\n/// FIXME: \"Options objects and individual option values may be temporary. They must not\n\n/// be used beyond the method call that supplied or asked for them.\" -- what does this mean?\n", "file_path": "ecap/src/common/options.rs", "rank": 49, "score": 34125.48194772893 }, { "content": "pub fn register_erased_service<T: Service<dyn Host>>(service: T)\n\nwhere\n\n <T as Service<dyn Host>>::Transaction: 'static,\n\n{\n\n unsafe {\n\n let service = ErasedService::new(service);\n\n register_service(service);\n\n }\n\n}\n\n\n", "file_path": "ecap-common-link/src/lib.rs", "rank": 50, "score": 33991.97149619308 }, { "content": "use ffi;\n\nuse libc::{c_char, c_int, c_void, size_t};\n\n\n\nuse common::{CppArea, CppName};\n\nuse ecap::common::{Area, Name, NamedValueVisitor, Options};\n\n\n\nuse call_ffi_maybe_panic;\n\n\n\nforeign_ref!(pub struct CppOptions(ffi::Options));\n\n\n\nimpl Options for CppOptions {\n\n fn option(&self, name: &Name) -> Option<Area> {\n\n let name = CppName::from_name(name);\n\n unsafe {\n\n let area = call_ffi_maybe_panic(|raw| unsafe {\n\n ffi::options_option(self.as_ptr(), name.as_ptr(), raw)\n\n });\n\n Some(CppArea::from_raw(area).into())\n\n }\n\n }\n", "file_path": "ecap-cpp/src/common/options.rs", "rank": 51, "score": 32876.43740031516 }, { "content": "use ecap;\n\nuse ecap::common::{Area, Name, NamedValueVisitor};\n\n\n", "file_path": "erased-ecap/src/common/options.rs", "rank": 52, "score": 32870.30935168703 }, { "content": "\n\n fn visit_each<V: NamedValueVisitor>(&self, mut visitor: V) {\n\n let visitor_ptr = &mut visitor;\n\n call_ffi_maybe_panic(|_: *mut ()| unsafe {\n\n ffi::options_visit(\n\n self.as_ptr(),\n\n visitor_callback,\n\n visitor_ptr as *mut _ as *mut c_void,\n\n )\n\n });\n\n }\n\n}\n\n\n\n// XXX: Must handle panics/exceptions\n\npub extern \"C\" fn visitor_callback(name: ffi::Name, area: ffi::Area, cb: *mut c_void) {\n\n assert!(!cb.is_null());\n\n unsafe {\n\n let visitor = &mut **(cb as *mut *mut dyn NamedValueVisitor);\n\n\n\n let name = CppName::from_raw(&name);\n\n visitor.visit(&name, &Area::new(CppArea::from_raw(area)));\n\n }\n\n}\n", "file_path": "ecap-cpp/src/common/options.rs", "rank": 53, "score": 32869.65179773297 }, { "content": "use std::borrow::Cow;\n\n\n\npub struct Delay {\n\n /// Completed work-fraction in (0, 1) range or `None` if unknown.\n\n pub progress: Option<f64>,\n\n\n\n /// User-friendly state description, if available\n\n pub description: Option<Cow<'static, str>>,\n\n}\n", "file_path": "ecap/src/common/delay.rs", "rank": 54, "score": 13.651434608440182 }, { "content": "mod options;\n\npub use self::options::Options;\n\n\n\nmod message;\n\npub use self::message::Message;\n\n\n\npub mod header;\n\npub mod log;\n", "file_path": "erased-ecap/src/common/mod.rs", "rank": 56, "score": 13.000548133548161 }, { "content": "pub mod body;\n\npub use self::body::Body;\n\n\n\npub mod area;\n\npub use self::area::Area;\n\n\n\nmod delay;\n\npub use self::delay::Delay;\n\n\n\npub mod header;\n\n\n\npub mod log;\n\n\n\nmod message;\n\npub use self::message::Message;\n\n\n\npub mod name;\n\npub use self::name::Name;\n\n\n\nmod named_values;\n\npub use self::named_values::NamedValueVisitor;\n\n\n\nmod options;\n\npub use self::options::Options;\n\n\n\nmod version;\n\npub use self::version::Version;\n", "file_path": "ecap/src/common/mod.rs", "rank": 57, "score": 12.7560812038067 }, { "content": "#![feature(unwind_attributes)]\n\n#![feature(alloc_system)]\n\n\n\nextern crate alloc_system;\n\n\n\n#[global_allocator]\n\nstatic ALLOC: alloc_system::System = alloc_system::System;\n\n\n\n#[macro_use]\n\nextern crate lazy_static;\n\nextern crate ecap;\n\nextern crate erased_ecap;\n\n\n\nuse erased_ecap::adapter::ErasedService;\n\nuse erased_ecap::ErasedTranslatorS;\n\nuse std::sync::Mutex;\n\n\n\nlazy_static! {\n\n pub static ref REGISTERED_ADAPTERS: Mutex<Vec<ErasedService>> = Mutex::new(Vec::new());\n\n pub static ref REGISTERED_TRANSLATORS: Mutex<Option<ErasedTranslatorS>> = Mutex::new(None);\n\n}\n\n\n\n#[no_mangle]\n\n#[unwind(allowed)]\n", "file_path": "ecap-common/src/lib.rs", "rank": 60, "score": 11.873816666918735 }, { "content": " where\n\n H::TransactionRef: 'a,\n\n {\n\n }\n\n}\n\n\n\nimpl Options for MinimalTransaction {\n\n fn option(&self, _name: &Name) -> Option<Area> {\n\n // no meta-information to provide\n\n None\n\n }\n\n\n\n fn visit_each<V: NamedValueVisitor>(&self, _visitor: V) {\n\n // no meta-information to provide\n\n }\n\n}\n\n\n\npub extern \"C\" fn on_load() {\n\n ecap_common_link::register_erased_service(MinimalService);\n\n}\n\n\n\n#[link_section = \".ctors\"]\n\n#[used]\n\npub static ON_LOAD_PTR: extern \"C\" fn() = on_load;\n", "file_path": "sample-adapters/minimal/src/lib.rs", "rank": 61, "score": 11.472990149854128 }, { "content": "mod area;\n\npub mod body;\n\npub mod log;\n\npub mod message;\n\nmod name;\n\npub mod options;\n\npub use self::area::CppArea;\n\npub use self::name::CppName;\n\n\n\nmod version;\n\npub use self::version::CppVersion;\n", "file_path": "ecap-cpp/src/common/mod.rs", "rank": 62, "score": 11.378316431260448 }, { "content": "impl Options for PassthruTransaction {\n\n fn option(&self, _name: &Name) -> Option<Area> {\n\n // no meta-information to provide\n\n None\n\n }\n\n\n\n fn visit_each<V: NamedValueVisitor>(&self, _visitor: V) {\n\n // no meta-information to provide\n\n }\n\n}\n\n\n\nextern \"C\" fn on_load() {\n\n ecap_common_link::register_erased_service(PassthruService);\n\n}\n\n\n\n#[link_section = \".ctors\"]\n\n#[used]\n\npub static ON_LOAD_PTR: extern \"C\" fn() = on_load;\n", "file_path": "sample-adapters/passthru/src/lib.rs", "rank": 64, "score": 10.895772579176427 }, { "content": "#[derive(Debug, Copy, Clone, Eq)]\n\npub struct Version {\n\n pub major: Option<u32>,\n\n pub minor: Option<u32>,\n\n pub micro: Option<u32>,\n\n}\n\n\n\nimpl Version {\n\n pub fn known(&self) -> bool {\n\n self.major.is_some()\n\n }\n\n}\n\n\n\nimpl PartialEq for Version {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.known()\n\n && self.major == other.major\n\n && self.minor == other.minor\n\n && self.micro == other.micro\n\n }\n\n}\n", "file_path": "ecap/src/common/version.rs", "rank": 65, "score": 10.855603421388338 }, { "content": "#![feature(used)]\n\n\n\nextern crate ecap;\n\nextern crate ecap_common_link;\n\n\n\nuse std::ffi::CStr;\n\n\n\nuse ecap::adapter::{Service, Transaction};\n\nuse ecap::common::{Area, Name, NamedValueVisitor, Options};\n\nuse ecap::host::{self, Transaction as HostTransactionTrait};\n\n\n\n#[derive(Debug)]\n\npub struct MinimalService;\n\n\n\nimpl<H> Service<H> for MinimalService\n\nwhere\n\n H: host::Host + ?Sized,\n\n H::Transaction: 'static,\n\n{\n\n type Transaction = MinimalTransaction;\n", "file_path": "sample-adapters/minimal/src/lib.rs", "rank": 66, "score": 10.48914893916184 }, { "content": "#![feature(used)]\n\n\n\nextern crate ecap;\n\nextern crate ecap_common_link;\n\n\n\nuse std::ffi::CStr;\n\n\n\nuse ecap::adapter::{Service, Transaction};\n\nuse ecap::common::{Area, Message, Name, NamedValueVisitor, Options};\n\nuse ecap::host::{self, Transaction as HostTransactionTrait};\n\n\n\n#[derive(Debug)]\n\npub struct PassthruService;\n\n\n\nimpl<H> Service<H> for PassthruService\n\nwhere\n\n H: host::Host + ?Sized,\n\n H::Transaction: 'static,\n\n{\n\n type Transaction = PassthruTransaction;\n", "file_path": "sample-adapters/passthru/src/lib.rs", "rank": 67, "score": 10.420413655942687 }, { "content": "use ecap::common::name::{Id, Name};\n\nuse ffi;\n\nuse libc::{c_char, c_int};\n\nuse std::marker::PhantomData;\n\nuse std::{mem, ptr, slice, str};\n\n\n\npub struct CppName<'a: 'b, 'b> {\n\n cpp: ffi::Name,\n\n name: PhantomData<&'b Name<'a>>,\n\n}\n\n\n\nimpl<'a, 'b> CppName<'a, 'b> {\n\n pub fn from_name(name: &'a Name<'b>) -> CppName<'a, 'b> {\n\n unsafe {\n\n let cpp_name = ffi::Name {\n\n image: ffi::PStr {\n\n size: name.image().map(|i| i.len()).unwrap_or(0),\n\n buf: name.image()\n\n .map(|i| i.as_ptr() as *const c_char)\n\n .unwrap_or(ptr::null()),\n", "file_path": "ecap-cpp/src/common/name.rs", "rank": 68, "score": 10.390049788431197 }, { "content": "use ecap::common::log::LogVerbosity;\n\nuse ecap::host::Host;\n\nuse ffi;\n\n\n\nuse call_ffi_maybe_panic;\n\nuse std::panic;\n\n\n\nuse common::body::CppBody;\n\nuse common::log::DebugStream;\n\nuse common::message::{CppFirstLine, CppHeader, CppMessage, SharedPtrMessage};\n\nuse host::transaction::{CppTransaction, CppTransactionRef};\n\n\n\nuse std::mem;\n\n\n\nforeign_ref!(pub struct CppHost(ffi::Host));\n\n\n\nimpl CppHost {\n\n // XXX: Is this really &'static? Can the adapter rely on that?\n\n pub fn new() -> &'static CppHost {\n\n unsafe {\n", "file_path": "ecap-cpp/src/host/host.rs", "rank": 70, "score": 9.97701780119118 }, { "content": "#![feature(used)]\n\nextern crate ecap;\n\nextern crate ecap_common_link;\n\n\n\nuse std::ffi::CStr;\n\nuse std::mem;\n\nuse std::rc::Rc;\n\n\n\nuse ecap::adapter::{Service, Transaction};\n\nuse ecap::common::{header::Header, Area, Message, Name, NamedValueVisitor, Options};\n\nuse ecap::host::{self, Transaction as HostTransactionTrait};\n\n\n\n#[derive(Debug)]\n\npub struct ModifyService {\n\n victim: Option<Rc<Vec<u8>>>,\n\n replacement: Option<Rc<Vec<u8>>>,\n\n}\n\n\n\nimpl<H> Service<H> for ModifyService\n\nwhere\n", "file_path": "sample-adapters/modifying/src/lib.rs", "rank": 71, "score": 9.649775196677192 }, { "content": " /// shared_ptr that is given to `use_adapted`?\n\n ///\n\n /// XXX: Signature will change to &self -> Option<&Message>\n\n fn adapted(&mut self) -> &mut H::MessageRef;\n\n\n\n /// Use the virgin message for response/request.\n\n ///\n\n /// This may be called even if the adapter wishes to examine message\n\n /// body, but to do so `virgin_body_make` must have been called before hand.\n\n ///\n\n /// Host will not call `adapted_body` methods on `adapter::Transaction`.\n\n fn use_virgin(&mut self);\n\n\n\n /// Use the message passed for response/request.\n\n ///\n\n /// By calling this, the adapter indicates that the host should call\n\n /// the `adapted_body` methods on the `adapter::Transaction` in\n\n /// order to receive a message body.\n\n fn use_adapted<M: Message<H> + 'static>(&mut self, msg: M);\n\n\n", "file_path": "ecap/src/host/transaction.rs", "rank": 72, "score": 9.5037259809808 }, { "content": " }\n\n fn get(&self, field: &Name) -> Option<Area> {\n\n Self::get(self, field)\n\n }\n\n fn insert(&mut self, field: Name, value: Area) {\n\n Self::insert(self, field, value)\n\n }\n\n fn remove_any(&mut self, field: &Name) {\n\n Self::remove_any(self, field)\n\n }\n\n fn visit_each<V: NamedValueVisitor>(&self, visitor: &mut V) {\n\n Self::visit_each(self, visitor)\n\n }\n\n fn image(&self) -> Area {\n\n Self::image(self)\n\n }\n\n fn parse(&mut self, buf: &Area) -> Result<(), ()> {\n\n Self::parse(self, buf)\n\n }\n\n}\n\n\n\npub use ecap::common::header::{FirstLine, RequestLine, StatusLine};\n", "file_path": "erased-ecap/src/common/header.rs", "rank": 73, "score": 9.329681853000714 }, { "content": "use ecap::common::log::{self, LogVerbosity};\n\nuse ffi;\n\nuse host::CppHost;\n\nuse libc::c_char;\n\nuse std::fmt;\n\nuse std::ptr::NonNull;\n\n\n\nuse call_ffi_maybe_panic;\n\n\n\nimpl log::DebugStream for DebugStream {}\n\n\n\npub struct DebugStream {\n\n stream: *mut ffi::Ostream,\n\n host: *const ffi::Host,\n\n}\n\n\n\nimpl DebugStream {\n\n // XXX: This should technically take &'a CppHost and bind that lifetime to itself,\n\n // but we need GATs for that.\n\n pub fn from_host(host: &CppHost, verbosity: LogVerbosity) -> Option<Self> {\n", "file_path": "ecap-cpp/src/common/log.rs", "rank": 74, "score": 9.09147345470008 }, { "content": " panic!(\"unsupported configuration parameter: {:?}\", key);\n\n }\n\n }\n\n }\n\n}\n\n\n\npub extern \"C\" fn on_load() {\n\n ecap_common_link::register_erased_service(ModifyService {\n\n victim: None,\n\n replacement: None,\n\n });\n\n}\n\n\n\n#[link_section = \".ctors\"]\n\n#[used]\n\npub static _ON_LOAD_PTR: extern \"C\" fn() = on_load;\n", "file_path": "sample-adapters/modifying/src/lib.rs", "rank": 75, "score": 9.00094514504388 }, { "content": "use ffi;\n\nuse libc::{c_char, c_void, timeval};\n\nuse std::ffi::CStr;\n\nuse std::panic;\n\nuse std::time::Duration;\n\nuse std::{fmt::Write, mem};\n\n\n\nuse common::log::Ostream;\n\nuse common::options::CppOptions;\n\nuse ffi_unwind;\n\n\n\nuse erased_ecap::adapter::Service as ErasedService;\n\nuse erased_ecap::host::Host;\n\n\n\npub type ServicePtr = *mut *mut c_void;\n\n\n\nunsafe fn to_service<'a>(service: &'a ServicePtr) -> &'a dyn ErasedService<dyn Host> {\n\n assert!(!service.is_null());\n\n let service: *mut *mut dyn ErasedService<dyn Host> = mem::transmute(*service);\n\n let service = *service;\n", "file_path": "ecap-cpp/src/adapter/service.rs", "rank": 76, "score": 8.968130902433156 }, { "content": "mod service;\n\npub use self::service::ErasedService;\n\npub use self::service::Service;\n\n\n\nmod transaction;\n\npub use self::transaction::Transaction;\n", "file_path": "erased-ecap/src/adapter/mod.rs", "rank": 77, "score": 8.786669879555753 }, { "content": "use ffi;\n\nuse libc::{c_char, c_void};\n\nuse std::ops;\n\n\n\nuse call_ffi_maybe_panic;\n\nuse common::body::CppBody;\n\nuse common::{options, CppArea, CppName, CppVersion};\n\nuse ecap::common::header::{FirstLine, Header};\n\nuse ecap::common::{Area, Body, Message as ConcreteMessage, Name, NamedValueVisitor, Version};\n\nuse host::CppHost;\n\n\n\nuse erased_ecap::common::header::FirstLine as ErasedFirstLine;\n\nuse erased_ecap::common::header::Header as ErasedHeader;\n\n\n\nuse erased_ecap::host::Host as ErasedHost;\n\n\n\nforeign_ref!(pub struct CppMessage(ffi::Message));\n\n\n\nforeign_ref!(pub struct CppHeader(ffi::Header));\n\n\n", "file_path": "ecap-cpp/src/common/message.rs", "rank": 78, "score": 8.65264405221818 }, { "content": "pub mod adapter;\n\npub mod common;\n\npub mod host;\n\n\n\nuse adapter::Service;\n\nuse host::Host;\n\n\n", "file_path": "ecap/src/lib.rs", "rank": 79, "score": 8.535278926383707 }, { "content": "use call_ffi_maybe_panic;\n\nuse ffi;\n\n\n\nuse ecap::common::Body;\n\n\n\nforeign_ref!(pub struct CppBody(ffi::Body));\n\n\n\nimpl Body for CppBody {\n\n fn size(&self) -> Option<u64> {\n\n unsafe {\n\n let size =\n\n call_ffi_maybe_panic(|raw| unsafe { ffi::rust_shim_body_size(self.as_ptr(), raw) });\n\n if size.known {\n\n Some(size.size)\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n}\n", "file_path": "ecap-cpp/src/common/body.rs", "rank": 80, "score": 8.474006454426295 }, { "content": "#![feature(unwind_attributes, used)]\n\n#![allow(unused)]\n\nextern crate crossbeam;\n\nextern crate ecap;\n\nextern crate ecap_common_link;\n\nextern crate ecap_sys as ffi;\n\nextern crate erased_ecap;\n\nextern crate libc;\n\n#[macro_use]\n\nextern crate lazy_static;\n\n\n\nmacro_rules! foreign_ref {\n\n (pub struct $name:ident($cname:path)) => {\n\n pub struct $name(::std::marker::PhantomData<*mut ()>);\n\n\n\n impl $name {\n\n #[inline]\n\n pub unsafe fn from_ptr<'a>(p: *const $cname) -> &'a Self {\n\n assert!(!p.is_null());\n\n &*(p as *mut _)\n", "file_path": "ecap-cpp/src/lib.rs", "rank": 81, "score": 8.441410585935351 }, { "content": "mod service;\n\npub use self::service::Service;\n\n\n\nmod transaction;\n\npub use self::transaction::Transaction;\n", "file_path": "ecap/src/adapter/mod.rs", "rank": 82, "score": 8.39356744638092 }, { "content": "mod transaction;\n\n\n\npub use self::transaction::Transaction;\n\n\n\nmod host;\n\npub use self::host::Host;\n", "file_path": "ecap/src/host/mod.rs", "rank": 83, "score": 8.39356744638092 }, { "content": "mod transaction;\n\npub use self::transaction::Transaction;\n\n\n\nmod host;\n\npub use self::host::Host;\n", "file_path": "erased-ecap/src/host/mod.rs", "rank": 84, "score": 8.39356744638092 }, { "content": " fn body(&self) -> Option<&(dyn Body + 'static)> {\n\n match <Self as ConcreteMessage<CppHost>>::body(self) {\n\n Some(body) => Some(body),\n\n None => None,\n\n }\n\n }\n\n fn add_trailer(&mut self) {\n\n <Self as ConcreteMessage<CppHost>>::add_trailer(self)\n\n }\n\n fn trailer_mut(&mut self) -> &mut (dyn ErasedHeader + 'static) {\n\n <Self as ConcreteMessage<CppHost>>::trailer_mut(self)\n\n }\n\n fn trailer(&self) -> &(dyn ErasedHeader + 'static) {\n\n <Self as ConcreteMessage<CppHost>>::trailer(self)\n\n }\n\n}\n\n\n\npub struct SharedPtrMessage(pub ffi::SharedPtrMessage);\n\n\n\nimpl SharedPtrMessage {\n", "file_path": "ecap-cpp/src/common/message.rs", "rank": 85, "score": 8.349182581592096 }, { "content": " self as *mut Self as *mut $cname\n\n }\n\n }\n\n };\n\n}\n\n\n\npub mod adapter;\n\npub mod common;\n\npub mod host;\n\n\n\nuse ecap::adapter::Service;\n\nuse ecap::host::Host;\n\nuse libc::{c_int, c_void};\n\nuse std::any::Any;\n\nuse std::ptr;\n\n\n\nuse ecap::Translator;\n\nuse erased_ecap::adapter::Service as ErasedService;\n\nuse erased_ecap::host::Host as ErasedHost;\n\n\n", "file_path": "ecap-cpp/src/lib.rs", "rank": 86, "score": 8.332785232963488 }, { "content": "}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct DetailsStack {\n\n value: [usize; 2],\n\n increment: fn(*const ()),\n\n decrement: fn(*const ()),\n\n as_bytes: fn(*const ()) -> &'static [u8],\n\n}\n\n\n\nimpl DetailsStack {\n\n /// Creates a stack-allocated details trait object and stores the\n\n /// passed T into it.\n\n ///\n\n /// The passed T must fit into `[usize; 2]` and have the same\n\n /// alignment. This restriction may be relaxed in the future.\n\n pub fn from<T: Details + Copy + 'static>(v: T) -> DetailsStack {\n\n let mut data = [0; 2];\n\n assert!(mem::size_of::<T>() <= mem::size_of::<[usize; 2]>());\n\n assert_eq!(mem::align_of::<T>(), mem::align_of::<[usize; 2]>());\n", "file_path": "ecap/src/common/area.rs", "rank": 87, "score": 8.229218355165829 }, { "content": "#![feature(unwind_attributes, extern_types)]\n\n\n\nextern crate libc;\n\n\n\nuse std::marker::PhantomData;\n\nuse std::{mem, ptr};\n\n\n\nuse libc::{c_char, c_int, c_void, size_t};\n\n\n\n#[repr(C)]\n\npub struct Panic {\n\n pub is_exception: bool,\n\n pub message: CVec,\n\n pub location: PanicLocation,\n\n}\n\n\n\n#[repr(C)]\n\npub struct PanicLocation {\n\n pub file: CVec,\n\n pub line: c_int,\n", "file_path": "ecap-sys/src/lib.rs", "rank": 89, "score": 8.137118356255835 }, { "content": "#![feature(crate_in_paths, core_intrinsics)]\n\n\n\nextern crate ecap;\n\n#[macro_use]\n\nextern crate mopa;\n\n#[macro_use]\n\nextern crate parse_generics_shim;\n\n\n\npub mod adapter;\n\npub mod common;\n\npub mod host;\n\n\n\nuse adapter::ErasedService;\n\n\n", "file_path": "erased-ecap/src/lib.rs", "rank": 90, "score": 8.009013670879341 }, { "content": "\n\n pub fn options_option(options: *const Options, name: *const Name, out: *mut Area) -> bool;\n\n pub fn options_visit(options: *const Options, cb: VisitorCallback, extra: *mut c_void) -> bool;\n\n\n\n pub fn rust_host(out: *mut *const Host) -> bool;\n\n pub fn rust_shim_host_uri(host: *const Host, out: *mut CVec) -> bool;\n\n pub fn rust_shim_host_describe(host: *const Host, out: *mut CVec) -> bool;\n\n pub fn rust_shim_host_open_debug(\n\n host: *const Host,\n\n verbosity: LogVerbosity,\n\n out: *mut *mut Ostream,\n\n ) -> bool;\n\n pub fn rust_shim_host_close_debug(host: *const Host, stream: *mut Ostream) -> bool;\n\n pub fn rust_shim_host_new_request(host: *const Host, out: *mut SharedPtrMessage) -> bool;\n\n pub fn rust_shim_host_new_response(host: *const Host, out: *mut SharedPtrMessage) -> bool;\n\n pub fn rust_shim_ostream_write(stream: *mut Ostream, buf: *const c_char, len: size_t) -> bool;\n\n\n\n pub fn rust_shim_register_service(service: *mut *mut c_void, out: *mut bool) -> bool;\n\n}\n", "file_path": "ecap-sys/src/lib.rs", "rank": 91, "score": 7.929759510850744 }, { "content": "use std::fmt;\n\n\n\n/// Importance of the logged message to the host application admin\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum ImportanceLevel {\n\n /// Debugging information. Not normally logged.\n\n Debug = 0,\n\n\n\n /// General information. Seen and logged by default.\n\n Normal = 1,\n\n\n\n /// Information logged and seen in \"quiet\" mode.\n\n Critical = 2,\n\n}\n\n\n\n/// Quantity of messages expected under normal conditions\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum FrequencyLevel {\n\n /// Many times in transaction lifetime\n\n Operation = 0,\n", "file_path": "ecap/src/common/log.rs", "rank": 92, "score": 7.865572936021575 }, { "content": "use ecap::common::area::{Area, Details, DetailsConstructor, DetailsStack};\n\nuse ffi;\n\nuse libc::c_char;\n\nuse std::mem;\n\nuse std::ptr::{self, NonNull};\n\nuse std::rc::Rc;\n\nuse std::slice;\n\n\n\nuse call_ffi_maybe_panic;\n\n\n\npub struct CppArea(ffi::Area);\n\n\n\nimpl CppArea {\n\n pub fn from_raw(raw: ffi::Area) -> CppArea {\n\n CppArea(raw)\n\n }\n\n\n\n pub fn from_bytes(v: &[u8]) -> CppArea {\n\n unsafe {\n\n let raw = call_ffi_maybe_panic(|out| unsafe {\n", "file_path": "ecap-cpp/src/common/area.rs", "rank": 93, "score": 7.829065635168061 }, { "content": "impl<U, MC> Message for U\n\nwhere\n\n U: ecap::common::Message<dyn ErasedHost, MessageClone = MC> + 'static,\n\n MC: ecap::common::Message<dyn ErasedHost> + 'static,\n\n{\n\n fn clone(&self) -> Box<dyn Message> {\n\n Box::new(self.clone())\n\n }\n\n\n\n fn first_line_mut<'a>(&'a mut self) -> &'a mut (dyn FirstLine + 'static) {\n\n self.first_line_mut()\n\n }\n\n\n\n fn first_line<'a>(&'a self) -> &'a (dyn FirstLine + 'static) {\n\n self.first_line()\n\n }\n\n\n\n fn header_mut<'a>(&'a mut self) -> &'a mut (dyn Header + 'static) {\n\n self.header_mut()\n\n }\n", "file_path": "erased-ecap/src/common/message.rs", "rank": 94, "score": 7.806236466010361 }, { "content": "mod host;\n\npub mod transaction;\n\npub use self::host::CppHost;\n", "file_path": "ecap-cpp/src/host/mod.rs", "rank": 95, "score": 7.804151758228816 }, { "content": " fn body_mut(&mut self) -> Option<&mut (dyn Body + 'static)> {\n\n Self::body_mut(self)\n\n }\n\n fn body(&self) -> Option<&(dyn Body + 'static)> {\n\n <Self as Message>::body(self)\n\n }\n\n\n\n fn add_trailer(&mut self) {\n\n Self::add_trailer(self)\n\n }\n\n fn trailer_mut(&mut self) -> &mut (dyn Header + 'static) {\n\n Self::trailer_mut(self)\n\n }\n\n fn trailer(&self) -> &(dyn Header + 'static) {\n\n Self::trailer(self)\n\n }\n\n}\n", "file_path": "erased-ecap/src/common/message.rs", "rank": 96, "score": 7.6598684677816955 }, { "content": " }\n\n }\n\n\n\n fn add_trailer(&mut self) {\n\n self.add_trailer()\n\n }\n\n\n\n fn trailer_mut<'a>(&'a mut self) -> &'a mut (dyn Header + 'static) {\n\n self.trailer_mut()\n\n }\n\n\n\n fn trailer<'a>(&'a self) -> &'a (dyn Header + 'static) {\n\n self.trailer()\n\n }\n\n}\n\n\n\nimpl ecap::common::Message<dyn ErasedHost> for dyn Message {\n\n type MessageClone = Box<dyn Message>;\n\n\n\n fn clone(&self) -> Self::MessageClone {\n", "file_path": "erased-ecap/src/common/message.rs", "rank": 97, "score": 7.628338437500565 }, { "content": "\n\n fn header<'a>(&'a self) -> &'a (dyn Header + 'static) {\n\n self.header()\n\n }\n\n\n\n fn add_body<'a>(&'a mut self) {\n\n self.add_body()\n\n }\n\n\n\n fn body_mut<'a>(&'a mut self) -> Option<&'a mut (dyn Body + 'static)> {\n\n match self.body_mut() {\n\n Some(body) => Some(body),\n\n None => None,\n\n }\n\n }\n\n\n\n fn body<'a>(&'a self) -> Option<&'a (dyn Body + 'static)> {\n\n match self.body() {\n\n Some(body) => Some(body),\n\n None => None,\n", "file_path": "erased-ecap/src/common/message.rs", "rank": 98, "score": 7.582717831947255 }, { "content": "use ecap::common::Version;\n\nuse ffi;\n\nuse libc::c_int;\n\n\n\npub struct CppVersion;\n\n\n\nimpl CppVersion {\n\n pub fn from_raw(v: ffi::Version) -> Version {\n\n let major = if v.major == -1 {\n\n None\n\n } else {\n\n Some(v.major as u32)\n\n };\n\n let minor = if v.minor == -1 {\n\n None\n\n } else {\n\n Some(v.minor as u32)\n\n };\n\n let micro = if v.micro == -1 {\n\n None\n", "file_path": "ecap-cpp/src/common/version.rs", "rank": 99, "score": 7.5559622332878025 } ]
Rust
choseong-pullup/src/lib.rs
y15un/korean-stuff
e31c92ac364ca1cff88e3f5561fbbbc4cb9b7b49
use std::convert::TryFrom; use unicode_korean_multitool::{Choseong, Jongseong, Syllable}; const RULESET: [(Option<Jongseong>, Choseong, Jongseong, bool); 28] = [ (None, Choseong::Kiyeok, Jongseong::Kiyeok, false), (None, Choseong::SsangKiyeok, Jongseong::SsangKiyeok, false), (None, Choseong::Nieun, Jongseong::Nieun, false), (None, Choseong::Tikeut, Jongseong::Tikeut, false), (None, Choseong::Rieul, Jongseong::Rieul, false), (None, Choseong::Mieum, Jongseong::Mieum, false), (None, Choseong::Pieup, Jongseong::Pieup, false), (None, Choseong::Sios, Jongseong::Sios, false), (None, Choseong::SsangSios, Jongseong::SsangSios, false), (None, Choseong::Cieuc, Jongseong::Cieuc, false), (None, Choseong::Chieuch, Jongseong::Chieuch, false), (None, Choseong::Khieukh, Jongseong::Khieukh, false), (None, Choseong::Thieuth, Jongseong::Thieuth, false), (None, Choseong::Phieuph, Jongseong::Phieuph, false), (None, Choseong::Hieuh, Jongseong::Hieuh, true), ( Some(Jongseong::Kiyeok), Choseong::Kiyeok, Jongseong::SsangKiyeok, true, ), ( Some(Jongseong::Kiyeok), Choseong::Sios, Jongseong::KiyeokSios, false, ), ( Some(Jongseong::Nieun), Choseong::Cieuc, Jongseong::NieunCieuc, false, ), ( Some(Jongseong::Nieun), Choseong::Hieuh, Jongseong::NieunHieuh, true, ), ( Some(Jongseong::Rieul), Choseong::Kiyeok, Jongseong::RieulKiyeok, false, ), ( Some(Jongseong::Rieul), Choseong::Mieum, Jongseong::RieulMieum, false, ), ( Some(Jongseong::Rieul), Choseong::Pieup, Jongseong::RieulPieup, false, ), ( Some(Jongseong::Rieul), Choseong::Sios, Jongseong::RieulSios, false, ), ( Some(Jongseong::Rieul), Choseong::Thieuth, Jongseong::RieulThieuth, false, ), ( Some(Jongseong::Rieul), Choseong::Phieuph, Jongseong::RieulPhieuph, false, ), ( Some(Jongseong::Rieul), Choseong::Hieuh, Jongseong::RieulHieuh, true, ), ( Some(Jongseong::Pieup), Choseong::Sios, Jongseong::PieupSios, false, ), ( Some(Jongseong::Sios), Choseong::Sios, Jongseong::SsangSios, true, ), ]; pub fn pullup_choseong(source: &str) -> String { pullup_choseong_config(source, false) } pub fn pullup_choseong_config(source: &str, extended_flag: bool) -> String { let mut destination = String::with_capacity(source.len()); let mut characters = source.chars().peekable(); let mut choseong_pulled = false; while let Some(current) = characters.next() { if !Syllable::is_one_of_us(current) { destination.push(current); continue; } let mut current_syllable = Syllable::try_from(current).unwrap(); if choseong_pulled { current_syllable.choseong = Choseong::Ieung; choseong_pulled = false; } if let Some(&next) = characters.peek() { if !Syllable::is_one_of_us(next) { destination.push(char::from(current_syllable)); continue; } let next_syllable = Syllable::try_from(next).unwrap(); for &( current_jongseong_match, next_choseong_match, current_jongseong_to_be, is_extended, ) in RULESET.iter() { if current_jongseong_match == current_syllable.jongseong && next_choseong_match == next_syllable.choseong && (is_extended <= extended_flag) { current_syllable.jongseong = Some(current_jongseong_to_be); choseong_pulled = true; break; } } } destination.push(char::from(current_syllable)); } destination } #[cfg(test)] mod tests { #[test] fn test_pullup_choseong() { assert_eq!( super::pullup_choseong("초성 올려 쓰기"), "촛엉 올려 쓱이".to_owned() ); assert_eq!( super::pullup_choseong("이불 밖은 위험해!"), "입울 밖은 위험해!".to_owned() ); assert_eq!( super::pullup_choseong("버터치킨 최고야!"), "벝엋잌인 쵝오야!".to_owned() ); assert_eq!( super::pullup_choseong("이 얼마나 무시무시한 생각이니"), "이 얾안아 뭇임웃이한 생각인이".to_owned() ); assert_eq!( super::pullup_choseong_config("이불 밖은 위험해!", true), "입울 밖은 윟엄해!".to_owned() ); assert_eq!( super::pullup_choseong_config("이 얼마나 무시무시한 생각이니", true), "이 얾안아 뭇임웃잏안 생각인이".to_owned() ); } }
use std::convert::TryFrom; use unicode_korean_multitool::{Choseong, Jongseong, Syllable}; const RULESET: [(Option<Jongseong>, Choseong, Jongseong, bool); 28] = [ (None, Choseong::Kiyeok, Jongseong::Kiyeok, false), (None, Choseong::SsangKiyeok, Jongseong::SsangKiyeok, false), (None, Choseong::Nieun, Jongseong::Nieun, false), (None, Choseong::Tikeut, Jongseong::Tikeut, false), (None, Choseong::Rieul, Jongseong::Rieul, false), (None, Choseong::Mieum, Jongseong::Mieum, false), (None, Choseong::Pieup, Jongseong::Pieup, false), (None, Choseong::Sios, Jongseong::Sios, false), (None, Choseong::SsangSios, Jongseong::SsangSios, false), (None, Choseong::Cieuc, Jongseong::Cieuc, false), (None, Choseong::Chieuch, Jongseong::Chieuch, false), (None, Choseong::Khieukh, Jongseong::Khieukh, false), (None, Choseong::Thieuth, Jongseong::Thieuth, false), (None, Choseong::Phieuph, Jongseong::Phieuph, false), (None, Choseong::Hieuh, Jongseong::Hieuh, true), ( Some(Jongseong::Kiyeok), Choseong::Kiyeok, Jongseong::SsangKiyeok, true, ), ( Some(Jongseong::Kiyeok), Choseong::Sios, Jongseong::KiyeokSios, false, ), ( Some(Jongseong::Nieun), Choseong::Cieuc, Jongseong::NieunCieuc, false, ), ( Some(Jongseong::Nieun), Choseong::Hieuh, Jongseong::NieunHieuh, true, ), ( Some(Jongseong::Rieul), Choseong::Kiyeok, Jongseong::RieulKiyeok, false, ), ( Some(Jongseong::Rieul), Choseong::Mieum, Jongseong::RieulMieum, false, ), ( Some(Jongseong::Rieul), Choseong::Pieup, Jongseong::RieulPieup, false, ), ( Some(Jongseong::Rieul), Choseong::Sios, Jongseong::RieulSios, false, ), ( Some(Jongseong::Rieul), Choseong::Thieuth, Jongseong::RieulThieuth, false, ), ( Some(Jongseong::Rieul), Choseong::Phieuph, Jongseong::RieulPhieuph, false, ), ( Some(Jongseong::Rieul), Choseong::Hieuh, Jongseong::RieulHieuh, true, ), ( Some(Jongseong::Pieup), Choseong::Sios, Jongseong::PieupSios, false, ), ( Some(Jongseong::Sios), Choseong::Sios, Jongseong::SsangSios, true, ), ]; pub fn pullup_choseong(source: &str) -> String { pullup_choseong_config(source, false) }
#[cfg(test)] mod tests { #[test] fn test_pullup_choseong() { assert_eq!( super::pullup_choseong("초성 올려 쓰기"), "촛엉 올려 쓱이".to_owned() ); assert_eq!( super::pullup_choseong("이불 밖은 위험해!"), "입울 밖은 위험해!".to_owned() ); assert_eq!( super::pullup_choseong("버터치킨 최고야!"), "벝엋잌인 쵝오야!".to_owned() ); assert_eq!( super::pullup_choseong("이 얼마나 무시무시한 생각이니"), "이 얾안아 뭇임웃이한 생각인이".to_owned() ); assert_eq!( super::pullup_choseong_config("이불 밖은 위험해!", true), "입울 밖은 윟엄해!".to_owned() ); assert_eq!( super::pullup_choseong_config("이 얼마나 무시무시한 생각이니", true), "이 얾안아 뭇임웃잏안 생각인이".to_owned() ); } }
pub fn pullup_choseong_config(source: &str, extended_flag: bool) -> String { let mut destination = String::with_capacity(source.len()); let mut characters = source.chars().peekable(); let mut choseong_pulled = false; while let Some(current) = characters.next() { if !Syllable::is_one_of_us(current) { destination.push(current); continue; } let mut current_syllable = Syllable::try_from(current).unwrap(); if choseong_pulled { current_syllable.choseong = Choseong::Ieung; choseong_pulled = false; } if let Some(&next) = characters.peek() { if !Syllable::is_one_of_us(next) { destination.push(char::from(current_syllable)); continue; } let next_syllable = Syllable::try_from(next).unwrap(); for &( current_jongseong_match, next_choseong_match, current_jongseong_to_be, is_extended, ) in RULESET.iter() { if current_jongseong_match == current_syllable.jongseong && next_choseong_match == next_syllable.choseong && (is_extended <= extended_flag) { current_syllable.jongseong = Some(current_jongseong_to_be); choseong_pulled = true; break; } } } destination.push(char::from(current_syllable)); } destination }
function_block-full_function
[ { "content": "pub fn pushdown_jongseong_config(source: &str, extended_flag: bool) -> String {\n\n let mut destination = String::with_capacity(source.len());\n\n\n\n let mut buffer: [u8; 4] = [0, 0, 0, 0];\n\n let mut characters = source.chars().peekable();\n\n let mut new_choseong = None;\n\n\n\n while let Some(current) = characters.next() {\n\n if !Syllable::is_one_of_us(current) {\n\n destination.push_str(current.encode_utf8(&mut buffer));\n\n\n\n continue;\n\n }\n\n let mut current_syllable = Syllable::try_from(current).unwrap();\n\n if new_choseong.is_some() {\n\n current_syllable.choseong = new_choseong.take().unwrap();\n\n }\n\n\n\n if let Some(&next) = characters.peek() {\n\n if !Syllable::is_one_of_us(next) {\n", "file_path": "jongseong-pushdown/src/lib.rs", "rank": 1, "score": 118416.84179378176 }, { "content": "pub fn pushdown_jongseong(source: &str) -> String {\n\n pushdown_jongseong_config(source, false)\n\n}\n\n\n", "file_path": "jongseong-pushdown/src/lib.rs", "rank": 3, "score": 109758.67674634994 }, { "content": "pub fn flip_chojongseong_horizontally(source: &str) -> String {\n\n let mut destination = String::with_capacity(source.len());\n\n\n\n let mut chojongseong = Vec::new();\n\n let mut jungseong = Vec::new();\n\n for character in source.chars() {\n\n if Syllable::is_one_of_us(character) {\n\n let syllable = Syllable::try_from(character).unwrap();\n\n chojongseong.push((syllable.choseong, syllable.jongseong));\n\n jungseong.push(syllable.jungseong);\n\n }\n\n }\n\n\n\n let mut flipped = chojongseong.into_iter().rev().zip(jungseong.into_iter());\n\n for character in source.chars() {\n\n if Syllable::is_one_of_us(character) {\n\n let ((choseong, jongseong), jungseong) = flipped.next().unwrap();\n\n destination.push(char::from(Syllable {\n\n choseong,\n\n jungseong,\n", "file_path": "horizontal-chojongseong-flip/src/lib.rs", "rank": 4, "score": 82504.64522185814 }, { "content": "use std::convert::TryFrom;\n\nuse unicode_korean_multitool::{Choseong, Jongseong, Jungseong, Syllable};\n\n\n\n// how to interpret (jongseong_a, jongseong_b, choseong_c, extended)\n\n// => when the current syllable has `jongseong_a` and the next syllable has `Choseong::Ieung`,\n\n// replace the current syllable's jongseong with `jongseong_b`\n\n// and replace the next syllable's choseong with `choseong_c`.\n\n// => when `extended` is true, it's part of the extended ruleset, which violates\n\n// phonetic equivalence.\n\n//\n\n// additional extended ruleset:\n\n// if the current syllable has either `Jongseong::Tikeut` or `Jongseong::Thikeuth`,\n\n// and the next syllable has any of the following vowels as jungseong:\n\n// * Jungseong::Ya,\n\n// * Jungseong::Yae,\n\n// * Jungseong::Yeo,\n\n// * Jungseong::Ye,\n\n// * Jungseong::Yo,\n\n// * Jungseong::Yu,\n\n// * Jungseong::I,\n", "file_path": "jongseong-pushdown/src/lib.rs", "rank": 13, "score": 20664.097815038065 }, { "content": " ),\n\n (\n\n Jongseong::RieulHieuh,\n\n Some(Jongseong::Rieul),\n\n Choseong::Hieuh,\n\n true,\n\n ),\n\n (Jongseong::Mieum, None, Choseong::Mieum, false),\n\n (Jongseong::Pieup, None, Choseong::Pieup, false),\n\n (\n\n Jongseong::PieupSios,\n\n Some(Jongseong::Pieup),\n\n Choseong::Sios,\n\n false,\n\n ),\n\n (Jongseong::Sios, None, Choseong::Sios, false),\n\n (Jongseong::SsangSios, None, Choseong::SsangSios, false),\n\n (Jongseong::Cieuc, None, Choseong::Cieuc, false),\n\n (Jongseong::Chieuch, None, Choseong::Chieuch, false),\n\n (Jongseong::Khieukh, None, Choseong::Khieukh, false),\n\n (Jongseong::Thieuth, None, Choseong::Thieuth, false),\n\n (Jongseong::Phieuph, None, Choseong::Phieuph, false),\n\n (Jongseong::Hieuh, None, Choseong::Hieuh, true),\n\n];\n\n\n", "file_path": "jongseong-pushdown/src/lib.rs", "rank": 14, "score": 20662.760116665464 }, { "content": "// then only apply jongseong pushdown if and only if extended rulset is active.\n\nconst RULESET: [(Jongseong, Option<Jongseong>, Choseong, bool); 26] = [\n\n (Jongseong::Kiyeok, None, Choseong::Kiyeok, false),\n\n (Jongseong::SsangKiyeok, None, Choseong::SsangKiyeok, false),\n\n (\n\n Jongseong::KiyeokSios,\n\n Some(Jongseong::Kiyeok),\n\n Choseong::Sios,\n\n false,\n\n ),\n\n (Jongseong::Nieun, None, Choseong::Nieun, false),\n\n (\n\n Jongseong::NieunCieuc,\n\n Some(Jongseong::Nieun),\n\n Choseong::Cieuc,\n\n false,\n\n ),\n\n (\n\n Jongseong::NieunHieuh,\n\n Some(Jongseong::Nieun),\n", "file_path": "jongseong-pushdown/src/lib.rs", "rank": 15, "score": 20662.744637449337 }, { "content": " {\n\n for &(\n\n current_jongseong_match,\n\n current_jongseong_to_be,\n\n next_choseong_to_be,\n\n is_extended,\n\n ) in RULESET.iter()\n\n {\n\n if Some(current_jongseong_match) == current_syllable.jongseong\n\n && Choseong::Ieung == next_syllable.choseong\n\n && (is_extended <= extended_flag)\n\n {\n\n current_syllable.jongseong = current_jongseong_to_be;\n\n new_choseong = Some(next_choseong_to_be);\n\n\n\n break;\n\n }\n\n }\n\n }\n\n }\n", "file_path": "jongseong-pushdown/src/lib.rs", "rank": 16, "score": 20662.078914637026 }, { "content": " Choseong::Hieuh,\n\n true,\n\n ),\n\n (Jongseong::Tikeut, None, Choseong::Tikeut, false),\n\n (Jongseong::Rieul, None, Choseong::Rieul, false),\n\n (\n\n Jongseong::RieulKiyeok,\n\n Some(Jongseong::Rieul),\n\n Choseong::Kiyeok,\n\n false,\n\n ),\n\n (\n\n Jongseong::RieulMieum,\n\n Some(Jongseong::Rieul),\n\n Choseong::Mieum,\n\n false,\n\n ),\n\n (\n\n Jongseong::RieulPieup,\n\n Some(Jongseong::Rieul),\n", "file_path": "jongseong-pushdown/src/lib.rs", "rank": 17, "score": 20661.85269152699 }, { "content": " destination.push_str(char::from(current_syllable).encode_utf8(&mut buffer));\n\n\n\n continue;\n\n }\n\n let next_syllable = Syllable::try_from(next).unwrap();\n\n\n\n // additional extended ruleset check\n\n if !([Some(Jongseong::Tikeut), Some(Jongseong::Thieuth)]\n\n .contains(&current_syllable.jongseong)\n\n && [\n\n Jungseong::Ya,\n\n Jungseong::Yae,\n\n Jungseong::Yeo,\n\n Jungseong::Ye,\n\n Jungseong::Yo,\n\n Jungseong::Yu,\n\n Jungseong::I,\n\n ]\n\n .contains(&next_syllable.jungseong))\n\n || extended_flag\n", "file_path": "jongseong-pushdown/src/lib.rs", "rank": 18, "score": 20660.176670616645 }, { "content": " Choseong::Pieup,\n\n false,\n\n ),\n\n (\n\n Jongseong::RieulSios,\n\n Some(Jongseong::Rieul),\n\n Choseong::Sios,\n\n false,\n\n ),\n\n (\n\n Jongseong::RieulThieuth,\n\n Some(Jongseong::Rieul),\n\n Choseong::Thieuth,\n\n false,\n\n ),\n\n (\n\n Jongseong::RieulPhieuph,\n\n Some(Jongseong::Rieul),\n\n Choseong::Phieuph,\n\n false,\n", "file_path": "jongseong-pushdown/src/lib.rs", "rank": 19, "score": 20658.38090395332 }, { "content": " assert_eq!(\n\n super::pushdown_jongseong_config(\"입울 밖은 윟엄해!\", true),\n\n \"이불 바끈 위험해!\".to_owned()\n\n );\n\n assert_eq!(\n\n super::pushdown_jongseong_config(\"이 얾안아 뭇임웃잏안 생각인이\", true),\n\n \"이 얼마나 무시무시한 생가기니\".to_owned()\n\n );\n\n assert_eq!(\n\n super::pushdown_jongseong_config(\"해돋이 돋아 다같이 같아\", true),\n\n \"해도디 도다 다가티 가타\".to_owned()\n\n );\n\n }\n\n}\n", "file_path": "jongseong-pushdown/src/lib.rs", "rank": 20, "score": 20658.336749059326 }, { "content": "\n\n destination.push_str(char::from(current_syllable).encode_utf8(&mut buffer));\n\n }\n\n\n\n destination\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_pushdown_jongseong() {\n\n assert_eq!(\n\n super::pushdown_jongseong(\"종성 내려 쓰기\"),\n\n \"종성 내려 쓰기\".to_owned()\n\n );\n\n assert_eq!(\n\n super::pushdown_jongseong(\"입울 밖은 위험해!\"),\n\n \"이불 바끈 위험해!\".to_owned()\n\n );\n\n assert_eq!(\n", "file_path": "jongseong-pushdown/src/lib.rs", "rank": 21, "score": 20658.304709054362 }, { "content": " super::pushdown_jongseong(\"입울 밖은 윟엄해!\"),\n\n \"이불 바끈 윟엄해!\".to_owned()\n\n );\n\n assert_eq!(\n\n super::pushdown_jongseong(\"벝엋잌인 쵝오야!\"),\n\n \"버터치킨 최고야!\".to_owned()\n\n );\n\n assert_eq!(\n\n super::pushdown_jongseong(\"이 얾안아 뭇임웃이한 생각인이\"),\n\n \"이 얼마나 무시무시한 생가기니\".to_owned()\n\n );\n\n assert_eq!(\n\n super::pushdown_jongseong(\"이 얾안아 뭇임웃잏안 생각인이\"),\n\n \"이 얼마나 무시무싷안 생가기니\".to_owned()\n\n );\n\n assert_eq!(\n\n super::pushdown_jongseong(\"해돋이 돋아 다같이 같아\"),\n\n \"해돋이 도다 다같이 가타\".to_owned()\n\n );\n\n\n", "file_path": "jongseong-pushdown/src/lib.rs", "rank": 22, "score": 20655.84660150454 }, { "content": "#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]\n\npub struct Syllable {\n\n pub choseong: Choseong,\n\n pub jungseong: Jungseong,\n\n pub jongseong: Option<Jongseong>,\n\n}\n\nimpl From<(Choseong, Jungseong)> for Syllable {\n\n fn from((choseong, jungseong): (Choseong, Jungseong)) -> Self {\n\n Self {\n\n choseong,\n\n jungseong,\n\n jongseong: None,\n\n }\n\n }\n\n}\n\nimpl From<(Choseong, Jungseong, Option<Jongseong>)> for Syllable {\n\n fn from((choseong, jungseong, jongseong): (Choseong, Jungseong, Option<Jongseong>)) -> Self {\n\n Self {\n\n choseong,\n\n jungseong,\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 23, "score": 10.316938505227903 }, { "content": " Syllable::try_from('고'),\n\n Ok(Syllable {\n\n choseong: Choseong::Kiyeok,\n\n jungseong: Jungseong::O,\n\n jongseong: None,\n\n })\n\n );\n\n assert_eq!(\n\n Syllable::try_from('양'),\n\n Ok(Syllable {\n\n choseong: Choseong::Ieung,\n\n jungseong: Jungseong::Ya,\n\n jongseong: Some(Jongseong::Ieung),\n\n })\n\n );\n\n assert_eq!(\n\n Syllable::try_from('이'),\n\n Ok(Syllable {\n\n choseong: Choseong::Ieung,\n\n jungseong: Jungseong::I,\n\n jongseong: None,\n\n })\n\n );\n\n }\n\n}\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 24, "score": 9.448541101026308 }, { "content": "/// Represents a Korean syllable.\n\n///\n\n/// Specifically, those residing in Precomposed Hangul Syllables range (U+AC00 '가' -- U+D7A3 '힣').\n\n///\n\n/// Most of the time, all you need to do is calling [`Syllable::try_from`] with [`char`]\n\n/// (that contains a valid Korean syllable) as its argument:\n\n/// ```\n\n/// use crate::{Choseong, Jungseong, Jongseong};\n\n/// use std::convert::TryFrom;\n\n///\n\n/// let syllable = Syllable::try_from('잌').unwrap();\n\n/// assert_eq!(syllable.choseong, Choseong::Ieung);\n\n/// assert_eq!(syllable.jungseong, Jungseong::I);\n\n/// assert_eq!(syllable.jongseong, Some(Jongseong::Khieukh));\n\n///\n\n/// let syllable = Syllable::try_from('뭐').unwrap();\n\n/// assert_eq!(syllable.choseong, Choseong::Mieum);\n\n/// assert_eq!(syllable.jungseong, Jungseong::Weo);\n\n/// assert_eq!(syllable.jongseong, None);\n\n/// ```\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 25, "score": 8.952408988209507 }, { "content": "\n\n let jongseong = unified_syllable % 28;\n\n unified_syllable -= jongseong;\n\n let jungseong = (unified_syllable / 28) % 21;\n\n unified_syllable -= jungseong * 28;\n\n let choseong = unified_syllable / (21 * 28);\n\n\n\n Ok(Self {\n\n choseong: Choseong::try_from(choseong as u8).unwrap(),\n\n jungseong: Jungseong::try_from(jungseong as u8).unwrap(),\n\n jongseong: Jongseong::try_from(jongseong as u8).ok(),\n\n })\n\n }\n\n}\n\nimpl Syllable {\n\n /// Determines if a given [`char`] is one of the 11,172 valid modern Korean syllables.\n\n pub fn is_one_of_us(character: char) -> bool {\n\n // all precomposed korean syllables are within BMP, so in this context, it is safe to\n\n // assume:\n\n // Unicode Scalar Value == Unicode Code Point\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 26, "score": 8.580042293304587 }, { "content": " jongseong,\n\n }\n\n }\n\n}\n\nimpl From<Syllable> for (Choseong, Jungseong, Option<Jongseong>) {\n\n fn from(syllable: Syllable) -> Self {\n\n (syllable.choseong, syllable.jungseong, syllable.jongseong)\n\n }\n\n}\n\nimpl From<Syllable> for char {\n\n fn from(syllable: Syllable) -> Self {\n\n // all precomposed korean syllables are within BMP, so in this context, it is safe to\n\n // assume:\n\n // Unicode Scalar Value == Unicode Code Point\n\n // and thus, `char::from_u32()` never fails\n\n Self::from_u32(\n\n 0xAC00\n\n + (syllable.choseong as u32 * 21 * 28)\n\n + (syllable.jungseong as u32 * 28)\n\n + if let Some(jongseong) = syllable.jongseong {\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 27, "score": 6.8880016818325664 }, { "content": " assert_eq!(Jongseong::try_from('ㅎ'), Ok(Jongseong::Hieuh));\n\n assert_eq!(Jaeum::try_from('ㅎ'), Ok(Jaeum::Hieuh));\n\n }\n\n\n\n #[test]\n\n fn test_from_syllable_for_char() {\n\n assert_eq!(\n\n char::from(Syllable {\n\n choseong: Choseong::Ieung,\n\n jungseong: Jungseong::I,\n\n jongseong: Some(Jongseong::Rieul),\n\n }),\n\n '일'\n\n );\n\n assert_eq!(\n\n char::from(Syllable {\n\n choseong: Choseong::Sios,\n\n jungseong: Jungseong::Eo,\n\n jongseong: Some(Jongseong::Nieun),\n\n }),\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 28, "score": 6.673369089610524 }, { "content": "/// ```\n\n/// use crate::Jaeum;\n\n/// use std::convert::TryFrom;\n\n///\n\n/// let jaeum = Jaeum::try_from('ㄳ').unwrap();\n\n/// assert_eq!(jaeum, Jaeum::KiyeokSios);\n\n/// ```\n\n///\n\n/// Also, you can convert [`Choseong`] and [`Jongseong`] from and into `Jaeum`:\n\n/// ```\n\n/// use crate::{Choseong, Error, Jaeum, Jongseong};\n\n/// use std::convert::TryFrom;\n\n///\n\n/// let choseong = Choseong::Rieul; // ㄹ\n\n/// assert_eq!(Jaeum::from(choseong), Jaeum::Rieul);\n\n///\n\n/// let jongseong = Jongseong::PieupSios; // ㅄ\n\n/// assert_eq!(Jaeum::from(jongseong), Jaeum::PieupSios);\n\n///\n\n/// let jaeum = Jaeum::SsangSios; // ㅆ\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 29, "score": 6.5485790042489445 }, { "content": " '선'\n\n );\n\n\n\n assert_eq!(\n\n char::from(Syllable {\n\n choseong: Choseong::Kiyeok,\n\n jungseong: Jungseong::Ae,\n\n jongseong: None,\n\n }),\n\n '개'\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_tryfrom_char_for_syllable() {\n\n assert_eq!(Syllable::try_from('@'), Err(Error::NonKorean('@')));\n\n assert_eq!(Syllable::try_from('E'), Err(Error::NonKorean('E')));\n\n assert_eq!(Syllable::try_from('𝄞'), Err(Error::NonKorean('𝄞')));\n\n\n\n assert_eq!(\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 30, "score": 6.4872623651332955 }, { "content": " let character = character as u32;\n\n\n\n (0xAC00..=0xD7A3).contains(&character)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{Choseong, Error, Jaeum, Jongseong, Jungseong, Moeum, Syllable};\n\n use std::convert::TryFrom;\n\n\n\n #[test]\n\n fn test_from_jamo_for_char() {\n\n let choseong = Choseong::Pieup;\n\n assert_eq!(char::from(choseong), 'ㅂ');\n\n let jaeum = Jaeum::from(choseong);\n\n assert_eq!(jaeum, Jaeum::Pieup);\n\n assert_eq!(char::from(jaeum), 'ㅂ');\n\n\n\n let jungseong = Jungseong::We;\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 31, "score": 6.000418484699165 }, { "content": "use std::convert::TryFrom;\n\nuse unicode_korean_multitool::Syllable;\n\n\n", "file_path": "horizontal-chojongseong-flip/src/lib.rs", "rank": 32, "score": 5.118973680698153 }, { "content": "/// assert_eq!(Choseong::try_from(jaeum), Ok(Choseong::SsangSios));\n\n/// assert_eq!(Jongseong::try_from(jaeum), Ok(Jongseong::SsangSios));\n\n///\n\n/// let jaeum = Jaeum::SsangTikeut; // ㄸ\n\n/// assert_eq!(Choseong::try_from(jaeum), Ok(Choseong::SsangTikeut));\n\n/// assert_eq!(\n\n/// Jongseong::try_from(jaeum),\n\n/// Err(Error::NotApplicableToJongseong(Jaeum::SsangTikeut))\n\n/// );\n\n///\n\n/// let jaeum = Jaeum::NieunCieuc; // ㄵ\n\n/// assert_eq!(\n\n/// Choseong::try_from(jaeum),\n\n/// Err(Error::NotApplicableToChoseong(Jaeum::NieunCieuc))\n\n/// );\n\n/// assert_eq!(Jongseong::try_from(jaeum), Ok(Jongseong::NieunCieuc));\n\n/// ```\n\n#[derive(Clone, Copy, Debug, Eq, IntoPrimitive, Ord, PartialEq, PartialOrd, TryFromPrimitive)]\n\n#[repr(u8)]\n\npub enum Jaeum {\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 33, "score": 4.8388801189311526 }, { "content": "}\n\n\n\n/// Contains all the possible error conditions that can arise within this crate.\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub enum Error {\n\n /// Denotes that a [`char`] outside the Hangul Compatibility Jamo range (U+3131 'ㄱ' -- U+3163\n\n /// 'ㅣ') was tried converting into either a [`Jaeum`] or [`Moeum`].\n\n NonJamo(char),\n\n /// Denotes that a [`char`] outside the Precomposed Korean Syllables range (U+AC00 '가' --\n\n /// U+D7A3 '힣') was tried converting into a [`Syllable`].\n\n NonKorean(char),\n\n /// Denotes that a consonant (자음, [`Jaeum`]) cannot be placed in the initial consonant (초성,\n\n /// [`Choseong`]) position.\n\n NotApplicableToChoseong(Jaeum),\n\n /// Denotes that a consonant (자음, [`Jaeum`]) cannot be placed in the final consonant (종성,\n\n /// [`Jongseong`]) position.\n\n NotApplicableToJongseong(Jaeum),\n\n}\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut Formatter) -> FmtResult {\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 34, "score": 4.321400667948307 }, { "content": " match self {\n\n Self::NonJamo(coi) => write!(f, \"'{}' is not a Hangul Compatibility Jamo\", coi),\n\n Self::NonKorean(coi) => write!(f, \"'{}' is not a Precomposed Korean Sylable\", coi),\n\n Self::NotApplicableToChoseong(jaeum) => {\n\n write!(f, \"{:?} cannot be used as an initial consonant\", jaeum)\n\n }\n\n Self::NotApplicableToJongseong(jaeum) => {\n\n write!(f, \"{:?} cannot be used as a final consonant\", jaeum)\n\n }\n\n }\n\n }\n\n}\n\nimpl StdError for Error {}\n\n\n\n/// Groups all the Korean consonants (자음, Jaeum).\n\n///\n\n/// Specifically, those residing in Hangul Compatibility Jamo range (U+3131 'ㄱ' -- U+314E 'ㅎ').\n\n///\n\n/// Most of the time, all you need to do is calling [`Jaeum::try_from`] with [`char`]\n\n/// (that contains a valid Korean syllable) as its argument:\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 35, "score": 4.0962508807964895 }, { "content": " Choseong::Thieuth => Self::Thieuth,\n\n Choseong::Phieuph => Self::Phieuph,\n\n Choseong::Hieuh => Self::Hieuh,\n\n }\n\n }\n\n}\n\nimpl From<Jaeum> for char {\n\n fn from(jaeum: Jaeum) -> Self {\n\n Self::from_u32(0x3131 + jaeum as u32).unwrap()\n\n }\n\n}\n\nimpl From<Jongseong> for Jaeum {\n\n fn from(jongseong: Jongseong) -> Self {\n\n match jongseong {\n\n Jongseong::Kiyeok => Self::Kiyeok,\n\n Jongseong::SsangKiyeok => Self::SsangKiyeok,\n\n Jongseong::KiyeokSios => Self::KiyeokSios,\n\n Jongseong::Nieun => Self::Nieun,\n\n Jongseong::NieunCieuc => Self::NieunCieuc,\n\n Jongseong::NieunHieuh => Self::NieunHieuh,\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 36, "score": 4.0306182004327935 }, { "content": "//! This crate provides ways to manipulate modern Korean alphabets (현대한글, Hyeondae Hangeul).\n\n//!\n\n//! More specifically, you can:\n\n//! * Decompose a Precomposed Korean [`Syllable`] into individual 'consonants and vowels' (자모,\n\n//! Jamo), and\n\n//! * Do the reverse of above action, i.e., compose a set of individual consonants and vowels\n\n//! into a Precomposed Korean Syllable.\n\nuse num_enum::{IntoPrimitive, TryFromPrimitive};\n\nuse std::{\n\n convert::TryFrom,\n\n error::Error as StdError,\n\n fmt::{Display, Formatter, Result as FmtResult},\n\n};\n\n\n\n/// Groups all the consonants applicable to the 'initial consonant' (초성, Choseong) position of a\n\n/// Korean syllable.\n\n///\n\n/// These consonants do reside by themselves as an individual Unicode characters, but not in this\n\n/// particular order; for that, see [`Jaeum`].\n\n#[derive(Clone, Copy, Debug, Eq, IntoPrimitive, Ord, PartialEq, PartialOrd, TryFromPrimitive)]\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 37, "score": 3.7405905453854826 }, { "content": " assert_eq!(char::from(jungseong), 'ㅞ');\n\n assert_eq!(jungseong, Moeum::We);\n\n\n\n let jongseong = Jongseong::RieulKiyeok;\n\n assert_eq!(char::from(jongseong), 'ㄺ');\n\n let jaeum = Jaeum::from(jongseong);\n\n assert_eq!(jaeum, Jaeum::RieulKiyeok);\n\n assert_eq!(char::from(jaeum), 'ㄺ');\n\n }\n\n\n\n #[test]\n\n fn test_tryfrom_char_for_jamo() {\n\n assert_eq!(Choseong::try_from('@'), Err(Error::NonJamo('@')));\n\n assert_eq!(Jungseong::try_from('E'), Err(Error::NonJamo('E')));\n\n assert_eq!(Jongseong::try_from('𝄞'), Err(Error::NonJamo('𝄞')));\n\n\n\n assert_eq!(Choseong::try_from('ㄸ'), Ok(Choseong::SsangTikeut));\n\n assert_eq!(Jaeum::try_from('ㄸ'), Ok(Jaeum::SsangTikeut));\n\n assert_eq!(Jungseong::try_from('ㅖ'), Ok(Jungseong::Ye));\n\n assert_eq!(Moeum::try_from('ㅖ'), Ok(Moeum::Ye));\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 38, "score": 3.215226254056775 }, { "content": " jongseong as u32\n\n } else {\n\n 0\n\n },\n\n )\n\n .unwrap()\n\n }\n\n}\n\nimpl TryFrom<char> for Syllable {\n\n type Error = Error;\n\n\n\n fn try_from(character: char) -> Result<Self, Self::Error> {\n\n if !Self::is_one_of_us(character) {\n\n return Err(Error::NonKorean(character));\n\n }\n\n\n\n // all precomposed korean syllables are within BMP, so in this context, it is safe to\n\n // assume:\n\n // Unicode Scalar Value == Unicode Code Point\n\n let mut unified_syllable = character as u32 - 0xAC00;\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 39, "score": 3.1008793339096132 }, { "content": "#[repr(u8)]\n\npub enum Choseong {\n\n /// Represents 'ㄱ'.\n\n Kiyeok,\n\n /// Represents 'ㄲ'.\n\n SsangKiyeok,\n\n /// Represents 'ㄴ'.\n\n Nieun,\n\n /// Represents 'ㄷ'.\n\n Tikeut,\n\n /// Represents 'ㄸ'.\n\n SsangTikeut,\n\n /// Represents 'ㄹ'.\n\n Rieul,\n\n /// Represents 'ㅁ'.\n\n Mieum,\n\n /// Represents 'ㅂ'.\n\n Pieup,\n\n /// Represents 'ㅃ'.\n\n SsangPieup,\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 40, "score": 3.0831779776222277 }, { "content": " Jaeum::Thieuth => Ok(Self::Thieuth),\n\n Jaeum::Phieuph => Ok(Self::Phieuph),\n\n Jaeum::Hieuh => Ok(Self::Hieuh),\n\n anything_else => Err(Error::NotApplicableToJongseong(anything_else)),\n\n }\n\n }\n\n}\n\n\n\n/// Groups all the vowels applicable to the 'medial vowel' (중성, Jungseong) position of a Korean\n\n/// syllable.\n\n#[derive(Clone, Copy, Debug, Eq, IntoPrimitive, Ord, PartialEq, PartialOrd, TryFromPrimitive)]\n\n#[repr(u8)]\n\npub enum Jungseong {\n\n /// Represents 'ㅏ'.\n\n A,\n\n /// Represents 'ㅐ'.\n\n Ae,\n\n /// Represents 'ㅑ'.\n\n Ya,\n\n /// Represents 'ㅒ'.\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 41, "score": 2.9571257263003066 }, { "content": "}\n\nimpl From<Choseong> for Jaeum {\n\n fn from(choseong: Choseong) -> Self {\n\n match choseong {\n\n Choseong::Kiyeok => Self::Kiyeok,\n\n Choseong::SsangKiyeok => Self::SsangKiyeok,\n\n Choseong::Nieun => Self::Nieun,\n\n Choseong::Tikeut => Self::Tikeut,\n\n Choseong::SsangTikeut => Self::SsangTikeut,\n\n Choseong::Rieul => Self::Rieul,\n\n Choseong::Mieum => Self::Mieum,\n\n Choseong::Pieup => Self::Pieup,\n\n Choseong::SsangPieup => Self::SsangPieup,\n\n Choseong::Sios => Self::Sios,\n\n Choseong::SsangSios => Self::SsangSios,\n\n Choseong::Ieung => Self::Ieung,\n\n Choseong::Cieuc => Self::Cieuc,\n\n Choseong::SsangCieuc => Self::SsangCieuc,\n\n Choseong::Chieuch => Self::Chieuch,\n\n Choseong::Khieukh => Self::Khieukh,\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 42, "score": 2.7761984887520312 }, { "content": " Jongseong::Tikeut => Self::Tikeut,\n\n Jongseong::Rieul => Self::Rieul,\n\n Jongseong::RieulKiyeok => Self::RieulKiyeok,\n\n Jongseong::RieulMieum => Self::RieulMieum,\n\n Jongseong::RieulPieup => Self::RieulPieup,\n\n Jongseong::RieulSios => Self::RieulSios,\n\n Jongseong::RieulThieuth => Self::RieulThieuth,\n\n Jongseong::RieulPhieuph => Self::RieulPhieuph,\n\n Jongseong::RieulHieuh => Self::RieulHieuh,\n\n Jongseong::Mieum => Self::Mieum,\n\n Jongseong::Pieup => Self::Pieup,\n\n Jongseong::PieupSios => Self::PieupSios,\n\n Jongseong::Sios => Self::Sios,\n\n Jongseong::SsangSios => Self::SsangSios,\n\n Jongseong::Ieung => Self::Ieung,\n\n Jongseong::Cieuc => Self::Cieuc,\n\n Jongseong::Chieuch => Self::Chieuch,\n\n Jongseong::Khieukh => Self::Khieukh,\n\n Jongseong::Thieuth => Self::Thieuth,\n\n Jongseong::Phieuph => Self::Phieuph,\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 43, "score": 2.532029926838166 }, { "content": " Jongseong::Hieuh => Self::Hieuh,\n\n }\n\n }\n\n}\n\nimpl TryFrom<char> for Jaeum {\n\n type Error = Error;\n\n\n\n fn try_from(character: char) -> Result<Self, Self::Error> {\n\n if !(0x3131..=0x314E).contains(&(character as u32)) {\n\n return Err(Error::NonJamo(character));\n\n }\n\n\n\n Ok(Self::try_from((character as u32 - 0x3131) as u8).unwrap())\n\n }\n\n}\n\n\n\n/// Groups all the consonants (including clustered consonants) applicable to the 'final consonant'\n\n/// (종성, Jongseong) position of a Korean syllable.\n\n///\n\n/// These consonants do reside by themselves as an individual Unicode characters, but not in this\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 44, "score": 2.447641357918932 }, { "content": "}\n\nimpl From<Choseong> for char {\n\n fn from(choseong: Choseong) -> Self {\n\n Jaeum::from(choseong).into()\n\n }\n\n}\n\nimpl TryFrom<char> for Choseong {\n\n type Error = Error;\n\n\n\n fn try_from(character: char) -> Result<Self, Self::Error> {\n\n Self::try_from(Jaeum::try_from(character)?)\n\n }\n\n}\n\nimpl TryFrom<Jaeum> for Choseong {\n\n type Error = Error;\n\n\n\n fn try_from(jaeum: Jaeum) -> Result<Self, Self::Error> {\n\n match jaeum {\n\n Jaeum::Kiyeok => Ok(Self::Kiyeok),\n\n Jaeum::SsangKiyeok => Ok(Self::SsangKiyeok),\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 45, "score": 2.365802900698618 }, { "content": "/// particular order; for that, see [`Jaeum`].\n\n#[derive(Clone, Copy, Debug, Eq, IntoPrimitive, Ord, PartialEq, PartialOrd, TryFromPrimitive)]\n\n#[repr(u8)]\n\npub enum Jongseong {\n\n /// Represents 'ㄱ'.\n\n Kiyeok = 1,\n\n /// Represents 'ㄲ'.\n\n SsangKiyeok,\n\n /// Represents 'ㄳ'.\n\n KiyeokSios,\n\n /// Represents 'ㄴ'.\n\n Nieun,\n\n /// Represents 'ㄵ'.\n\n NieunCieuc,\n\n /// Represents 'ㄶ'.\n\n NieunHieuh,\n\n /// Represents 'ㄷ'.\n\n Tikeut,\n\n /// Represents 'ㄹ'.\n\n Rieul,\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 46, "score": 2.329557662562605 }, { "content": " fn from(jongseong: Jongseong) -> Self {\n\n Jaeum::from(jongseong).into()\n\n }\n\n}\n\nimpl TryFrom<char> for Jongseong {\n\n type Error = Error;\n\n\n\n fn try_from(character: char) -> Result<Self, Self::Error> {\n\n Self::try_from(Jaeum::try_from(character)?)\n\n }\n\n}\n\nimpl TryFrom<Jaeum> for Jongseong {\n\n type Error = Error;\n\n\n\n fn try_from(jaeum: Jaeum) -> Result<Self, Self::Error> {\n\n match jaeum {\n\n Jaeum::Kiyeok => Ok(Self::Kiyeok),\n\n Jaeum::SsangKiyeok => Ok(Self::SsangKiyeok),\n\n Jaeum::KiyeokSios => Ok(Self::KiyeokSios),\n\n Jaeum::Nieun => Ok(Self::Nieun),\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 47, "score": 1.9975710177438302 }, { "content": " /// Represents 'ㅅ'.\n\n Sios,\n\n /// Represents 'ㅆ'.\n\n SsangSios,\n\n /// Represents 'ㅇ'.\n\n Ieung,\n\n /// Represents 'ㅈ'.\n\n Cieuc,\n\n /// Represents 'ㅊ'.\n\n Chieuch,\n\n /// Represents 'ㅋ'.\n\n Khieukh,\n\n /// Represents 'ㅌ'.\n\n Thieuth,\n\n /// Represents 'ㅍ'.\n\n Phieuph,\n\n /// Represents 'ㅎ'.\n\n Hieuh,\n\n}\n\nimpl From<Jongseong> for char {\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 48, "score": 1.3895348817567255 }, { "content": " jongseong,\n\n }));\n\n } else {\n\n destination.push(character);\n\n }\n\n }\n\n\n\n destination\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_flip_chojongseong_horizontally() {\n\n assert_eq!(\n\n super::flip_chojongseong_horizontally(\"사람이 사람답게 살아야\"),\n\n \"아아실 가답람세 아람샤\".to_owned()\n\n );\n\n assert_eq!(\n\n super::flip_chojongseong_horizontally(\"아무말 대잔치\"),\n", "file_path": "horizontal-chojongseong-flip/src/lib.rs", "rank": 49, "score": 1.2695838116870122 }, { "content": "}\n\nimpl TryFrom<char> for Jungseong {\n\n type Error = Error;\n\n\n\n fn try_from(character: char) -> Result<Self, Self::Error> {\n\n if !(0x314F..=0x3163).contains(&(character as u32)) {\n\n return Err(Error::NonJamo(character));\n\n }\n\n\n\n Ok(Self::try_from((character as u32 - 0x314F) as u8).unwrap())\n\n }\n\n}\n\n\n\n/// Groups all the Korean vowels (모음, Moeum).\n\n///\n\n/// Specifically, those residing in 'Hangul Compatibility Jamo' range (U+314F 'ㅏ' -- U+3163 'ㅣ').\n\n///\n\n/// Since [`Jungseong`] already contains the entire vowel set, this is mere a type alias to it.\n\npub type Moeum = Jungseong;\n\n\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 50, "score": 1.0815401889507124 }, { "content": " Jaeum::Nieun => Ok(Self::Nieun),\n\n Jaeum::Tikeut => Ok(Self::Tikeut),\n\n Jaeum::SsangTikeut => Ok(Self::SsangTikeut),\n\n Jaeum::Rieul => Ok(Self::Rieul),\n\n Jaeum::Mieum => Ok(Self::Mieum),\n\n Jaeum::Pieup => Ok(Self::Pieup),\n\n Jaeum::SsangPieup => Ok(Self::SsangPieup),\n\n Jaeum::Sios => Ok(Self::Sios),\n\n Jaeum::SsangSios => Ok(Self::SsangSios),\n\n Jaeum::Ieung => Ok(Self::Ieung),\n\n Jaeum::Cieuc => Ok(Self::Cieuc),\n\n Jaeum::SsangCieuc => Ok(Self::SsangCieuc),\n\n Jaeum::Chieuch => Ok(Self::Chieuch),\n\n Jaeum::Khieukh => Ok(Self::Khieukh),\n\n Jaeum::Thieuth => Ok(Self::Thieuth),\n\n Jaeum::Phieuph => Ok(Self::Phieuph),\n\n Jaeum::Hieuh => Ok(Self::Hieuh),\n\n anything_else => Err(Error::NotApplicableToChoseong(anything_else)),\n\n }\n\n }\n", "file_path": "unicode-korean-multitool/src/lib.rs", "rank": 51, "score": 0.5943383218628733 } ]
Rust
gui/draw-cube/src/007-synchronization/main.rs
Shub1427/rustschool
fe45848a2101ac0cf48311e9926fe26f509059bd
use gfx_hal::{ command, format::{self as hal_format, Aspects, Swizzle}, image::{Layout, SubresourceRange, ViewKind}, pass::{Attachment, AttachmentOps, SubpassDesc}, pool::CommandPoolCreateFlags, prelude::*, window as hal_window, Backend, Features, Instance, }; use std::mem::ManuallyDrop; use std::ptr; use winit::{ dpi::{LogicalSize, PhysicalSize}, event, event_loop, window, }; #[cfg(feature = "dx12")] use gfx_backend_dx12 as back; #[cfg(feature = "metal")] use gfx_backend_metal as back; #[cfg(feature = "vulkan")] use gfx_backend_vulkan as back; use log::debug; use log4rs; const APP_NAME: &'static str = "Show Window"; const WINDOW_SIZE: [u32; 2] = [1280, 768]; pub struct Renderer<B: Backend> { instance: B::Instance, surface: ManuallyDrop<B::Surface>, device: B::Device, command_pool: Option<B::CommandPool>, swapchain: Option<B::Swapchain>, image_views: Vec<B::ImageView>, render_pass: Option<B::RenderPass>, framebuffers: Vec<B::Framebuffer>, image_available_semaphores: Vec<B::Semaphore>, render_complete_semaphores: Vec<B::Semaphore>, submission_complete_fence: Vec<B::Fence>, } impl<B: Backend> Renderer<B> { pub fn new( instance: B::Instance, mut surface: B::Surface, init_extent: hal_window::Extent2D, ) -> Result<Self, &'static str> { let mut adapters = instance.enumerate_adapters(); let (memory_types, limits, adapter) = { let adapter = adapters.remove(0); ( adapter.physical_device.memory_properties().memory_types, adapter.physical_device.limits(), adapter, ) }; let (device, queues, supported_family) = { let supported_family = adapter .queue_families .iter() .find(|family| { surface.supports_queue_family(family) && family.queue_type().supports_graphics() }) .unwrap(); let mut gpu = unsafe { adapter .physical_device .open(&[(supported_family, &[1.0])], Features::empty()) .unwrap() }; ( gpu.device, gpu.queue_groups.pop().unwrap(), supported_family, ) }; let (command_pool, mut command_buffer) = unsafe { let mut command_pool = device .create_command_pool(queues.family, CommandPoolCreateFlags::empty()) .expect("Out of memory"); let command_buffer = command_pool.allocate_one(command::Level::Primary); (command_pool, command_buffer) }; let (swapchain, backbuffer, image_extent, format) = { let caps = surface.capabilities(&adapter.physical_device); let supported_formats = surface.supported_formats(&adapter.physical_device); let format = supported_formats.map_or(hal_format::Format::Rgba8Srgb, |formats| { formats .iter() .find(|format| format.base_format().1 == hal_format::ChannelType::Srgb) .map(|format| *format) .unwrap_or(formats[0]) }); let swap_config = hal_window::SwapchainConfig::from_caps(&caps, format, init_extent); let image_extent = swap_config.extent.to_extent(); let (swapchain, backbuffer) = unsafe { device .create_swapchain(&mut surface, swap_config, None) .expect("Can't create swapchain") }; (swapchain, backbuffer, image_extent, format) }; let image_views = backbuffer .into_iter() .map(|image| unsafe { device .create_image_view( &image, ViewKind::D2, format, Swizzle::NO, SubresourceRange { aspects: Aspects::COLOR, levels: 0..1, layers: 0..1, }, ) .map_err(|_| "Couldn't create the image_view for the image!") }) .collect::<Result<Vec<B::ImageView>, &str>>()?; let render_pass = { let color_attachment = Attachment { format: Some(format), samples: 1, ops: AttachmentOps::INIT, stencil_ops: AttachmentOps::DONT_CARE, layouts: Layout::Undefined..Layout::Present, }; let subpass = SubpassDesc { colors: &[(0, Layout::ColorAttachmentOptimal)], depth_stencil: None, inputs: &[], resolves: &[], preserves: &[], }; unsafe { device .create_render_pass(&[color_attachment], &[subpass], &[]) .expect("Out of memory") } }; let framebuffers = image_views .iter() .map(|image_view| unsafe { device .create_framebuffer(&render_pass, vec![image_view], image_extent) .map_err(|_| "Couldn't create the framebuffer for the image_view!") }) .collect::<Result<Vec<B::Framebuffer>, &str>>()?; let (image_available_semaphores, render_complete_semaphores, submission_complete_fence) = { let mut image_available_semaphores: Vec<B::Semaphore> = vec![]; let mut render_finished_semaphores: Vec<B::Semaphore> = vec![]; let mut submission_complete_fence: Vec<B::Fence> = vec![]; for _ in 0..image_views.len() { image_available_semaphores.push( device .create_semaphore() .map_err(|_| "Could not create image_available_semaphores semaphore!")?, ); render_finished_semaphores.push( device .create_semaphore() .map_err(|_| "Could not create render_finished_semaphores semaphore!")?, ); submission_complete_fence.push( device .create_fence(true) .map_err(|_| "Could not create submission_complete_fence fence!")?, ); } ( image_available_semaphores, render_finished_semaphores, submission_complete_fence, ) }; Ok(Renderer { instance, surface: ManuallyDrop::new(surface), device, command_pool: Some(command_pool), swapchain: Some(swapchain), image_views, render_pass: Some(render_pass), framebuffers, image_available_semaphores, render_complete_semaphores, submission_complete_fence, }) } } impl<B: Backend> Drop for Renderer<B> { fn drop(&mut self) { unsafe { for image_available in self.image_available_semaphores.drain(..) { self.device.destroy_semaphore(image_available); } for render_complete in self.render_complete_semaphores.drain(..) { self.device.destroy_semaphore(render_complete); } for submission_complete in self.submission_complete_fence.drain(..) { self.device.destroy_fence(submission_complete); } for framebuffer in self.framebuffers.drain(..) { self.device.destroy_framebuffer(framebuffer); } for image_view in self.image_views.drain(..) { self.device.destroy_image_view(image_view); } self.device .destroy_render_pass(self.render_pass.take().unwrap()); self.device .destroy_swapchain(self.swapchain.take().unwrap()); self.device .destroy_command_pool(self.command_pool.take().unwrap()); let surface = ManuallyDrop::into_inner(ptr::read(&self.surface)); self.instance.destroy_surface(surface); } } } fn create_backend( wb: window::WindowBuilder, ev_loop: &event_loop::EventLoop<()>, ) -> (back::Instance, back::Surface, window::Window) { let window = wb.build(ev_loop).unwrap(); let instance = back::Instance::create(APP_NAME, 1).expect("Failed to create an instance!"); let surface = unsafe { instance .create_surface(&window) .expect("Failed to create a surface!") }; (instance, surface, window) } fn build_window( ev_loop: &event_loop::EventLoop<()>, ) -> (window::WindowBuilder, hal_window::Extent2D) { let (logical_window_size, physical_window_size) = { let dpi = ev_loop.primary_monitor().scale_factor(); let logical: LogicalSize<u32> = WINDOW_SIZE.into(); let physical: PhysicalSize<u32> = logical.to_physical(dpi); (logical, physical) }; let window_builder = window::WindowBuilder::new() .with_title(APP_NAME) .with_inner_size(logical_window_size); ( window_builder, hal_window::Extent2D { width: physical_window_size.width, height: physical_window_size.height, }, ) } fn main() { log4rs::init_file("log4rs.yml", Default::default()).unwrap(); let ev_loop = event_loop::EventLoop::new(); let (window_builder, extent) = build_window(&ev_loop); let (instance, surface, window) = create_backend(window_builder, &ev_loop); let renderer = Renderer::<back::Backend>::new(instance, surface, extent); ev_loop.run(move |event, _, control_flow| { *control_flow = event_loop::ControlFlow::Wait; match event { event::Event::WindowEvent { event, .. } => { #[allow(unused_variables)] match event { event::WindowEvent::CloseRequested => { *control_flow = event_loop::ControlFlow::Exit } event::WindowEvent::Resized(dims) => { debug!("RESIZE EVENT"); } event::WindowEvent::ScaleFactorChanged { new_inner_size, .. } => { debug!("Scale Factor Change"); } _ => (), } } event::Event::MainEventsCleared => { debug!("MainEventsCleared"); } event::Event::RedrawRequested(_) => { debug!("RedrawRequested"); } event::Event::RedrawEventsCleared => { debug!("RedrawEventsCleared"); } _ => (), } }); }
use gfx_hal::{ command, format::{self as hal_format, Aspects, Swizzle}, image::{Layout, SubresourceRange, ViewKind}, pass::{Attachment, AttachmentOps, SubpassDesc}, pool::CommandPoolCreateFlags, prelude::*, window as hal_window, Backend, Features, Instance, }; use std::mem::ManuallyDrop; use std::ptr; use winit::{ dpi::{LogicalSize, PhysicalSize}, event, event_loop, window, }; #[cfg(feature = "dx12")] use gfx_backend_dx12 as back; #[cfg(feature = "metal")] use gfx_backend_metal as back; #[cfg(feature = "vulkan")] use gfx_backend_vulkan as back; use log::debug; use log4rs; const APP_NAME: &'static str = "Show Window"; const WINDOW_SIZE: [u32; 2] = [1280, 768]; pub struct Renderer<B: Backend> { instance: B::Instance, surface: ManuallyDrop<B::Surface>, device: B::Device, command_pool: Option<B::CommandPool>, swapchain: Option<B::Swapchain>, image_views: Vec<B::ImageView>, render_pass: Option<B::RenderPass>, framebuffers: Vec<B::Framebuffer>, image_available_semaphores: Vec<B::Semaphore>, render_complete_semaphores: Vec<B::Semaphore>, submission_complete_fence: Vec<B::Fence>, } impl<B: Backend> Renderer<B> { pub fn new( instance: B::Instance, mut surface: B::Surface, init_extent: hal_window::Extent2D, ) -> Result<Self, &'static str> { let mut adapters = instance.enumerate_adapters(); let (memory_types, limits, adapter) = { let adapter = adapters.remove(0); ( adapter.physical_device.memory_properties().memory_types, adapter.physical_device.limits(), adapter, ) }; let (device, queues, supported_family) = { let supported_family = adapter .queue_families .iter() .find(|family| { surface.supports_queue_family(family) && family.queue_type().supports_graphics() }) .unwrap(); let mut gpu = unsafe { adapter .physical_device .open(&[(supported_family, &[1.0])], Features::empty()) .unwrap() }; ( gpu.device, gpu.queue_groups.pop().unwrap(), supported_family, ) }; let (command_pool, mut command_buffer) = unsafe { let mut command_pool = device .create_command_pool(queues.family, CommandPoolCreateFlags::empty()) .expect("Out of memory"); let command_buffer = command_pool.allocate_one(command::Level::Primary); (command_pool, command_buffer) }; let (swapchain, backbuffer, image_extent, format) = { let caps = surface.capabilities(&adapter.physical_device); let supported_formats = surface.supported_formats(&adapter.physical_device); let format = supported_formats.map_or(hal_format::Format::Rgba8Srgb, |formats| { formats .iter() .find(|format| format.base_format().1 == hal_format::ChannelType::Srgb) .map(|format| *format) .unwrap_or(formats[0]) }); let swap_config = hal_window::SwapchainConfig::from_caps(&caps, format, init_extent); let image_extent = swap_config.extent.to_extent(); let (swapchain, backbuffer) = unsafe { device .create_swapchain(&mut surface, swap_config, None) .expect("Can't create swapchain") }; (swapchain, backbuffer, image_extent, format) }; let image_views = backbuffer .into_iter() .map(|image| unsafe { device .create_image_view( &image, ViewKind::D2, format, Swizzle::NO, SubresourceRange { aspects: Aspects::COLOR, levels: 0..1, layers: 0..1, }, ) .map_err(|_| "Couldn't create the image_view for the image!") }) .collect::<Result<Vec<B::ImageView>, &str>>()?; let render_pass = { let color_attachment = Attachment { format: Some(format), samples: 1, ops: AttachmentOps::INIT, stencil_ops: AttachmentOps::DONT_CARE, layouts: Layout::Undefined..Layout::Present, }; let subpass = SubpassDesc { colors: &[(0, Layout::ColorAttachmentOptimal)], depth_stencil: None, inputs: &[], resolves: &[], preserves: &[], }; unsafe { device .create_render_pass(&[color_attachment], &[subpass], &[]) .expect("Out of memory") } }; let framebuffers = image_views .iter() .map(|image_view| unsafe { device .create_framebuffer(&render_pass, vec![image_view], image_extent) .map_err(|_| "Couldn't create the framebuffer for the image_view!") }) .collect::<Result<Vec<B::Framebuffer>, &str>>()?; let (image_available_semaphores, render_complete_semaphores, submission_complete_fence) = { let mut image_available_semaphores: Vec<B::Semaphore> = vec![]; let mut render_finished_semaphores: Vec<B::Semaphore> = vec![]; let mut submission_complete_fence: Vec<B::Fence> = vec![]; for _ in 0..image_views.len() { image_available_semaphores.push( device .create_semaphore() .map_err(|_| "Could not create image_available_semaphores semaphore!")?, ); render_finished_semaphores.push( device .create_semaphore() .map_err(|_| "Could not create render_finished_semaphores semaphore!")?, ); submission_complete_fence.push( device .create_fence(true) .map_err(|_| "Could not create submission_complete_fence fence!")?, ); } ( image_available_semaphores, render_finished_semaphores, submission_complete_fence, ) };
} } impl<B: Backend> Drop for Renderer<B> { fn drop(&mut self) { unsafe { for image_available in self.image_available_semaphores.drain(..) { self.device.destroy_semaphore(image_available); } for render_complete in self.render_complete_semaphores.drain(..) { self.device.destroy_semaphore(render_complete); } for submission_complete in self.submission_complete_fence.drain(..) { self.device.destroy_fence(submission_complete); } for framebuffer in self.framebuffers.drain(..) { self.device.destroy_framebuffer(framebuffer); } for image_view in self.image_views.drain(..) { self.device.destroy_image_view(image_view); } self.device .destroy_render_pass(self.render_pass.take().unwrap()); self.device .destroy_swapchain(self.swapchain.take().unwrap()); self.device .destroy_command_pool(self.command_pool.take().unwrap()); let surface = ManuallyDrop::into_inner(ptr::read(&self.surface)); self.instance.destroy_surface(surface); } } } fn create_backend( wb: window::WindowBuilder, ev_loop: &event_loop::EventLoop<()>, ) -> (back::Instance, back::Surface, window::Window) { let window = wb.build(ev_loop).unwrap(); let instance = back::Instance::create(APP_NAME, 1).expect("Failed to create an instance!"); let surface = unsafe { instance .create_surface(&window) .expect("Failed to create a surface!") }; (instance, surface, window) } fn build_window( ev_loop: &event_loop::EventLoop<()>, ) -> (window::WindowBuilder, hal_window::Extent2D) { let (logical_window_size, physical_window_size) = { let dpi = ev_loop.primary_monitor().scale_factor(); let logical: LogicalSize<u32> = WINDOW_SIZE.into(); let physical: PhysicalSize<u32> = logical.to_physical(dpi); (logical, physical) }; let window_builder = window::WindowBuilder::new() .with_title(APP_NAME) .with_inner_size(logical_window_size); ( window_builder, hal_window::Extent2D { width: physical_window_size.width, height: physical_window_size.height, }, ) } fn main() { log4rs::init_file("log4rs.yml", Default::default()).unwrap(); let ev_loop = event_loop::EventLoop::new(); let (window_builder, extent) = build_window(&ev_loop); let (instance, surface, window) = create_backend(window_builder, &ev_loop); let renderer = Renderer::<back::Backend>::new(instance, surface, extent); ev_loop.run(move |event, _, control_flow| { *control_flow = event_loop::ControlFlow::Wait; match event { event::Event::WindowEvent { event, .. } => { #[allow(unused_variables)] match event { event::WindowEvent::CloseRequested => { *control_flow = event_loop::ControlFlow::Exit } event::WindowEvent::Resized(dims) => { debug!("RESIZE EVENT"); } event::WindowEvent::ScaleFactorChanged { new_inner_size, .. } => { debug!("Scale Factor Change"); } _ => (), } } event::Event::MainEventsCleared => { debug!("MainEventsCleared"); } event::Event::RedrawRequested(_) => { debug!("RedrawRequested"); } event::Event::RedrawEventsCleared => { debug!("RedrawEventsCleared"); } _ => (), } }); }
Ok(Renderer { instance, surface: ManuallyDrop::new(surface), device, command_pool: Some(command_pool), swapchain: Some(swapchain), image_views, render_pass: Some(render_pass), framebuffers, image_available_semaphores, render_complete_semaphores, submission_complete_fence, })
call_expression
[ { "content": "// The &'static here means the return type has a static lifetime.\n\n// This is a Rust feature that you don't need to worry about now.\n\npub fn hello() -> &'static str {\n\n \"Hello, World!\"\n\n}\n", "file_path": "exercism/rust/hello-world/src/lib.rs", "rank": 0, "score": 250871.10906581604 }, { "content": "pub fn reverse(input: &str) -> String {\n\n reversed::reverse(input)\n\n}\n", "file_path": "exercism/rust/reverse-string/src/lib.rs", "rank": 1, "score": 239693.73870298266 }, { "content": "pub fn search<'a>(query: &str, content: &'a str) -> Vec<&'a str> {\n\n content\n\n .lines()\n\n .filter(|line| line.contains(query))\n\n .collect()\n\n}\n\n\n", "file_path": "rust-book/minigrep/src/search.rs", "rank": 2, "score": 234904.62167838257 }, { "content": "pub fn search_case_insensitive<'a>(query: &str, content: &'a str) -> Vec<&'a str> {\n\n content\n\n .lines()\n\n .filter(|line| line.to_lowercase().contains(&(query.to_lowercase())))\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n #[test]\n\n fn test_search_case_sensitive() {\n\n let search_str = \"selv\";\n\n // parah string is a list of lines, which acts like pre-formatted text, thus don't use\n\n // unnecessary spaces or tabs, which might give unexpected results.\n\n let parah = \"\\\n\nAim for your goal,\n\nand things will come to\n\ntheir place by themselves...\n\n \";\n", "file_path": "rust-book/minigrep/src/search.rs", "rank": 3, "score": 229577.0311753151 }, { "content": "fn main() -> Result<(), &'static str> {\n\n log4rs::init_file(\"log4rs.yml\", Default::default()).unwrap();\n\n\n\n let ev_loop = event_loop::EventLoop::new();\n\n let (window_builder, extent) = build_window(&ev_loop);\n\n let (instance, surface, window) = create_backend(window_builder, &ev_loop);\n\n\n\n let mut renderer = Renderer::<back::Backend>::new(instance, surface, extent)?;\n\n let mut current_pos = PhysicalPosition::new(0.0, 0.0);\n\n let mut red = 1.0;\n\n let mut green = 0.5;\n\n let mut blue = 0.2;\n\n let mut alpha = 1.0; // Alpha channel if set to 1.0 makes the color opaque...\n\n\n\n renderer.draw([red, green, blue, alpha]);\n\n\n\n ev_loop.run(move |event, _, control_flow| {\n\n *control_flow = event_loop::ControlFlow::Wait;\n\n match event {\n\n event::Event::WindowEvent { event, .. } => {\n", "file_path": "gui/draw-cube/src/008-color_background/main.rs", "rank": 4, "score": 220900.44361120617 }, { "content": "fn create_backend(\n\n wb: window::WindowBuilder,\n\n ev_loop: &event_loop::EventLoop<()>,\n\n extent: hal_window::Extent2D,\n\n) -> (back::Instance, back::Surface, window::Window) {\n\n let window = wb.build(ev_loop).unwrap();\n\n\n\n let instance = back::Instance::create(APP_NAME, 1).expect(\"Failed to create an instance!\");\n\n let surface = unsafe {\n\n instance\n\n .create_surface(&window)\n\n .expect(\"Failed to create a surface!\")\n\n };\n\n\n\n (instance, surface, window)\n\n}\n\n\n", "file_path": "gui/draw-cube/src/001-show_window/main.rs", "rank": 5, "score": 219821.51976780698 }, { "content": "fn create_user_with_shorthand(name: String, email: &'static str, age: u8) -> User {\n\n User {\n\n name, email, age,\n\n active: false\n\n }\n\n}\n\n\n\n/**\n\n * Breakdown #2\n\n * - Immutability concept for Structs work similar to any other variable assignment in Rust.\n\n * - If a variable is marked as `mut`, then all it's member variables are also mutable...\n\n */\n", "file_path": "rust-book/structs/src/main.rs", "rank": 6, "score": 215947.2180601178 }, { "content": "struct Renderer<B: Backend> {\n\n // Vulkan backend instance object\n\n instance: B::Instance,\n\n // Vulkan backend surface object\n\n surface: ManuallyDrop<B::Surface>,\n\n}\n\n\n\nimpl<B: Backend> Renderer<B> {\n\n fn new(instance: B::Instance, surface: B::Surface) -> Self {\n\n Renderer {\n\n instance,\n\n surface: ManuallyDrop::new(surface),\n\n }\n\n }\n\n}\n\n\n\nimpl<B: Backend> Drop for Renderer<B> {\n\n fn drop(&mut self) {\n\n unsafe {\n\n // up here ManuallyDrop gives us the inner resource with ownership\n\n // where `ptr::read` doesn't do anything just reads the resource\n\n // without manipulating the actual memory\n\n let surface = ManuallyDrop::into_inner(ptr::read(&self.surface));\n\n self.instance.destroy_surface(surface);\n\n }\n\n }\n\n}\n\n\n", "file_path": "gui/draw-cube/src/001-show_window/main.rs", "rank": 7, "score": 205030.34811708017 }, { "content": "pub fn nth(n: u32) -> u32 {\n\n *(PrimeTime::new(n as usize).find_prime_at(n))\n\n}\n", "file_path": "exercism/rust/nth-prime/src/lib.rs", "rank": 8, "score": 202165.64036155658 }, { "content": "/// Process a single test case for the property `reverse`\n\nfn process_reverse_case(input: &str, expected: &str) {\n\n assert_eq!(&reverse(input), expected)\n\n}\n\n\n\n#[test]\n", "file_path": "exercism/rust/reverse-string/tests/reverse-string.rs", "rank": 9, "score": 193458.18892247876 }, { "content": "pub fn raindrops(n: u32) -> String {\n\n let mut rain_str = String::with_capacity(15); // with max capacity\n\n if n % 3 == 0 {\n\n rain_str.push_str(\"Pling\");\n\n }\n\n if n % 5 == 0 {\n\n rain_str.push_str(\"Plang\");\n\n }\n\n if n % 7 == 0 {\n\n rain_str.push_str(\"Plong\");\n\n }\n\n\n\n if rain_str.is_empty() {\n\n return n.to_string();\n\n }\n\n\n\n rain_str\n\n}\n", "file_path": "exercism/rust/raindrops/src/lib.rs", "rank": 10, "score": 184091.35142544442 }, { "content": "fn main() -> Result<(), &'static str> {\n\n log4rs::init_file(\"log4rs.yml\", Default::default()).unwrap();\n\n\n\n let ev_loop = event_loop::EventLoop::new();\n\n let (window_builder, extent) = build_window(&ev_loop);\n\n let (instance, surface, window) = create_backend(window_builder, &ev_loop);\n\n\n\n let mut renderer = Renderer::<back::Backend>::new(instance, surface, extent)?;\n\n let mut current_pos = PhysicalPosition::new(0.0, 0.0);\n\n let mut red = 1.0;\n\n let mut green = 0.5;\n\n let mut blue = 0.2;\n\n let mut alpha = 1.0; // Alpha channel if set to 1.0 makes the color opaque...\n\n\n\n renderer.draw([red, green, blue, alpha]);\n\n\n\n ev_loop.run(move |event, _, control_flow| {\n\n *control_flow = event_loop::ControlFlow::Wait;\n\n match event {\n\n event::Event::WindowEvent { event, .. } => {\n", "file_path": "gui/draw-cube/src/final-draw-cube/main.rs", "rank": 11, "score": 182684.80020722892 }, { "content": "fn create_backend(\n\n wb: window::WindowBuilder,\n\n ev_loop: &event_loop::EventLoop<()>,\n\n extent: hal_window::Extent2D,\n\n) -> (back::Instance, back::Surface, window::Window) {\n\n let window = wb.build(ev_loop).unwrap();\n\n\n\n let instance = back::Instance::create(APP_NAME, 1).expect(\"Failed to create an instance!\");\n\n let surface = unsafe {\n\n instance\n\n .create_surface(&window)\n\n .expect(\"Failed to create a surface!\")\n\n };\n\n\n\n (instance, surface, window)\n\n}\n\n\n", "file_path": "gui/draw-cube/src/006-command_buffers/main.rs", "rank": 12, "score": 182141.66328646586 }, { "content": "fn create_backend(\n\n wb: window::WindowBuilder,\n\n ev_loop: &event_loop::EventLoop<()>,\n\n extent: hal_window::Extent2D,\n\n) -> (back::Instance, back::Surface, window::Window) {\n\n let window = wb.build(ev_loop).unwrap();\n\n\n\n let instance = back::Instance::create(APP_NAME, 1).expect(\"Failed to create an instance!\");\n\n let surface = unsafe {\n\n instance\n\n .create_surface(&window)\n\n .expect(\"Failed to create a surface!\")\n\n };\n\n\n\n (instance, surface, window)\n\n}\n\n\n", "file_path": "gui/draw-cube/src/002-enumerate_devices/main.rs", "rank": 13, "score": 182126.8327514796 }, { "content": "fn create_backend(\n\n wb: window::WindowBuilder,\n\n ev_loop: &event_loop::EventLoop<()>,\n\n) -> (back::Instance, back::Surface, window::Window) {\n\n let window = wb.build(ev_loop).unwrap();\n\n\n\n let instance = back::Instance::create(APP_NAME, 1).expect(\"Failed to create an instance!\");\n\n let surface = unsafe {\n\n instance\n\n .create_surface(&window)\n\n .expect(\"Failed to create a surface!\")\n\n };\n\n\n\n (instance, surface, window)\n\n}\n\n\n", "file_path": "gui/draw-cube/src/008-color_background/main.rs", "rank": 14, "score": 181984.34085542636 }, { "content": "pub fn reverse(original: &str) -> String {\n\n original.graphemes(true).rev().collect::<String>()\n\n}\n", "file_path": "exercism/rust/reverse-string/src/reversed.rs", "rank": 15, "score": 178801.0362655123 }, { "content": "// This function will only work for ASCII chars\n\n// and also will solve the problem in O(n)\n\n// Not sure for now, if there is any better solution\n\n// Can make the loop to run for n/2, but that is still O(n) complexity\n\npub fn reverse(original: &str) -> String {\n\n let size = original.len();\n\n let mut reversed = String::with_capacity(size);\n\n let original_bytes = original.as_bytes();\n\n\n\n for index in 1..=size {\n\n reversed.push(original_bytes[size - index] as char);\n\n }\n\n\n\n return reversed;\n\n}\n\n\n", "file_path": "exercism/rust/reverse-string/src/reverse_string.rs", "rank": 16, "score": 176423.56101018924 }, { "content": "pub fn grapheme_reverse(original: &str) -> String {\n\n let original_uchars = UnicodeSegmentation::graphemes(original, true).collect::<Vec<&str>>();\n\n let size = original_uchars.len();\n\n let mut reversed = String::with_capacity(size);\n\n\n\n for index in 1..=size {\n\n reversed.push_str(original_uchars[size - index]);\n\n }\n\n\n\n return reversed;\n\n}\n", "file_path": "exercism/rust/reverse-string/src/reverse_string.rs", "rank": 17, "score": 174149.64265108437 }, { "content": "pub fn simple_reverse(original: &str) -> String {\n\n return original.chars().rev().collect::<String>();\n\n}\n\n\n", "file_path": "exercism/rust/reverse-string/src/reverse_string.rs", "rank": 18, "score": 174149.64265108437 }, { "content": "pub fn odd_ones_out(list: &[isize]) -> Vec<isize> {\n\n\tlet mut odds: Vec<isize> = Vec::new();\n\n\tfor element in list {\n\n\t\tif element & 1 == 1 {\n\n\t\t\todds.push(*element);\n\n\t\t}\n\n\t}\n\n\todds\n\n}\n\n\n", "file_path": "rust-book/testing/src/lib.rs", "rank": 19, "score": 171547.066091494 }, { "content": "struct Renderer<B: Backend> {\n\n // Vulkan backend instance object\n\n instance: B::Instance,\n\n // Vulkan backend surface object\n\n surface: ManuallyDrop<B::Surface>,\n\n // Logical Device object\n\n device: B::Device,\n\n // CommandPool instance\n\n command_pool: Option<B::CommandPool>,\n\n}\n\n\n\nimpl<B: Backend> Renderer<B> {\n\n fn new(instance: B::Instance, surface: B::Surface) -> Self {\n\n let mut adapters = instance.enumerate_adapters();\n\n let (memory_types, limits, adapter) = {\n\n let adapter = adapters.remove(0);\n\n (\n\n adapter.physical_device.memory_properties().memory_types,\n\n adapter.physical_device.limits(),\n\n adapter,\n", "file_path": "gui/draw-cube/src/006-command_buffers/main.rs", "rank": 20, "score": 168228.4761784424 }, { "content": "struct Renderer<B: Backend> {\n\n // Vulkan backend instance object\n\n instance: B::Instance,\n\n // Vulkan backend surface object\n\n surface: ManuallyDrop<B::Surface>,\n\n // Device Adpter, containing Physical and Queue details\n\n adapter: Adapter<B>,\n\n // Logical Device object\n\n device: B::Device,\n\n // Queue Group for rendering reference\n\n queue_group: family::QueueGroup<B>,\n\n}\n\n\n\nimpl<B: Backend> Renderer<B> {\n\n fn new(instance: B::Instance, surface: B::Surface) -> Self {\n\n let mut adapters = instance.enumerate_adapters();\n\n let (memory_types, limits, adapter) = {\n\n let adapter = adapters.remove(0);\n\n (\n\n adapter.physical_device.memory_properties().memory_types,\n", "file_path": "gui/draw-cube/src/002-enumerate_devices/main.rs", "rank": 21, "score": 168213.9912121948 }, { "content": "fn build_window(\n\n ev_loop: &event_loop::EventLoop<()>,\n\n) -> (window::WindowBuilder, hal_window::Extent2D) {\n\n // We need to first get Logical and Physical Size of the screen\n\n let (logical_window_size, physical_window_size) = {\n\n let dpi = ev_loop.primary_monitor().scale_factor();\n\n let logical: LogicalSize<u32> = WINDOW_SIZE.into();\n\n\n\n // Phsical Size is the actual internal screen size, a factor of DPI\n\n let physical: PhysicalSize<u32> = logical.to_physical(dpi);\n\n\n\n (logical, physical)\n\n };\n\n\n\n let window_builder = window::WindowBuilder::new()\n\n .with_title(APP_NAME)\n\n .with_inner_size(logical_window_size);\n\n\n\n (\n\n window_builder,\n\n hal_window::Extent2D {\n\n width: physical_window_size.width,\n\n height: physical_window_size.height,\n\n },\n\n )\n\n}\n\n\n", "file_path": "gui/draw-cube/src/001-show_window/main.rs", "rank": 22, "score": 154850.91430233337 }, { "content": "fn main() {\n\n log4rs::init_file(\"log4rs.yml\", Default::default()).unwrap();\n\n\n\n let ev_loop = event_loop::EventLoop::new();\n\n let (window_builder, extent) = build_window(&ev_loop);\n\n let (instance, surface, window) = create_backend(window_builder, &ev_loop, extent);\n\n\n\n let renderer = Renderer::<back::Backend>::new(instance, surface);\n\n\n\n ev_loop.run(move |event, _, control_flow| {\n\n *control_flow = event_loop::ControlFlow::Wait;\n\n match event {\n\n event::Event::WindowEvent { event, .. } => {\n\n #[allow(unused_variables)]\n\n match event {\n\n event::WindowEvent::CloseRequested => {\n\n *control_flow = event_loop::ControlFlow::Exit\n\n }\n\n event::WindowEvent::Resized(dims) => {\n\n debug!(\"RESIZE EVENT\");\n", "file_path": "gui/draw-cube/src/001-show_window/main.rs", "rank": 23, "score": 147191.09414093458 }, { "content": "fn create_backend(\n\n wb: window::WindowBuilder,\n\n ev_loop: &event_loop::EventLoop<()>,\n\n) -> (back::Instance, back::Surface, window::Window) {\n\n let window = wb.build(ev_loop).unwrap();\n\n\n\n let instance = back::Instance::create(APP_NAME, 1).expect(\"Failed to create an instance!\");\n\n let surface = unsafe {\n\n instance\n\n .create_surface(&window)\n\n .expect(\"Failed to create a surface!\")\n\n };\n\n\n\n (instance, surface, window)\n\n}\n\n\n", "file_path": "gui/draw-cube/src/003-swap_chain/main.rs", "rank": 25, "score": 144439.50315599728 }, { "content": "fn build_window(\n\n ev_loop: &event_loop::EventLoop<()>,\n\n) -> (window::WindowBuilder, hal_window::Extent2D) {\n\n // We need to first get Logical and Physical Size of the screen\n\n let (logical_window_size, physical_window_size) = {\n\n let dpi = ev_loop.primary_monitor().scale_factor();\n\n let logical: LogicalSize<u32> = WINDOW_SIZE.into();\n\n\n\n // Phsical Size is the actual internal screen size, a factor of DPI\n\n let physical: PhysicalSize<u32> = logical.to_physical(dpi);\n\n\n\n (logical, physical)\n\n };\n\n\n\n let window_builder = window::WindowBuilder::new()\n\n .with_title(APP_NAME)\n\n .with_inner_size(logical_window_size);\n\n\n\n (\n\n window_builder,\n\n hal_window::Extent2D {\n\n width: physical_window_size.width,\n\n height: physical_window_size.height,\n\n },\n\n )\n\n}\n\n\n", "file_path": "gui/draw-cube/src/006-command_buffers/main.rs", "rank": 26, "score": 144439.47847355873 }, { "content": "fn build_window(\n\n ev_loop: &event_loop::EventLoop<()>,\n\n) -> (window::WindowBuilder, hal_window::Extent2D) {\n\n // We need to first get Logical and Physical Size of the screen\n\n let (logical_window_size, physical_window_size) = {\n\n let dpi = ev_loop.primary_monitor().scale_factor();\n\n let logical: LogicalSize<u32> = WINDOW_SIZE.into();\n\n\n\n // Phsical Size is the actual internal screen size, a factor of DPI\n\n let physical: PhysicalSize<u32> = logical.to_physical(dpi);\n\n\n\n (logical, physical)\n\n };\n\n\n\n let window_builder = window::WindowBuilder::new()\n\n .with_title(APP_NAME)\n\n .with_inner_size(logical_window_size);\n\n\n\n (\n\n window_builder,\n\n hal_window::Extent2D {\n\n width: physical_window_size.width,\n\n height: physical_window_size.height,\n\n },\n\n )\n\n}\n\n\n", "file_path": "gui/draw-cube/src/002-enumerate_devices/main.rs", "rank": 27, "score": 144424.64793857248 }, { "content": "fn build_window(\n\n ev_loop: &event_loop::EventLoop<()>,\n\n) -> (window::WindowBuilder, hal_window::Extent2D) {\n\n // We need to first get Logical and Physical Size of the screen\n\n let (logical_window_size, physical_window_size) = {\n\n let dpi = ev_loop.primary_monitor().scale_factor();\n\n let logical: LogicalSize<u32> = WINDOW_SIZE.into();\n\n\n\n // Phsical Size is the actual internal screen size, a factor of DPI\n\n let physical: PhysicalSize<u32> = logical.to_physical(dpi);\n\n\n\n (logical, physical)\n\n };\n\n\n\n let window_builder = window::WindowBuilder::new()\n\n .with_title(APP_NAME)\n\n .with_inner_size(logical_window_size);\n\n\n\n (\n\n window_builder,\n\n hal_window::Extent2D {\n\n width: physical_window_size.width,\n\n height: physical_window_size.height,\n\n },\n\n )\n\n}\n\n\n", "file_path": "gui/draw-cube/src/008-color_background/main.rs", "rank": 28, "score": 144282.15604251923 }, { "content": "fn create_user() -> User {\n\n // Create a new user and return it. No `new` keyword required...\n\n User {\n\n name: String::from(\"foo\"),\n\n email: \"bar\",\n\n age: 24,\n\n active: true\n\n }\n\n}\n\n\n", "file_path": "rust-book/structs/src/main.rs", "rank": 29, "score": 143810.9990263015 }, { "content": "fn create_backend(\n\n wb: window::WindowBuilder,\n\n ev_loop: &event_loop::EventLoop<()>,\n\n) -> (back::Instance, back::Surface, window::Window) {\n\n let window = wb.build(ev_loop).unwrap();\n\n\n\n let instance = back::Instance::create(APP_NAME, 1).expect(\"Failed to create an instance!\");\n\n let surface = unsafe {\n\n instance\n\n .create_surface(&window)\n\n .expect(\"Failed to create a surface!\")\n\n };\n\n\n\n (instance, surface, window)\n\n}\n\n\n", "file_path": "gui/draw-cube/src/004-render_pass_init/main.rs", "rank": 30, "score": 141830.59158324683 }, { "content": "fn create_backend(\n\n wb: window::WindowBuilder,\n\n ev_loop: &event_loop::EventLoop<()>,\n\n) -> (back::Instance, back::Surface, window::Window) {\n\n let window = wb.build(ev_loop).unwrap();\n\n\n\n let instance = back::Instance::create(APP_NAME, 1).expect(\"Failed to create an instance!\");\n\n let surface = unsafe {\n\n instance\n\n .create_surface(&window)\n\n .expect(\"Failed to create a surface!\")\n\n };\n\n\n\n (instance, surface, window)\n\n}\n\n\n", "file_path": "gui/draw-cube/src/final-draw-cube/main.rs", "rank": 31, "score": 141830.59158324683 }, { "content": "pub fn setup () {\n\n\t// any particular setup that is needed.\n\n\tprintln!(\"It works...\");\n\n}", "file_path": "rust-book/testing/tests/common.rs", "rank": 32, "score": 138981.6933529971 }, { "content": "fn create_from(user: User) -> User {\n\n User {\n\n email: \"foo@bar.com\",\n\n ..user\n\n }\n\n}\n\n\n\n/**\n\n * Breakdown #4:\n\n * - Defining a Tuple Struct.\n\n * - This type of struct can be useful for similar list of data, that means something.\n\n * -- Like Color(u8, u8, u8);\n\n */\n", "file_path": "rust-book/structs/src/main.rs", "rank": 33, "score": 136308.42040724616 }, { "content": "struct Renderer<B: Backend> {\n\n}\n\n\n\nimpl<B: Backend> Renderer<B> {\n\n fn new() -> Self {\n\n Renderer {}\n\n }\n\n}\n\n\n", "file_path": "gui/draw-cube/src/000-setup/main.rs", "rank": 34, "score": 133155.69132561525 }, { "content": "struct Renderer<B: Backend> {\n\n window_dims: hal_window::Extent2D,\n\n viewport: Viewport,\n\n // Vulkan backend instance object\n\n instance: B::Instance,\n\n // Vulkan backend surface object\n\n surface: ManuallyDrop<B::Surface>,\n\n // Device Adpter, containing Physical and Queue details\n\n adapter: Adapter<B>,\n\n // Logical Device object\n\n device: B::Device,\n\n // Queue Group for rendering reference\n\n queue_group: family::QueueGroup<B>,\n\n // Collection Swapchain Image, Empty buffer initially\n\n frame_count: usize,\n\n // Desired Format / Selected Format\n\n format: hal_format::Format,\n\n}\n\n\n\nimpl<B: Backend> Renderer<B> {\n", "file_path": "gui/draw-cube/src/003-swap_chain/main.rs", "rank": 35, "score": 131404.8202916794 }, { "content": "struct Renderer<B: Backend> {\n\n window_dims: hal_window::Extent2D,\n\n viewport: Viewport,\n\n // Vulkan backend instance object\n\n instance: B::Instance,\n\n // Vulkan backend surface object\n\n surface: ManuallyDrop<B::Surface>,\n\n // Device Adpter, containing Physical and Queue details\n\n adapter: Adapter<B>,\n\n // Logical Device object\n\n device: B::Device,\n\n // Queue Group for rendering reference\n\n queue_group: family::QueueGroup<B>,\n\n // Collection Swapchain Image, Empty buffer initially\n\n frame_count: usize,\n\n // Desired Format / Selected Format\n\n format: hal_format::Format,\n\n // Render Pass instance\n\n render_pass: ManuallyDrop<B::RenderPass>,\n\n}\n", "file_path": "gui/draw-cube/src/004-render_pass_init/main.rs", "rank": 36, "score": 129733.68596826654 }, { "content": "#[derive(Debug)]\n\nstruct Color(u8, u8, u8); // (Red, Green, Blue)\n\n\n\n/**\n\n * Breakdown #5:\n\n * - Implementing a Method (function bound to an instance) for Struct\n\n * - Type of self is Color due to this method being inside the impl Color context\n\n * - Methods can take ownership of self, borrow self immutably (as done below),\n\n * or borrow self mutably, just as they can any other parameter.\n\n * - Also, impl blocks can have one or more methods in it defined\n\n */\n\nimpl Color {\n\n /// @param {u8} percent: represent amount to be lighten from color (0 - 100)\n\n fn red(&self) -> u8 {\n\n self.0\n\n }\n\n\n\n fn green(&self) -> u8 {\n\n self.1\n\n }\n\n\n\n fn blue(&self) -> u8 {\n\n self.2\n\n }\n\n}\n\n\n", "file_path": "rust-book/structs/src/main.rs", "rank": 37, "score": 123597.76284892223 }, { "content": "fn handle_connection(mut stream: TcpStream) {\n\n let mut buffer = [0; 1024];\n\n stream.read(&mut buffer).unwrap();\n\n\n\n let get = b\"GET / HTTP/1.1\\r\\n\";\n\n\n\n // let response = \"HTTP/1.1 200 OK\\r\\n\\r\\n\";\n\n // println!(\"Request: {}\", String::from_utf8_lossy(&buffer[..]));\n\n\n\n if buffer.starts_with(get) {\n\n let contents = fs::read_to_string(\"index.html\").unwrap();\n\n\n\n let response = format!(\n\n \"HTTP/1.1 200 OK\\nContent-Length: {}\\n\\n{}\",\n\n contents.len(),\n\n contents\n\n );\n\n\n\n stream.write(response.as_bytes()).unwrap();\n\n stream.flush().unwrap();\n\n } else {\n\n // TODO: update\n\n }\n\n}\n", "file_path": "small-projects/web-server/src/main.rs", "rank": 38, "score": 120732.55671838453 }, { "content": "pub fn verse(n: i32) -> String {\n\n let nth_statements = map_to_verse(n);\n\n format!(\n\n \"{} of beer on the wall, {} of beer.\\n{}, {} of beer on the wall.\\n\",\n\n nth_statements.0, nth_statements.1, nth_statements.2, nth_statements.3\n\n )\n\n}\n\n\n", "file_path": "exercism/rust/beer-song/src/lib.rs", "rank": 39, "score": 119750.08585253386 }, { "content": "/// This Function takes any argument immutable/mutable, and converts it to a new\n\n/// variable that is mutable, and has it's own ownership...\n\nfn return_ownership(mut msg: String) -> String {\n\n msg.push_str(\" #modified\");\n\n msg\n\n}\n\n\n", "file_path": "rust-book/ownership/src/main.rs", "rank": 40, "score": 118451.00722099308 }, { "content": "pub fn is_leap_year(year: u64) -> bool {\n\n (year % 4 == 0 && year % 100 != 0) || year % 400 == 0\n\n}\n", "file_path": "exercism/rust/leap/src/lib.rs", "rank": 41, "score": 118099.98101601246 }, { "content": "fn test_struct_members_immutability() {\n\n let mut user = create_user_with_shorthand(String::from(\"Dumb\"), \"foo\", 12);\n\n user.name = String::from(\"Dumber\");\n\n println!(\"=== USER Modified: {:?}\\n\", user);\n\n}\n\n\n\n/**\n\n * Breakdown #3:\n\n * - `user`.[property_name] is acceptable to access `user` instance members. This can also be used\n\n * to create other user instance from existing user.\n\n * - using struct update syntax (similar to JS Object Spread, but with varius catches to keep in mind).\n\n * -- This example is using Object Spread.\n\n * -- Catch 1: Overrides only those member, which are not explicitly modified/created.\n\n * - Check this `rustc --explain E0507`, references doesn't work for update syntax\n\n * - Normal pass by value works, thus to maintain the ownership, we need to send a clone of the struct instance instead.\n\n */\n", "file_path": "rust-book/structs/src/main.rs", "rank": 42, "score": 116883.08074228115 }, { "content": "fn mutable_pass_by_reference (line: &mut String) -> String {\n\n /// * std::str::replace, std is the default namespace...\n\n /// * `line` is not modified, while replace returns a new String instance with the replaced data...\n\n str::replace(line, \" \", \"::\")\n\n // OR\n\n // line.replace(\" \", \"::\");\n\n}\n\n\n", "file_path": "rust-book/ownership/src/main.rs", "rank": 43, "score": 116867.43784235587 }, { "content": "fn modify_mutable_reference (line: &mut String) -> String {\n\n line.push_str(\", added sugar\");\n\n line.to_string()\n\n}\n\n\n\n/**\n\n * Breakdown #6:\n\n * - Immutable references can be made as many times as we want.\n\n * - Mutable references can only be created once in present scope...\n\n * Why? To prevent Data Races (Race Condition), as Mutable Reference can write to memory as well\n\n * compared to Immutable Reference...\n\n * Details - https://doc.rust-lang.org/book/ch04-02-references-and-borrowing.html#mutable-references\n\n */\n", "file_path": "rust-book/ownership/src/main.rs", "rank": 44, "score": 116867.43784235587 }, { "content": "fn print_which_item_was_selected(event: CallbackInfo<List>) -> UpdateScreen {\n\n let selected = event.target_index_in_parent();\n\n let mut should_redraw = DontRedraw;\n\n\n\n if selected != event.state.data.selected {\n\n event.state.data.selected = selected;\n\n should_redraw = Redraw;\n\n }\n\n\n\n println!(\"selected item: {:?}\", event.state.data.selected);\n\n\n\n should_redraw\n\n}\n\n\n", "file_path": "gui/components/src/main.rs", "rank": 45, "score": 115453.51467137288 }, { "content": "fn draw_fps(ctx: &mut Context) -> GameResult {\n\n let mut text = CustomText::new(ctx, format!(\"FPS: {}\", (timer::fps(ctx) as u32).to_string()), None);\n\n text.printf(ctx, 10.0, 10.0, None, None)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "cs50-2020-game-dev/pong/pong-6-7/src/main.rs", "rank": 46, "score": 113910.47409705276 }, { "content": "fn draw_fps(ctx: &mut Context) -> GameResult {\n\n let mut text = CustomText::new(ctx, format!(\"FPS: {}\", (timer::fps(ctx) as u32).to_string()), None);\n\n text.printf(ctx, 10.0, 10.0, None, None)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "cs50-2020-game-dev/pong/pong-8/src/main.rs", "rank": 47, "score": 113910.47409705276 }, { "content": "fn main() {\n\n loop {\n\n let mut choice = String::new();\n\n println!(\"Make a choice:\");\n\n println!(\"\\t1. Usual Struct Creation\");\n\n println!(\"\\t2. Struct Creation using (field init shorthand), similar to ES6 Object Property Shorthand\");\n\n println!(\"\\t3. Is inner member of struct immutable as well?\");\n\n println!(\"\\t4. From other user instance\");\n\n println!(\"\\t5. Instantiate Color Tuple\");\n\n println!(\"\\t6. Color Struct Method example\");\n\n println!(\"\\t7. Test Color Impls after method call statement is already done, with associated methods\");\n\n\n\n std::io::stdin().read_line(&mut choice)\n\n .expect(\"Input Save failed...\");\n\n match choice.trim() {\n\n \"1\" => {\n\n let user = create_user();\n\n println!(\"=== USER created: {:?}\\n\", user);\n\n },\n\n \"2\" => {\n", "file_path": "rust-book/structs/src/main.rs", "rank": 48, "score": 112250.37098395891 }, { "content": "// 1-D Array Peak Finder\n\npub fn peak_finder_1d_greedy(arr: &[usize]) -> Option<usize> {\n\n for (i, value) in arr.iter().enumerate() {\n\n if arr.len() == 1\n\n || (i == 0 && *value >= arr[i + 1])\n\n || (i > 0 && i < (arr.len() - 1) && (arr[i - 1] <= *value && *value >= arr[i + 1])\n\n || (i == arr.len() - 1 && *value >= arr[i - 1]))\n\n {\n\n return Some(*value);\n\n } else {\n\n continue;\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "dsa/src/searching/peak_finder.rs", "rank": 49, "score": 111706.55018866571 }, { "content": "pub fn peak_finder_1d_divide_conquer(arr: &[usize]) -> Option<usize> {\n\n let mut _arr = vec![0; arr.len()];\n\n _arr.clone_from_slice(arr);\n\n let mid_index = arr.len() / 2;\n\n if arr.is_empty() {\n\n return None;\n\n }\n\n if mid_index > 0 && _arr[mid_index] <= _arr[mid_index - 1] {\n\n peak_finder_1d_divide_conquer(_arr.split_at(mid_index).0)\n\n } else if mid_index < (_arr.len() - 1) && _arr[mid_index] <= _arr[mid_index + 1] {\n\n peak_finder_1d_divide_conquer(_arr.split_at(mid_index).1)\n\n } else {\n\n Some(_arr[mid_index])\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "dsa/src/searching/peak_finder.rs", "rank": 50, "score": 110262.87688384598 }, { "content": "pub fn sing(start: i32, end: i32) -> String {\n\n (end..start + 1)\n\n .rev()\n\n .map(|n| verse(n))\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\")\n\n}\n", "file_path": "exercism/rust/beer-song/src/lib.rs", "rank": 51, "score": 108830.51568740493 }, { "content": "// Returns a Utc DateTime one billion seconds after start.\n\npub fn after(start: DateTime<Utc>) -> DateTime<Utc> {\n\n start + Duration::seconds(10i64.pow(9))\n\n}\n", "file_path": "exercism/rust/gigasecond/src/lib.rs", "rank": 52, "score": 108830.51568740493 }, { "content": "fn main() {\n\n log4rs::init_file(\"log4rs.yml\", Default::default()).unwrap();\n\n\n\n let ev_loop = event_loop::EventLoop::new();\n\n let (window_builder, extent) = build_window(&ev_loop);\n\n let (instance, surface, window) = create_backend(window_builder, &ev_loop, extent);\n\n\n\n let renderer = Renderer::<back::Backend>::new(instance, surface);\n\n\n\n ev_loop.run(move |event, _, control_flow| {\n\n *control_flow = event_loop::ControlFlow::Wait;\n\n match event {\n\n event::Event::WindowEvent { event, .. } => {\n\n #[allow(unused_variables)]\n\n match event {\n\n event::WindowEvent::CloseRequested => {\n\n *control_flow = event_loop::ControlFlow::Exit\n\n }\n\n event::WindowEvent::Resized(dims) => {\n\n debug!(\"RESIZE EVENT\");\n", "file_path": "gui/draw-cube/src/006-command_buffers/main.rs", "rank": 53, "score": 108590.33700100712 }, { "content": "fn main() {\n\n log4rs::init_file(\"log4rs.yml\", Default::default()).unwrap();\n\n\n\n let ev_loop = event_loop::EventLoop::new();\n\n let (window_builder, extent) = build_window(&ev_loop);\n\n let (instance, surface, window) = create_backend(window_builder, &ev_loop, extent);\n\n\n\n let renderer = Renderer::<back::Backend>::new(instance, surface);\n\n\n\n ev_loop.run(move |event, _, control_flow| {\n\n *control_flow = event_loop::ControlFlow::Wait;\n\n match event {\n\n event::Event::WindowEvent { event, .. } => {\n\n #[allow(unused_variables)]\n\n match event {\n\n event::WindowEvent::CloseRequested => {\n\n *control_flow = event_loop::ControlFlow::Exit\n\n }\n\n event::WindowEvent::Resized(dims) => {\n\n debug!(\"RESIZE EVENT\");\n", "file_path": "gui/draw-cube/src/002-enumerate_devices/main.rs", "rank": 54, "score": 108575.14400579114 }, { "content": "fn build_window(\n\n ev_loop: &event_loop::EventLoop<()>,\n\n) -> (window::WindowBuilder, hal_window::Extent2D) {\n\n // We need to first get Logical and Physical Size of the screen\n\n let (logical_window_size, physical_window_size) = {\n\n let dpi = ev_loop.primary_monitor().scale_factor();\n\n let logical: LogicalSize<u32> = WINDOW_SIZE.into();\n\n\n\n // Phsical Size is the actual internal screen size, a factor of DPI\n\n let physical: PhysicalSize<u32> = logical.to_physical(dpi);\n\n\n\n (logical, physical)\n\n };\n\n\n\n let window_builder = window::WindowBuilder::new()\n\n .with_title(APP_NAME)\n\n .with_inner_size(logical_window_size);\n\n\n\n (\n\n window_builder,\n\n hal_window::Extent2D {\n\n width: physical_window_size.width,\n\n height: physical_window_size.height,\n\n },\n\n )\n\n}\n\n\n", "file_path": "gui/draw-cube/src/003-swap_chain/main.rs", "rank": 56, "score": 106737.31834309014 }, { "content": "///\n\n/// This method is the main cycle, which runs the logical part of the application.\n\n/// It can throw error, but nothing is returned when it passes.\n\n///\n\n/// @returns Result<Ok, Err>\n\n///\n\npub fn run(config: Config) -> Result<(), Box<dyn Error>> {\n\n let file_data = fs::read_to_string(config.get_filename())?; // Propagating Error to caller.\n\n let found: Vec<&str>;\n\n if *config.is_sensitive() {\n\n found = search::search(config.get_query(), &file_data);\n\n } else {\n\n found = search::search_case_insensitive(config.get_query(), &file_data);\n\n }\n\n println!(\"Found: {:?}\", found);\n\n Ok(())\n\n}\n", "file_path": "rust-book/minigrep/src/lib.rs", "rank": 57, "score": 106307.58428584109 }, { "content": "fn build_window(\n\n ev_loop: &event_loop::EventLoop<()>,\n\n) -> (window::WindowBuilder, hal_window::Extent2D) {\n\n // We need to first get Logical and Physical Size of the screen\n\n let (logical_window_size, physical_window_size) = {\n\n let dpi = ev_loop.primary_monitor().scale_factor();\n\n let logical: LogicalSize<u32> = WINDOW_SIZE.into();\n\n\n\n // Phsical Size is the actual internal screen size, a factor of DPI\n\n let physical: PhysicalSize<u32> = logical.to_physical(dpi);\n\n\n\n (logical, physical)\n\n };\n\n\n\n let window_builder = window::WindowBuilder::new()\n\n .with_title(APP_NAME)\n\n .with_inner_size(logical_window_size);\n\n\n\n (\n\n window_builder,\n\n hal_window::Extent2D {\n\n width: physical_window_size.width,\n\n height: physical_window_size.height,\n\n },\n\n )\n\n}\n\n\n", "file_path": "gui/draw-cube/src/004-render_pass_init/main.rs", "rank": 58, "score": 105006.91158917488 }, { "content": "fn build_window(\n\n ev_loop: &event_loop::EventLoop<()>,\n\n) -> (window::WindowBuilder, hal_window::Extent2D) {\n\n // We need to first get Logical and Physical Size of the screen\n\n let (logical_window_size, physical_window_size) = {\n\n let dpi = ev_loop.primary_monitor().scale_factor();\n\n let logical: LogicalSize<u32> = WINDOW_SIZE.into();\n\n\n\n // Phsical Size is the actual internal screen size, a factor of DPI\n\n let physical: PhysicalSize<u32> = logical.to_physical(dpi);\n\n\n\n (logical, physical)\n\n };\n\n\n\n let window_builder = window::WindowBuilder::new()\n\n .with_title(APP_NAME)\n\n .with_inner_size(logical_window_size);\n\n\n\n (\n\n window_builder,\n\n hal_window::Extent2D {\n\n width: physical_window_size.width,\n\n height: physical_window_size.height,\n\n },\n\n )\n\n}\n\n\n", "file_path": "gui/draw-cube/src/final-draw-cube/main.rs", "rank": 59, "score": 105006.91158917488 }, { "content": "pub fn map_to_verse(n: i32) -> (String, String, String, String) {\n\n match n {\n\n 0 => (\n\n String::from(\"No more bottles\"),\n\n String::from(\"no more bottles\"),\n\n String::from(\"Go to the store and buy some more\"),\n\n String::from(\"99 bottles\"),\n\n ),\n\n 1 => (\n\n String::from(\"1 bottle\"),\n\n String::from(\"1 bottle\"),\n\n String::from(\"Take it down and pass it around\"),\n\n String::from(\"no more bottles\"),\n\n ),\n\n n if n < 0 || n > 99 => (\n\n String::from(\"Unknown bottles\"),\n\n \"unknown bottles\".to_string(),\n\n String::from(\"Go to the store and buy some more\"),\n\n String::from(\"unknown bottles\"),\n\n ),\n\n n => (\n\n format!(\"{} bottles\", n),\n\n format!(\"{} bottles\", n),\n\n String::from(\"Take one down and pass it around\"),\n\n format!(\"{} bottle{}\", n - 1, if n - 1 == 1 { \"\" } else { \"s\" }),\n\n ),\n\n }\n\n}\n\n\n", "file_path": "exercism/rust/beer-song/src/lib.rs", "rank": 60, "score": 104863.91098102136 }, { "content": "#[derive(Clone, Debug)] // https://doc.rust-lang.org/rust-by-example/trait/clone.html\n\nstruct User {\n\n name: String,\n\n // Why the Type is `&'static str`, instead of `&str`, Check this https://doc.rust-lang.org/book/ch10-03-lifetime-syntax.html#lifetime-elision\n\n // And according to this following should be `&'a str`, with struct also having a lifetime of it's own.\n\n // For now as the app is just for example purpose, it's fine to use static lifetime...\n\n email: &'static str, // https://www.reddit.com/r/rust/comments/2o8r94/why_is_a_string_literal_not_always_of_type_str/\n\n age: u8,\n\n active: bool\n\n}\n\n\n", "file_path": "rust-book/structs/src/main.rs", "rank": 61, "score": 85121.94359884708 }, { "content": "#[derive(Debug)]\n\nstruct MessageStruct {\n\n msg: String,\n\n}\n\n\n\n/**\n\n * Breakdown #1:\n\n * - I was thinking Enum as C++ Enums, but it's not. It's more than a store of named integer values here,\n\n * more like JAVA's Enums, much better than even that. Consider it as a Conditional Map Data Type,\n\n * which changes it's behaviour respective to the type of instance created from Enum.\n\n * - Just my observation till now, might be I am wrong here, but I think Enums are miniature CLASS.\n\n * - Following is the way to create an Enum.\n\n * - Figured out `'static` is some kind of Lifetime Speciifier, which we will learn later.\n\n */\n", "file_path": "rust-book/enums/src/main.rs", "rank": 62, "score": 84335.4447963009 }, { "content": "use std::mem::ManuallyDrop;\n\nuse std::ptr;\n\n\n\nuse gfx_hal::{window as hal_window, Backend, Instance};\n\nuse winit::{\n\n dpi::{LogicalSize, PhysicalSize, Size},\n\n event, event_loop, window,\n\n};\n\n\n\n#[cfg(feature = \"dx12\")]\n\nuse gfx_backend_dx12 as back;\n\n#[cfg(feature = \"metal\")]\n\nuse gfx_backend_metal as back;\n\n#[cfg(feature = \"vulkan\")]\n\nuse gfx_backend_vulkan as back;\n\n\n\nuse log::debug;\n\nuse log4rs;\n\n\n\nconst APP_NAME: &'static str = \"Show Window\";\n\nconst WINDOW_SIZE: [u32; 2] = [1280, 768];\n\n\n", "file_path": "gui/draw-cube/src/001-show_window/main.rs", "rank": 63, "score": 79218.12638147731 }, { "content": " }\n\n event::WindowEvent::ScaleFactorChanged { new_inner_size, .. } => {\n\n // Will get called whenever the screen scale factor (DPI) changes,\n\n // like when user move the Window from one less DPI monitor\n\n // to other high scaled DPI Monitor.\n\n debug!(\"Scale Factor Change\");\n\n }\n\n _ => (),\n\n }\n\n }\n\n event::Event::MainEventsCleared => {\n\n debug!(\"MainEventsCleared\");\n\n window.request_redraw();\n\n }\n\n event::Event::RedrawRequested(_) => {\n\n debug!(\"RedrawRequested\");\n\n }\n\n event::Event::RedrawEventsCleared => {\n\n debug!(\"RedrawEventsCleared\");\n\n }\n\n _ => (),\n\n }\n\n });\n\n}\n", "file_path": "gui/draw-cube/src/001-show_window/main.rs", "rank": 64, "score": 79171.33159380386 }, { "content": "// main.rs\n\nstruct State {\n\n surface: wgpu::Surface,\n\n device: wgpu::Device,\n\n queue: wgpu::Queue,\n\n sc_desc: wgpu::SwapChainDescriptor,\n\n swap_chain: wgpu::SwapChain,\n\n size: winit::dpi::PhysicalSize<u32>,\n\n}\n\n\n\nimpl State {\n\n // Creating some of the wgpu types requires async code\n\n async fn new(window: &Window) -> Self {\n\n let size = window.inner_size();\n\n\n\n // The instance is a handle to our GPU\n\n // BackendBit::PRIMARY => Vulkan + Metal + DX12 + Browser WebGPU\n\n let instance = wgpu::Instance::new(wgpu::BackendBit::PRIMARY);\n\n let surface = unsafe { instance.create_surface(window) };\n\n let adapter = instance.request_adapter(\n\n &wgpu::RequestAdapterOptions {\n", "file_path": "gui/wgpu/src/main.rs", "rank": 65, "score": 76444.67523217274 }, { "content": "struct List {\n\n items: Vec<&'static str>,\n\n selected: Option<usize>,\n\n}\n\n\n\nconst CUSTOM_CSS: &str = \".selected { background-color: black; color: white; }\";\n\n\n\nimpl Layout for List {\n\n fn layout(&self, _: LayoutInfo<Self>) -> Dom<Self> {\n\n self.items\n\n .iter()\n\n .enumerate()\n\n .map(|(idx, item)| {\n\n Dom::label(*item)\n\n .with_class(if self.selected == Some(idx) {\n\n \"selected\".into()\n\n } else {\n\n \"\"\n\n })\n\n .with_callback(On::MouseDown, print_which_item_was_selected)\n\n })\n\n .collect::<Dom<Self>>()\n\n }\n\n}\n\n\n", "file_path": "gui/components/src/main.rs", "rank": 66, "score": 76444.67523217274 }, { "content": "struct Draft {}\n\n\n\nimpl State for Draft {\n\n fn request_review(self: Box<Self>) -> Box<dyn State> {\n\n Box::new(PendingReview {})\n\n }\n\n\n\n fn approve(self: Box<Self>) -> Box<dyn State> {\n\n self // Since draft cannot be approved\n\n }\n\n\n\n fn content<'a>(&self, post: &'a Post) -> &'a str {\n\n \"\"\n\n }\n\n}\n\n\n", "file_path": "rust-book/oops/src/lib.rs", "rank": 67, "score": 75380.3091450385 }, { "content": "struct Published {}\n\n\n\nimpl State for Published {\n\n fn request_review(self: Box<Self>) -> Box<dyn State> {\n\n self\n\n }\n\n\n\n fn approve(self: Box<Self>) -> Box<dyn State> {\n\n self\n\n }\n\n\n\n fn content<'a>(&self, post: &'a Post) -> &'a str {\n\n &post.content // because we are return a reference to string tuple\n\n }\n\n}\n", "file_path": "rust-book/oops/src/lib.rs", "rank": 68, "score": 75380.3091450385 }, { "content": "struct MyDataModel {}\n\n\n\nimpl Layout for MyDataModel {\n\n fn layout(&self, _: LayoutInfo<Self>) -> Dom<Self> {\n\n Dom::div()\n\n }\n\n}\n\n\n", "file_path": "gui/first-gui/src/main.rs", "rank": 69, "score": 74369.27294221027 }, { "content": "struct SelectBox {\n\n width: u32,\n\n height: u32,\n\n options: Vec<String>,\n\n}\n\n\n\nimpl Draw for SelectBox {\n\n fn draw(&self) {\n\n println!(\n\n \"SelectBox: width: {}, height: {}, options: {:#?}\",\n\n self.width, self.height, self.options\n\n );\n\n }\n\n}\n\n\n", "file_path": "rust-book/oops/src/main.rs", "rank": 70, "score": 74369.27294221027 }, { "content": "struct PendingReview {}\n\n\n\nimpl State for PendingReview {\n\n fn request_review(self: Box<Self>) -> Box<dyn State> {\n\n self\n\n }\n\n\n\n fn approve(self: Box<Self>) -> Box<dyn State> {\n\n Box::new(Published {})\n\n }\n\n\n\n fn content<'a>(&self, post: &'a Post) -> &'a str {\n\n \"\"\n\n }\n\n}\n\n\n", "file_path": "rust-book/oops/src/lib.rs", "rank": 71, "score": 74369.27294221027 }, { "content": "struct PrimeTime {\n\n primes: Vec<u32>,\n\n}\n\n\n\nimpl PrimeTime {\n\n pub fn new(size: usize) -> Self {\n\n let mut primes = Vec::with_capacity(size);\n\n primes.push(2);\n\n PrimeTime { primes }\n\n }\n\n\n\n /**\n\n * take_wile and filter together reduces the amount of loops run\n\n *\n\n * The logic behind `p * p <= n` is that, a number is already either factored out by some smaller prime\n\n * Or if it's a multiple of `p * non-prime` it's a not a prime anyways...\n\n *\n\n * For eg. 13 is a prime, and filtering it with 10 % 5 is usesless, as 10 % 2 === 0,\n\n * which rejects the number even before coming to check with prime number 5.\n\n *\n", "file_path": "exercism/rust/nth-prime/src/lib.rs", "rank": 72, "score": 73407.65645548422 }, { "content": "#[derive(Debug)]\n\nstruct BNode {\n\n id: usize,\n\n name: String,\n\n next: Box<Option<BNode>>,\n\n}\n\n\n\nimpl BNode {\n\n fn new(id: usize, name: &str, next: Option<BNode>) -> Self {\n\n BNode{\n\n id,\n\n name: name.to_owned(),\n\n next: Box::new(next)\n\n }\n\n }\n\n}\n\n/** Box<T> example */\n\n\n\n/** Rc<T> example */\n", "file_path": "rust-book/smart-pointers/src/main.rs", "rank": 73, "score": 73407.65645548422 }, { "content": "#[derive(Debug)]\n\nstruct RList {\n\n head: Rc<RNodeOpt>,\n\n}\n\n\n", "file_path": "rust-book/smart-pointers/src/main.rs", "rank": 74, "score": 73407.65645548422 }, { "content": "#[derive(Debug)]\n\nstruct RNode {\n\n id: usize,\n\n next: Rc<RNodeOpt>,\n\n}\n\n\n\nimpl RNode {\n\n fn new(id: usize, next: Rc<RNodeOpt>) -> Self {\n\n RNode{\n\n id,\n\n next,\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust-book/smart-pointers/src/main.rs", "rank": 75, "score": 73407.65645548422 }, { "content": "fn main() {\n\n let data = List {\n\n items: vec![\"Hello\", \"World\", \"my\", \"name\", \"is\", \"Lorem\", \"Ipsum\"],\n\n selected: None,\n\n };\n\n\n\n let mut app = App::new(data, AppConfig::default()).unwrap();\n\n let css = css::override_native(CUSTOM_CSS).unwrap();\n\n let window = app\n\n .create_window(WindowCreateOptions::default(), css)\n\n .unwrap();\n\n app.run(window).unwrap();\n\n}\n", "file_path": "gui/components/src/main.rs", "rank": 76, "score": 72933.1657501523 }, { "content": "fn main() {\n\n env_logger::init();\n\n let event_loop = EventLoop::new();\n\n let window = WindowBuilder::new()\n\n .build(&event_loop)\n\n .unwrap();\n\n\n\n let mut state = block_on(State::new(&window));\n\n\n\n event_loop.run(move |event, _, control_flow| {\n\n match event {\n\n Event::WindowEvent {\n\n ref event,\n\n window_id,\n\n } if window_id == window.id() => if !state.input(event) {\n\n match event {\n\n WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,\n\n WindowEvent::KeyboardInput {\n\n input,\n\n ..\n", "file_path": "gui/wgpu/src/main.rs", "rank": 77, "score": 72933.1657501523 }, { "content": "function push:setBorderColor(color, g, b)\n\n self._borderColor = g and {color, g, b} or color\n\nend\n\n\n", "file_path": "cs50-2020-game-dev/pong-lua/pong1/push.lua", "rank": 78, "score": 72636.20545542153 }, { "content": "function push:setBorderColor(color, g, b)\n\n self._borderColor = g and {color, g, b} or color\n\nend\n\n\n", "file_path": "cs50-2020-game-dev/pong-lua/pong2/push.lua", "rank": 79, "score": 72636.20545542153 }, { "content": "function push:setBorderColor(color, g, b)\n\n self._borderColor = g and {color, g, b} or color\n\nend\n\n\n", "file_path": "cs50-2020-game-dev/pong-lua/pong6/push.lua", "rank": 80, "score": 72636.20545542153 }, { "content": "function push:setBorderColor(color, g, b)\n\n self._borderColor = g and {color, g, b} or color\n\nend\n\n\n", "file_path": "cs50-2020-game-dev/pong-lua/pong8_9/push.lua", "rank": 81, "score": 72636.20545542153 }, { "content": "function push:setBorderColor(color, g, b)\n\n self._borderColor = g and {color, g, b} or color\n\nend\n\n\n", "file_path": "cs50-2020-game-dev/fifty-bird/f0/push.lua", "rank": 82, "score": 72636.20545542153 }, { "content": "/// Ggez doesn't work without state, we will be needing an initial state\n\n/// that contains our Text mesh.\n\nstruct GameState {\n\n text: CustomText,\n\n}\n\n\n\nimpl GameState {\n\n fn new(ctx: &mut Context) -> Self {\n\n let text = CustomText::new(ctx, String::from(\"Hello Pong!\"));\n\n GameState { text }\n\n }\n\n}\n\n\n\nimpl EventHandler for GameState {\n\n fn update(&mut self, _ctx: &mut Context) -> GameResult {\n\n Ok(())\n\n }\n\n\n\n fn draw(&mut self, ctx: &mut Context) -> GameResult {\n\n graphics::clear(ctx, Color::new(0.3, 0.3, 0.3, 1.0));\n\n\n\n self.text.printf(\n", "file_path": "cs50-2020-game-dev/pong/pong-0/src/main.rs", "rank": 83, "score": 72491.92265715846 }, { "content": "struct CustomText {\n\n mesh: Text,\n\n width: u32,\n\n height: u32,\n\n dpi_factor: f32,\n\n}\n\n\n\nimpl CustomText {\n\n fn new(ctx: &mut Context, string: String) -> Self {\n\n // Getting crisp text using this logic: https://github.com/ggez/ggez/issues/561\n\n let dpi_factor = graphics::window(ctx).get_hidpi_factor() as f32;\n\n\n\n // Scale can be considered as Font Size, as scale takes a value in pixels.\n\n let fragment = TextFragment::new(string)\n\n .font(Font::new(ctx, \"/fonts/font.ttf\").unwrap())\n\n .scale(Scale::uniform(24.0 * dpi_factor)); // Take a high resolution Font, and will later scale it down,\n\n // as for now, text does not have any interpolation built in.\n\n let mesh = Text::new(fragment);\n\n let (width, height) = mesh.dimensions(ctx);\n\n CustomText {\n", "file_path": "cs50-2020-game-dev/pong/pong-2/src/main.rs", "rank": 84, "score": 72491.92265715846 }, { "content": "/// Ggez doesn't work without state, we will be needing an initial state\n\n/// that contains our Text mesh.\n\nstruct GameState {\n\n text: CustomText,\n\n push: push::Push,\n\n}\n\n\n\nimpl GameState {\n\n fn new(ctx: &mut Context) -> GameResult<Self> {\n\n let text = CustomText::new(ctx, String::from(\"Hello Pong!\"));\n\n let push = push::Push::new(\n\n ctx,\n\n VIRTUAL_WIDTH,\n\n VIRTUAL_HEIGHT,\n\n WINDOW_WIDTH,\n\n WINDOW_HEIGHT,\n\n )?;\n\n Ok(GameState { text, push })\n\n }\n\n}\n\n\n\nimpl EventHandler for GameState {\n", "file_path": "cs50-2020-game-dev/pong/pong-1/src/main.rs", "rank": 85, "score": 72491.92265715846 }, { "content": "struct CustomText {\n\n mesh: Text,\n\n width: u32,\n\n height: u32,\n\n dpi_factor: f32,\n\n}\n\n\n\nimpl CustomText {\n\n fn new(ctx: &mut Context, string: String) -> Self {\n\n // Getting crisp text using this logic: https://github.com/ggez/ggez/issues/561\n\n let dpi_factor = graphics::window(ctx).get_hidpi_factor() as f32;\n\n\n\n // Scale can be considered as Font Size, as scale takes a value in pixels.\n\n let fragment = TextFragment::new(string)\n\n .font(Font::new(ctx, \"/fonts/font.ttf\").unwrap())\n\n .scale(Scale::uniform(24.0 * dpi_factor)); // Take a high resolution Font, and will later scale it down,\n\n // as for now, text does not have any interpolation built in.\n\n let mesh = Text::new(fragment);\n\n let (width, height) = mesh.dimensions(ctx);\n\n CustomText {\n", "file_path": "cs50-2020-game-dev/pong/pong-1/src/main.rs", "rank": 86, "score": 72491.92265715846 }, { "content": "/// Ggez doesn't work without state, we will be needing an initial state\n\n/// that contains our Text mesh.\n\nstruct GameState {\n\n text: CustomText,\n\n player1_score_text: CustomText,\n\n player2_score_text: CustomText,\n\n push: push::Push,\n\n player1: Player,\n\n player2: Player,\n\n border: graphics::Mesh,\n\n ball: Ball,\n\n dpi_factor: f32,\n\n}\n\n\n\nimpl GameState {\n\n fn new(ctx: &mut Context) -> GameResult<Self> {\n\n let dpi_factor = graphics::window(ctx).get_hidpi_factor() as f32;\n\n let text = CustomText::new(ctx, String::from(\"Hello Pong!\"), None);\n\n let push = push::Push::new(\n\n ctx,\n\n VIRTUAL_WIDTH,\n\n VIRTUAL_HEIGHT,\n", "file_path": "cs50-2020-game-dev/pong/pong-3/src/main.rs", "rank": 87, "score": 72491.92265715846 }, { "content": "/// Ggez doesn't work without state, we will be needing an initial state\n\n/// that contains our Text mesh.\n\nstruct GameState {\n\n text: CustomText,\n\n push: push::Push,\n\n player1: Player,\n\n player2: Player,\n\n border: graphics::Mesh,\n\n ball: Ball,\n\n state: GameStates,\n\n dpi_factor: f32,\n\n}\n\n\n\nimpl GameState {\n\n fn new(ctx: &mut Context) -> GameResult<Self> {\n\n let dpi_factor = graphics::window(ctx).get_hidpi_factor() as f32;\n\n let text = CustomText::new(ctx, String::from(\"Hello Pong!\"), None);\n\n let push = push::Push::new(\n\n ctx,\n\n VIRTUAL_WIDTH,\n\n VIRTUAL_HEIGHT,\n\n WINDOW_WIDTH,\n", "file_path": "cs50-2020-game-dev/pong/pong-8/src/main.rs", "rank": 88, "score": 72491.92265715846 }, { "content": "struct World {\n\n stage: usize, // Nothing else for now.\n\n player: Player,\n\n}\n\n\n\nimpl World {\n\n fn new(ctx: &mut Context) -> GameResult<Self> {\n\n Ok(World {\n\n stage: 0,\n\n player: Player::new(ctx)?,\n\n })\n\n }\n\n}\n\n\n\nimpl event::EventHandler for World {\n\n fn update(&mut self, ctx: &mut Context) -> GameResult {\n\n self.player.update(ctx)?;\n\n Ok(())\n\n }\n\n\n\n fn draw(&mut self, ctx: &mut Context) -> GameResult {\n\n graphics::clear(ctx, graphics::BLACK); // Clear with Black Background\n\n self.player.draw(ctx)?;\n\n graphics::present(ctx)?; // It's important to present the buffer on Screen\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "gui/ggez_basics/astro_blaster_v2/src/main.rs", "rank": 89, "score": 72491.92265715846 }, { "content": "/// Ggez doesn't work without state, we will be needing an initial state\n\n/// that contains our Text mesh.\n\nstruct GameState {\n\n text: CustomText,\n\n player1_score_text: CustomText,\n\n player2_score_text: CustomText,\n\n push: push::Push,\n\n player1: Player,\n\n player2: Player,\n\n border: graphics::Mesh,\n\n ball: Ball,\n\n state: GameStates,\n\n dpi_factor: f32,\n\n}\n\n\n\nimpl GameState {\n\n fn new(ctx: &mut Context) -> GameResult<Self> {\n\n let dpi_factor = graphics::window(ctx).get_hidpi_factor() as f32;\n\n let text = CustomText::new(ctx, String::from(\"Hello Pong!\"), None);\n\n let push = push::Push::new(\n\n ctx,\n\n VIRTUAL_WIDTH,\n", "file_path": "cs50-2020-game-dev/pong/pong-6-7/src/main.rs", "rank": 90, "score": 72491.92265715846 }, { "content": "struct CustomText {\n\n mesh: Text,\n\n width: u32,\n\n height: u32,\n\n}\n\n\n\nimpl CustomText {\n\n fn new(ctx: &mut Context, string: String) -> Self {\n\n // Scale can be considered as Font Size, as scale takes a value in pixels.\n\n let fragment = TextFragment::new(string).scale(Scale::uniform(18.0));\n\n let mesh = Text::new(fragment);\n\n let (width, height) = mesh.dimensions(ctx);\n\n CustomText {\n\n mesh,\n\n width,\n\n height,\n\n }\n\n }\n\n\n\n // This is a simple replication of Love2D example shown for this Class.\n", "file_path": "cs50-2020-game-dev/pong/pong-0/src/main.rs", "rank": 91, "score": 72491.92265715846 }, { "content": "/// Ggez doesn't work without state, we will be needing an initial state\n\n/// that contains our Text mesh.\n\nstruct GameState {\n\n text: CustomText,\n\n push: push::Push,\n\n border: graphics::Mesh,\n\n paddle_rect: graphics::Mesh,\n\n ball_circle: graphics::Mesh,\n\n dpi_factor: f32,\n\n}\n\n\n\nimpl GameState {\n\n fn new(ctx: &mut Context) -> GameResult<Self> {\n\n let dpi_factor = graphics::window(ctx).get_hidpi_factor() as f32;\n\n let text = CustomText::new(ctx, String::from(\"Hello Pong!\"));\n\n let push = push::Push::new(\n\n ctx,\n\n VIRTUAL_WIDTH,\n\n VIRTUAL_HEIGHT,\n\n WINDOW_WIDTH,\n\n WINDOW_HEIGHT,\n\n )?;\n", "file_path": "cs50-2020-game-dev/pong/pong-2/src/main.rs", "rank": 92, "score": 72491.92265715846 }, { "content": "/// Ggez doesn't work without state, we will be needing an initial state\n\n/// that contains our Text mesh.\n\nstruct GameState {\n\n text: CustomText,\n\n player1_score_text: CustomText,\n\n player2_score_text: CustomText,\n\n push: push::Push,\n\n player1: Player,\n\n player2: Player,\n\n border: graphics::Mesh,\n\n ball: Ball,\n\n state: GameStates,\n\n dpi_factor: f32,\n\n}\n\n\n\nimpl GameState {\n\n fn new(ctx: &mut Context) -> GameResult<Self> {\n\n let dpi_factor = graphics::window(ctx).get_hidpi_factor() as f32;\n\n let text = CustomText::new(ctx, String::from(\"Hello Pong!\"), None);\n\n let push = push::Push::new(\n\n ctx,\n\n VIRTUAL_WIDTH,\n", "file_path": "cs50-2020-game-dev/pong/pong-4/src/main.rs", "rank": 93, "score": 72491.92265715846 }, { "content": "fn main() {\n\n let screen = Screen {\n\n components: vec![\n\n Box::new(SelectBox {\n\n width: 100,\n\n height: 50,\n\n options: vec![\n\n String::from(\"Yes\"),\n\n String::from(\"Maybe\"),\n\n String::from(\"No\")\n\n ]\n\n }),\n\n Box::new(Button {\n\n width: 100,\n\n height: 50,\n\n label: String::from(\"Click Me\"),\n\n })\n\n ]\n\n };\n\n\n\n screen.run();\n\n}\n", "file_path": "rust-book/oops/src/main.rs", "rank": 94, "score": 71893.53686895288 }, { "content": "fn main() {\n\n let event_loop = EventLoop::new();\n\n let window = winit::window::Window::new(&event_loop).unwrap();\n\n {\n\n // Temporarily avoid srgb formats for the swapchain on the web\n\n futures::executor::block_on(\n\n run(event_loop, window, wgpu::TextureFormat::Bgra8UnormSrgb)\n\n );\n\n }\n\n}\n", "file_path": "wgpu_learn/first/src/main.rs", "rank": 95, "score": 71893.53686895288 }, { "content": "fn main() {\n\n let args = arguments::Arguments::new(env::args());\n\n match args {\n\n Ok(args) => run(args),\n\n Err(e) => println!(\"{}\", e.kind().as_str()),\n\n }\n\n}\n\n\n", "file_path": "small-projects/dictionary/src/main.rs", "rank": 96, "score": 71893.53686895288 }, { "content": "fn main() {\n\n let x = Human {\n\n age: 50,\n\n name: String::from(\"Fudal Lord\"),\n\n };\n\n x.age_category();\n\n\n\n // I don't know HOW/WHY, Shinobi Trait is required to be in scope,\\\n\n // to access this method, I guess if methods are implemented with a trait\n\n // that trait is needed to be present in scope, else compilation fails...\n\n x.chakra_type();\n\n\n\n let _map = HashTable::new(String::from(\"subroto\"), \"biswas\");\n\n _map.printMe();\n\n}\n", "file_path": "rust-book/traits/src/main.rs", "rank": 97, "score": 71893.53686895288 }, { "content": "fn main() {\n\n loop {\n\n println!(\"\\n\\nChose an Option:\");\n\n println!(\"1. Simple Ownership Example\");\n\n println!(\"2. Lose Ownership on Function Call\");\n\n println!(\"3. Return Ownership after Function Call\");\n\n println!(\"4. Return Multiple values after Function Call\");\n\n println!(\"5. Pass by reference (Immutable)\");\n\n println!(\"6. Pass by reference (Mutable)\");\n\n println!(\"7. Immutable Reference vs Mutable Reference\");\n\n println!(\"8. Dangling Reference\");\n\n // Slicing in Ownership Chapter can be looked into https://doc.rust-lang.org/book/ch04-03-slices.html\n\n // as the details that presents is based on above knowledge of Ownership...\n\n\n\n let mut option = String::new();\n\n io::stdin().read_line(&mut option)\n\n .expect(\"Error saving stdin\");\n\n\n\n match option.trim() {\n\n \"1\" => simple_ownership_example(),\n", "file_path": "rust-book/ownership/src/main.rs", "rank": 98, "score": 71893.53686895288 }, { "content": "#[test]\n\nfn test_1() {\n\n assert_eq!(\"1\", raindrops::raindrops(1));\n\n}\n\n\n", "file_path": "exercism/rust/raindrops/tests/raindrops.rs", "rank": 99, "score": 71893.53686895288 } ]
Rust
rmqtt/src/settings/mod.rs
phial3/rmqtt
8c29529e273007178fd0af73dccb6b0bf6729339
use std::fmt; use std::net::SocketAddr; use std::ops::{Deref, DerefMut}; use std::sync::Arc; use std::time::Duration; use config::{Config, ConfigError, File}; use parking_lot::RwLock; use serde::de::{Deserialize, Deserializer}; use crate::{NodeId, Result}; use self::listener::Listeners; use self::log::Log; pub mod listener; pub mod log; #[derive(Clone)] pub struct Settings(Arc<Inner>); #[derive(Debug, Clone, Deserialize)] pub struct Inner { #[serde(default = "inner_api_addr_default", deserialize_with = "deserialize_addr")] pub inner_api_addr: SocketAddr, #[serde(default)] pub node: Node, #[serde(default)] pub rpc: Rpc, #[serde(default)] pub log: Log, #[serde(rename = "listener")] #[serde(default)] pub listeners: Listeners, #[serde(default)] pub plugins: Plugins, #[serde(default)] pub mqtt: Mqtt, } fn inner_api_addr_default() -> SocketAddr { ([0, 0, 0, 0], 6063).into() } impl Deref for Settings { type Target = Inner; fn deref(&self) -> &Self::Target { self.0.as_ref() } } impl Settings { pub fn new() -> Result<Self, ConfigError> { let mut s = Config::new(); if let Ok(cfg_filename) = std::env::var("RMQTT-CONFIG-FILENAME") { s.merge(File::with_name(&cfg_filename).required(false))?; } s.merge(File::with_name("/etc/rmqtt/rmqtt").required(false))?; s.merge(File::with_name("/etc/rmqtt").required(false))?; s.merge(File::with_name("rmqtt").required(false))?; let mut inner: Inner = match s.try_into() { Ok(c) => c, Err(e) => { return Err(e); } }; inner.listeners.init(); if inner.listeners.tcps.is_empty() && inner.listeners.tlss.is_empty() { return Err(ConfigError::Message( "Settings::new() error, listener.tcp or listener.tls is not exist".into(), )); } Ok(Self(Arc::new(inner))) } } impl fmt::Debug for Settings { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Settings ...")?; Ok(()) } } #[derive(Default, Debug, Clone, Deserialize)] pub struct Node { #[serde(default)] pub id: NodeId, #[serde(default = "Node::cookie_default")] pub cookie: String, #[serde(default = "Node::crash_dump_default")] pub crash_dump: String, } impl Node { fn cookie_default() -> String { "rmqttsecretcookie".into() } fn crash_dump_default() -> String { "/var/log/rmqtt/crash.dump".into() } } #[derive(Debug, Clone, Deserialize)] pub struct Rpc { #[serde(default = "Rpc::mode_default")] pub mode: String, #[serde(default = "Rpc::server_addr_default", deserialize_with = "deserialize_addr")] pub server_addr: SocketAddr, #[serde(default = "Rpc::server_workers_default")] pub server_workers: usize, #[serde(default = "Rpc::client_concurrency_limit_default")] pub client_concurrency_limit: usize, #[serde(default = "Rpc::client_timeout_default", deserialize_with = "deserialize_duration")] pub client_timeout: Duration, #[serde(default = "Rpc::batch_size_default")] pub batch_size: usize, } impl Default for Rpc { #[inline] fn default() -> Self { Self { mode: Self::mode_default(), batch_size: Self::batch_size_default(), server_addr: Self::server_addr_default(), server_workers: Self::server_workers_default(), client_concurrency_limit: Self::client_concurrency_limit_default(), client_timeout: Self::client_timeout_default(), } } } impl Rpc { fn mode_default() -> String { "async".into() } fn batch_size_default() -> usize { 128 } fn server_addr_default() -> SocketAddr { ([0, 0, 0, 0], 5363).into() } fn server_workers_default() -> usize { 4 } fn client_concurrency_limit_default() -> usize { 128 } fn client_timeout_default() -> Duration { Duration::from_secs(5) } } #[derive(Default, Debug, Clone, Deserialize)] pub struct Plugins { #[serde(default = "Plugins::dir_default")] pub dir: String, #[serde(default)] pub default_startups: Vec<String>, } impl Plugins { fn dir_default() -> String { "./plugins/".into() } pub fn load_config<'de, T: serde::Deserialize<'de>>(&self, name: &str) -> Result<T, ConfigError> { let dir = self.dir.trim_end_matches(|c| c == '/' || c == '\\'); let mut s = Config::new(); s.merge(File::with_name(&format!("{}/{}", dir, name)).required(true))?; s.try_into::<T>() } } #[derive(Debug, Clone, Default, Deserialize)] pub struct Mqtt {} #[derive(Debug, Clone)] pub struct ValueMut<T>(Arc<RwLock<T>>); impl<T> ValueMut<T> where T: Copy, { #[inline] pub fn new(v: T) -> Self { Self(Arc::new(RwLock::new(v))) } #[inline] pub fn get(&self) -> T { *self.0.read() } #[inline] pub fn set(&self, v: T) { *self.0.write() = v; } } impl<'de, T: serde::Deserialize<'de> + Copy> Deserialize<'de> for ValueMut<T> { #[inline] fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let v = T::deserialize(deserializer)?; Ok(ValueMut::new(v)) } } #[derive(Debug, Clone)] pub struct Bytesize(usize); impl Bytesize { #[inline] pub fn as_u32(&self) -> u32 { self.0 as u32 } } impl Deref for Bytesize { type Target = usize; fn deref(&self) -> &Self::Target { &self.0 } } impl DerefMut for Bytesize { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl<'de> Deserialize<'de> for Bytesize { #[inline] fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let v = to_bytesize(&String::deserialize(deserializer)?); Ok(Bytesize(v)) } } #[inline] pub fn to_bytesize(text: &str) -> usize { let text = text.to_uppercase().replace("GB", "G").replace("MB", "M").replace("KB", "K"); text.split_inclusive(|x| x == 'G' || x == 'M' || x == 'K' || x == 'B') .map(|x| { let mut chars = x.chars(); let u = match chars.nth_back(0) { None => return 0, Some(u) => u, }; let v = match chars.as_str().parse::<usize>() { Err(_e) => return 0, Ok(v) => v, }; match u { 'B' => v, 'K' => v * 1024, 'M' => v * 1048576, 'G' => v * 1073741824, _ => 0, } }) .sum() } #[inline] pub fn deserialize_duration<'de, D>(deserializer: D) -> Result<Duration, D::Error> where D: Deserializer<'de>, { let v = String::deserialize(deserializer)?; Ok(to_duration(&v)) } #[inline] pub fn to_duration(text: &str) -> Duration { let text = text.to_lowercase().replace("ms", "Y"); let ms: u64 = text .split_inclusive(|x| x == 's' || x == 'm' || x == 'h' || x == 'd' || x == 'w' || x == 'f' || x == 'Y') .map(|x| { let mut chars = x.chars(); let u = match chars.nth_back(0) { None => return 0, Some(u) => u, }; let v = match chars.as_str().parse::<u64>() { Err(_e) => return 0, Ok(v) => v, }; match u { 'Y' => v, 's' => v * 1000, 'm' => v * 60000, 'h' => v * 3600000, 'd' => v * 86400000, 'w' => v * 604800000, 'f' => v * 1209600000, _ => 0, } }) .sum(); Duration::from_millis(ms) } #[inline] pub fn deserialize_addr<'de, D>(deserializer: D) -> Result<SocketAddr, D::Error> where D: Deserializer<'de>, { let addr = String::deserialize(deserializer)? .parse::<std::net::SocketAddr>() .map_err(serde::de::Error::custom)?; Ok(addr) }
use std::fmt; use std::net::SocketAddr; use std::ops::{Deref, DerefMut}; use std::sync::Arc; use std::time::Duration; use config::{Config, ConfigError, File}; use parking_lot::RwLock; use serde::de::{Deserialize, Deserializer}; use crate::{NodeId, Result}; use self::listener::Listeners; use self::log::Log; pub mod listener; pub mod log; #[derive(Clone)] pub struct Settings(Arc<Inner>); #[derive(Debug, Clone, Deserialize)] pub struct Inner { #[serde(default = "inner_api_addr_default", deserialize_with = "deserialize_addr")] pub inner_api_addr: SocketAddr, #[serde(default)] pub node: Node, #[serde(default)] pub rpc: Rpc, #[serde(default)] pub log: Log, #[serde(rename = "listener")] #[serde(default)] pub listeners: Listeners, #[serde(default)] pub plugins: Plugins, #[serde(default)] pub mqtt: Mqtt, } fn inner_api_addr_default() -> SocketAddr { ([0, 0, 0, 0], 6063).into() } impl Deref for Settings { type Target = Inner; fn deref(&self) -> &Self::Target { self.0.as_ref() } } impl Settings { pub fn new() -> Result<Self, ConfigError> { let mut s = Config::new(); if let Ok(cfg_filename) = std::env::var("RMQTT-CONFIG-FILENAME") { s.merge(File::with_name(&cfg_filename).required(false))?; } s.merge(File::with_name("/etc/rmqtt/rmqtt").required(false))?; s.merge(File::with_name("/etc/rmqtt").required(false))?; s.merge(File::with_name("rmqtt").required(false))?; let mut inner: Inner = match s.try_into() { Ok(c) => c, Err(e) => { return Err(e); } }; inner.listeners.init(); if inner.listeners.tcps.is_empty() && inner.listeners.tlss.is_empty() { return Err(ConfigError::Message( "Settings::new() error, listener.tcp or listener.tls is not exist".into(), )); } Ok(Self(Arc::new(inner))) } } impl fmt::Debug for Settings { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Settings ...")?; Ok(()) } } #[derive(Default, Debug, Clone, Deserialize)] pub struct Node { #[serde(default)] pub id: NodeId, #[serde(default = "Node::cookie_default")] pub cookie: String, #[serde(default = "Node::crash_dump_default")] pub crash_dump: String, } impl Node { fn cookie_default() -> String { "rmqttsecretcookie".into() } fn crash_dump_default() -> String { "/var/log/rmqtt/crash.dump".into() } } #[derive(Debug, Clone, Deserialize)] pub struct Rpc { #[serde(default = "Rpc::mode_default")] pub mode: String, #[serde(default = "Rpc::server_addr_default", deserialize_with = "deserialize_addr")] pub server_addr: SocketAddr, #[serde(default = "Rpc::server_workers_default")] pub server_workers: usize, #[serde(default = "Rpc::client_concurrency_limit_default")] pub client_concurrency_limit: usize, #[serde(default = "Rpc::client_timeout_default", deserialize_with = "deserialize_duration")] pub client_timeout: Duration, #[serde(default = "Rpc::batch_size_default")] pub batch_size: usize, } impl Default for Rpc { #[inline] fn default() -> Self { Self { mode: Self::mode_default(), batch_size: Self::batch_size_default(), server_addr: Self::server_addr_default(), server_workers: Self::server_workers_default(), client_concurrency_limit: Self::client_concurrency_limit_default(), client_timeout: Self::client_timeout_default(), } } } impl Rpc { fn mode_default() -> String { "async".into() } fn batch_size_default() -> usize { 128 } fn server_addr_default() -> SocketAddr { ([0, 0, 0, 0], 5363).into() } fn server_workers_default() -> usize { 4 } fn client_concurrency_limit_default() -> usize { 128 } fn client_timeout_default() -> Duration { Duration::from_secs(5) } } #[derive(Default, Debug, Clone, Deserialize)] pub struct Plugins { #[serde(default = "Plugins::dir_default")] pub dir: String, #[serde(default)] pub default_startups: Vec<String>, } impl Plugins { fn dir_default() -> String { "./plugins/".into() } pub fn load_config<'de, T: serde::Deserialize<'de>>(&self, name: &str) -> Result<T, ConfigError> { let dir = self.dir.trim_end_matches(|c| c == '/' || c == '\\'); let mut s = Config::new(); s.merge(File::with_name(&format!("{}/{}", dir, name)).required(true))?; s.try_into::<T>() } } #[derive(Debug, Clone, Default, Deserialize)] pub struct Mqtt {} #[derive(Debug, Clone)] pub struct ValueMut<T>(Arc<RwLock<T>>); impl<T> ValueMut<T> where T: Copy, { #[inline] pub fn new(v: T) -> Self { Self(Arc::new(RwLock::new(v))) } #[inline] pub fn get(&self) -> T { *self.0.read() } #[inline] pub fn set(&self, v: T) { *self.0.write() = v; } } impl<'de, T: serde::Deserialize<'de> + Copy> Deserialize<'de> for ValueMut<T> { #[inline] fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let v = T::deserialize(deserializer)?; Ok(ValueMut::new(v)) } } #[derive(Debug, Clone)] pub struct Bytesize(usize); impl Bytesize { #[inline] pub fn as_u32(&self) -> u32 { self.0 as u32 } } impl Deref for Bytesize { type Target = usize; fn deref(&self) -> &Self::Target { &self.0 } } impl DerefMut for Bytesize { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl<'de> Deserialize<'de> for Bytesize { #[inline] fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let v = to_bytesize(&String::deserialize(deserializer)?); Ok(Bytesize(v)) } } #[inline] pub fn to_bytesize(text: &str) -> usize { let text = text.to_uppercase().replace("GB", "G").replace("MB", "M").replace("KB", "K"); text.split_inclusive(|x| x == 'G' || x == 'M' || x == 'K' || x == 'B') .map(|x| { let mut chars = x.chars(); let u = match chars.nth_back(0) { None => return 0, Some(u) => u, }; let v = match chars.as_str().parse::<usize>() { Err(_e) => return 0, Ok(v) => v, };
}) .sum() } #[inline] pub fn deserialize_duration<'de, D>(deserializer: D) -> Result<Duration, D::Error> where D: Deserializer<'de>, { let v = String::deserialize(deserializer)?; Ok(to_duration(&v)) } #[inline] pub fn to_duration(text: &str) -> Duration { let text = text.to_lowercase().replace("ms", "Y"); let ms: u64 = text .split_inclusive(|x| x == 's' || x == 'm' || x == 'h' || x == 'd' || x == 'w' || x == 'f' || x == 'Y') .map(|x| { let mut chars = x.chars(); let u = match chars.nth_back(0) { None => return 0, Some(u) => u, }; let v = match chars.as_str().parse::<u64>() { Err(_e) => return 0, Ok(v) => v, }; match u { 'Y' => v, 's' => v * 1000, 'm' => v * 60000, 'h' => v * 3600000, 'd' => v * 86400000, 'w' => v * 604800000, 'f' => v * 1209600000, _ => 0, } }) .sum(); Duration::from_millis(ms) } #[inline] pub fn deserialize_addr<'de, D>(deserializer: D) -> Result<SocketAddr, D::Error> where D: Deserializer<'de>, { let addr = String::deserialize(deserializer)? .parse::<std::net::SocketAddr>() .map_err(serde::de::Error::custom)?; Ok(addr) }
match u { 'B' => v, 'K' => v * 1024, 'M' => v * 1048576, 'G' => v * 1073741824, _ => 0, }
if_condition
[ { "content": "fn open_file(filename: &str) -> Result<File> {\n\n OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .append(true)\n\n .open(filename)\n\n .map_err(|e| MqttError::from(format!(\"logger file config error, filename: {}, {:?}\", filename, e)))\n\n}\n", "file_path": "rmqtt/src/logger.rs", "rank": 8, "score": 243025.16126642236 }, { "content": "type HashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;\n\n\n", "file_path": "rmqtt/src/settings/listener.rs", "rank": 10, "score": 235176.23769533925 }, { "content": "#[inline]\n\npub fn to_uptime(uptime: i64) -> String {\n\n let uptime_secs = uptime % 60;\n\n let uptime = uptime / 60;\n\n let uptime_minus = uptime % 60;\n\n let uptime = uptime / 60;\n\n let uptime_hours = uptime % 24;\n\n let uptime_days = uptime / 24;\n\n format!(\"{} days {} hours, {} minutes, {} seconds\", uptime_days, uptime_hours, uptime_minus, uptime_secs)\n\n}\n", "file_path": "rmqtt/src/node.rs", "rank": 11, "score": 214333.49263920978 }, { "content": "type DashMap<K, V> = dashmap::DashMap<K, V, ahash::RandomState>;\n\npub type EntryRef<'a> = Ref<'a, String, Entry, ahash::RandomState>;\n\npub type EntryRefMut<'a> = RefMut<'a, String, Entry, ahash::RandomState>;\n\npub type EntryIter<'a> = Iter<'a, String, Entry, ahash::RandomState, DashMap<String, Entry>>;\n\n\n", "file_path": "rmqtt/src/plugin.rs", "rank": 12, "score": 211612.03463102758 }, { "content": "type HandlerId = String;\n\n\n\n//#[derive(Clone)]\n\npub struct DefaultHookManager {\n\n #[allow(clippy::type_complexity)]\n\n handlers: Arc<DashMap<Type, Arc<sync::RwLock<BTreeMap<(Priority, HandlerId), HookEntry>>>>>,\n\n}\n\n\n\nimpl DefaultHookManager {\n\n #[inline]\n\n pub fn instance() -> &'static DefaultHookManager {\n\n static INSTANCE: OnceCell<DefaultHookManager> = OnceCell::new();\n\n INSTANCE.get_or_init(|| Self { handlers: Arc::new(DashMap::default()) })\n\n }\n\n\n\n #[inline]\n\n async fn add(&self, typ: Type, priority: Priority, handler: Box<dyn Handler>) -> Result<HandlerId> {\n\n let id = Uuid::new_v4().as_simple().encode_lower(&mut Uuid::encode_buffer()).to_string();\n\n let type_handlers =\n\n self.handlers.entry(typ).or_insert(Arc::new(sync::RwLock::new(BTreeMap::default())));\n", "file_path": "rmqtt/src/broker/default.rs", "rank": 13, "score": 211410.52810444575 }, { "content": "type DashMap<K, V> = dashmap::DashMap<K, V, ahash::RandomState>;\n", "file_path": "rmqtt/src/broker/default.rs", "rank": 14, "score": 210487.315045189 }, { "content": "type HashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;\n\n\n\npub mod default;\n\npub mod error;\n\npub mod executor;\n\npub mod fitter;\n\npub mod hook;\n\npub mod inflight;\n\npub mod metrics;\n\npub mod queue;\n\npub mod retain;\n\npub mod session;\n\npub mod stats;\n\npub mod topic;\n\npub mod types;\n\npub mod v3;\n\npub mod v5;\n\n\n", "file_path": "rmqtt/src/broker/mod.rs", "rank": 15, "score": 204757.9470767993 }, { "content": "type HashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;\n\n\n\npub struct LockEntry {\n\n id: Id,\n\n shared: &'static DefaultShared,\n\n _locker: Option<OwnedMutexGuard<()>>,\n\n}\n\n\n\nimpl Drop for LockEntry {\n\n #[inline]\n\n fn drop(&mut self) {\n\n if self._locker.is_some() {\n\n let _t = self.shared.lockers.remove(&self.id.client_id);\n\n log::debug!(\"{:?} LockEntry Drop ..., {}\", self.id, _t.is_some());\n\n }\n\n }\n\n}\n\n\n\nimpl LockEntry {\n\n #[inline]\n", "file_path": "rmqtt/src/broker/default.rs", "rank": 16, "score": 204646.19192121035 }, { "content": "type ValueSet<K> = std::collections::BTreeSet<K>;\n\n\n\npub type Level = ntex_mqtt::TopicLevel;\n\npub type Topic = ntex_mqtt::Topic;\n\npub type TopicTree<V> = Node<V>;\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Node<V: Ord> {\n\n values: ValueSet<V>,\n\n branches: HashMap<Level, Node<V>>,\n\n}\n\n\n\nimpl<V> Default for Node<V>\n\nwhere\n\n V: Hash + Ord + Eq + Clone + Debug,\n\n{\n\n #[inline]\n\n fn default() -> Node<V> {\n\n Self { values: ValueSet::default(), branches: HashMap::default() }\n\n }\n", "file_path": "rmqtt/src/broker/topic.rs", "rank": 17, "score": 203617.80931255224 }, { "content": "type DashMap<K, V> = dashmap::DashMap<K, V, ahash::RandomState>;\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub(crate) struct ClientStatus {\n\n pub id: Id,\n\n pub online: IsOnline,\n\n pub handshaking: bool,\n\n pub handshak_duration: TimestampMillis,\n\n}\n\n\n\nimpl ClientStatus {\n\n fn new(id: Id, online: IsOnline, handshaking: bool) -> Self {\n\n Self { id, online, handshaking, handshak_duration: chrono::Local::now().timestamp_millis() }\n\n }\n\n\n\n pub fn handshaking(&self, try_lock_timeout: Duration) -> bool {\n\n self.handshaking\n\n && (chrono::Local::now().timestamp_millis()\n\n < (self.handshak_duration + try_lock_timeout.as_millis() as TimestampMillis))\n\n }\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/router.rs", "rank": 18, "score": 200484.609522583 }, { "content": "pub trait PluginFn: 'static + Sync + Send + Fn() -> BoxFuture<Result<DynPlugin>> {}\n\n\n\nimpl<T> PluginFn for T where T: 'static + Sync + Send + ?Sized + Fn() -> BoxFuture<Result<DynPlugin>> {}\n\n\n\npub type DynPluginResult = BoxFuture<Result<DynPlugin>>;\n\npub type DynPlugin = Box<dyn Plugin>;\n\npub type DynPluginFn = Box<dyn PluginFn>;\n\n\n\npub struct Entry {\n\n inited: bool,\n\n active: bool,\n\n //will reject start, stop, and load config operations\n\n immutable: bool,\n\n plugin: Option<DynPlugin>,\n\n plugin_f: Option<DynPluginFn>,\n\n}\n\n\n\nimpl Entry {\n\n #[inline]\n\n pub fn inited(&self) -> bool {\n", "file_path": "rmqtt/src/plugin.rs", "rank": 19, "score": 200483.19140979173 }, { "content": "type DashSet<V> = dashmap::DashSet<V, ahash::RandomState>;\n", "file_path": "rmqtt/src/broker/default.rs", "rank": 20, "score": 197268.71949474138 }, { "content": "type HashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;\n\n\n\n#[inline]\n\npub async fn register(\n\n runtime: &'static Runtime,\n\n name: &'static str,\n\n descr: &'static str,\n\n default_startup: bool,\n\n immutable: bool,\n\n) -> Result<()> {\n\n runtime\n\n .plugins\n\n .register(name, default_startup, immutable, move || -> DynPluginResult {\n\n Box::pin(async move {\n\n ClusterPlugin::new(runtime, name, descr).await.map(|p| -> DynPlugin { Box::new(p) })\n\n })\n\n })\n\n .await?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/lib.rs", "rank": 21, "score": 194999.14240468966 }, { "content": "type HashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;\n\n\n\nconst CACHEABLE: &str = \"X-Cache\";\n\nconst SUPERUSER: &str = \"X-Superuser\";\n\n\n\nconst CACHE_KEY: &str = \"ACL-CACHE-MAP\";\n\n\n", "file_path": "rmqtt-plugins/rmqtt-auth-http/src/lib.rs", "rank": 22, "score": 194999.14240468966 }, { "content": "type HashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/router.rs", "rank": 23, "score": 194999.14240468966 }, { "content": "type HashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct PluginConfig {\n\n #[serde(default = \"PluginConfig::worker_threads_default\")]\n\n pub worker_threads: usize,\n\n #[serde(default = \"PluginConfig::queue_capacity_default\")]\n\n pub queue_capacity: usize,\n\n #[serde(default = \"PluginConfig::concurrency_limit_default\")]\n\n pub concurrency_limit: usize,\n\n #[serde(default)]\n\n pub urls: Vec<Url>,\n\n #[serde(default)]\n\n #[deprecated]\n\n http_urls: Vec<Url>,\n\n #[serde(default = \"PluginConfig::http_timeout_default\", deserialize_with = \"deserialize_duration\")]\n\n pub http_timeout: Duration,\n\n #[serde(rename = \"rule\")]\n\n #[serde(default, deserialize_with = \"PluginConfig::deserialize_rules\")]\n\n pub rules: HashMap<Type, Vec<Rule>>,\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/config.rs", "rank": 24, "score": 194999.14240468966 }, { "content": "type HashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;\n\n\n\npub struct ClusterLockEntry {\n\n inner: Box<dyn Entry>,\n\n cluster_shared: &'static ClusterShared,\n\n}\n\n\n\nimpl ClusterLockEntry {\n\n #[inline]\n\n pub fn new(inner: Box<dyn Entry>, cluster_shared: &'static ClusterShared) -> Self {\n\n Self { inner, cluster_shared }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Entry for ClusterLockEntry {\n\n #[inline]\n\n async fn try_lock(&self) -> Result<Box<dyn Entry>> {\n\n Ok(Box::new(ClusterLockEntry::new(self.inner.try_lock().await?, self.cluster_shared)))\n\n }\n", "file_path": "rmqtt-plugins/rmqtt-cluster-broadcast/src/shared.rs", "rank": 25, "score": 194999.14240468966 }, { "content": "type HashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct PluginConfig {\n\n ///Disconnect if publishing is rejected\n\n #[serde(default = \"PluginConfig::disconnect_if_pub_rejected_default\")]\n\n pub disconnect_if_pub_rejected: bool,\n\n\n\n ///Hook priority\n\n #[serde(default = \"PluginConfig::priority_default\")]\n\n pub priority: Priority,\n\n\n\n ///#Return 'Deny' if http request error otherwise 'Ignore'\n\n #[serde(default = \"PluginConfig::deny_if_error_default\")]\n\n pub deny_if_error: bool,\n\n\n\n #[serde(default = \"PluginConfig::http_timeout_default\", deserialize_with = \"deserialize_duration\")]\n\n pub http_timeout: Duration,\n\n #[serde(\n\n default,\n", "file_path": "rmqtt-plugins/rmqtt-auth-http/src/config.rs", "rank": 26, "score": 194999.14240468966 }, { "content": "type HashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;\n\n\n\n#[inline]\n\npub async fn register(\n\n runtime: &'static Runtime,\n\n name: &'static str,\n\n descr: &'static str,\n\n default_startup: bool,\n\n immutable: bool,\n\n) -> Result<()> {\n\n runtime\n\n .plugins\n\n .register(name, default_startup, immutable, move || -> DynPluginResult {\n\n Box::pin(async move {\n\n ClusterPlugin::new(runtime, name, descr).await.map(|p| -> DynPlugin { Box::new(p) })\n\n })\n\n })\n\n .await?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rmqtt-plugins/rmqtt-cluster-broadcast/src/lib.rs", "rank": 27, "score": 194999.14240468966 }, { "content": "type DashSet<V> = dashmap::DashSet<V, ahash::RandomState>;\n\n\n\npub const PH_C: &str = \"%c\";\n\npub const PH_U: &str = \"%u\";\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct PluginConfig {\n\n ///Disconnect if publishing is rejected\n\n #[serde(default = \"PluginConfig::disconnect_if_pub_rejected_default\")]\n\n pub disconnect_if_pub_rejected: bool,\n\n\n\n ///Hook priority\n\n #[serde(default = \"PluginConfig::priority_default\")]\n\n pub priority: Priority,\n\n\n\n #[serde(\n\n default,\n\n serialize_with = \"PluginConfig::serialize_rules\",\n\n deserialize_with = \"PluginConfig::deserialize_rules\"\n\n )]\n", "file_path": "rmqtt-plugins/rmqtt-acl/src/config.rs", "rank": 28, "score": 189331.16515318866 }, { "content": "fn on_handshake(req: &Request, mut response: Response) -> std::result::Result<Response, ErrorResponse> {\n\n const PROTOCOL_ERROR: &str = \"No \\\"Sec-WebSocket-Protocol: mqtt\\\" in client request\";\n\n let mqtt_protocol = req\n\n .headers()\n\n .get(\"Sec-WebSocket-Protocol\")\n\n .ok_or_else(|| ErrorResponse::new(Some(PROTOCOL_ERROR.into())))?;\n\n if mqtt_protocol != \"mqtt\" {\n\n return Err(ErrorResponse::new(Some(PROTOCOL_ERROR.into())));\n\n }\n\n response.headers_mut().append(\"Sec-WebSocket-Protocol\", HeaderValue::from_static(\"mqtt\"));\n\n Ok(response)\n\n}\n", "file_path": "rmqtt-bin/src/ws.rs", "rank": 29, "score": 185912.46718575823 }, { "content": "#[inline]\n\nfn format_timestamp(t: i64) -> String {\n\n if t <= 0 {\n\n \"\".into()\n\n } else {\n\n use chrono::TimeZone;\n\n if let LocalResult::Single(t) = chrono::Local.timestamp_opt(t, 0) {\n\n t.format(\"%Y-%m-%d %H:%M:%S\").to_string()\n\n } else {\n\n \"\".into()\n\n }\n\n }\n\n}\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/types.rs", "rank": 30, "score": 182513.25362611347 }, { "content": "#[inline]\n\nfn set_active_count(name: Port, c: isize, handshaking_busy_limit: Option<usize>) {\n\n let active_counts = ACTIVE_COUNTS.get_or_init(DashMap::default);\n\n let mut entry = active_counts.entry((name, std::thread::current().id())).or_default();\n\n let (count, busy_limit) = entry.value_mut();\n\n *count = c;\n\n if let Some(handshaking_busy_limit) = handshaking_busy_limit {\n\n *busy_limit = handshaking_busy_limit as isize;\n\n }\n\n}\n\n\n", "file_path": "rmqtt/src/broker/executor.rs", "rank": 31, "score": 176701.98136179784 }, { "content": "type HashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;\n", "file_path": "rmqtt/src/broker/topic.rs", "rank": 32, "score": 174399.65068201697 }, { "content": "type HashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;\n", "file_path": "rmqtt/src/broker/retain.rs", "rank": 33, "score": 174399.65068201697 }, { "content": "#[inline]\n\npub fn parse_topic_filter(\n\n topic_filter: &ByteString,\n\n shared_subscription_supported: bool,\n\n) -> Result<(TopicFilter, Option<SharedGroup>)> {\n\n let mut shared_group = None;\n\n let err = MqttError::TopicError(\"Illegal topic filter\".into());\n\n //$share/abc/\n\n let topic = if shared_subscription_supported {\n\n let mut levels = topic_filter.splitn(3, '/').collect::<Vec<_>>();\n\n let is_share = levels.first().map(|f| *f == \"$share\").unwrap_or(false);\n\n if is_share {\n\n if levels.len() < 3 {\n\n return Err(err);\n\n }\n\n levels.remove(0);\n\n shared_group = Some(SharedGroup::from(levels.remove(0)));\n\n ByteString::from(levels.remove(0))\n\n } else {\n\n topic_filter.clone()\n\n }\n", "file_path": "rmqtt/src/broker/types.rs", "rank": 34, "score": 167056.0620499918 }, { "content": "type Headers = (Option<ContentType>, HeaderMap, HashMap<String, String>);\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct Req {\n\n #[serde(serialize_with = \"Req::serialize_url\", deserialize_with = \"Req::deserialize_url\")]\n\n pub url: Url,\n\n #[serde(serialize_with = \"Req::serialize_method\", deserialize_with = \"Req::deserialize_method\")]\n\n pub method: Method,\n\n #[serde(\n\n default,\n\n serialize_with = \"Req::serialize_headers\",\n\n deserialize_with = \"Req::deserialize_headers\"\n\n )]\n\n pub headers: Headers,\n\n pub params: HashMap<String, String>,\n\n}\n\n\n\nimpl Req {\n\n pub fn is_get(&self) -> bool {\n\n self.method == Method::GET\n", "file_path": "rmqtt-plugins/rmqtt-auth-http/src/config.rs", "rank": 35, "score": 156476.5326916406 }, { "content": "#[inline]\n\nfn set_rate(name: Port, rate: f64) {\n\n let rates = RATES.get_or_init(DashMap::default);\n\n let mut entry = rates.entry((name, std::thread::current().id())).or_default();\n\n *entry.value_mut() = rate;\n\n}\n\n\n", "file_path": "rmqtt/src/broker/executor.rs", "rank": 36, "score": 149353.72124838782 }, { "content": "type Port = u16;\n\n\n\n#[derive(Debug, Clone, Deserialize, Default)]\n\npub struct Listeners {\n\n #[serde(rename = \"tcp\")]\n\n #[serde(default)]\n\n _tcps: HashMap<String, ListenerInner>,\n\n\n\n #[serde(rename = \"tls\")]\n\n #[serde(default)]\n\n _tlss: HashMap<String, ListenerInner>,\n\n\n\n #[serde(rename = \"ws\")]\n\n #[serde(default)]\n\n _wss: HashMap<String, ListenerInner>,\n\n\n\n #[serde(rename = \"wss\")]\n\n #[serde(default)]\n\n _wsss: HashMap<String, ListenerInner>,\n\n\n", "file_path": "rmqtt/src/settings/listener.rs", "rank": 37, "score": 148227.34589696286 }, { "content": "/// Creates a new `slog::Logger` with two `Drain`s: one for printing to the console and another for\n\n/// printing to a file.\n\n///\n\n/// This function takes three arguments: `filename`, which specifies the name of the file to print\n\n/// to; `to`, which specifies where to print the logs (either the console or a file); and `level`,\n\n/// which specifies the minimum log level to print. The function sets the format for the logs and\n\n/// creates the two `Drain`s using the provided parameters. It then combines the two `Drain`s using a\n\n/// `Tee` and returns the resulting `Logger`.\n\npub fn config_logger(filename: String, to: To, level: Level) -> slog::Logger {\n\n let custom_timestamp =\n\n |io: &mut dyn io::Write| write!(io, \"{}\", chrono::Local::now().format(\"%Y-%m-%d %H:%M:%S%.3f\"));\n\n\n\n let print_msg_header = |fn_timestamp: &dyn ThreadSafeTimestampFn<Output = io::Result<()>>,\n\n mut rd: &mut dyn RecordDecorator,\n\n record: &Record,\n\n _use_file_location: bool|\n\n -> io::Result<bool> {\n\n rd.start_timestamp()?;\n\n fn_timestamp(&mut rd)?;\n\n\n\n rd.start_whitespace()?;\n\n write!(rd, \" \")?;\n\n\n\n rd.start_level()?;\n\n write!(rd, \"{}\", record.level().as_short_str())?;\n\n\n\n rd.start_location()?;\n\n if record.function().is_empty() {\n", "file_path": "rmqtt/src/logger.rs", "rank": 38, "score": 146166.87326498656 }, { "content": "type TopicsType = Option<(Arc<TopicTree<()>>, Vec<String>)>;\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct Rule {\n\n pub action: String,\n\n #[serde(default)]\n\n pub urls: Vec<Url>,\n\n #[serde(\n\n default,\n\n deserialize_with = \"Rule::deserialize_topics\",\n\n serialize_with = \"Rule::serialize_topics\"\n\n )]\n\n pub topics: TopicsType,\n\n}\n\n\n\nimpl Rule {\n\n fn serialize_topics<S>(topics: &TopicsType, s: S) -> std::result::Result<S::Ok, S::Error>\n\n where\n\n S: ser::Serializer,\n\n {\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/config.rs", "rank": 39, "score": 142456.50172465493 }, { "content": "fn to_error(e: WSError) -> io::Error {\n\n match e {\n\n WSError::ConnectionClosed => io::Error::from(ErrorKind::ConnectionAborted),\n\n WSError::AlreadyClosed => io::Error::from(ErrorKind::NotConnected),\n\n WSError::Io(io_e) => io_e,\n\n _ => io::Error::new(ErrorKind::Other, e.to_string()),\n\n }\n\n}\n\n\n", "file_path": "rmqtt-bin/src/ws.rs", "rank": 40, "score": 132470.34932266714 }, { "content": "#[async_trait]\n\npub trait Plugin: Send + Sync {\n\n #[inline]\n\n async fn init(&mut self) -> Result<()> {\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn name(&self) -> &str {\n\n \"\"\n\n }\n\n\n\n #[inline]\n\n async fn get_config(&self) -> Result<serde_json::Value> {\n\n Ok(json!({}))\n\n }\n\n\n\n #[inline]\n\n async fn load_config(&mut self) -> Result<()> {\n\n Err(MqttError::from(\"unimplemented!\"))\n\n }\n", "file_path": "rmqtt/src/plugin.rs", "rank": 41, "score": 131426.21889131767 }, { "content": "type HookWriters = Arc<DashMap<ByteString, Arc<RwLock<HookWriter>>>>;\n\n\n\n#[inline]\n\npub async fn register(\n\n runtime: &'static Runtime,\n\n name: &'static str,\n\n descr: &'static str,\n\n default_startup: bool,\n\n immutable: bool,\n\n) -> Result<()> {\n\n runtime\n\n .plugins\n\n .register(name, default_startup, immutable, move || -> DynPluginResult {\n\n Box::pin(async move {\n\n WebHookPlugin::new(runtime, name, descr).await.map(|p| -> DynPlugin { Box::new(p) })\n\n })\n\n })\n\n .await?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/lib.rs", "rank": 42, "score": 131060.19096615736 }, { "content": "fn route(cfg: PluginConfigType) -> Router {\n\n Router::with_path(\"api/v1\")\n\n .hoop(affix::inject(cfg))\n\n .hoop(api_logger)\n\n .get(list_apis)\n\n .push(Router::with_path(\"brokers\").get(get_brokers).push(Router::with_path(\"<id>\").get(get_brokers)))\n\n .push(Router::with_path(\"nodes\").get(get_nodes).push(Router::with_path(\"<id>\").get(get_nodes)))\n\n .push(Router::with_path(\"health/check\").get(check_health))\n\n .push(\n\n Router::with_path(\"clients\").get(search_clients).push(\n\n Router::with_path(\"<clientid>\")\n\n .get(get_client)\n\n .delete(kick_client)\n\n .push(Router::with_path(\"online\").get(check_online)),\n\n ),\n\n )\n\n .push(\n\n Router::with_path(\"subscriptions\")\n\n .get(query_subscriptions)\n\n .push(Router::with_path(\"<clientid>\").get(get_client_subscriptions)),\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/api.rs", "rank": 43, "score": 130661.4550280038 }, { "content": "#[inline]\n\npub fn is_busy() -> bool {\n\n #[inline]\n\n fn _is_busy() -> bool {\n\n let busies = ACTIVE_COUNTS\n\n .get()\n\n .map(|m| {\n\n m.iter()\n\n .group_by(|item| (item.key().0, item.value().1))\n\n .into_iter()\n\n .map(|(k, g)| {\n\n (\n\n k,\n\n g.map(|item| {\n\n let (c, _) = item.value();\n\n *c\n\n })\n\n .sum::<isize>(),\n\n )\n\n })\n\n .filter_map(|((_, busy_limit), c)| if c > busy_limit { Some(1) } else { None })\n", "file_path": "rmqtt/src/broker/executor.rs", "rank": 44, "score": 126437.0374769649 }, { "content": "#[inline]\n\npub fn get_rate() -> f64 {\n\n RATES.get().map(|m| m.iter().map(|entry| *entry.value()).sum::<f64>()).unwrap_or_default()\n\n}\n", "file_path": "rmqtt/src/broker/executor.rs", "rank": 45, "score": 124072.61653414075 }, { "content": "#[inline]\n\nfn init_task_exec_queue(workers: usize, queue_max: usize) {\n\n let (exec, task_runner) = Builder::default().workers(workers).queue_max(queue_max).build();\n\n\n\n tokio::spawn(async move {\n\n task_runner.await;\n\n });\n\n\n\n TASK_EXEC_QUEUE.set(exec).ok().expect(\"Failed to initialize task execution queue\")\n\n}\n\n\n\n#[inline]\n\npub(crate) fn task_exec_queue() -> &'static TaskExecQueue {\n\n TASK_EXEC_QUEUE.get().expect(\"TaskExecQueue not initialized\")\n\n}\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/lib.rs", "rank": 46, "score": 123427.88773316974 }, { "content": "type Item<'a, V> = (Vec<&'a Level>, Vec<&'a V>);\n\n\n\npub struct Matcher<'a, V: Ord> {\n\n node: &'a Node<V>,\n\n path: &'a [Level],\n\n}\n\n\n\nimpl<'a, V> Matcher<'a, V>\n\nwhere\n\n V: Hash + Eq + Ord + Clone + Debug + Serialize + Deserialize<'static>,\n\n{\n\n #[inline]\n\n pub fn iter(&self) -> MatchedIter<'a, V> {\n\n MatchedIter::new(self.node, self.path, Vec::new())\n\n }\n\n\n\n #[inline]\n\n pub fn first(&self) -> Option<Item<'a, V>> {\n\n self.iter().next()\n\n }\n\n}\n\n\n", "file_path": "rmqtt/src/broker/topic.rs", "rank": 47, "score": 122504.29725521879 }, { "content": "type WebSocketStreamType<T> = Pin<Box<dyn Future<Output = Result<WebSocketStream<T>, WSError>>>>;\n\n\n\npin_project_lite::pin_project! {\n\n pub struct WSServiceFut<T>\n\n where\n\n T: AsyncRead,\n\n T: AsyncWrite,\n\n T: Unpin,\n\n {\n\n fut: WebSocketStreamType<T>,\n\n #[pin]\n\n delay: Option<Sleep>,\n\n }\n\n}\n\n\n\nimpl<T: AsyncRead + AsyncWrite + Unpin> Future for WSServiceFut<T> {\n\n type Output = Result<WsStream<T>, ntex_mqtt::MqttError<MqttError>>;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let mut this = self.project();\n", "file_path": "rmqtt-bin/src/ws.rs", "rank": 48, "score": 122165.62748634975 }, { "content": "/// Initializes a logger using `slog` and `slog_scope`.\n\n///\n\n/// This function creates a `GlobalLoggerGuard` and sets the global logger to the `logger` passed\n\n/// in the `Runtime` instance. It also initializes `slog_stdlog` with the log level specified in\n\n/// the `Runtime` settings.\n\npub fn logger_init() -> GlobalLoggerGuard {\n\n let level = slog_log_to_level(Runtime::instance().settings.log.level.inner());\n\n let logger = Runtime::instance().logger.clone();\n\n // Make sure to save the guard, see documentation for more information\n\n let guard = slog_scope::set_global_logger(logger.clone());\n\n // register slog_stdlog as the log handler with the log crate\n\n slog_stdlog::init_with_level(level).unwrap();\n\n guard\n\n}\n\n\n", "file_path": "rmqtt/src/logger.rs", "rank": 49, "score": 121853.1074604337 }, { "content": "#[inline]\n\npub fn get_active_count() -> isize {\n\n ACTIVE_COUNTS\n\n .get()\n\n .map(|m| {\n\n m.iter()\n\n .map(|item| {\n\n let (c, _) = item.value();\n\n *c\n\n })\n\n .sum()\n\n })\n\n .unwrap_or_default()\n\n}\n\n\n\nstatic RATES: OnceCell<DashMap<(Port, ThreadId), f64>> = OnceCell::new();\n\n\n", "file_path": "rmqtt/src/broker/executor.rs", "rank": 50, "score": 121844.76584805558 }, { "content": "pub fn active_grpc_requests() -> isize {\n\n ACTIVE_REQUEST_COUNT.load(Ordering::SeqCst)\n\n}\n", "file_path": "rmqtt/src/grpc/server.rs", "rank": 51, "score": 121839.71879754188 }, { "content": "fn slog_log_to_level(level: slog::Level) -> log::Level {\n\n match level {\n\n slog::Level::Trace => log::Level::Trace,\n\n slog::Level::Debug => log::Level::Debug,\n\n slog::Level::Info => log::Level::Info,\n\n slog::Level::Warning => log::Level::Warn,\n\n slog::Level::Error => log::Level::Error,\n\n slog::Level::Critical => log::Level::Error,\n\n }\n\n}\n\n\n", "file_path": "rmqtt/src/logger.rs", "rank": 52, "score": 120069.83938925587 }, { "content": "pub trait OnEventFn: 'static + Sync + Send + Fn() {}\n\nimpl<T> OnEventFn for T where T: 'static + Sync + Send + Clone + ?Sized + Fn() {}\n\n\n\n#[derive(Clone)]\n\npub struct Sender<T> {\n\n tx: mpsc::Sender<()>,\n\n queue: Arc<Queue<T>>,\n\n policy_fn: Rc<dyn PolicyFn<T>>,\n\n}\n\n\n\nimpl<T> Sender<T> {\n\n #[inline]\n\n pub async fn close(&mut self) -> Result<()> {\n\n self.tx.close().await?;\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n pub fn len(&self) -> usize {\n\n self.queue.len()\n", "file_path": "rmqtt/src/broker/queue.rs", "rank": 53, "score": 119146.71161883499 }, { "content": "#[inline]\n\nfn init_task_exec_queue(workers: usize, queue_max: usize) -> TaskExecQueue {\n\n let (exec, task_runner) = Builder::default().workers(workers).queue_max(queue_max).build();\n\n\n\n tokio::spawn(async move {\n\n task_runner.await;\n\n });\n\n\n\n exec\n\n}\n\n\n\n//Failure count\n\n#[inline]\n\npub(crate) fn fails() -> &'static Counter {\n\n static INSTANCE: OnceCell<Counter> = OnceCell::new();\n\n INSTANCE.get_or_init(Counter::new)\n\n}\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/lib.rs", "rank": 54, "score": 115830.52589345758 }, { "content": "type NodeServiceClientType = NodeServiceClient<Channel>;\n\n\n\n#[derive(Clone)]\n\npub struct NodeGrpcClient {\n\n grpc_client: Arc<RwLock<Option<NodeServiceClientType>>>,\n\n active_tasks: Arc<AtomicUsize>,\n\n channel_tasks: Arc<AtomicUsize>,\n\n endpoint: Endpoint,\n\n tx: Sender<(MessageType, Message, OneshotSender<Result<MessageReply>>)>,\n\n}\n\n\n\nimpl NodeGrpcClient {\n\n //server_addr - ip:port, 127.0.0.1:6666\n\n #[inline]\n\n pub async fn new(server_addr: &str) -> Result<Self> {\n\n log::debug!(\"rpc.client_timeout: {:?}\", Runtime::instance().settings.rpc.client_timeout);\n\n let concurrency_limit = Runtime::instance().settings.rpc.client_concurrency_limit + 1;\n\n let endpoint = Channel::from_shared(format!(\"http://{}\", server_addr))\n\n .map(|endpoint| {\n\n endpoint\n", "file_path": "rmqtt/src/grpc/client.rs", "rank": 55, "score": 115811.33683822677 }, { "content": "struct Template {\n\n _runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n}\n\n\n\nimpl Template {\n\n #[inline]\n\n async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> {\n\n let register = runtime.extends.hook_mgr().await.register();\n\n Ok(Self { _runtime: runtime, name: name.into(), descr: descr.into(), register })\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Plugin for Template {\n\n #[inline]\n\n async fn init(&mut self) -> Result<()> {\n\n log::debug!(\"{} init\", self.name);\n", "file_path": "rmqtt-plugins/rmqtt-plugin-template/src/lib.rs", "rank": 56, "score": 115250.56362782502 }, { "content": "struct AclPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n}\n\n\n\nimpl AclPlugin {\n\n #[inline]\n\n async fn new<N: Into<String>, D: Into<String>>(\n\n runtime: &'static Runtime,\n\n name: N,\n\n descr: D,\n\n ) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = Arc::new(RwLock::new(runtime.settings.plugins.load_config::<PluginConfig>(&name)?));\n\n log::debug!(\"{} AclPlugin cfg: {:?}\", name, cfg.read().await);\n\n let register = runtime.extends.hook_mgr().await.register();\n\n Ok(Self { runtime, name, descr: descr.into(), register, cfg })\n", "file_path": "rmqtt-plugins/rmqtt-acl/src/lib.rs", "rank": 57, "score": 115250.56362782502 }, { "content": "struct RetainerPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n retainer: &'static Retainer,\n\n}\n\n\n\nimpl RetainerPlugin {\n\n #[inline]\n\n async fn new<N: Into<String>, D: Into<String>>(\n\n runtime: &'static Runtime,\n\n name: N,\n\n descr: D,\n\n ) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = runtime.settings.plugins.load_config::<PluginConfig>(&name)?;\n\n log::info!(\"{} RetainerPlugin cfg: {:?}\", name, cfg);\n\n let register = runtime.extends.hook_mgr().await.register();\n", "file_path": "rmqtt-plugins/rmqtt-retainer/src/lib.rs", "rank": 58, "score": 115250.56362782502 }, { "content": "struct CounterPlugin {\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n}\n\n\n\nimpl CounterPlugin {\n\n #[inline]\n\n async fn new<N: Into<String>, D: Into<String>>(\n\n runtime: &'static Runtime,\n\n name: N,\n\n descr: D,\n\n ) -> Result<Self> {\n\n let name = name.into();\n\n let register = runtime.extends.hook_mgr().await.register();\n\n Ok(Self { name, descr: descr.into(), register })\n\n }\n\n}\n\n\n\n#[async_trait]\n", "file_path": "rmqtt-plugins/rmqtt-counter/src/lib.rs", "rank": 59, "score": 115250.56362782502 }, { "content": "pub trait PolicyFn<P>: 'static + Fn(&P) -> Policy {}\n\n\n\nimpl<T, P> PolicyFn<P> for T where T: 'static + Clone + ?Sized + Fn(&P) -> Policy {}\n\n\n", "file_path": "rmqtt/src/broker/queue.rs", "rank": 60, "score": 114825.54770002824 }, { "content": "struct ClusterPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: Arc<PluginConfig>,\n\n grpc_clients: GrpcClients,\n\n shared: &'static ClusterShared,\n\n retainer: &'static ClusterRetainer,\n\n\n\n router: &'static ClusterRouter,\n\n raft_mailbox: Option<Mailbox>,\n\n}\n\n\n\nimpl ClusterPlugin {\n\n #[inline]\n\n async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> {\n\n let name = name.into();\n\n let env_list_keys = [\"node_grpc_addrs\", \"raft_peer_addrs\"];\n\n let mut cfg = runtime.settings.plugins.load_config_with::<PluginConfig>(&name, &env_list_keys)?;\n", "file_path": "rmqtt-plugins/rmqtt-cluster-raft/src/lib.rs", "rank": 61, "score": 113247.2124682081 }, { "content": "struct ClusterPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n grpc_clients: GrpcClients,\n\n shared: &'static ClusterShared,\n\n retainer: &'static ClusterRetainer,\n\n router: &'static ClusterRouter,\n\n}\n\n\n\nimpl ClusterPlugin {\n\n #[inline]\n\n async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = Arc::new(RwLock::new(\n\n runtime.settings.plugins.load_config_with::<PluginConfig>(&name, &[\"node_grpc_addrs\"])?,\n\n ));\n\n log::debug!(\"{} ClusterPlugin cfg: {:?}\", name, cfg.read().await);\n", "file_path": "rmqtt-plugins/rmqtt-cluster-broadcast/src/lib.rs", "rank": 62, "score": 113247.2124682081 }, { "content": "struct HookHandler {}\n\n\n\nimpl HookHandler {\n\n fn new() -> Self {\n\n Self {}\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Handler for HookHandler {\n\n async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType {\n\n match param {\n\n Parameter::ClientConnack(connect_info, r) => {\n\n log::debug!(\"client connack, {:?}, {:?}\", connect_info, r);\n\n }\n\n Parameter::ClientSubscribe(_session, c, subscribe) => {\n\n log::debug!(\"{:?} client subscribe, {:?}\", c.id, subscribe);\n\n //let mut topic_filter = subscribe.topic_filter.clone();\n\n //topic_filter.insert(0, Level::Normal(\"PPP\".into()));\n\n //return (true, Some(HookResult::TopicFilter(Some(topic_filter))))\n", "file_path": "rmqtt-plugins/rmqtt-plugin-template/src/lib.rs", "rank": 63, "score": 113247.2124682081 }, { "content": "type PluginConfigType = Arc<RwLock<PluginConfig>>;\n\n\n\n#[inline]\n\npub async fn register(\n\n runtime: &'static Runtime,\n\n name: &'static str,\n\n descr: &'static str,\n\n default_startup: bool,\n\n immutable: bool,\n\n) -> Result<()> {\n\n runtime\n\n .plugins\n\n .register(name, default_startup, immutable, move || -> DynPluginResult {\n\n Box::pin(async move {\n\n HttpApiPlugin::new(runtime, name, descr).await.map(|p| -> DynPlugin { Box::new(p) })\n\n })\n\n })\n\n .await?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/lib.rs", "rank": 64, "score": 112097.68639439947 }, { "content": "#[derive(Clone)]\n\nstruct EntryItem {\n\n s: Session,\n\n tx: Tx,\n\n c: ClientInfo,\n\n}\n\n\n\npub struct DefaultShared {\n\n lockers: DashMap<ClientId, Arc<Mutex<()>>>,\n\n peers: DashMap<ClientId, EntryItem>,\n\n}\n\n\n\nimpl DefaultShared {\n\n #[inline]\n\n pub fn instance() -> &'static DefaultShared {\n\n static INSTANCE: OnceCell<DefaultShared> = OnceCell::new();\n\n INSTANCE.get_or_init(|| Self { lockers: DashMap::default(), peers: DashMap::default() })\n\n }\n\n\n\n #[inline]\n\n pub fn tx(&self, client_id: &str) -> Option<(Tx, To)> {\n", "file_path": "rmqtt/src/broker/default.rs", "rank": 65, "score": 111622.73815030353 }, { "content": "struct HookEntry {\n\n handler: Box<dyn Handler>,\n\n enabled: bool,\n\n}\n\n\n\nimpl HookEntry {\n\n fn new(handler: Box<dyn Handler>) -> Self {\n\n Self { handler, enabled: false }\n\n }\n\n}\n\n\n", "file_path": "rmqtt/src/broker/default.rs", "rank": 66, "score": 111617.63849247161 }, { "content": "struct WebHookPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n chan_queue_count: Arc<AtomicIsize>,\n\n tx: Arc<RwLock<Sender<Message>>>,\n\n writers: HookWriters,\n\n exec: TaskExecQueue,\n\n}\n\n\n\nimpl WebHookPlugin {\n\n #[inline]\n\n async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = Arc::new(RwLock::new(Self::load_config(runtime, &name)?));\n\n log::debug!(\"{} WebHookPlugin cfg: {:?}\", name, cfg.read().await);\n\n let writers = Arc::new(DashMap::default());\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/lib.rs", "rank": 67, "score": 111339.50291442694 }, { "content": "struct HttpApiPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: PluginConfigType,\n\n shutdown_tx: Option<ShutdownTX>,\n\n}\n\n\n\nimpl HttpApiPlugin {\n\n #[inline]\n\n async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = Arc::new(RwLock::new(runtime.settings.plugins.load_config::<PluginConfig>(&name)?));\n\n log::debug!(\"{} HttpApiPlugin cfg: {:?}\", name, cfg.read().await);\n\n let register = runtime.extends.hook_mgr().await.register();\n\n let shutdown_tx = Some(Self::start(runtime, cfg.clone()).await);\n\n Ok(Self { runtime, name, descr: descr.into(), register, cfg, shutdown_tx })\n\n }\n\n\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/lib.rs", "rank": 68, "score": 111339.50291442694 }, { "content": "struct AuthHttpPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n}\n\n\n\nimpl AuthHttpPlugin {\n\n #[inline]\n\n async fn new<S: Into<String>>(runtime: &'static Runtime, name: S, descr: S) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = Arc::new(RwLock::new(runtime.settings.plugins.load_config::<PluginConfig>(&name)?));\n\n log::debug!(\"{} AuthHttpPlugin cfg: {:?}\", name, cfg.read().await);\n\n let register = runtime.extends.hook_mgr().await.register();\n\n Ok(Self { runtime, name, descr: descr.into(), register, cfg })\n\n }\n\n}\n\n\n\n#[async_trait]\n", "file_path": "rmqtt-plugins/rmqtt-auth-http/src/lib.rs", "rank": 69, "score": 111339.50291442694 }, { "content": "struct SystemTopicPlugin {\n\n runtime: &'static Runtime,\n\n name: String,\n\n descr: String,\n\n register: Box<dyn Register>,\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n running: Arc<AtomicBool>,\n\n}\n\n\n\nimpl SystemTopicPlugin {\n\n #[inline]\n\n async fn new<N: Into<String>, D: Into<String>>(\n\n runtime: &'static Runtime,\n\n name: N,\n\n descr: D,\n\n ) -> Result<Self> {\n\n let name = name.into();\n\n let cfg = runtime.settings.plugins.load_config_default::<PluginConfig>(&name)?;\n\n log::debug!(\"{} SystemTopicPlugin cfg: {:?}\", name, cfg);\n\n let register = runtime.extends.hook_mgr().await.register();\n", "file_path": "rmqtt-plugins/rmqtt-sys-topic/src/lib.rs", "rank": 70, "score": 111339.50291442694 }, { "content": "#[test]\n\nfn test_reason() {\n\n assert_eq!(Reason::ConnectKicked(false).is_kicked(false), true);\n\n assert_eq!(Reason::ConnectKicked(false).is_kicked(true), false);\n\n assert_eq!(Reason::ConnectKicked(true).is_kicked(true), true);\n\n assert_eq!(Reason::ConnectKicked(true).is_kicked(false), false);\n\n assert_eq!(Reason::ConnectKicked(true).is_kicked_by_admin(), true);\n\n assert_eq!(Reason::ConnectKicked(false).is_kicked_by_admin(), false);\n\n assert_eq!(Reason::ConnectDisconnect(None).is_kicked(false), false);\n\n assert_eq!(Reason::ConnectDisconnect(None).is_kicked_by_admin(), false);\n\n\n\n let reasons = Reason::Reasons(vec![\n\n Reason::PublishRefused,\n\n Reason::ConnectKicked(false),\n\n Reason::MessageExpiration,\n\n ]);\n\n assert_eq!(reasons.to_string(), \"PublishRefused,Kicked,MessageExpiration\");\n\n}\n", "file_path": "rmqtt/src/broker/types.rs", "rank": 71, "score": 110318.70242111704 }, { "content": "pub trait VecToString {\n\n fn to_string(&self) -> String;\n\n}\n\n\n\nimpl<'a> VecToString for Vec<&'a Level> {\n\n #[inline]\n\n fn to_string(&self) -> String {\n\n self.iter().map(|l| l.to_string()).collect::<Vec<String>>().join(\"/\")\n\n }\n\n}\n\n\n\nimpl<'a> VecToString for &'a [Level] {\n\n #[inline]\n\n fn to_string(&self) -> String {\n\n self.iter().map(|l| l.to_string()).collect::<Vec<String>>().join(\"/\")\n\n }\n\n}\n\n\n", "file_path": "rmqtt/src/broker/topic.rs", "rank": 72, "score": 105724.87829268172 }, { "content": "struct CounterHandler {\n\n metrics: &'static Metrics,\n\n}\n\n\n\nimpl CounterHandler {\n\n fn new() -> Self {\n\n Self { metrics: Metrics::instance() }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Handler for CounterHandler {\n\n async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType {\n\n match param {\n\n Parameter::ClientConnect(connect_info) => {\n\n self.metrics.client_connect_inc();\n\n if connect_info.username().is_none() {\n\n self.metrics.client_auth_anonymous_inc();\n\n }\n\n }\n", "file_path": "rmqtt-plugins/rmqtt-counter/src/lib.rs", "rank": 73, "score": 104487.44958870106 }, { "content": "struct RetainHandler {\n\n retainer: &'static Retainer,\n\n _cfg: Arc<RwLock<PluginConfig>>,\n\n message_type: MessageType,\n\n}\n\n\n\nimpl RetainHandler {\n\n fn new(retainer: &'static Retainer, cfg: &Arc<RwLock<PluginConfig>>, message_type: MessageType) -> Self {\n\n Self { retainer, _cfg: cfg.clone(), message_type }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Handler for RetainHandler {\n\n async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType {\n\n match param {\n\n Parameter::GrpcMessageReceived(typ, msg) => {\n\n log::debug!(\"GrpcMessageReceived, type: {}, msg: {:?}\", typ, msg);\n\n if self.message_type != *typ {\n\n return (true, acc);\n", "file_path": "rmqtt-plugins/rmqtt-retainer/src/lib.rs", "rank": 74, "score": 104487.44958870106 }, { "content": "struct AclHandler {\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n}\n\n\n\nimpl AclHandler {\n\n fn new(cfg: &Arc<RwLock<PluginConfig>>) -> Self {\n\n Self { cfg: cfg.clone() }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Handler for AclHandler {\n\n async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType {\n\n match param {\n\n Parameter::ClientConnected(_session, client) => {\n\n let cfg = self.cfg.clone();\n\n let client_id = client.id.client_id.clone();\n\n let username = client.connect_info.username().cloned();\n\n let build_placeholders = async move {\n\n for rule in cfg.read().await.rules() {\n", "file_path": "rmqtt-plugins/rmqtt-acl/src/lib.rs", "rank": 75, "score": 104487.44958870106 }, { "content": "fn get_fields_named(data: &Data) -> &FieldsNamed {\n\n match *data {\n\n Data::Struct(ref data) => match data.fields {\n\n Fields::Named(ref fields) => fields,\n\n Fields::Unnamed(ref _fields) => {\n\n unreachable!()\n\n }\n\n Fields::Unit => {\n\n unreachable!()\n\n }\n\n },\n\n Data::Enum(_) | Data::Union(_) => unreachable!(),\n\n }\n\n}\n", "file_path": "rmqtt-macros/src/metrics.rs", "rank": 76, "score": 104227.95260730443 }, { "content": "pub trait QoSEx {\n\n fn value(&self) -> u8;\n\n fn less_value(&self, qos: QoS) -> QoS;\n\n}\n\n\n\nimpl QoSEx for QoS {\n\n #[inline]\n\n fn value(&self) -> u8 {\n\n match self {\n\n QoS::AtMostOnce => 0,\n\n QoS::AtLeastOnce => 1,\n\n QoS::ExactlyOnce => 2,\n\n }\n\n }\n\n\n\n #[inline]\n\n fn less_value(&self, qos: QoS) -> QoS {\n\n if self.value() < qos.value() {\n\n *self\n\n } else {\n", "file_path": "rmqtt/src/broker/types.rs", "rank": 77, "score": 102638.4188210099 }, { "content": "struct HookWriter {\n\n file_name: String,\n\n file: Option<File>,\n\n}\n\n\n\nimpl HookWriter {\n\n fn new(file: ByteString) -> Self {\n\n Self { file_name: file.to_string(), file: None }\n\n }\n\n\n\n #[inline]\n\n pub async fn log(&mut self, msg: &[u8]) -> Result<(), Box<dyn std::error::Error>> {\n\n if let Some(file) = self.file.as_mut() {\n\n file.write_all(msg).await?;\n\n file.write_all(b\"\\n\").await?;\n\n } else {\n\n Self::create_dirs(Path::new(&self.file_name)).await?;\n\n let mut file = OpenOptions::new().create(true).append(true).open(&self.file_name).await?;\n\n file.write_all(msg).await?;\n\n file.write_all(b\"\\n\").await?;\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/lib.rs", "rank": 78, "score": 102329.2825728922 }, { "content": "struct AuthHandler {\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n}\n\n\n\nimpl AuthHandler {\n\n fn new(cfg: &Arc<RwLock<PluginConfig>>) -> Self {\n\n Self { cfg: cfg.clone() }\n\n }\n\n\n\n async fn response_result(resp: Response) -> Result<(ResponseResult, Superuser, Cacheable)> {\n\n if resp.status().is_success() {\n\n let superuser = resp.headers().contains_key(SUPERUSER);\n\n let cache_timeout = if let Some(tm) = resp.headers().get(CACHEABLE).and_then(|v| v.to_str().ok())\n\n {\n\n match tm.parse::<i64>() {\n\n Ok(tm) => Some(tm),\n\n Err(e) => {\n\n log::warn!(\"Parse X-Cache error, {:?}\", e);\n\n None\n\n }\n", "file_path": "rmqtt-plugins/rmqtt-auth-http/src/lib.rs", "rank": 79, "score": 102329.2825728922 }, { "content": "struct WebHookHandler {\n\n tx: Arc<RwLock<Sender<Message>>>,\n\n chan_queue_count: Arc<AtomicIsize>,\n\n}\n\n\n\nimpl WebHookHandler {\n\n async fn handle(\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n writers: HookWriters,\n\n backoff_strategy: Arc<ExponentialBackoff>,\n\n typ: hook::Type,\n\n topic: Option<TopicFilter>,\n\n body: serde_json::Value,\n\n ) -> Result<()> {\n\n let topic = if let Some(topic) = topic { Some(Topic::from_str(&topic)?) } else { None };\n\n let hook_writes = {\n\n let cfg = cfg.read().await;\n\n if let Some(rules) = cfg.rules.get(&typ) {\n\n //get action and urls\n\n let action_urls = rules.iter().filter_map(|r| {\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/lib.rs", "rank": 80, "score": 100292.27340139708 }, { "content": "struct SystemTopicHandler {\n\n cfg: Arc<RwLock<PluginConfig>>,\n\n // message_type: MessageType,\n\n nodeid: NodeId,\n\n}\n\n\n\nimpl SystemTopicHandler {\n\n fn new(cfg: &Arc<RwLock<PluginConfig>>) -> Self {\n\n let nodeid = Runtime::instance().node.id();\n\n Self { cfg: cfg.clone(), nodeid }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Handler for SystemTopicHandler {\n\n async fn hook(&self, param: &Parameter, acc: Option<HookResult>) -> ReturnType {\n\n log::debug!(\"param: {:?}, acc: {:?}\", param, acc);\n\n let now = chrono::Local::now();\n\n let now_time = now.format(\"%Y-%m-%d %H:%M:%S%.3f\").to_string();\n\n if let Some((topic, payload)) = match param {\n", "file_path": "rmqtt-plugins/rmqtt-sys-topic/src/lib.rs", "rank": 81, "score": 100292.27340139708 }, { "content": "type Topic = ntex_mqtt::Topic;\n\n\n\npub type RetainTree<V> = Node<V>;\n\n\n\npub struct Node<V> {\n\n value: Option<V>,\n\n branches: HashMap<Level, Node<V>>,\n\n}\n\n\n\nimpl<V> Default for Node<V> {\n\n #[inline]\n\n fn default() -> Node<V> {\n\n Self { value: None, branches: HashMap::default() }\n\n }\n\n}\n\n\n\nimpl<V> Node<V>\n\nwhere\n\n V: std::fmt::Debug + Clone,\n\n{\n", "file_path": "rmqtt/src/broker/retain.rs", "rank": 82, "score": 100135.45048908482 }, { "content": "#[async_trait]\n\npub trait Entry: Sync + Send {\n\n async fn try_lock(&self) -> Result<Box<dyn Entry>>;\n\n fn id(&self) -> Id;\n\n fn id_same(&self) -> Option<bool>;\n\n async fn set(&mut self, session: Session, tx: Tx, conn: ClientInfo) -> Result<()>;\n\n async fn remove(&mut self) -> Result<Option<(Session, Tx, ClientInfo)>>;\n\n async fn remove_with(&mut self, id: &Id) -> Result<Option<(Session, Tx, ClientInfo)>>;\n\n async fn kick(\n\n &mut self,\n\n clean_start: bool,\n\n clear_subscriptions: bool,\n\n is_admin: IsAdmin,\n\n ) -> Result<Option<SessionOfflineInfo>>;\n\n async fn online(&self) -> bool;\n\n fn is_connected(&self) -> bool;\n\n fn session(&self) -> Option<Session>;\n\n fn client(&self) -> Option<ClientInfo>;\n\n fn exist(&self) -> bool;\n\n fn tx(&self) -> Option<Tx>;\n\n async fn subscribe(&self, subscribe: &Subscribe) -> Result<SubscribeReturn>;\n\n async fn unsubscribe(&self, unsubscribe: &Unsubscribe) -> Result<bool>;\n\n async fn publish(&self, from: From, p: Publish) -> Result<(), (From, Publish, Reason)>;\n\n async fn subscriptions(&self) -> Option<Vec<SubsSearchResult>>;\n\n}\n\n\n", "file_path": "rmqtt/src/broker/mod.rs", "rank": 83, "score": 98559.08937056706 }, { "content": "#[async_trait]\n\npub trait Router: Sync + Send {\n\n ///\n\n async fn add(&self, topic_filter: &str, id: Id, opts: SubscriptionOptions) -> Result<()>;\n\n\n\n ///\n\n async fn remove(&self, topic_filter: &str, id: Id) -> Result<bool>;\n\n\n\n ///\n\n async fn matches(&self, id: Id, topic: &TopicName) -> Result<SubRelationsMap>;\n\n\n\n ///Check online or offline\n\n #[inline]\n\n async fn is_online(&self, node_id: NodeId, client_id: &str) -> bool {\n\n Runtime::instance()\n\n .extends\n\n .shared()\n\n .await\n\n .entry(Id::from(node_id, ClientId::from(client_id)))\n\n .is_connected()\n\n }\n", "file_path": "rmqtt/src/broker/mod.rs", "rank": 84, "score": 98559.08937056706 }, { "content": "#[async_trait]\n\npub trait Shared: Sync + Send {\n\n ///\n\n fn entry(&self, id: Id) -> Box<dyn Entry>;\n\n\n\n ///\n\n fn exist(&self, client_id: &str) -> bool;\n\n\n\n ///Route and dispense publish message\n\n async fn forwards(\n\n &self,\n\n from: From,\n\n publish: Publish,\n\n ) -> Result<SubscriptionSize, Vec<(To, From, Publish, Reason)>>;\n\n\n\n ///Route and dispense publish message and return shared subscription relations\n\n async fn forwards_and_get_shareds(\n\n &self,\n\n from: From,\n\n publish: Publish,\n\n ) -> Result<(SubRelationsMap, SubscriptionSize), Vec<(To, From, Publish, Reason)>>;\n", "file_path": "rmqtt/src/broker/mod.rs", "rank": 85, "score": 98559.08937056706 }, { "content": "// This function extracts data from the decoded Cargo.toml file and uses it to generate Rust code\n\nfn plugins(decoded: &toml::Value) {\n\n let mut inits = Vec::new();\n\n // Extract the data from the \"package.metadata.plugins\" field of the Cargo.toml file\n\n if let Some(plugins) = decoded\n\n .get(\"package\")\n\n .and_then(|package| package.get(\"metadata\"))\n\n .and_then(|metadata| metadata.get(\"plugins\"))\n\n .and_then(|plugins| plugins.as_table())\n\n {\n\n // Iterate over the plugins and extract the relevant data\n\n for (id, cfg) in plugins {\n\n let plugin_id = id.replace('-', \"_\");\n\n let name = cfg.get(\"name\").and_then(|v| v.as_str()).unwrap_or(id);\n\n let descr = cfg.get(\"description\").and_then(|v| v.as_str()).unwrap_or_default();\n\n let default_startup = cfg.get(\"default_startup\").and_then(|v| v.as_bool()).unwrap_or(false);\n\n let immutable = cfg.get(\"immutable\").and_then(|v| v.as_bool()).unwrap_or(false);\n\n println!(\n\n \"plugin_id: {}, default_startup: {}, immutable: {}, name: {}, descr: {}\",\n\n plugin_id, default_startup, immutable, name, descr\n\n );\n", "file_path": "rmqtt-bin/build.rs", "rank": 86, "score": 98246.57755121333 }, { "content": "type Level = ntex_mqtt::TopicLevel;\n", "file_path": "rmqtt/src/broker/retain.rs", "rank": 87, "score": 97920.21009951626 }, { "content": "type Message = (hook::Type, Option<TopicFilter>, serde_json::Value);\n\n\n", "file_path": "rmqtt-plugins/rmqtt-web-hook/src/lib.rs", "rank": 88, "score": 97079.58235996259 }, { "content": "#[async_trait]\n\npub trait SharedSubscription: Sync + Send {\n\n ///Whether shared subscriptions are supported\n\n #[inline]\n\n fn is_supported(&self, listen_cfg: &Listener) -> bool {\n\n listen_cfg.shared_subscription\n\n }\n\n\n\n ///Shared subscription strategy, select a subscriber, default is \"random\"\n\n #[inline]\n\n async fn choice(\n\n &self,\n\n ncs: &[(\n\n NodeId,\n\n ClientId,\n\n SubscriptionOptions,\n\n Option<Vec<SubscriptionIdentifier>>,\n\n Option<IsOnline>,\n\n )],\n\n ) -> Option<(usize, IsOnline)> {\n\n if ncs.is_empty() {\n", "file_path": "rmqtt/src/broker/mod.rs", "rank": 89, "score": 96333.3696143463 }, { "content": "#[async_trait]\n\npub trait RetainStorage: Sync + Send {\n\n ///Whether retain is supported\n\n #[inline]\n\n fn is_supported(&self, listen_cfg: &Listener) -> bool {\n\n listen_cfg.retain_available\n\n }\n\n\n\n ///topic - concrete topic\n\n async fn set(&self, topic: &TopicName, retain: Retain) -> Result<()>;\n\n\n\n ///topic_filter - Topic filter\n\n async fn get(&self, topic_filter: &TopicFilter) -> Result<Vec<(TopicName, Retain)>>;\n\n\n\n ///\n\n fn count(&self) -> isize;\n\n\n\n ///\n\n fn max(&self) -> isize;\n\n}\n", "file_path": "rmqtt/src/broker/mod.rs", "rank": 90, "score": 96333.3696143463 }, { "content": "#[proc_macro_derive(Metrics)]\n\npub fn derive_metrics(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n metrics::build(input)\n\n}\n", "file_path": "rmqtt-macros/src/lib.rs", "rank": 91, "score": 96188.29204007774 }, { "content": "type Cacheable = Option<i64>;\n\n\n", "file_path": "rmqtt-plugins/rmqtt-auth-http/src/lib.rs", "rank": 92, "score": 93334.55451990815 }, { "content": "type Queues = DequeMap<PacketId, InflightMessage>;\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone, Copy, Serialize, Deserialize)]\n\npub enum MomentStatus {\n\n UnAck,\n\n UnReceived,\n\n UnComplete,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct InflightMessage {\n\n pub publish: Publish,\n\n pub from: From,\n\n pub status: MomentStatus,\n\n pub update_time: TimestampMillis,\n\n}\n\n\n\nimpl InflightMessage {\n\n #[inline]\n\n pub fn new(status: MomentStatus, from: From, publish: Publish) -> Self {\n", "file_path": "rmqtt/src/broker/inflight.rs", "rank": 93, "score": 91793.582413306 }, { "content": "type ShutdownTX = oneshot::Sender<()>;\n", "file_path": "rmqtt-plugins/rmqtt-http-api/src/lib.rs", "rank": 94, "score": 91425.96694989323 }, { "content": "use std::ops::{Deref, DerefMut};\n\nuse std::str::FromStr;\n\n\n\nuse serde::de::{self, Deserialize, Deserializer};\n\n\n\n#[derive(Debug, Clone, Deserialize)]\n\npub struct Log {\n\n #[serde(default = \"Log::to_default\")]\n\n pub to: To,\n\n #[serde(default = \"Log::level_default\")]\n\n pub level: Level,\n\n #[serde(default = \"Log::dir_default\")]\n\n pub dir: String,\n\n #[serde(default = \"Log::file_default\")]\n\n pub file: String,\n\n}\n\n\n\nimpl Default for Log {\n\n #[inline]\n\n fn default() -> Self {\n", "file_path": "rmqtt/src/settings/log.rs", "rank": 95, "score": 87587.84315395913 }, { "content": "\n\nimpl DerefMut for Level {\n\n #[inline]\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.inner\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Level {\n\n #[inline]\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n let level = String::deserialize(deserializer)?;\n\n let level = slog::Level::from_str(&level).map_err(|_e| de::Error::missing_field(\"level\"))?;\n\n Ok(Level { inner: level })\n\n }\n\n}\n", "file_path": "rmqtt/src/settings/log.rs", "rank": 96, "score": 87583.4805016792 }, { "content": " }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for To {\n\n #[inline]\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n let to = match (String::deserialize(deserializer)?).to_ascii_lowercase().as_str() {\n\n \"off\" => To::Off,\n\n \"file\" => To::File,\n\n \"console\" => To::Console,\n\n \"both\" => To::Both,\n\n _ => To::Both,\n\n };\n\n\n\n Ok(to)\n\n }\n\n}\n", "file_path": "rmqtt/src/settings/log.rs", "rank": 97, "score": 87570.7389756067 }, { "content": "\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Level {\n\n inner: slog::Level,\n\n}\n\n\n\nimpl Level {\n\n #[inline]\n\n pub fn inner(&self) -> slog::Level {\n\n self.inner\n\n }\n\n}\n\n\n\nimpl Deref for Level {\n\n type Target = slog::Level;\n\n #[inline]\n\n fn deref(&self) -> &Self::Target {\n\n &self.inner\n\n }\n\n}\n", "file_path": "rmqtt/src/settings/log.rs", "rank": 98, "score": 87565.74312244084 }, { "content": " Self {\n\n to: Self::to_default(),\n\n level: Self::level_default(),\n\n dir: Self::dir_default(),\n\n file: Self::file_default(),\n\n }\n\n }\n\n}\n\n\n\nimpl Log {\n\n #[inline]\n\n fn to_default() -> To {\n\n To::Console\n\n }\n\n #[inline]\n\n fn level_default() -> Level {\n\n Level { inner: slog::Level::Debug }\n\n }\n\n #[inline]\n\n fn dir_default() -> String {\n", "file_path": "rmqtt/src/settings/log.rs", "rank": 99, "score": 87560.60865194217 } ]
Rust
geo/src/algorithm/relate/geomgraph/topology_position.rs
phreeheeler/geo
ab0fe46cec04ebe358d66455a6b73415c925b18d
use super::{CoordPos, Direction}; use std::fmt; #[derive(Copy, Clone)] pub(crate) enum TopologyPosition { Area { on: Option<CoordPos>, left: Option<CoordPos>, right: Option<CoordPos>, }, LineOrPoint { on: Option<CoordPos>, }, } impl fmt::Debug for TopologyPosition { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt_position(position: &Option<CoordPos>, f: &mut fmt::Formatter) -> fmt::Result { match position { Some(CoordPos::Inside) => write!(f, "i"), Some(CoordPos::OnBoundary) => write!(f, "b"), Some(CoordPos::Outside) => write!(f, "e"), None => write!(f, "_"), } } match self { Self::LineOrPoint { on } => fmt_position(on, f)?, Self::Area { on, left, right } => { fmt_position(left, f)?; fmt_position(on, f)?; fmt_position(right, f)?; } } Ok(()) } } impl TopologyPosition { pub fn area(on: CoordPos, left: CoordPos, right: CoordPos) -> Self { Self::Area { on: Some(on), left: Some(left), right: Some(right), } } pub fn empty_area() -> Self { Self::Area { on: None, left: None, right: None, } } pub fn line_or_point(on: CoordPos) -> Self { Self::LineOrPoint { on: Some(on) } } pub fn empty_line_or_point() -> Self { Self::LineOrPoint { on: None } } pub fn get(&self, direction: Direction) -> Option<CoordPos> { match (direction, self) { (Direction::Left, Self::Area { left, .. }) => *left, (Direction::Right, Self::Area { right, .. }) => *right, (Direction::On, Self::LineOrPoint { on }) | (Direction::On, Self::Area { on, .. }) => { *on } (_, Self::LineOrPoint { .. }) => { panic!("LineOrPoint only has a position for `Direction::On`") } } } pub fn is_empty(&self) -> bool { matches!( self, Self::LineOrPoint { on: None } | Self::Area { on: None, left: None, right: None, } ) } pub fn is_any_empty(&self) -> bool { !matches!( self, Self::LineOrPoint { on: Some(_) } | Self::Area { on: Some(_), left: Some(_), right: Some(_), } ) } pub fn is_area(&self) -> bool { matches!(self, Self::Area { .. }) } pub fn is_line(&self) -> bool { matches!(self, Self::LineOrPoint { .. }) } pub fn flip(&mut self) { match self { Self::LineOrPoint { .. } => {} Self::Area { left, right, .. } => { std::mem::swap(left, right); } } } pub fn set_all_positions(&mut self, position: CoordPos) { match self { Self::LineOrPoint { on } => { *on = Some(position); } Self::Area { on, left, right } => { *on = Some(position); *left = Some(position); *right = Some(position); } } } pub fn set_all_positions_if_empty(&mut self, position: CoordPos) { match self { Self::LineOrPoint { on } => { if on.is_none() { *on = Some(position); } } Self::Area { on, left, right } => { if on.is_none() { *on = Some(position); } if left.is_none() { *left = Some(position); } if right.is_none() { *right = Some(position); } } } } pub fn set_position(&mut self, direction: Direction, position: CoordPos) { match (direction, self) { (Direction::On, Self::LineOrPoint { on }) => *on = Some(position), (_, Self::LineOrPoint { .. }) => { panic!("invalid assignment dimensions for Self::Line") } (Direction::On, Self::Area { on, .. }) => *on = Some(position), (Direction::Left, Self::Area { left, .. }) => *left = Some(position), (Direction::Right, Self::Area { right, .. }) => *right = Some(position), } } pub fn set_on_position(&mut self, position: CoordPos) { match self { Self::LineOrPoint { on } | Self::Area { on, .. } => { *on = Some(position); } } } pub fn set_locations(&mut self, new_on: CoordPos, new_left: CoordPos, new_right: CoordPos) { match self { Self::LineOrPoint { .. } => { error!("invalid assignment dimensions for {:?}", self); debug_assert!(false, "invalid assignment dimensions for {:?}", self); } Self::Area { on, left, right } => { *on = Some(new_on); *left = Some(new_left); *right = Some(new_right); } } } }
use super::{CoordPos, Direction}; use std::fmt; #[derive(Copy, Clone)] pub(crate) enum TopologyPosition { Area { on: Option<CoordPos>, left: Option<CoordPos>, right: Option<CoordPos>, }, LineOrPoint { on: Option<CoordPos>, }, } impl fmt::Debug for TopologyPosition { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt_position(position: &Option<CoordPos>, f: &mut fmt::Formatter) -> fmt::Result { match position { Some(CoordPos::Inside) => write!(f, "i"), Some(CoordPos::OnBoundary) => write!(f, "b"), Some(CoordPos::Outside) => write!(f, "e"), None => write!(f, "_"), } } match self { Self::LineOrPoint { on } => fmt_position(on, f)?, Self::Area { on, left, right } => { fmt_position(left, f)?; fmt_position(on, f)?; fmt_position(right, f)?; } } Ok(()) } } impl TopologyPosition { pub fn area(on: CoordPos, left: CoordPos, right: CoordPos) -> Self { Self::Area { on: Some(on), left: Some(left), right: Some(right), } } pub fn empty_area() -> Self { Self::Area { on: None, left: None, right: None, } } pub fn line_or_point(on: CoordPos) -> Self { Self::LineOrPoint { on: Some(on) } } pub fn empty_line_or_point() -> Self { Self::LineOrPoint { on: None } } pub fn get(&self, direction: Direction) -> Option<CoordPos> { match (direction, self) { (Direction::Left, Self::Area { left, .. }) => *left, (Direction::Right, Self::Area { right, .. }) => *right, (Direction::On, Self::LineOrPoint { on }) | (Direction::On, Self::Area { on, .. }) => { *on } (_, Self::LineOrPoint { .. }) => { panic!("LineOrPoint only has a position for `Direction::On`") } } } pub fn is_empty(&self) -> bool { matches!( self, Self::LineOrPoint { on: None } | Self::Area { on: None, left: None, right: None, } ) } pub fn is_any_empty(&self) -> bool { !matches!( self, Self::LineOrPoint { on: Some(_) } | Self::Area { on: Some(_), left: Some(_), right: Some(_), } ) } pub fn is_area(&self) -> bool { matches!(self, Self::Area { .. }) } pub fn is_line(&self) -> bool { matches!(self, Self::LineOrPoint { .. }) } pub fn flip(&mut self) { match self { Self::LineOrPoint { .. } => {} Self::Area { left, right, .. } => { std::mem::swap(left, right); } } }
pub fn set_all_positions_if_empty(&mut self, position: CoordPos) { match self { Self::LineOrPoint { on } => { if on.is_none() { *on = Some(position); } } Self::Area { on, left, right } => { if on.is_none() { *on = Some(position); } if left.is_none() { *left = Some(position); } if right.is_none() { *right = Some(position); } } } } pub fn set_position(&mut self, direction: Direction, position: CoordPos) { match (direction, self) { (Direction::On, Self::LineOrPoint { on }) => *on = Some(position), (_, Self::LineOrPoint { .. }) => { panic!("invalid assignment dimensions for Self::Line") } (Direction::On, Self::Area { on, .. }) => *on = Some(position), (Direction::Left, Self::Area { left, .. }) => *left = Some(position), (Direction::Right, Self::Area { right, .. }) => *right = Some(position), } } pub fn set_on_position(&mut self, position: CoordPos) { match self { Self::LineOrPoint { on } | Self::Area { on, .. } => { *on = Some(position); } } } pub fn set_locations(&mut self, new_on: CoordPos, new_left: CoordPos, new_right: CoordPos) { match self { Self::LineOrPoint { .. } => { error!("invalid assignment dimensions for {:?}", self); debug_assert!(false, "invalid assignment dimensions for {:?}", self); } Self::Area { on, left, right } => { *on = Some(new_on); *left = Some(new_left); *right = Some(new_right); } } } }
pub fn set_all_positions(&mut self, position: CoordPos) { match self { Self::LineOrPoint { on } => { *on = Some(position); } Self::Area { on, left, right } => { *on = Some(position); *left = Some(position); *right = Some(position); } } }
function_block-full_function
[]
Rust
crates/tor-chanmgr/src/event.rs
Mohsen7s/arti
1dbb23982fa36d431f2dd2b501cfc043438d9de5
#![allow(dead_code, unreachable_pub)] use futures::{Stream, StreamExt}; use postage::watch; use std::{ fmt, time::{Duration, Instant}, }; #[derive(Default, Debug, Clone)] pub struct ConnStatus { online: Option<bool>, tls_works: Option<bool>, } #[derive(Debug, Clone, Eq, PartialEq, derive_more::Display)] #[non_exhaustive] pub enum ConnBlockage { #[display(fmt = "unable to connect to the internet")] NoTcp, #[display(fmt = "our internet connection seems to be filtered")] NoHandshake, } impl ConnStatus { fn eq(&self, other: &ConnStatus) -> bool { self.online == other.online && self.tls_works == other.tls_works } pub fn usable(&self) -> bool { self.online == Some(true) && self.tls_works == Some(true) } pub fn frac(&self) -> f32 { match self { Self { online: Some(true), tls_works: Some(true), } => 1.0, Self { online: Some(true), .. } => 0.5, _ => 0.0, } } pub fn blockage(&self) -> Option<ConnBlockage> { match self { Self { online: Some(false), .. } => Some(ConnBlockage::NoTcp), Self { tls_works: Some(false), .. } => Some(ConnBlockage::NoHandshake), _ => None, } } } impl fmt::Display for ConnStatus { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ConnStatus { online: None, .. } => write!(f, "connecting to the internet"), ConnStatus { online: Some(false), .. } => write!(f, "unable to connect to the internet"), ConnStatus { tls_works: None, .. } => write!(f, "handshaking with Tor relays"), ConnStatus { tls_works: Some(false), .. } => write!(f, "unable to handshake with Tor relays"), ConnStatus { online: Some(true), tls_works: Some(true), } => write!(f, "connecting successfully"), } } } #[derive(Clone)] pub struct ConnStatusEvents { inner: watch::Receiver<ConnStatus>, } impl fmt::Debug for ConnStatusEvents { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("ConnStatusEvents").finish_non_exhaustive() } } impl Stream for ConnStatusEvents { type Item = ConnStatus; fn poll_next( mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>, ) -> std::task::Poll<Option<Self::Item>> { self.inner.poll_next_unpin(cx) } } #[derive(Debug, Clone)] struct ChanMgrStatus { startup: Instant, n_attempts: usize, last_tcp_success: Option<Instant>, last_tls_success: Option<Instant>, last_chan_success: Option<Instant>, } impl ChanMgrStatus { fn new_at(now: Instant) -> ChanMgrStatus { ChanMgrStatus { startup: now, n_attempts: 0, last_tcp_success: None, last_tls_success: None, last_chan_success: None, } } fn conn_status_at(&self, now: Instant) -> ConnStatus { const MIN_DURATION: Duration = Duration::from_secs(60); const MIN_ATTEMPTS: usize = 6; let early = now < self.startup + MIN_DURATION || self.n_attempts < MIN_ATTEMPTS; let online = match (self.last_tcp_success.is_some(), early) { (true, _) => Some(true), (_, true) => None, (false, false) => Some(false), }; let tls_works = match (self.last_chan_success.is_some(), early) { (true, _) => Some(true), (_, true) => None, (false, false) => Some(false), }; ConnStatus { online, tls_works } } fn record_attempt(&mut self) { self.n_attempts += 1; } fn record_tcp_success(&mut self, now: Instant) { self.last_tcp_success = Some(now); } fn record_tls_finished(&mut self, now: Instant) { self.last_tls_success = Some(now); } fn record_handshake_done(&mut self, now: Instant) { self.last_chan_success = Some(now); } } pub(crate) struct ChanMgrEventSender { last_conn_status: ConnStatus, mgr_status: ChanMgrStatus, sender: watch::Sender<ConnStatus>, } impl ChanMgrEventSender { fn push_at(&mut self, now: Instant) { let status = self.mgr_status.conn_status_at(now); if !status.eq(&self.last_conn_status) { self.last_conn_status = status.clone(); let mut b = self.sender.borrow_mut(); *b = status; } } pub(crate) fn record_attempt(&mut self) { self.mgr_status.record_attempt(); self.push_at(Instant::now()); } pub(crate) fn record_tcp_success(&mut self) { let now = Instant::now(); self.mgr_status.record_tcp_success(now); self.push_at(now); } pub(crate) fn record_tls_finished(&mut self) { let now = Instant::now(); self.mgr_status.record_tls_finished(now); self.push_at(now); } pub(crate) fn record_handshake_done(&mut self) { let now = Instant::now(); self.mgr_status.record_handshake_done(now); self.push_at(now); } } pub(crate) fn channel() -> (ChanMgrEventSender, ConnStatusEvents) { let (sender, receiver) = watch::channel(); let receiver = ConnStatusEvents { inner: receiver }; let sender = ChanMgrEventSender { last_conn_status: ConnStatus::default(), mgr_status: ChanMgrStatus::new_at(Instant::now()), sender, }; (sender, receiver) } #[cfg(test)] #[allow(clippy::unwrap_used, clippy::cognitive_complexity)] mod test { use super::*; use float_eq::assert_float_eq; const TOL: f32 = 0.00001; #[test] fn status_basics() { let s1 = ConnStatus::default(); assert_eq!(s1.to_string(), "connecting to the internet"); assert_float_eq!(s1.frac(), 0.0, abs <= TOL); assert!(s1.eq(&s1)); assert!(s1.blockage().is_none()); let s2 = ConnStatus { online: Some(false), tls_works: None, }; assert_eq!(s2.to_string(), "unable to connect to the internet"); assert_float_eq!(s2.frac(), 0.0, abs <= TOL); assert!(s2.eq(&s2)); assert!(!s2.eq(&s1)); assert_eq!(s2.blockage(), Some(ConnBlockage::NoTcp)); assert_eq!( s2.blockage().unwrap().to_string(), "unable to connect to the internet" ); let s3 = ConnStatus { online: Some(true), tls_works: None, }; assert_eq!(s3.to_string(), "handshaking with Tor relays"); assert_float_eq!(s3.frac(), 0.5, abs <= TOL); assert_eq!(s3.blockage(), None); assert!(!s3.eq(&s1)); let s4 = ConnStatus { online: Some(true), tls_works: Some(false), }; assert_eq!(s4.to_string(), "unable to handshake with Tor relays"); assert_float_eq!(s4.frac(), 0.5, abs <= TOL); assert_eq!(s4.blockage(), Some(ConnBlockage::NoHandshake)); assert_eq!( s4.blockage().unwrap().to_string(), "our internet connection seems to be filtered" ); assert!(!s4.eq(&s1)); assert!(!s4.eq(&s2)); assert!(!s4.eq(&s3)); assert!(s4.eq(&s4)); let s5 = ConnStatus { online: Some(true), tls_works: Some(true), }; assert_eq!(s5.to_string(), "connecting successfully"); assert_float_eq!(s5.frac(), 1.0, abs <= TOL); assert!(s5.blockage().is_none()); assert!(s5.eq(&s5)); assert!(!s5.eq(&s4)); } #[test] fn derive_status() { let start = Instant::now(); let sec = Duration::from_secs(1); let hour = Duration::from_secs(3600); let mut ms = ChanMgrStatus::new_at(start); let s0 = ms.conn_status_at(start); assert!(s0.online.is_none()); assert!(s0.tls_works.is_none()); let s = ms.conn_status_at(start + hour); assert!(s.eq(&s0)); for _ in 0..10 { ms.record_attempt(); } let s = ms.conn_status_at(start); assert!(s.eq(&s0)); let s = ms.conn_status_at(start + hour); assert_eq!(s.online, Some(false)); assert_eq!(s.tls_works, Some(false)); ms.record_tcp_success(start + sec); let s = ms.conn_status_at(start + sec * 2); assert_eq!(s.online, Some(true)); assert!(s.tls_works.is_none()); let s = ms.conn_status_at(start + hour); assert_eq!(s.online, Some(true)); assert_eq!(s.tls_works, Some(false)); ms.record_handshake_done(start + sec * 2); let s = ms.conn_status_at(start + sec * 3); assert_eq!(s.online, Some(true)); assert_eq!(s.tls_works, Some(true)); } #[test] fn sender() { let (mut snd, rcv) = channel(); { let s = rcv.inner.borrow().clone(); assert_float_eq!(s.frac(), 0.0, abs <= TOL); } snd.record_attempt(); snd.record_tcp_success(); snd.record_tls_finished(); snd.record_handshake_done(); { let s = rcv.inner.borrow().clone(); assert_float_eq!(s.frac(), 1.0, abs <= TOL); } } }
#![allow(dead_code, unreachable_pub)] use futures::{Stream, StreamExt}; use postage::watch; use std::{ fmt, time::{Duration, Instant}, }; #[derive(Default, Debug, Clone)] pub struct ConnStatus { online: Option<bool>, tls_works: Option<bool>, } #[derive(Debug, Clone, Eq, PartialEq, derive_more::Display)] #[non_exhaustive] pub enum ConnBlockage { #[display(fmt = "unable to connect to the internet")] NoTcp, #[display(fmt = "our internet connection seems to be filtered")] NoHandshake, } impl ConnStatus { fn eq(&self, other: &ConnStatus) -> bool { self.online == other.online && self.tls_works == other.tls_works } pub fn usable(&self) -> bool { self.online == Some(true) && self.tls_works == Some(true) } pub fn frac(&self) -> f32 { match self { Self { online: Some(true), tls_works: Some(true), } => 1.0, Self { online: Some(true), .. } => 0.5, _ => 0.0, } } pub fn blockage(&self) -> Option<ConnBlockage> { match self { Self { online: Some(false), .. } => Some(ConnBlockage::NoTcp), Self { tls_works: Some(false), .. } => Some(ConnBlockage::NoHandshake), _ => None, } } } impl fmt::Display for ConnStatus { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ConnStatus { online: None, .. } => write!(f, "connecting to the internet"), ConnStatus { online: Some(false), .. } => write!(f, "unable to connect to the internet"), ConnStatus { tls_works: None, .. } => write!(f, "handshaking with Tor relays"), ConnStatus { tls_works: Some(false), .. } => write!(f, "unable to handshake with Tor relays"), ConnStatus { online: Some(true), tls_works: Some(true), } => write!(f, "connecting successfully"), } } } #[derive(Clone)] pub struct ConnStatusEvents { inner: watch::Receiver<ConnStatus>, } impl fmt::Debug for ConnStatusEvents { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("ConnStatusEvents").finish_non_exhaustive() } } impl Stream for ConnStatusEvents { type Item = ConnStatus; fn poll_next( mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>, ) -> std::task::Poll<Option<Self::Item>> { self.inner.poll_next_unpin(cx) } } #[derive(Debug, Clone)] struct ChanMgrStatus { startup: Instant, n_attempts: usize, last_tcp_success: Option<Instant>, last_tls_success: Option<Instant>, last_chan_success: Option<Instant>, } impl ChanMgrStatus { fn new_at(now: Instant) -> ChanMgrStatus { ChanMgrStatus { startup: now, n_attempts: 0, last_tcp_success: None, last_tls_success: None, last_chan_success: None, } } fn conn_status_at(&self, now: Instant) -> ConnStatus { const MIN_DURATION: Duration = Duration::from_secs(60); const MIN_ATTEMPTS: usize = 6; let early = now < self.startup + MIN_DURATION || self.n_attempts < MIN_ATTEMPTS; let online = match (self.last_tcp_success.is_some(), early) { (true, _) => Some(true), (_, true) => None, (false, false) => Some(false), }; let tls_works =
; ConnStatus { online, tls_works } } fn record_attempt(&mut self) { self.n_attempts += 1; } fn record_tcp_success(&mut self, now: Instant) { self.last_tcp_success = Some(now); } fn record_tls_finished(&mut self, now: Instant) { self.last_tls_success = Some(now); } fn record_handshake_done(&mut self, now: Instant) { self.last_chan_success = Some(now); } } pub(crate) struct ChanMgrEventSender { last_conn_status: ConnStatus, mgr_status: ChanMgrStatus, sender: watch::Sender<ConnStatus>, } impl ChanMgrEventSender { fn push_at(&mut self, now: Instant) { let status = self.mgr_status.conn_status_at(now); if !status.eq(&self.last_conn_status) { self.last_conn_status = status.clone(); let mut b = self.sender.borrow_mut(); *b = status; } } pub(crate) fn record_attempt(&mut self) { self.mgr_status.record_attempt(); self.push_at(Instant::now()); } pub(crate) fn record_tcp_success(&mut self) { let now = Instant::now(); self.mgr_status.record_tcp_success(now); self.push_at(now); } pub(crate) fn record_tls_finished(&mut self) { let now = Instant::now(); self.mgr_status.record_tls_finished(now); self.push_at(now); } pub(crate) fn record_handshake_done(&mut self) { let now = Instant::now(); self.mgr_status.record_handshake_done(now); self.push_at(now); } } pub(crate) fn channel() -> (ChanMgrEventSender, ConnStatusEvents) { let (sender, receiver) = watch::channel(); let receiver = ConnStatusEvents { inner: receiver }; let sender = ChanMgrEventSender { last_conn_status: ConnStatus::default(), mgr_status: ChanMgrStatus::new_at(Instant::now()), sender, }; (sender, receiver) } #[cfg(test)] #[allow(clippy::unwrap_used, clippy::cognitive_complexity)] mod test { use super::*; use float_eq::assert_float_eq; const TOL: f32 = 0.00001; #[test] fn status_basics() { let s1 = ConnStatus::default(); assert_eq!(s1.to_string(), "connecting to the internet"); assert_float_eq!(s1.frac(), 0.0, abs <= TOL); assert!(s1.eq(&s1)); assert!(s1.blockage().is_none()); let s2 = ConnStatus { online: Some(false), tls_works: None, }; assert_eq!(s2.to_string(), "unable to connect to the internet"); assert_float_eq!(s2.frac(), 0.0, abs <= TOL); assert!(s2.eq(&s2)); assert!(!s2.eq(&s1)); assert_eq!(s2.blockage(), Some(ConnBlockage::NoTcp)); assert_eq!( s2.blockage().unwrap().to_string(), "unable to connect to the internet" ); let s3 = ConnStatus { online: Some(true), tls_works: None, }; assert_eq!(s3.to_string(), "handshaking with Tor relays"); assert_float_eq!(s3.frac(), 0.5, abs <= TOL); assert_eq!(s3.blockage(), None); assert!(!s3.eq(&s1)); let s4 = ConnStatus { online: Some(true), tls_works: Some(false), }; assert_eq!(s4.to_string(), "unable to handshake with Tor relays"); assert_float_eq!(s4.frac(), 0.5, abs <= TOL); assert_eq!(s4.blockage(), Some(ConnBlockage::NoHandshake)); assert_eq!( s4.blockage().unwrap().to_string(), "our internet connection seems to be filtered" ); assert!(!s4.eq(&s1)); assert!(!s4.eq(&s2)); assert!(!s4.eq(&s3)); assert!(s4.eq(&s4)); let s5 = ConnStatus { online: Some(true), tls_works: Some(true), }; assert_eq!(s5.to_string(), "connecting successfully"); assert_float_eq!(s5.frac(), 1.0, abs <= TOL); assert!(s5.blockage().is_none()); assert!(s5.eq(&s5)); assert!(!s5.eq(&s4)); } #[test] fn derive_status() { let start = Instant::now(); let sec = Duration::from_secs(1); let hour = Duration::from_secs(3600); let mut ms = ChanMgrStatus::new_at(start); let s0 = ms.conn_status_at(start); assert!(s0.online.is_none()); assert!(s0.tls_works.is_none()); let s = ms.conn_status_at(start + hour); assert!(s.eq(&s0)); for _ in 0..10 { ms.record_attempt(); } let s = ms.conn_status_at(start); assert!(s.eq(&s0)); let s = ms.conn_status_at(start + hour); assert_eq!(s.online, Some(false)); assert_eq!(s.tls_works, Some(false)); ms.record_tcp_success(start + sec); let s = ms.conn_status_at(start + sec * 2); assert_eq!(s.online, Some(true)); assert!(s.tls_works.is_none()); let s = ms.conn_status_at(start + hour); assert_eq!(s.online, Some(true)); assert_eq!(s.tls_works, Some(false)); ms.record_handshake_done(start + sec * 2); let s = ms.conn_status_at(start + sec * 3); assert_eq!(s.online, Some(true)); assert_eq!(s.tls_works, Some(true)); } #[test] fn sender() { let (mut snd, rcv) = channel(); { let s = rcv.inner.borrow().clone(); assert_float_eq!(s.frac(), 0.0, abs <= TOL); } snd.record_attempt(); snd.record_tcp_success(); snd.record_tls_finished(); snd.record_handshake_done(); { let s = rcv.inner.borrow().clone(); assert_float_eq!(s.frac(), 1.0, abs <= TOL); } } }
match (self.last_chan_success.is_some(), early) { (true, _) => Some(true), (_, true) => None, (false, false) => Some(false), }
if_condition
[ { "content": "pub fn create_runtime() -> std::io::Result<impl Runtime> {\n\n PreferredRuntime::create()\n\n}\n\n\n\n/// Helpers for test_with_all_runtimes\n\npub mod testing__ {\n", "file_path": "crates/tor-rtcompat/src/lib.rs", "rank": 0, "score": 283516.07034823194 }, { "content": "/// Shared structure to implement [`FlagPublisher`] and [`FlagListener`].\n\nstruct Inner<F> {\n\n /// An event that we use to broadcast whenever a new [`FlagEvent`] event has occurred.\n\n event: event_listener::Event,\n\n /// How many times has each event occurred, ever.\n\n ///\n\n /// (It is safe for this to wrap around.)\n\n // TODO(nickm): I wish this could be an array, but const generics don't\n\n // quite support that yet.\n\n counts: Vec<AtomicUsize>, // I wish this could be an array.\n\n /// How many publishers remain?\n\n n_publishers: AtomicUsize,\n\n /// Phantom member to provide correct covariance.\n\n ///\n\n /// The `fn` business is a covariance trick to include `F` without affecting\n\n /// this object's Send/Sync status.\n\n _phantom: PhantomData<fn(F) -> F>,\n\n}\n\n\n\n/// A [`Stream`] that returns a series of event [`FlagEvent`]s broadcast by a\n\n/// [`FlagPublisher`].\n", "file_path": "crates/tor-dirmgr/src/event.rs", "rank": 1, "score": 280294.30294495204 }, { "content": "pub fn current_user_runtime() -> std::io::Result<impl Runtime> {\n\n PreferredRuntime::current()\n\n}\n\n\n\n/// Return a new instance of the default [`Runtime`].\n\n///\n\n/// Generally you should call this function at most once, and then use\n\n/// [`Clone::clone()`] to create additional references to that runtime.\n\n///\n\n/// Tokio users may want to avoid this function and instead make a runtime using\n\n/// [`current_user_runtime()`] or [`tokio::PreferredRuntime::current()`]: this\n\n/// function always _builds_ a runtime, and if you already have a runtime, that\n\n/// isn't what you want with Tokio.\n\n///\n\n/// If you need more fine-grained control over a runtime, you can create it\n\n/// using an appropriate builder type or function.\n\n///\n\n/// This function returns a type-erased `impl Runtime` rather than a specific\n\n/// runtime implementation, so that you can be sure that your code doesn't\n\n/// depend on any runtime-specific features. If that's not what you want, you\n\n/// can call [`PreferredRuntime::create`], or the `create` function on some\n\n/// specific runtime in the `tokio` or `async_std` modules.\n\n#[cfg(all(\n\n any(feature = \"native-tls\", feature = \"rustls\"),\n\n any(feature = \"async-std\", feature = \"tokio\")\n\n))]\n", "file_path": "crates/tor-rtcompat/src/lib.rs", "rank": 2, "score": 279124.8613828515 }, { "content": "/// Build a fake network with enough information to enable some basic\n\n/// tests.\n\n///\n\n/// By default, the constructed network will contain 40 relays,\n\n/// numbered 0 through 39. They will have with RSA and Ed25519\n\n/// identity fingerprints set to 0x0000...00 through 0x2727...27.\n\n/// Each pair of relays is in a family with one another: 0x00..00 with\n\n/// 0x01..01, and so on.\n\n///\n\n/// All relays are marked as usable. The first ten are marked with no\n\n/// additional flags. The next ten are marked with the exit flag.\n\n/// The next ten are marked with the guard flag. The last ten are\n\n/// marked with the exit _and_ guard flags.\n\n///\n\n/// TAP and Ntor onion keys are present, but unusable.\n\n///\n\n/// Odd-numbered exit relays are set to allow ports 80 and 443 on\n\n/// IPv4. Even-numbered exit relays are set to allow ports 1-65535\n\n/// on IPv4. No exit relays are marked to support IPv6.\n\n///\n\n/// Even-numbered relays support the `DirCache=2` protocol.\n\n///\n\n/// Every relay is given a measured weight based on its position\n\n/// within its group of ten. The weights for the ten relays in each\n\n/// group are: 1000, 2000, 3000, ... 10000. There is no additional\n\n/// flag-based bandwidth weighting.\n\n///\n\n/// The consensus is declared as using method 34, and as being valid for\n\n/// one day (in realtime) after the current `SystemTime`.\n\n///\n\n/// # Customization\n\n///\n\n/// Before each relay is added to the consensus or the network, it is\n\n/// passed through the provided filtering function. This function\n\n/// receives as its arguments the current index (in range 0..40), a\n\n/// [`RouterStatusBuilder`], and a [`MicrodescBuilder`]. If it\n\n/// returns a `RouterStatusBuilder`, the corresponding router status\n\n/// is added to the consensus. If it returns a `MicrodescBuilder`,\n\n/// the corresponding microdescriptor is added to the vector of\n\n/// microdescriptor.\n\n///\n\n/// # Notes for future expansion\n\n///\n\n/// _Resist the temptation to make unconditional changes to this\n\n/// function._ If the network generated by this function gets more and\n\n/// more complex, then it will become harder and harder over time to\n\n/// make it support new test cases and new behavior, and eventually\n\n/// we'll have to throw the whole thing away. (We ran into this\n\n/// problem with Tor's unit tests.)\n\n///\n\n/// Instead, refactor this function so that it takes a\n\n/// description of what kind of network to build, and then builds it from\n\n/// that description.\n\npub fn construct_custom_network<F>(mut func: F) -> Result<(MdConsensus, Vec<Microdesc>)>\n\nwhere\n\n F: FnMut(usize, &mut NodeBuilders),\n\n{\n\n let f = RelayFlags::RUNNING | RelayFlags::VALID | RelayFlags::V2DIR;\n\n // define 4 groups of flags\n\n let flags = [\n\n f,\n\n f | RelayFlags::EXIT,\n\n f | RelayFlags::GUARD,\n\n f | RelayFlags::EXIT | RelayFlags::GUARD,\n\n ];\n\n\n\n let now = SystemTime::now();\n\n let one_day = Duration::new(86400, 0);\n\n let mut bld = MdConsensus::builder();\n\n bld.consensus_method(34)\n\n .lifetime(Lifetime::new(now, now + one_day / 2, now + one_day)?)\n\n .param(\"bwweightscale\", 1)\n\n .weights(\"\".parse()?);\n", "file_path": "crates/tor-netdir/src/testnet.rs", "rank": 3, "score": 273037.59601717885 }, { "content": "#[cfg(feature = \"traffic-timestamp\")]\n\npub fn time_since_last_incoming_traffic() -> std::time::Duration {\n\n LAST_INCOMING_TRAFFIC.time_since_update().into()\n\n}\n", "file_path": "crates/tor-proto/src/lib.rs", "rank": 4, "score": 266746.9792653633 }, { "content": "/// Create and return a new `async_std` runtime.\n\npub fn create_runtime() -> async_executors::AsyncStd {\n\n async_executors::AsyncStd::new()\n\n}\n\n\n\nimpl SleepProvider for async_executors::AsyncStd {\n\n type SleepFuture = Pin<Box<dyn Future<Output = ()> + Send + 'static>>;\n\n fn sleep(&self, duration: Duration) -> Self::SleepFuture {\n\n Box::pin(async_io::Timer::after(duration).map(|_| ()))\n\n }\n\n}\n\n\n\nimpl BlockOn for async_executors::AsyncStd {\n\n fn block_on<F: Future>(&self, f: F) -> F::Output {\n\n async_executors::AsyncStd::block_on(f)\n\n }\n\n}\n", "file_path": "crates/tor-rtcompat/src/impls/async_std.rs", "rank": 5, "score": 258660.719111151 }, { "content": "/// Returns true if both relays can appear together in the same circuit.\n\nfn relays_can_share_circuit(a: &Relay<'_>, b: &Relay<'_>, subnet_config: SubnetConfig) -> bool {\n\n !a.in_same_family(b) && !a.in_same_subnet(b, &subnet_config)\n\n}\n\n\n", "file_path": "crates/tor-circmgr/src/path/exitpath.rs", "rank": 6, "score": 256222.06327991566 }, { "content": "/// A Keyword identifies the possible types of a keyword for an Item.\n\n///\n\n/// These do not map one-to-one to Item strings: several Item strings\n\n/// may be placed in a single Keyword -- for example, when their order\n\n/// is significant with respect to one another, like \"accept\" and\n\n/// \"reject\" in router descriptors.\n\n///\n\n/// Every keyword has an \"index\", which is a small number suitable for\n\n/// indexing an array. These are used in Section and SectionRules.\n\n///\n\n/// Turning a string into a keyword cannot fail: there is always an\n\n/// \"UNRECOGNIZED\" keyword.\n\n///\n\n/// See macro::decl_keyword! for help defining a Keyword type for a\n\n/// network document.\n\n///\n\n/// TODO: I'd rather have this be pub(crate), but I haven't figured out\n\n/// how to make that work; there is a cascading change of other stuff that\n\n/// would need to be more hidden.\n\npub trait Keyword: Hash + Eq + PartialEq + Copy + Clone {\n\n /// Find a Keyword corresponding to a string that appears in a\n\n /// network document.\n\n fn from_str(s: &str) -> Self;\n\n /// Try to find the keyword corresponding to a given index value,\n\n /// as used in Section and SectionRules.\n\n fn from_idx(i: usize) -> Option<Self>;\n\n /// Find a string corresponding to this keyword. This may not be the\n\n /// actual string from the document; it is intended for reporting errors.\n\n fn to_str(self) -> &'static str;\n\n /// Return the index for this keyword.\n\n fn idx(self) -> usize;\n\n /// Return the number of indices for this keyword.\n\n fn n_vals() -> usize;\n\n /// Return the \"UNRECOGNIZED\" keyword.\n\n fn unrecognized() -> Self;\n\n /// Return the \"ANN_UNRECOGNIZED\" keyword.\n\n fn ann_unrecognized() -> Self;\n\n /// Return true iff this keyword denotes an annotation.\n\n fn is_annotation(self) -> bool;\n", "file_path": "crates/tor-netdoc/src/parse/keyword.rs", "rank": 7, "score": 254959.50412162463 }, { "content": "/// Helper: wraps relays_can_share_circuit but takes an option.\n\nfn relays_can_share_circuit_opt(r1: &Relay<'_>, r2: Option<&Relay<'_>>, c: SubnetConfig) -> bool {\n\n match r2 {\n\n Some(r2) => relays_can_share_circuit(r1, r2, c),\n\n None => true,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #![allow(clippy::unwrap_used)]\n\n #![allow(clippy::clone_on_copy)]\n\n use super::*;\n\n use crate::path::{assert_same_path_when_owned, OwnedPath, TorPathInner};\n\n use crate::test::OptDummyGuardMgr;\n\n use std::collections::HashSet;\n\n use std::convert::TryInto;\n\n use tor_linkspec::ChanTarget;\n\n use tor_netdir::testnet;\n\n\n\n fn assert_exit_path_ok(relays: &[Relay<'_>]) {\n", "file_path": "crates/tor-circmgr/src/path/exitpath.rs", "rank": 8, "score": 246545.45227592532 }, { "content": "/// Return true iff 's' is a valid keyword or annotation.\n\n///\n\n/// (Only allow annotations if `anno_ok` is true.`\n\nfn keyword_ok(mut s: &str, anno_ok: bool) -> bool {\n\n /// Helper: return true if this character can appear in keywords.\n\n fn kwd_char_ok(c: char) -> bool {\n\n matches!(c,'A'..='Z' | 'a'..='z' |'0'..='9' | '-')\n\n }\n\n\n\n if s.is_empty() {\n\n return false;\n\n }\n\n if anno_ok && s.starts_with('@') {\n\n s = &s[1..];\n\n }\n\n if s.starts_with('-') {\n\n return false;\n\n }\n\n s.chars().all(kwd_char_ok)\n\n}\n\n\n", "file_path": "crates/tor-netdoc/src/parse/tokenize.rs", "rank": 9, "score": 240657.36151603237 }, { "content": "/// Returns a boolean indicating whether the event `kind` has any subscribers (as in,\n\n/// whether `TorEventReceiver::subscribe` has been called with that event kind).\n\n///\n\n/// This is useful to avoid doing work to generate events that might be computationally expensive\n\n/// to generate.\n\npub fn event_has_subscribers(kind: TorEventKind) -> bool {\n\n EVENT_SUBSCRIBERS[kind as usize].load(Ordering::SeqCst) > 0\n\n}\n\n\n", "file_path": "crates/tor-events/src/lib.rs", "rank": 10, "score": 239410.4668608447 }, { "content": "/// Return true if `s` looks more like a consensus diff than some other kind\n\n/// of document.\n\npub fn looks_like_diff(s: &str) -> bool {\n\n s.starts_with(\"network-status-diff-version\")\n\n}\n\n\n\n/// Apply a given diff to an input text, and return the result from applying\n\n/// that diff.\n\n///\n\n/// This is a slow version, for testing and correctness checking. It uses\n\n/// an O(n) operation to apply diffs, and therefore runs in O(n^2) time.\n", "file_path": "crates/tor-consdiff/src/lib.rs", "rank": 11, "score": 236835.40180106502 }, { "content": "/// As [`construct_custom_network()`], but return a [`PartialNetDir`].\n\npub fn construct_custom_netdir<F>(func: F) -> Result<PartialNetDir>\n\nwhere\n\n F: FnMut(usize, &mut NodeBuilders),\n\n{\n\n let (consensus, microdescs) = construct_custom_network(func)?;\n\n let mut dir = PartialNetDir::new(consensus, None);\n\n for md in microdescs {\n\n dir.add_microdesc(md);\n\n }\n\n\n\n Ok(dir)\n\n}\n\n\n", "file_path": "crates/tor-netdir/src/testnet.rs", "rank": 12, "score": 234226.46023752657 }, { "content": "/// Construct a new pair of linked LocalStream objects.\n\n///\n\n/// Any bytes written to one will be readable on the other, and vice\n\n/// versa. These streams will behave more or less like a socketpair,\n\n/// except without actually going through the operating system.\n\n///\n\n/// Note that this implementation is intended for testing only, and\n\n/// isn't optimized.\n\npub fn stream_pair() -> (LocalStream, LocalStream) {\n\n let (w1, r2) = mpsc::channel(CAPACITY);\n\n let (w2, r1) = mpsc::channel(CAPACITY);\n\n let s1 = LocalStream {\n\n w: w1,\n\n r: r1,\n\n pending_bytes: Vec::new(),\n\n tls_cert: None,\n\n };\n\n let s2 = LocalStream {\n\n w: w2,\n\n r: r2,\n\n pending_bytes: Vec::new(),\n\n tls_cert: None,\n\n };\n\n (s1, s2)\n\n}\n\n\n\n/// One half of a pair of linked streams returned by [`stream_pair`].\n\n//\n", "file_path": "crates/tor-rtmock/src/io.rs", "rank": 13, "score": 231503.95711307018 }, { "content": "/// Perform a batch verification operation on the provided signatures\n\n///\n\n/// Return `true` if _every_ signature is valid; otherwise return `false`.\n\n///\n\n/// Note that the mathematics for batch validation are slightly\n\n/// different than those for normal one-signature validation. Because\n\n/// of this, it is possible for an ostensible signature that passes\n\n/// one validation algorithm might fail the other. (Well-formed\n\n/// signatures generated by a correct Ed25519 implementation will\n\n/// always pass both kinds of validation, and an attacker should not\n\n/// be able to forge a signature that passes either kind.)\n\npub fn validate_batch(sigs: &[&ValidatableEd25519Signature]) -> bool {\n\n use crate::pk::ValidatableSignature;\n\n if sigs.is_empty() {\n\n // ed25519_dalek has nonzero cost for a batch-verification of\n\n // zero sigs.\n\n true\n\n } else if sigs.len() == 1 {\n\n // Validating one signature in the traditional way is faster.\n\n sigs[0].is_valid()\n\n } else {\n\n let mut ed_msgs = Vec::new();\n\n let mut ed_sigs = Vec::new();\n\n let mut ed_pks = Vec::new();\n\n for ed_sig in sigs {\n\n let (pk, sig, msg) = ed_sig.as_parts();\n\n ed_sigs.push(*sig);\n\n ed_pks.push(*pk);\n\n ed_msgs.push(msg);\n\n }\n\n ed25519_dalek::verify_batch(&ed_msgs[..], &ed_sigs[..], &ed_pks[..]).is_ok()\n\n }\n\n}\n", "file_path": "crates/tor-llcrypto/src/pk/ed25519.rs", "rank": 14, "score": 225947.83463355392 }, { "content": "/// Skip this reader forward until the next thing it reads looks like the\n\n/// start of a router descriptor.\n\n///\n\n/// Used to recover from errors.\n\nfn advance_to_next_routerdesc(reader: &mut NetDocReader<'_, RouterKwd>, annotated: bool) {\n\n use RouterKwd::*;\n\n let iter = reader.iter();\n\n loop {\n\n let item = iter.peek();\n\n match item {\n\n Some(Ok(t)) => {\n\n let kwd = t.kwd();\n\n if (annotated && kwd.is_annotation()) || kwd == ROUTER {\n\n return;\n\n }\n\n }\n\n Some(Err(_)) => {\n\n // Skip over broken tokens.\n\n }\n\n None => {\n\n return;\n\n }\n\n }\n\n let _ = iter.next();\n", "file_path": "crates/tor-netdoc/src/doc/routerdesc.rs", "rank": 15, "score": 220851.9204182594 }, { "content": "/// Consume tokens from 'reader' until the next token is the beginning\n\n/// of a microdescriptor: an annotation or an ONION_KEY. If no such\n\n/// token exists, advance to the end of the reader.\n\nfn advance_to_next_microdesc(reader: &mut NetDocReader<'_, MicrodescKwd>, annotated: bool) {\n\n use MicrodescKwd::*;\n\n let iter = reader.iter();\n\n loop {\n\n let item = iter.peek();\n\n match item {\n\n Some(Ok(t)) => {\n\n let kwd = t.kwd();\n\n if (annotated && kwd.is_annotation()) || kwd == ONION_KEY {\n\n return;\n\n }\n\n }\n\n Some(Err(_)) => {\n\n // We skip over broken tokens here.\n\n }\n\n None => {\n\n return;\n\n }\n\n };\n\n let _ = iter.next();\n", "file_path": "crates/tor-netdoc/src/doc/microdesc.rs", "rank": 16, "score": 220846.68401452256 }, { "content": "/// Check whether all of the signatures in this Vec are valid.\n\n///\n\n/// Return `true` if every signature is valid; return `false` if even\n\n/// one is invalid.\n\n///\n\n/// This function should typically give the same result as just\n\n/// calling `v.iter().all(ValidatableSignature::is_valid))`, while taking\n\n/// advantage of batch verification to whatever extent possible.\n\n///\n\n/// (See [`ed25519::validate_batch`] for caveats.)\n\npub fn validate_all_sigs(v: &[Box<dyn ValidatableSignature>]) -> bool {\n\n // First we break out the ed25519 signatures (if any) so we can do\n\n // a batch-verification on them.\n\n let mut ed_sigs = Vec::new();\n\n let mut non_ed_sigs = Vec::new();\n\n for sig in v.iter() {\n\n match sig.as_ed25519() {\n\n Some(ed_sig) => ed_sigs.push(ed_sig),\n\n None => non_ed_sigs.push(sig),\n\n }\n\n }\n\n\n\n // Find out if the ed25519 batch is valid.\n\n let ed_batch_is_valid = crate::pk::ed25519::validate_batch(&ed_sigs[..]);\n\n\n\n // if so, verify the rest.\n\n ed_batch_is_valid && non_ed_sigs.iter().all(|b| b.is_valid())\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "crates/tor-llcrypto/src/pk.rs", "rank": 17, "score": 219461.53346113238 }, { "content": "#[cfg(feature = \"rustls\")]\n\ntype RustlsInner = CompoundRuntime<AsyncStd, AsyncStd, AsyncStd, RustlsProvider<TcpStream>>;\n\n\n\n#[cfg(feature = \"rustls\")]\n\ncrate::opaque::implement_opaque_runtime! {\n\n AsyncStdRustlsRuntime { inner: RustlsInner }\n\n}\n\n\n\n#[cfg(all(feature = \"native-tls\"))]\n\nimpl AsyncStdNativeTlsRuntime {\n\n /// Return a new [`AsyncStdNativeTlsRuntime`]\n\n ///\n\n /// Generally you should call this function only once, and then use\n\n /// [`Clone::clone()`] to create additional references to that\n\n /// runtime.\n\n pub fn create() -> IoResult<Self> {\n\n let rt = create_runtime_impl();\n\n Ok(AsyncStdNativeTlsRuntime {\n\n inner: CompoundRuntime::new(rt, rt, rt, NativeTlsProvider::default()),\n\n })\n\n }\n", "file_path": "crates/tor-rtcompat/src/async_std.rs", "rank": 18, "score": 218110.29860655533 }, { "content": "/// Return the interval after which we should retry a guard that has\n\n/// been failing for the last `failing`.\n\n///\n\n/// If the guard `is_primary`, we use a more aggressive retry schedule.\n\nfn retry_interval(is_primary: bool, failing: Duration) -> Duration {\n\n /// One minute.\n\n const MIN: Duration = Duration::from_secs(60);\n\n /// One hour.\n\n const HOUR: Duration = Duration::from_secs(60 * 60);\n\n /// One (normal) day.\n\n const DAY: Duration = Duration::from_secs(24 * 60 * 60);\n\n\n\n // TODO-SPEC: This matches tor, not guardspec.\n\n // TODO: Hardcoding this feels ugly.\n\n #[allow(clippy::collapsible_else_if)]\n\n if is_primary {\n\n if failing < 6 * HOUR {\n\n 10 * MIN\n\n } else if failing < 4 * DAY {\n\n 90 * MIN\n\n } else if failing < 7 * DAY {\n\n 4 * HOUR\n\n } else {\n\n 9 * HOUR\n", "file_path": "crates/tor-guardmgr/src/guard.rs", "rank": 19, "score": 217717.2472440701 }, { "content": "/// The client is about to make an INTRODUCE1 cell. Perform the first part of\n\n/// the client handshake.\n\n///\n\n/// Return a state object containing the current progress of the handshake, and\n\n/// the data that should be written in the INTRODUCE1 cell. The data that is\n\n/// written is:\n\n///\n\n/// CLIENT_PK [PK_PUBKEY_LEN bytes]\n\n/// ENCRYPTED_DATA [Padded to length of plaintext]\n\n/// MAC [MAC_LEN bytes]\n\npub fn client_send_intro<R>(\n\n rng: &mut R,\n\n proto_input: &HsNtorClientInput,\n\n) -> Result<(HsNtorClientState, Vec<u8>)>\n\nwhere\n\n R: RngCore + CryptoRng,\n\n{\n\n // Create client's ephemeral keys to be used for this handshake\n\n let x = curve25519::StaticSecret::new(rng.rng_compat());\n\n let X = curve25519::PublicKey::from(&x);\n\n\n\n // Get EXP(B,x)\n\n let bx = x.diffie_hellman(&proto_input.B);\n\n\n\n // Compile our state structure\n\n let state = HsNtorClientState {\n\n proto_input: proto_input.clone(),\n\n x,\n\n X,\n\n };\n", "file_path": "crates/tor-proto/src/crypto/handshake/hs_ntor.rs", "rank": 20, "score": 212354.8183642826 }, { "content": "#[cfg(all(feature = \"native-tls\"))]\n\ntype NativeTlsInner = CompoundRuntime<AsyncStd, AsyncStd, AsyncStd, NativeTlsProvider<TcpStream>>;\n\n\n\n#[cfg(all(feature = \"native-tls\"))]\n\ncrate::opaque::implement_opaque_runtime! {\n\n AsyncStdNativeTlsRuntime { inner : NativeTlsInner }\n\n}\n\n\n\n#[cfg(feature = \"rustls\")]\n\n/// A [`Runtime`](crate::Runtime) powered by `async_std` and `rustls`.\n\n#[derive(Clone)]\n\npub struct AsyncStdRustlsRuntime {\n\n /// The actual runtime object.\n\n inner: RustlsInner,\n\n}\n\n\n\n/// Implementation type for AsyncStdRustlsRuntime.\n", "file_path": "crates/tor-rtcompat/src/async_std.rs", "rank": 21, "score": 211995.41096447385 }, { "content": "/// Internal: the write part of a DataStream\n\nstruct DataWriterImpl {\n\n /// The underlying StreamTarget object.\n\n s: StreamTarget,\n\n\n\n /// Buffered data to send over the connection.\n\n // TODO: this buffer is probably smaller than we want, but it's good\n\n // enough for now. If we _do_ make it bigger, we'll have to change\n\n // our use of Data::split_from to handle the case where we can't fit\n\n // all the data.\n\n buf: Box<[u8; Data::MAXLEN]>,\n\n\n\n /// Number of unflushed bytes in buf.\n\n n_pending: usize,\n\n}\n\n\n\nimpl DataWriter {\n\n /// Helper for poll_flush() and poll_close(): Performs a flush, then\n\n /// closes the stream if should_close is true.\n\n fn poll_flush_impl(\n\n mut self: Pin<&mut Self>,\n", "file_path": "crates/tor-proto/src/stream/data.rs", "rank": 22, "score": 207354.4963309348 }, { "content": "/// Wrapper for the read part of a DataStream\n\nstruct DataReaderImpl {\n\n /// The underlying StreamReader object.\n\n s: StreamReader,\n\n\n\n /// If present, data that we received on this stream but have not\n\n /// been able to send to the caller yet.\n\n // TODO: This data structure is probably not what we want, but\n\n // it's good enough for now.\n\n pending: Vec<u8>,\n\n\n\n /// Index into pending to show what we've already read.\n\n offset: usize,\n\n\n\n /// If true, we have received a CONNECTED cell on this stream.\n\n connected: bool,\n\n}\n\n\n\nimpl AsyncRead for DataReader {\n\n fn poll_read(\n\n mut self: Pin<&mut Self>,\n", "file_path": "crates/tor-proto/src/stream/data.rs", "rank": 23, "score": 207354.4963309348 }, { "content": "/// Helper: pull bytes off the front of `pending_bytes` and put them\n\n/// onto `buf. Return the number of bytes moved.\n\nfn drain_helper(buf: &mut [u8], pending_bytes: &mut Vec<u8>) -> usize {\n\n let n_to_drain = std::cmp::min(buf.len(), pending_bytes.len());\n\n buf[..n_to_drain].copy_from_slice(&pending_bytes[..n_to_drain]);\n\n pending_bytes.drain(..n_to_drain);\n\n n_to_drain\n\n}\n\n\n\nimpl AsyncRead for LocalStream {\n\n fn poll_read(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<IoResult<usize>> {\n\n if buf.is_empty() {\n\n return Poll::Ready(Ok(0));\n\n }\n\n if self.tls_cert.is_some() {\n\n return Poll::Ready(Err(std::io::Error::new(\n\n std::io::ErrorKind::Other,\n\n \"attempted to treat a TLS stream as non-TLS!\",\n", "file_path": "crates/tor-rtmock/src/io.rs", "rank": 24, "score": 206740.50723061376 }, { "content": "/// Conduct the HS Ntor handshake as the service.\n\n///\n\n/// Return a key generator which is the result of the key exchange, the\n\n/// RENDEZVOUS1 response to the client, and the introduction plaintext that we decrypted.\n\n///\n\n/// The response to the client is:\n\n/// SERVER_PK Y [PK_PUBKEY_LEN bytes]\n\n/// AUTH AUTH_INPUT_MAC [MAC_LEN bytes]\n\npub fn server_receive_intro<R, T>(\n\n rng: &mut R,\n\n proto_input: &HsNtorServiceInput,\n\n msg: T,\n\n) -> Result<(HsNtorHkdfKeyGenerator, Vec<u8>, Vec<u8>)>\n\nwhere\n\n R: RngCore + CryptoRng,\n\n T: AsRef<[u8]>,\n\n{\n\n // Extract all the useful pieces from the message\n\n let mut cur = Reader::from_slice(msg.as_ref());\n\n let X: curve25519::PublicKey = cur.extract()?;\n\n let remaining_bytes = cur.remaining();\n\n let ciphertext = &mut cur.take(remaining_bytes - 32)?.to_vec();\n\n let mac_tag: MacTag = cur.extract()?;\n\n\n\n // Now derive keys needed for handling the INTRO1 cell\n\n let bx = proto_input.b.diffie_hellman(&X);\n\n let (enc_key, mac_key) = get_introduce1_key_material(\n\n &bx,\n", "file_path": "crates/tor-proto/src/crypto/handshake/hs_ntor.rs", "rank": 25, "score": 204081.40714680136 }, { "content": "/// If `ptr` is within `s`, return its byte offset.\n\nfn offset_in(ptr: *const u8, s: &str) -> Option<usize> {\n\n // We need to confirm that 'ptr' falls within 's' in order\n\n // to subtract it meaningfully and find its offset.\n\n // Otherwise, we'll get a bogus result.\n\n //\n\n // Fortunately, we _only_ get a bogus result: we don't\n\n // hit unsafe behavior.\n\n let ptr_u = ptr as usize;\n\n let start_u = s.as_ptr() as usize;\n\n let end_u = (s.as_ptr() as usize) + s.len();\n\n if start_u <= ptr_u && ptr_u < end_u {\n\n Some(ptr_u - start_u)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nimpl fmt::Display for Pos {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n use Pos::*;\n", "file_path": "crates/tor-netdoc/src/err.rs", "rank": 26, "score": 198644.6922501939 }, { "content": "/// Helper: a customization function that does nothing.\n\nfn simple_net_func(_idx: usize, _nb: &mut NodeBuilders) {}\n\n\n", "file_path": "crates/tor-netdir/src/testnet.rs", "rank": 27, "score": 197120.3986746702 }, { "content": "/// Return the amount of time we should wait next, when running\n\n/// sleep_until_wallclock(). Also return a boolean indicating whether we\n\n/// expect this to be the final delay.\n\n///\n\n/// (This is a separate function for testing.)\n\nfn calc_next_delay(now: SystemTime, when: SystemTime) -> (bool, Duration) {\n\n let remainder = when\n\n .duration_since(now)\n\n .unwrap_or_else(|_| Duration::from_secs(0));\n\n if remainder > MAX_SLEEP {\n\n (false, MAX_SLEEP)\n\n } else {\n\n (true, remainder)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #![allow(clippy::erasing_op)]\n\n use super::*;\n\n\n\n #[test]\n\n fn sleep_delay() {\n\n fn calc(now: SystemTime, when: SystemTime) -> Duration {\n\n calc_next_delay(now, when).1\n", "file_path": "crates/tor-rtcompat/src/timer.rs", "rank": 28, "score": 190711.91766921864 }, { "content": "/// Try to extract a TorCert from the reader `r`.\n\nfn take_one_tor_cert(r: &mut Reader<'_>) -> Result<TorCert> {\n\n let certtype = r.take_u8()?;\n\n let certlen = r.take_u16()?;\n\n let cert = r.take(certlen as usize)?;\n\n Ok(TorCert {\n\n certtype,\n\n cert: cert.into(),\n\n })\n\n}\n\n/// A Certs message is used as part of the channel handshake to send\n\n/// additional certificates.\n\n///\n\n/// These certificates are not presented as part of the TLS handshake.\n\n/// Originally this was meant to make Tor TLS handshakes look \"normal\", but\n\n/// nowadays it serves less purpose, especially now that we have TLS 1.3.\n\n///\n\n/// Every relay sends this message as part of channel negotiation;\n\n/// clients do not send them.\n\n#[derive(Clone, Debug)]\n\npub struct Certs {\n", "file_path": "crates/tor-cell/src/chancell/msg.rs", "rank": 29, "score": 188643.06111756904 }, { "content": "/// Helper: try to parse a plain ipv4 address, or an IPv6 address\n\n/// wrapped in brackets.\n\nfn parse_addr(mut s: &str) -> Result<IpAddr, PolicyError> {\n\n let bracketed = s.starts_with('[') && s.ends_with(']');\n\n if bracketed {\n\n s = &s[1..s.len() - 1];\n\n }\n\n let addr: IpAddr = s.parse().map_err(|_| PolicyError::InvalidAddress)?;\n\n if addr.is_ipv6() != bracketed {\n\n return Err(PolicyError::InvalidAddress);\n\n }\n\n Ok(addr)\n\n}\n\n\n\nimpl FromStr for IpPattern {\n\n type Err = PolicyError;\n\n fn from_str(s: &str) -> Result<Self, PolicyError> {\n\n let (ip_s, mask_s) = match s.find('/') {\n\n Some(slash_idx) => (&s[..slash_idx], Some(&s[slash_idx + 1..])),\n\n None => (s, None),\n\n };\n\n match (ip_s, mask_s) {\n", "file_path": "crates/tor-netdoc/src/types/policy/addrpolicy.rs", "rank": 30, "score": 187456.69182377332 }, { "content": "/// An object that can manage persistent state.\n\n///\n\n/// State is implemented as a simple key-value store, where the values\n\n/// are objects that can be serialized and deserialized.\n\n///\n\n/// # Warnings\n\n///\n\n/// Current implementations may place additional limits on the types\n\n/// of objects that can be stored. This is not a great example of OO\n\n/// design: eventually we should probably clarify that more.\n\npub trait StateMgr: Clone {\n\n /// Try to load the object with key `key` from the store.\n\n ///\n\n /// Return None if no such object exists.\n\n fn load<D>(&self, key: &str) -> Result<Option<D>>\n\n where\n\n D: DeserializeOwned;\n\n /// Try to save `val` with key `key` in the store.\n\n ///\n\n /// Replaces any previous value associated with `key`.\n\n fn store<S>(&self, key: &str, val: &S) -> Result<()>\n\n where\n\n S: Serialize;\n\n /// Return true if this is a read-write state manager.\n\n ///\n\n /// If it returns false, then attempts to `store` will fail with\n\n /// [`Error::NoLock`]\n\n fn can_store(&self) -> bool;\n\n\n\n /// Try to become a read-write state manager if possible, without\n", "file_path": "crates/tor-persist/src/lib.rs", "rank": 31, "score": 185717.77401693328 }, { "content": "/// Apply a given diff to an input text, and return the result from applying\n\n/// that diff.\n\n///\n\n/// If `check_digest_in` is provided, require the diff to say that it\n\n/// applies to a document with the provided digest.\n\npub fn apply_diff<'a>(\n\n input: &'a str,\n\n diff: &'a str,\n\n check_digest_in: Option<[u8; 32]>,\n\n) -> Result<DiffResult<'a>> {\n\n let mut input = DiffResult::from_str(input, [0; 32]);\n\n\n\n let mut diff_lines = diff.lines();\n\n let (d1, d2) = parse_diff_header(&mut diff_lines)?;\n\n if let Some(d_want) = check_digest_in {\n\n if d1 != d_want {\n\n return Err(Error::CantApply(\"listed digest does not match document\"));\n\n }\n\n }\n\n\n\n let mut output = DiffResult::new(d2);\n\n\n\n for command in DiffCommandIter::new(diff_lines) {\n\n command?.apply_transformation(&mut input, &mut output)?;\n\n }\n\n\n\n output.push_reversed(&input.lines[..]);\n\n\n\n output.lines.reverse();\n\n Ok(output)\n\n}\n\n\n", "file_path": "crates/tor-consdiff/src/lib.rs", "rank": 32, "score": 185586.75319953385 }, { "content": "#[cfg(feature = \"hsv3-client\")]\n\npub fn blind_pubkey(pk: &PublicKey, mut param: [u8; 32]) -> Result<PublicKey, BlindingError> {\n\n use curve25519_dalek::edwards::CompressedEdwardsY;\n\n use curve25519_dalek::scalar::Scalar;\n\n\n\n // Clamp the blinding parameter\n\n param[0] &= 248;\n\n param[31] &= 63;\n\n param[31] |= 64;\n\n\n\n // Transform it into a scalar so that we can do scalar mult\n\n let blinding_factor = Scalar::from_bytes_mod_order(param);\n\n\n\n // Convert the public key to a point on the curve\n\n let pubkey_point = CompressedEdwardsY(pk.to_bytes())\n\n .decompress()\n\n .ok_or(BlindingError::BadPubkey)?;\n\n\n\n // Do the scalar multiplication and get a point back\n\n let blinded_pubkey_point = (blinding_factor * pubkey_point).compress();\n\n // Turn the point back into bytes and return it\n", "file_path": "crates/tor-llcrypto/src/pk/keymanip.rs", "rank": 33, "score": 184507.9978552194 }, { "content": "/// Broadcast the given `TorEvent` to any interested subscribers.\n\n///\n\n/// As an optimization, does nothing if the event has no subscribers (`event_has_subscribers`\n\n/// returns false). (also does nothing if the event subsystem hasn't been initialized yet)\n\n///\n\n/// This function isn't intended for use outside Arti crates (as in, library consumers of Arti\n\n/// shouldn't broadcast events!).\n\npub fn broadcast(event: TorEvent) {\n\n if !event_has_subscribers(event.kind()) {\n\n return;\n\n }\n\n if let Some(sender) = EVENT_SENDER.get() {\n\n // If this fails, there isn't much we can really do about it!\n\n let _ = sender.unbounded_send(event);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #![allow(clippy::unwrap_used)]\n\n use crate::{\n\n broadcast, event_has_subscribers, EventReactor, StreamExt, TorEvent, TorEventKind,\n\n };\n\n use once_cell::sync::OnceCell;\n\n use std::sync::{Mutex, MutexGuard};\n\n use std::time::Duration;\n\n use tokio::runtime::Runtime;\n", "file_path": "crates/tor-events/src/lib.rs", "rank": 34, "score": 184184.24344331602 }, { "content": "/// Convert a curve25519 public key (with sign bit) to an ed25519\n\n/// public key, for use in ntor key cross-certification.\n\n///\n\n/// Note that this formula is not standardized; don't use\n\n/// it for anything besides cross-certification.\n\npub fn convert_curve25519_to_ed25519_public(\n\n pubkey: &pk::curve25519::PublicKey,\n\n signbit: u8,\n\n) -> Option<pk::ed25519::PublicKey> {\n\n use curve25519_dalek::montgomery::MontgomeryPoint;\n\n\n\n let point = MontgomeryPoint(*pubkey.as_bytes());\n\n let edpoint = point.to_edwards(signbit)?;\n\n\n\n // TODO: This is inefficient; we shouldn't have to re-compress\n\n // this point to get the public key we wanted. But there's no way\n\n // with the current API that I can to construct an ed25519 public\n\n // key from a compressed point.\n\n let compressed_y = edpoint.compress();\n\n pk::ed25519::PublicKey::from_bytes(compressed_y.as_bytes()).ok()\n\n}\n\n\n\n/// Convert a curve25519 private key to an ed25519 public key (and\n\n/// give a sign bit) to use with it, for use in ntor key cross-certification.\n\n///\n", "file_path": "crates/tor-llcrypto/src/pk/keymanip.rs", "rank": 35, "score": 182175.97958626738 }, { "content": "#[cfg(any(test, feature = \"relay\"))]\n\npub fn convert_curve25519_to_ed25519_private(\n\n privkey: &pk::curve25519::StaticSecret,\n\n) -> Option<(pk::ed25519::ExpandedSecretKey, u8)> {\n\n use crate::d::Sha512;\n\n use digest::Digest;\n\n use zeroize::Zeroizing;\n\n\n\n let h = Sha512::new()\n\n .chain_update(privkey.to_bytes())\n\n .chain_update(&b\"Derive high part of ed25519 key from curve25519 key\\0\"[..])\n\n .finalize();\n\n\n\n let mut bytes = Zeroizing::new([0_u8; 64]);\n\n bytes[0..32].clone_from_slice(&privkey.to_bytes());\n\n bytes[32..64].clone_from_slice(&h[0..32]);\n\n\n\n let result = pk::ed25519::ExpandedSecretKey::from_bytes(&bytes[..]).ok()?;\n\n let pubkey: pk::ed25519::PublicKey = (&result).into();\n\n let signbit = pubkey.as_bytes()[31] >> 7;\n\n\n", "file_path": "crates/tor-llcrypto/src/pk/keymanip.rs", "rank": 36, "score": 182175.97183339042 }, { "content": "#[must_use = \"yield_now returns a future that must be .awaited on.\"]\n\npub fn yield_now() -> YieldFuture {\n\n // TODO: There are functions similar to this in tokio and\n\n // async_std and futures_lite. It would be lovely if futures had\n\n // one too. If it does, we should probably use it.\n\n YieldFuture { first_time: true }\n\n}\n\n\n\n/// A future returned by [`yield_now()`].\n\n///\n\n/// It returns `Poll::Pending` once, and `Poll::Ready` thereafter.\n\n#[derive(Debug)]\n\n#[must_use = \"Futures do nothing unless .awaited on.\"]\n\npub struct YieldFuture {\n\n /// True if this future has not yet been polled.\n\n first_time: bool,\n\n}\n\n\n\nimpl Future for YieldFuture {\n\n type Output = ();\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> {\n", "file_path": "crates/tor-rtcompat/src/task.rs", "rank": 37, "score": 179311.51505441713 }, { "content": "/// Helper: Calculate the function we should use to find initial relay\n\n/// bandwidths.\n\nfn pick_bandwidth_fn<'a, I>(mut weights: I) -> BandwidthFn\n\nwhere\n\n I: Clone + Iterator<Item = &'a netstatus::RelayWeight>,\n\n{\n\n let has_measured = weights.clone().any(|w| w.is_measured());\n\n let has_nonzero = weights.clone().any(|w| w.is_nonzero());\n\n let has_nonzero_measured = weights.any(|w| w.is_measured() && w.is_nonzero());\n\n\n\n if !has_nonzero {\n\n // If every value is zero, we should just pretend everything has\n\n // bandwidth == 1.\n\n BandwidthFn::Uniform\n\n } else if !has_measured {\n\n // If there are no measured values, then we can look at unmeasured\n\n // weights.\n\n BandwidthFn::IncludeUnmeasured\n\n } else if has_nonzero_measured {\n\n // Otherwise, there are measured values; we should look at those only, if\n\n // any of them is nonzero.\n\n BandwidthFn::MeasuredOnly\n", "file_path": "crates/tor-netdir/src/weight.rs", "rank": 38, "score": 178642.48200602987 }, { "content": "/// Helper: return true if the provided string is a valid \"integer\"\n\n/// in the form accepted by the protover spec. This is stricter than\n\n/// rust's integer parsing format.\n\nfn is_good_number(n: &str) -> bool {\n\n n.chars().all(|ch| ch.is_ascii_digit()) && !n.starts_with('0')\n\n}\n\n\n\n/// A single SubprotocolEntry is parsed from a string of the format\n\n/// Name=Versions, where Versions is a comma-separated list of\n\n/// integers or ranges of integers.\n\nimpl std::str::FromStr for SubprotocolEntry {\n\n type Err = ParseError;\n\n\n\n fn from_str(s: &str) -> Result<Self, ParseError> {\n\n // split the string on the =.\n\n let (name, versions) = {\n\n let eq_idx = s.find('=').ok_or(ParseError::Malformed)?;\n\n (&s[..eq_idx], &s[eq_idx + 1..])\n\n };\n\n // Look up the protocol by name.\n\n let proto = match ProtoKind::from_name(name) {\n\n Some(p) => Protocol::Proto(p),\n\n None => Protocol::Unrecognized(name.to_string()),\n", "file_path": "crates/tor-protover/src/lib.rs", "rank": 39, "score": 177945.61772636278 }, { "content": "/// Run the timing routine\n\nfn run_timing(mut stream: TcpStream, send: &Arc<[u8]>, receive: &Arc<[u8]>) -> Result<()> {\n\n let peer_addr = stream.peer_addr()?;\n\n // Do this potentially costly allocation before we do all the timing stuff.\n\n let mut received = vec![0_u8; receive.len()];\n\n\n\n info!(\"Accepted connection from {}\", peer_addr);\n\n let accepted_ts = SystemTime::now();\n\n let mut data: &[u8] = send.deref();\n\n let copied = std::io::copy(&mut data, &mut stream)?;\n\n stream.flush()?;\n\n let copied_ts = SystemTime::now();\n\n assert_eq!(copied, send.len() as u64);\n\n info!(\"Copied {} bytes payload to {}.\", copied, peer_addr);\n\n let read = stream.read(&mut received)?;\n\n if read == 0 {\n\n panic!(\"unexpected EOF\");\n\n }\n\n let first_byte_ts = SystemTime::now();\n\n stream.read_exact(&mut received[read..])?;\n\n let read_done_ts = SystemTime::now();\n", "file_path": "crates/arti-bench/src/main.rs", "rank": 40, "score": 177047.38550601553 }, { "content": "#[derive(Clone, Debug, Copy, PartialEq)]\n\nenum State {\n\n /// Starting state: no messages have been handled yet.\n\n Initial,\n\n /// SOCKS5: we've negotiated Username/Password authentication, and\n\n /// are waiting for the client to send it.\n\n Socks5Username,\n\n /// SOCKS5: we've finished the authentication (if any), and\n\n /// we're waiting for the actual request.\n\n Socks5Wait,\n\n /// Ending (successful) state: the client has sent all its messages.\n\n ///\n\n /// (Note that we still need to send a reply.)\n\n Done,\n\n /// Ending (failed) state: the handshake has failed and cannot continue.\n\n Failed,\n\n}\n\n\n\n/// An action to take in response to a SOCKS handshake message.\n\n#[derive(Clone, Debug)]\n\n#[non_exhaustive]\n", "file_path": "crates/tor-socksproto/src/handshake.rs", "rank": 41, "score": 172125.27763723698 }, { "content": "/// Return true iff 's' is a valid keyword for a BEGIN/END tag.\n\nfn tag_keyword_ok(s: &str) -> bool {\n\n s.split(' ').all(|w| keyword_ok(w, false))\n\n}\n\n\n\n/// When used as an Iterator, returns a sequence of Result<Item>.\n\nimpl<'a, K: Keyword> Iterator for NetDocReaderBase<'a, K> {\n\n type Item = Result<Item<'a, K>>;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.item().transpose()\n\n }\n\n}\n\n\n", "file_path": "crates/tor-netdoc/src/parse/tokenize.rs", "rank": 42, "score": 171967.54976181572 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]\n\n#[repr(u8)]\n\nenum TorVerStatus {\n\n /// An unknown release status\n\n Other,\n\n /// An alpha release\n\n Alpha,\n\n /// A beta release\n\n Beta,\n\n /// A release candidate\n\n Rc,\n\n /// A stable release\n\n Stable,\n\n}\n\n\n\nimpl TorVerStatus {\n\n /// Helper for encoding: return the suffix that represents a version.\n\n fn suffix(self) -> &'static str {\n\n use TorVerStatus::*;\n\n match self {\n\n Stable => \"\",\n\n Rc => \"-rc\",\n", "file_path": "crates/tor-netdoc/src/types/version.rs", "rank": 43, "score": 171576.68906553963 }, { "content": "/// Non-public helper type to represent the different kinds of Tor path.\n\n///\n\n/// (This is a separate type to avoid exposing its details to the user.)\n\nenum TorPathInner<'a> {\n\n /// A single-hop path for use with a directory cache, when a relay is\n\n /// known.\n\n OneHop(Relay<'a>), // This could just be a routerstatus.\n\n /// A single-hop path for use with a directory cache, when we don't have\n\n /// a consensus.\n\n FallbackOneHop(&'a FallbackDir),\n\n /// A multi-hop path, containing one or more relays.\n\n Path(Vec<Relay<'a>>),\n\n}\n\n\n\nimpl<'a> TorPath<'a> {\n\n /// Create a new one-hop path for use with a directory cache with a known\n\n /// relay.\n\n pub fn new_one_hop(relay: Relay<'a>) -> Self {\n\n Self {\n\n inner: TorPathInner::OneHop(relay),\n\n }\n\n }\n\n\n", "file_path": "crates/tor-circmgr/src/path.rs", "rank": 44, "score": 171074.17177783418 }, { "content": "/// Given a bitmask, return a list of the bits set in the mask, as a\n\n/// String in the format expected by Tor consensus documents.\n\n///\n\n/// This implementation constructs ranges greedily. For example, the\n\n/// bitmask `0b0111011` will be represented as `0-1,3-5`, and not\n\n/// `0,1,3,4,5` or `0,1,3-5`.\n\n///\n\n/// ```ignore\n\n/// # use tor_protover::dumpmask;\n\n/// assert_eq!(dumpmask(0b111111), \"0-5\");\n\n/// assert_eq!(dumpmask(0b111100), \"2-5\");\n\n/// assert_eq!(dumpmask(0b11111100), \"2-7\");\n\n/// ```\n\nfn dumpmask(mut mask: u64) -> String {\n\n /// Helper: push a range (which may be a singleton) onto `v`.\n\n fn append(v: &mut Vec<String>, lo: u32, hi: u32) {\n\n if lo == hi {\n\n v.push(lo.to_string());\n\n } else {\n\n v.push(format!(\"{}-{}\", lo, hi));\n\n }\n\n }\n\n // We'll be building up our result here, then joining it with\n\n // commas.\n\n let mut result = Vec::new();\n\n // This implementation is a little tricky, but it should be more\n\n // efficient than a raw search. Basically, we're using the\n\n // function u64::trailing_zeros to count how large each range of\n\n // 1s or 0s is, and then shifting by that amount.\n\n\n\n // How many bits have we already shifted `mask`?\n\n let mut shift = 0;\n\n while mask != 0 {\n", "file_path": "crates/tor-protover/src/lib.rs", "rank": 45, "score": 170843.06595529063 }, { "content": "#[derive(Clone, Debug, Deserialize, Eq, PartialEq)]\n\n#[serde(untagged)]\n\nenum PathInner {\n\n /// A path that should be expanded from a string using ShellExpand.\n\n Shell(String),\n\n /// A path that should be used literally, with no expansion.\n\n Literal(PathBuf),\n\n}\n\n\n\n/// An error that has occurred while expanding a path.\n\n#[derive(thiserror::Error, Debug, Clone)]\n\n#[non_exhaustive]\n\npub enum CfgPathError {\n\n /// The path contained a variable we didn't recognize.\n\n #[error(\"unrecognized variable {0}\")]\n\n UnknownVar(String),\n\n /// We couldn't construct a ProjectDirs object.\n\n #[error(\"can't construct project directories\")]\n\n NoProjectDirs,\n\n /// We couldn't construct a BaseDirs object.\n\n #[error(\"can't construct base directories\")]\n\n NoBaseDirs,\n", "file_path": "crates/tor-config/src/path.rs", "rank": 46, "score": 168807.51955254286 }, { "content": "/// As [`construct_network()`], but return a [`PartialNetDir`].\n\npub fn construct_netdir() -> Result<PartialNetDir> {\n\n construct_custom_netdir(simple_net_func)\n\n}\n\n\n", "file_path": "crates/tor-netdir/src/testnet.rs", "rank": 47, "score": 168798.92436976597 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq)]\n\nenum BandwidthFn {\n\n /// There are no weights at all in the consensus: weight every\n\n /// relay as 1.\n\n Uniform,\n\n /// There are no measured weights in the consensus: count\n\n /// unmeasured weights as the weights for relays.\n\n IncludeUnmeasured,\n\n /// There are measured relays in the consensus; only use those.\n\n MeasuredOnly,\n\n}\n\n\n\nimpl BandwidthFn {\n\n /// Apply this function to the measured or unmeasured bandwidth\n\n /// of a single relay.\n\n fn apply(&self, w: &netstatus::RelayWeight) -> u32 {\n\n use netstatus::RelayWeight::*;\n\n use BandwidthFn::*;\n\n match (self, w) {\n\n (Uniform, _) => 1,\n\n (IncludeUnmeasured, Unmeasured(u)) => *u,\n", "file_path": "crates/tor-netdir/src/weight.rs", "rank": 48, "score": 168441.49661883665 }, { "content": "fn cell(body: &str, id: StreamId, msg: RelayMsg) {\n\n let body = decode(body);\n\n let mut bad_rng = BadRng;\n\n\n\n let expected = RelayCell::new(id, msg);\n\n\n\n let decoded = RelayCell::decode(body).unwrap();\n\n\n\n assert_eq!(format!(\"{:?}\", expected), format!(\"{:?}\", decoded));\n\n\n\n let encoded1 = decoded.encode(&mut bad_rng).unwrap();\n\n let encoded2 = expected.encode(&mut bad_rng).unwrap();\n\n\n\n assert_eq!(&encoded1[..], &encoded2[..]);\n\n}\n\n\n", "file_path": "crates/tor-cell/tests/test_relaycell.rs", "rank": 49, "score": 168366.52489351953 }, { "content": "#[derive(Clone, Debug, Copy)]\n\nstruct RelayWeight {\n\n /// How to weight this kind of relay when picking a guard relay.\n\n as_guard: u32,\n\n /// How to weight this kind of relay when picking a middle relay.\n\n as_middle: u32,\n\n /// How to weight this kind of relay when picking a exit relay.\n\n as_exit: u32,\n\n /// How to weight this kind of relay when picking a one-hop BEGIN_DIR.\n\n as_dir: u32,\n\n}\n\n\n\nimpl std::ops::Mul<u32> for RelayWeight {\n\n type Output = Self;\n\n fn mul(self, rhs: u32) -> Self {\n\n RelayWeight {\n\n as_guard: self.as_guard * rhs,\n\n as_middle: self.as_middle * rhs,\n\n as_exit: self.as_exit * rhs,\n\n as_dir: self.as_dir * rhs,\n\n }\n", "file_path": "crates/tor-netdir/src/weight.rs", "rank": 50, "score": 168359.3049149208 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Verifier {}\n\n\n\nimpl rustls::ServerCertVerifier for Verifier {\n\n fn verify_server_cert(\n\n &self,\n\n _roots: &rustls::RootCertStore,\n\n presented_certs: &[rustls::Certificate],\n\n _dns_name: async_rustls::webpki::DNSNameRef,\n\n _ocsp_response: &[u8],\n\n ) -> Result<rustls::ServerCertVerified, TLSError> {\n\n // We don't check anything about the certificate at this point other\n\n // than making sure it is well-formed.\n\n //\n\n // When we make a channel, we'll check that it's authenticated by the\n\n // other party's real identity key, inside the Tor handshake.\n\n //\n\n // In theory, we shouldn't have to do even this much: rustls should not\n\n // allow a handshake without a certificate, and the certificate's\n\n // well-formedness should get checked below in one of the\n\n // verify_*_signature functions. But this check is cheap, so let's\n", "file_path": "crates/tor-rtcompat/src/impls/rustls.rs", "rank": 51, "score": 168256.01648237818 }, { "content": "/// Return default duration\n\nfn default_preemptive_duration() -> Duration {\n\n Duration::from_secs(60 * 60)\n\n}\n\n\n", "file_path": "crates/tor-circmgr/src/config.rs", "rank": 52, "score": 168219.83192357942 }, { "content": "/// Return the default stream timeout\n\nfn default_connect_timeout() -> Duration {\n\n Duration::new(10, 0)\n\n}\n\n\n", "file_path": "crates/arti-client/src/config.rs", "rank": 53, "score": 167501.5046276685 }, { "content": "/// helper: take an address as encoded in a netinfo message\n\nfn take_one_netinfo_addr(r: &mut Reader<'_>) -> Result<Option<IpAddr>> {\n\n let atype = r.take_u8()?;\n\n let alen = r.take_u8()?;\n\n let abody = r.take(alen as usize)?;\n\n match (atype, alen) {\n\n (0x04, 4) => {\n\n let bytes = [abody[0], abody[1], abody[2], abody[3]];\n\n Ok(Some(IpAddr::V4(bytes.into())))\n\n }\n\n (0x06, 16) => {\n\n // TODO(nickm) is there a better way?\n\n let mut bytes = [0_u8; 16];\n\n (&mut bytes[..]).copy_from_slice(abody);\n\n Ok(Some(IpAddr::V6(bytes.into())))\n\n }\n\n (0x04, _) => Ok(None),\n\n (0x06, _) => Ok(None),\n\n (_, _) => Ok(None),\n\n }\n\n}\n", "file_path": "crates/tor-cell/src/chancell/msg.rs", "rank": 54, "score": 166966.00666914525 }, { "content": "/// Take an unrecognized cell's body from a reader `r`, and apply\n\n/// the given command to it.\n\nfn unrecognized_with_cmd(cmd: ChanCmd, r: &mut Reader<'_>) -> Result<Unrecognized> {\n\n let mut u = Unrecognized::take_from(r)?;\n\n u.cmd = cmd;\n\n Ok(u)\n\n}\n\nimpl Unrecognized {\n\n /// Construct a new cell of arbitrary or unrecognized type.\n\n pub fn new<B>(cmd: ChanCmd, content: B) -> Self\n\n where\n\n B: Into<Vec<u8>>,\n\n {\n\n let content = content.into();\n\n Unrecognized { cmd, content }\n\n }\n\n /// Return the command from this cell.\n\n fn cmd(&self) -> ChanCmd {\n\n self.cmd\n\n }\n\n}\n\nimpl Body for Unrecognized {\n", "file_path": "crates/tor-cell/src/chancell/msg.rs", "rank": 55, "score": 165697.60329140618 }, { "content": "/// Possible requirements on stream IDs for a relay command.\n\nenum StreamIdReq {\n\n /// Can only be used with a stream ID of 0\n\n WantZero,\n\n /// Can only be used with a stream ID that isn't 0\n\n WantNonZero,\n\n /// Can be used with any stream ID\n\n Any,\n\n}\n\n\n\nimpl RelayCmd {\n\n /// Check whether this command requires a certain kind of\n\n /// StreamId, and return a corresponding StreamIdReq.\n\n fn expects_streamid(self) -> StreamIdReq {\n\n match self {\n\n RelayCmd::BEGIN\n\n | RelayCmd::DATA\n\n | RelayCmd::END\n\n | RelayCmd::CONNECTED\n\n | RelayCmd::RESOLVE\n\n | RelayCmd::RESOLVED\n", "file_path": "crates/tor-cell/src/relaycell.rs", "rank": 56, "score": 165324.2561821863 }, { "content": "/// Return true if `result` holds an error indicating that we should retire the\n\n/// circuit used for the corresponding request.\n\nfn should_retire_circ(result: &Result<DirResponse>) -> bool {\n\n match result {\n\n Err(e) => e.should_retire_circ(),\n\n Ok(dr) => dr.error().map(Error::should_retire_circ) == Some(true),\n\n }\n\n}\n\n\n\n/// Fetch a Tor directory object from a provided stream.\n\n///\n\n/// To do this, we send a simple HTTP/1.0 request for the described\n\n/// object in `req` over `stream`, and then wait for a response. In\n\n/// log messages, we describe the origin of the data as coming from\n\n/// `source`.\n\n///\n\n/// # Notes\n\n///\n\n/// It's kind of bogus to have a 'source' field here at all; we may\n\n/// eventually want to remove it.\n\n///\n\n/// This function doesn't close the stream; you may want to do that\n", "file_path": "crates/tor-dirclient/src/lib.rs", "rank": 57, "score": 165094.8864499147 }, { "content": "/// Helper type that holds the data used by a [`GuardMgr`].\n\n///\n\n/// This would just be a [`GuardMgr`], except that it needs to sit inside\n\n/// a `Mutex` and get accessed by daemon tasks.\n\nstruct GuardMgrInner {\n\n /// Last time when marked all of our primary guards as retriable.\n\n ///\n\n /// We keep track of this time so that we can rate-limit\n\n /// these attempts.\n\n last_primary_retry_time: Instant,\n\n\n\n /// Persistent guard manager state.\n\n ///\n\n /// This object remembers one or more persistent set of guards that we can\n\n /// use, along with their relative priorities and statuses.\n\n guards: GuardSets,\n\n\n\n /// Configuration values derived from the consensus parameters.\n\n ///\n\n /// This is updated whenever the consensus parameters change.\n\n params: GuardParams,\n\n\n\n /// A mpsc channel, used to tell the task running in\n\n /// [`daemon::report_status_events`] about a new event to monitor.\n", "file_path": "crates/tor-guardmgr/src/lib.rs", "rank": 58, "score": 164950.4238670025 }, { "content": "#[cfg(feature = \"rustls\")]\n\ntype RustlsHandleInner = CompoundRuntime<Handle, Handle, Handle, RustlsProvider<TcpStream>>;\n\n\n\n#[cfg(feature = \"native-tls\")]\n\ncrate::opaque::implement_opaque_runtime! {\n\n TokioNativeTlsRuntime { inner : HandleInner }\n\n}\n\n\n\n#[cfg(feature = \"rustls\")]\n\ncrate::opaque::implement_opaque_runtime! {\n\n TokioRustlsRuntime { inner : RustlsHandleInner }\n\n}\n\n\n\n#[cfg(feature = \"native-tls\")]\n\nimpl From<tokio_crate::runtime::Handle> for TokioNativeTlsRuntime {\n\n fn from(h: tokio_crate::runtime::Handle) -> Self {\n\n let h = Handle::new(h);\n\n TokioNativeTlsRuntime {\n\n inner: CompoundRuntime::new(h.clone(), h.clone(), h, NativeTlsProvider::default()),\n\n }\n\n }\n", "file_path": "crates/tor-rtcompat/src/tokio.rs", "rank": 59, "score": 164336.4094714108 }, { "content": "#[cfg(feature = \"native-tls\")]\n\ntype HandleInner = CompoundRuntime<Handle, Handle, Handle, NativeTlsProvider<TcpStream>>;\n\n\n\n/// A [`Runtime`](crate::Runtime) built around a Handle to a tokio runtime, and `rustls`.\n\n#[derive(Clone)]\n\n#[cfg(feature = \"rustls\")]\n\npub struct TokioRustlsRuntime {\n\n /// The actual [`CompoundRuntime`] that implements this.\n\n inner: RustlsHandleInner,\n\n}\n\n\n\n/// Implementation for a TokioRuntimeRustlsHandle\n", "file_path": "crates/tor-rtcompat/src/tokio.rs", "rank": 60, "score": 164336.4094714108 }, { "content": "/// Helper: client handshake _without_ generating new keys.\n\nfn client_handshake_ntor_v1_no_keygen(\n\n my_public: PublicKey,\n\n my_sk: StaticSecret,\n\n relay_public: &NtorPublicKey,\n\n) -> (NtorHandshakeState, Vec<u8>) {\n\n let mut v: Vec<u8> = Vec::new();\n\n\n\n v.write(&relay_public.id);\n\n v.write(&relay_public.pk);\n\n v.write(&my_public);\n\n\n\n assert_eq!(v.len(), 20 + 32 + 32);\n\n\n\n let state = NtorHandshakeState {\n\n relay_public: relay_public.clone(),\n\n my_public,\n\n my_sk,\n\n };\n\n\n\n (state, v)\n\n}\n\n\n", "file_path": "crates/tor-proto/src/crypto/handshake/ntor.rs", "rank": 61, "score": 163032.47354599065 }, { "content": "/// A collection of objects implementing that traits that make up a [`Runtime`]\n\nstruct Inner<SpawnR, SleepR, TcpR, TlsR> {\n\n /// A `Spawn` and `BlockOn` implementation.\n\n spawn: SpawnR,\n\n /// A `SleepProvider` implementation.\n\n sleep: SleepR,\n\n /// A `TcpProvider` implementation\n\n tcp: TcpR,\n\n /// A `TcpProvider<TcpR::TcpStream>` implementation.\n\n tls: TlsR,\n\n}\n\n\n\nimpl<SpawnR, SleepR, TcpR, TlsR> CompoundRuntime<SpawnR, SleepR, TcpR, TlsR> {\n\n /// Construct a new CompoundRuntime from its components.\n\n pub fn new(spawn: SpawnR, sleep: SleepR, tcp: TcpR, tls: TlsR) -> Self {\n\n CompoundRuntime {\n\n inner: Arc::new(Inner {\n\n spawn,\n\n sleep,\n\n tcp,\n\n tls,\n", "file_path": "crates/tor-rtcompat/src/compound.rs", "rank": 62, "score": 162341.76947174847 }, { "content": "/// An enumeration for the state of a DataReader.\n\n///\n\n/// We have to use an enum here because, when we're waiting for\n\n/// ReadingCell to complete, the future returned by `read_cell()` owns the\n\n/// DataCellImpl. If we wanted to store the future and the cell at the\n\n/// same time, we'd need to make a self-referential structure, which isn't\n\n/// possible in safe Rust AIUI.\n\nenum DataReaderState {\n\n /// In this state we have received an end cell or an error.\n\n Closed,\n\n /// In this state the reader is not currently fetching a cell; it\n\n /// either has data or not.\n\n Ready(DataReaderImpl),\n\n /// The reader is currently fetching a cell: this future is the\n\n /// progress it is making.\n\n ReadingCell(Pin<Box<dyn Future<Output = (DataReaderImpl, Result<()>)> + Send>>),\n\n}\n\n\n", "file_path": "crates/tor-proto/src/stream/data.rs", "rank": 63, "score": 162103.2568661273 }, { "content": "/// An enumeration for the state of a DataWriter.\n\n///\n\n/// We have to use an enum here because, for as long as we're waiting\n\n/// for a flush operation to complete, the future returned by\n\n/// `flush_cell()` owns the DataWriterImpl.\n\nenum DataWriterState {\n\n /// The writer has closed or gotten an error: nothing more to do.\n\n Closed,\n\n /// The writer is not currently flushing; more data can get queued\n\n /// immediately.\n\n Ready(DataWriterImpl),\n\n /// The writer is flushing a cell.\n\n Flushing(Pin<Box<dyn Future<Output = (DataWriterImpl, Result<()>)> + Send>>),\n\n}\n\n\n", "file_path": "crates/tor-proto/src/stream/data.rs", "rank": 64, "score": 162100.26297093704 }, { "content": "#[derive(Clone, Debug)]\n\nenum IpPattern {\n\n /// Match all addresses.\n\n Star,\n\n /// Match all IPv4 addresses.\n\n V4Star,\n\n /// Match all IPv6 addresses.\n\n V6Star,\n\n /// Match all IPv4 addresses beginning with a given prefix.\n\n V4(Ipv4Addr, u8),\n\n /// Match all IPv6 addresses beginning with a given prefix.\n\n V6(Ipv6Addr, u8),\n\n}\n\n\n\nimpl IpPattern {\n\n /// Construct an IpPattern that matches the first `mask` bits of `addr`.\n\n fn from_addr_and_mask(addr: IpAddr, mask: u8) -> Result<Self, PolicyError> {\n\n match (addr, mask) {\n\n (IpAddr::V4(_), 0) => Ok(IpPattern::V4Star),\n\n (IpAddr::V6(_), 0) => Ok(IpPattern::V6Star),\n\n (IpAddr::V4(a), m) if m <= 32 => Ok(IpPattern::V4(a, m)),\n", "file_path": "crates/tor-netdoc/src/types/policy/addrpolicy.rs", "rank": 65, "score": 162024.70580451307 }, { "content": "#[test]\n\nfn test_connected() {\n\n let cmd = RelayCmd::CONNECTED;\n\n assert_eq!(Into::<u8>::into(cmd), 4_u8);\n\n\n\n msg(cmd, \"\", &msg::Connected::new_empty().into());\n\n let localhost = \"127.0.0.1\".parse::<IpAddr>().unwrap();\n\n msg(\n\n cmd,\n\n \"7F000001 00000E10\",\n\n &msg::Connected::new_with_addr(localhost, 0xe10).into(),\n\n );\n\n\n\n // hand-generated for IPv6\n\n let addr = \"2001:db8::1122\".parse::<IpAddr>().unwrap();\n\n msg(\n\n cmd,\n\n \"00000000 06 20010db8 00000000 00000000 00001122 00000E10\",\n\n &msg::Connected::new_with_addr(addr, 0xe10).into(),\n\n );\n\n\n\n // hand-generated: bogus address type.\n\n msg_error(\n\n cmd,\n\n \"00000000 07 20010db8 00000000 00000000 00001122 00000E10\",\n\n BytesError::BadMessage(\"Invalid address type in CONNECTED cell\"),\n\n );\n\n}\n\n\n", "file_path": "crates/tor-cell/tests/testvec_relaymsg.rs", "rank": 66, "score": 161775.27099241738 }, { "content": "#[test]\n\nfn test_relay() {\n\n // This is hand-generated.\n\n let cmd = ChanCmd::RELAY;\n\n assert_eq!(Into::<u8>::into(cmd), 3_u8);\n\n\n\n let mut body: Vec<u8> = b\"not validated at this stage\"[..].into();\n\n body.resize(CELL_SIZE, 0);\n\n fbody(\n\n cmd,\n\n \"6e6f742076616c6964617465642061742074686973207374616765\",\n\n &msg::Relay::new(&body).into(),\n\n );\n\n\n\n let cmd = ChanCmd::RELAY_EARLY;\n\n assert_eq!(Into::<u8>::into(cmd), 9_u8);\n\n fbody(\n\n cmd,\n\n \"6e6f742076616c6964617465642061742074686973207374616765\",\n\n &msg::Relay::new(&body).into_early(),\n\n );\n\n\n\n // Try converting to/from raw bodies.\n\n let body = [3_u8; 509];\n\n let cell = msg::Relay::from_raw(body);\n\n let body2 = cell.into_relay_body();\n\n assert_eq!(&body2[..], &body[..]);\n\n}\n\n\n", "file_path": "crates/tor-cell/tests/testvec_chanmsg.rs", "rank": 67, "score": 161764.4755041512 }, { "content": "/// Shared part of a MockNetworkProvider.\n\n///\n\n/// This is separate because providers need to implement Clone, but\n\n/// `next_port` can't be cloned.\n\nstruct MockNetProviderInner {\n\n /// List of public addresses\n\n addrs: Vec<IpAddr>,\n\n /// Shared reference to the network.\n\n net: Arc<MockNetwork>,\n\n /// Next port number to hand out when we're asked to listen on\n\n /// port 0.\n\n ///\n\n /// See discussion of limitations on `listen()` implementation.\n\n next_port: AtomicU16,\n\n}\n\n\n\n/// A [`TcpListener`] implementation returned by a [`MockNetProvider`].\n\n///\n\n/// Represents listening on a public address for incoming TCP connections.\n\npub struct MockNetListener {\n\n /// The address that we're listening on.\n\n addr: SocketAddr,\n\n /// The incoming channel that tells us about new connections.\n\n // TODO: I'm not thrilled to have to use an AsyncMutex and a\n", "file_path": "crates/tor-rtmock/src/net.rs", "rank": 68, "score": 161718.30257975037 }, { "content": "#[derive(Debug)]\n\nstruct TestingStateMgrInner {\n\n /// True if this manager, and all references to it, hold the lock on\n\n /// the storage.\n\n lock_held: bool,\n\n /// The underlying shared storage object.\n\n storage: Arc<Mutex<TestingStateMgrStorage>>,\n\n}\n\n\n\n/// Implementation type for [`TestingStateMgr`]: represents an underlying\n\n/// storage system that can be shared by multiple TestingStateMgr instances\n\n/// at a time, only one of which can hold the lock.\n", "file_path": "crates/tor-persist/src/testing.rs", "rank": 69, "score": 161717.95327893097 }, { "content": "#[derive(Debug)]\n\nstruct FsStateMgrInner {\n\n /// Directory in which we store state files.\n\n statepath: PathBuf,\n\n /// Lockfile to achieve exclusive access to state files.\n\n lockfile: Mutex<fslock::LockFile>,\n\n}\n\n\n\nimpl FsStateMgr {\n\n /// Construct a new `FsStateMgr` to store data in `path`.\n\n ///\n\n /// This function will try to create `path` if it does not already\n\n /// exist.\n\n pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self> {\n\n let path = path.as_ref();\n\n let statepath = path.join(\"state\");\n\n let lockpath = path.join(\"state.lock\");\n\n\n\n {\n\n let mut builder = std::fs::DirBuilder::new();\n\n #[cfg(target_family = \"unix\")]\n", "file_path": "crates/tor-persist/src/fs.rs", "rank": 70, "score": 161717.95327893097 }, { "content": "/// Convert from the signature scheme type used in `rustls` to the one used in\n\n/// `x509_signature`.\n\n///\n\n/// (We can't just use the x509_signature crate's \"rustls\" feature to have it\n\n/// use the same enum from `rustls`, because it seems to be on a different\n\n/// version from the rustls we want.)\n\nfn convert_scheme(\n\n scheme: rustls::internal::msgs::enums::SignatureScheme,\n\n) -> Result<x509_signature::SignatureScheme, TLSError> {\n\n use rustls::internal::msgs::enums::SignatureScheme as R;\n\n use x509_signature::SignatureScheme as X;\n\n\n\n // Yes, we do allow PKCS1 here. That's fine in practice when PKCS1 is only\n\n // used (as in TLS 1.2) for signatures; the attacks against correctly\n\n // implemented PKCS1 make sense only when it's used for encryption.\n\n Ok(match scheme {\n\n R::RSA_PKCS1_SHA256 => X::RSA_PKCS1_SHA256,\n\n R::ECDSA_NISTP256_SHA256 => X::ECDSA_NISTP256_SHA256,\n\n R::RSA_PKCS1_SHA384 => X::RSA_PKCS1_SHA384,\n\n R::ECDSA_NISTP384_SHA384 => X::ECDSA_NISTP384_SHA384,\n\n R::RSA_PKCS1_SHA512 => X::RSA_PKCS1_SHA512,\n\n R::RSA_PSS_SHA256 => X::RSA_PSS_SHA256,\n\n R::RSA_PSS_SHA384 => X::RSA_PSS_SHA384,\n\n R::RSA_PSS_SHA512 => X::RSA_PSS_SHA512,\n\n R::ED25519 => X::ED25519,\n\n R::ED448 => X::ED448,\n", "file_path": "crates/tor-rtcompat/src/impls/rustls.rs", "rank": 71, "score": 161685.42404441128 }, { "content": "/// Result type used by this crate\n\ntype Result<T> = std::result::Result<T, Error>;\n\n\n", "file_path": "crates/tor-consdiff/src/lib.rs", "rank": 72, "score": 160846.19918454054 }, { "content": "/// Finalize the handshake on the client side.\n\n///\n\n/// Called after we've received a message from the relay: try to\n\n/// complete the handshake and verify its correctness.\n\n///\n\n/// On success, return the server's reply to our original encrypted message,\n\n/// and an `XofReader` to use in generating circuit keys.\n\nfn client_handshake_ntor_v3_part2(\n\n state: &NtorV3HandshakeState,\n\n relay_handshake: &[u8],\n\n verification: &[u8],\n\n) -> Result<(Vec<u8>, impl digest::XofReader)> {\n\n let mut reader = Reader::from_slice(relay_handshake);\n\n let y_pk: curve25519::PublicKey = reader.extract()?;\n\n let auth: DigestVal = reader.extract()?;\n\n let encrypted_msg = reader.into_rest();\n\n\n\n let yx = state.my_sk.diffie_hellman(&y_pk);\n\n let secret_input = {\n\n let mut si = Zeroizing::new(Vec::new());\n\n si.write(&yx);\n\n si.write(&state.shared_secret);\n\n si.write(&state.relay_public.id);\n\n si.write(&state.relay_public.pk);\n\n si.write(&state.my_public);\n\n si.write(&y_pk);\n\n si.write(PROTOID);\n", "file_path": "crates/tor-proto/src/crypto/handshake/ntor_v3.rs", "rank": 73, "score": 160520.41384393643 }, { "content": "/// As `client_handshake_ntor_v3`, but don't generate an ephemeral DH\n\n/// key: instead take that key an arguments `my_sk`.\n\nfn client_handshake_ntor_v3_no_keygen(\n\n relay_public: &NtorV3PublicKey,\n\n client_msg: &[u8],\n\n verification: &[u8],\n\n my_sk: curve25519::StaticSecret,\n\n) -> (NtorV3HandshakeState, Vec<u8>) {\n\n let my_public = curve25519::PublicKey::from(&my_sk);\n\n let bx = my_sk.diffie_hellman(&relay_public.pk);\n\n\n\n let (enc_key, mut mac) = kdf_msgkdf(&bx, relay_public, &my_public, verification);\n\n\n\n //encrypted_msg = ENC(ENC_K1, CM)\n\n // msg_mac = MAC_msgmac(MAC_K1, ID | B | X | encrypted_msg)\n\n let encrypted_msg = encrypt(&enc_key, client_msg);\n\n let msg_mac: DigestVal = {\n\n use digest::Digest;\n\n mac.write(&encrypted_msg);\n\n mac.take().finalize().into()\n\n };\n\n\n", "file_path": "crates/tor-proto/src/crypto/handshake/ntor_v3.rs", "rank": 74, "score": 160505.2516030831 }, { "content": "/// Perform a client handshake, generating an onionskin and a state object\n\nfn client_handshake_ntor_v1<R>(\n\n rng: &mut R,\n\n relay_public: &NtorPublicKey,\n\n) -> (NtorHandshakeState, Vec<u8>)\n\nwhere\n\n R: RngCore + CryptoRng,\n\n{\n\n let my_sk = StaticSecret::new(rng.rng_compat());\n\n let my_public = PublicKey::from(&my_sk);\n\n\n\n client_handshake_ntor_v1_no_keygen(my_public, my_sk, relay_public)\n\n}\n\n\n", "file_path": "crates/tor-proto/src/crypto/handshake/ntor.rs", "rank": 75, "score": 159647.31384967623 }, { "content": "/// As [`construct_custom_network`], but do not require a\n\n/// customization function.\n\npub fn construct_network() -> Result<(MdConsensus, Vec<Microdesc>)> {\n\n construct_custom_network(simple_net_func)\n\n}\n\n\n", "file_path": "crates/tor-netdir/src/testnet.rs", "rank": 76, "score": 158584.5929695639 }, { "content": "#[derive(Clone, Debug)]\n\nstruct AddrPolicyRule {\n\n /// What do we do with items that match the pattern?\n\n kind: RuleKind,\n\n /// What pattern are we trying to match?\n\n pattern: AddrPortPattern,\n\n}\n\n\n\n/*\n\nimpl Display for AddrPolicyRule {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let cmd = match self.kind {\n\n RuleKind::Accept => \"accept\",\n\n RuleKind::Reject => \"reject\",\n\n };\n\n write!(f, \"{} {}\", cmd, self.pattern)\n\n }\n\n}\n\n*/\n\n\n\n/// A pattern that may or may not match an address and port.\n", "file_path": "crates/tor-netdoc/src/types/policy/addrpolicy.rs", "rank": 77, "score": 158511.16258604784 }, { "content": "/// helper: compute a key generator and an authentication code from a set\n\n/// of ntor parameters.\n\n///\n\n/// These parameter names are as described in tor-spec.txt\n\nfn ntor_derive(\n\n xy: &SharedSecret,\n\n xb: &SharedSecret,\n\n server_pk: &NtorPublicKey,\n\n x: &PublicKey,\n\n y: &PublicKey,\n\n) -> (NtorHkdfKeyGenerator, Authcode) {\n\n let ntor1_protoid = &b\"ntor-curve25519-sha256-1\"[..];\n\n let ntor1_mac = &b\"ntor-curve25519-sha256-1:mac\"[..];\n\n let ntor1_verify = &b\"ntor-curve25519-sha256-1:verify\"[..];\n\n let server_string = &b\"Server\"[..];\n\n\n\n let mut secret_input = Zeroizing::new(Vec::new());\n\n secret_input.write(xy); // EXP(X,y)\n\n secret_input.write(xb); // EXP(X,b)\n\n secret_input.write(&server_pk.id); // ID\n\n secret_input.write(&server_pk.pk); // B\n\n secret_input.write(x); // X\n\n secret_input.write(y); // Y\n\n secret_input.write(ntor1_protoid); // PROTOID\n", "file_path": "crates/tor-proto/src/crypto/handshake/ntor.rs", "rank": 78, "score": 158293.49736726575 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]\n\n#[serde(transparent)]\n\nstruct MsecDuration(u32);\n\n\n\nimpl MsecDuration {\n\n /// Convert a Duration into a MsecDuration, saturating\n\n /// extra-high values to u32::MAX milliseconds.\n\n fn new_saturating(d: &Duration) -> Self {\n\n let msec = std::cmp::min(d.as_millis(), u128::from(u32::MAX)) as u32;\n\n MsecDuration(msec)\n\n }\n\n}\n\n\n\n/// Module to hold calls to const_assert.\n\n///\n\n/// This is a separate module so we can change the clippy warnings on it.\n\n#[allow(clippy::checked_conversions)]\n\nmod assertion {\n\n use static_assertions::const_assert;\n\n // If this assertion is untrue, then we can't safely use u16 fields in\n\n // time_histogram.\n\n const_assert!(super::TIME_HISTORY_LEN <= u16::MAX as usize);\n\n}\n\n\n\n/// A history of circuit timeout observations, used to estimate our\n\n/// likely circuit timeouts.\n", "file_path": "crates/tor-circmgr/src/timeouts/pareto.rs", "rank": 79, "score": 158054.88137312868 }, { "content": "/// Return true if `rs` is usable as a directory cache.\n\nfn rs_is_dir_cache(rs: &netstatus::MdConsensusRouterStatus) -> bool {\n\n use tor_protover::ProtoKind;\n\n rs.is_flagged_v2dir() && rs.protovers().supports_known_subver(ProtoKind::DirCache, 2)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #![allow(clippy::unwrap_used)]\n\n #![allow(clippy::cognitive_complexity)]\n\n use super::*;\n\n use crate::testnet::*;\n\n use std::collections::HashSet;\n\n use std::time::Duration;\n\n\n\n // Basic functionality for a partial netdir: Add microdescriptors,\n\n // then you have a netdir.\n\n #[test]\n\n fn partial_netdir() {\n\n let (consensus, microdescs) = construct_network().unwrap();\n\n let dir = PartialNetDir::new(consensus, None);\n", "file_path": "crates/tor-netdir/src/lib.rs", "rank": 80, "score": 157481.23298130487 }, { "content": "/// Helper: perform a server handshake without generating any new keys.\n\nfn server_handshake_ntor_v1_no_keygen<T>(\n\n ephem_pub: PublicKey,\n\n ephem: EphemeralSecret,\n\n msg: T,\n\n keys: &[NtorSecretKey],\n\n) -> Result<(NtorHkdfKeyGenerator, Vec<u8>)>\n\nwhere\n\n T: AsRef<[u8]>,\n\n{\n\n let mut cur = Reader::from_slice(msg.as_ref());\n\n\n\n let my_id: RsaIdentity = cur.extract()?;\n\n let my_key: PublicKey = cur.extract()?;\n\n let their_pk: PublicKey = cur.extract()?;\n\n\n\n let keypair = ct::lookup(keys, |key| key.matches_pk(&my_key));\n\n let keypair = match keypair {\n\n Some(k) => k,\n\n None => return Err(Error::MissingKey),\n\n };\n", "file_path": "crates/tor-proto/src/crypto/handshake/ntor.rs", "rank": 81, "score": 157120.40143912815 }, { "content": "/// encode a single TorCert `c` onto a Writer `w`.\n\nfn enc_one_tor_cert<W: Writer + ?Sized>(w: &mut W, c: &TorCert) {\n\n w.write_u8(c.certtype);\n\n let cert_len: u16 = c\n\n .cert\n\n .len()\n\n .try_into()\n\n .expect(\"Impossibly long certificate\");\n\n w.write_u16(cert_len);\n\n w.write_all(&c.cert[..]);\n\n}\n", "file_path": "crates/tor-cell/src/chancell/msg.rs", "rank": 82, "score": 156947.6349151492 }, { "content": "/// The introduction has been completed and the service has replied with a\n\n/// RENDEZVOUS1.\n\n///\n\n/// Handle it by computing and verifying the MAC, and if it's legit return a\n\n/// key generator based on the result of the key exchange.\n\npub fn client_receive_rend<T>(state: &HsNtorClientState, msg: T) -> Result<HsNtorHkdfKeyGenerator>\n\nwhere\n\n T: AsRef<[u8]>,\n\n{\n\n // Extract the public key of the service from the message\n\n let mut cur = Reader::from_slice(msg.as_ref());\n\n let Y: curve25519::PublicKey = cur.extract()?;\n\n let mac_tag: MacTag = cur.extract()?;\n\n\n\n // Get EXP(Y,x) and EXP(B,x)\n\n let xy = state.x.diffie_hellman(&Y);\n\n let xb = state.x.diffie_hellman(&state.proto_input.B);\n\n\n\n let (keygen, my_mac_tag) = get_rendezvous1_key_material(\n\n &xy,\n\n &xb,\n\n &state.proto_input.auth_key,\n\n &state.proto_input.B,\n\n &state.X,\n\n &Y,\n", "file_path": "crates/tor-proto/src/crypto/handshake/hs_ntor.rs", "rank": 83, "score": 156523.19270415686 }, { "content": "fn unhex(s: &str, pad_to_len: bool) -> Vec<u8> {\n\n let mut s = s.to_string();\n\n s.retain(|c| !c.is_whitespace());\n\n let mut body = hex::decode(s).unwrap();\n\n if pad_to_len {\n\n assert!(body.len() <= CELL_SIZE);\n\n body.resize(CELL_SIZE, 0);\n\n }\n\n body\n\n}\n\n\n", "file_path": "crates/tor-cell/tests/testvec_chanmsg.rs", "rank": 84, "score": 155875.5715272649 }, { "content": "/// Compute the sha3-256 digests of signed_part on its own, and of\n\n/// signed_part concatenated with remainder.\n\nfn sha3_dual(signed_part: impl AsRef<[u8]>, remainder: impl AsRef<[u8]>) -> ([u8; 32], [u8; 32]) {\n\n let mut d = ll::d::Sha3_256::new();\n\n d.update(signed_part.as_ref());\n\n let sha3_of_signed = d.clone().finalize().into();\n\n d.update(remainder.as_ref());\n\n let sha3_of_whole = d.finalize().into();\n\n (sha3_of_signed, sha3_of_whole)\n\n}\n\n\n\n/// Information about an authority certificate that we have in storage.\n\n///\n\n/// This information is ordinarily derived from the authority cert, but it\n\n/// doesn't have to be.\n\n#[derive(Clone, Debug)]\n\npub(crate) struct AuthCertMeta {\n\n /// Key IDs (identity and signing) for the certificate.\n\n ids: AuthCertKeyIds,\n\n /// Time of publication.\n\n published: SystemTime,\n\n /// Expiration time.\n", "file_path": "crates/tor-dirmgr/src/docmeta.rs", "rank": 85, "score": 155603.53364372015 }, { "content": "/// A safe variant of [`Duration::mul_f64`] that never panics.\n\n///\n\n/// For infinite or NaN or negative multipliers, the results might be\n\n/// nonsensical, but at least they won't be a panic.\n\nfn mul_duration_f64_saturating(d: Duration, mul: f64) -> Duration {\n\n let secs = d.as_secs_f64() * mul;\n\n // At this point I'd like to use Duration::try_from_secs_f64, but\n\n // that isn't stable yet. :p\n\n if secs.is_finite() && secs >= 0.0 {\n\n // We rely on the property that `f64 as uNN` is saturating.\n\n let seconds = secs.trunc() as u64;\n\n let nanos = if seconds == u64::MAX {\n\n 0 // prevent any possible overflow.\n\n } else {\n\n (secs.fract() * 1e9) as u32\n\n };\n\n Duration::new(seconds, nanos)\n\n } else {\n\n Duration::from_secs(1)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "crates/tor-circmgr/src/timeouts.rs", "rank": 86, "score": 155521.76498594676 }, { "content": "/// Wrapper type for Results returned from this crate.\n\ntype Result<T> = std::result::Result<T, crate::Error>;\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\npub use fs::FsStateMgr;\n\npub use handle::{DynStorageHandle, StorageHandle};\n\npub use serde_json::Value as JsonValue;\n\n#[cfg(feature = \"testing\")]\n\npub use testing::TestingStateMgr;\n\n\n\nuse tor_error::ErrorKind;\n\n\n", "file_path": "crates/tor-persist/src/lib.rs", "rank": 87, "score": 155388.38282936122 }, { "content": "/// Internal representation of PathBuilder.\n\nenum ExitPathBuilderInner<'a> {\n\n /// Request a path that allows exit to the given `TargetPort]`s.\n\n WantsPorts(Vec<TargetPort>),\n\n\n\n /// Request a path that allows exit to _any_ port.\n\n AnyExit {\n\n /// If false, then we fall back to non-exit nodes if we can't find an\n\n /// exit.\n\n strict: bool,\n\n },\n\n\n\n /// Request a path that uses a given relay as exit node.\n\n ChosenExit(Relay<'a>),\n\n}\n\n\n\n/// A PathBuilder that builds a path to an exit relay supporting a given\n\n/// set of ports.\n\npub struct ExitPathBuilder<'a> {\n\n /// The inner ExitPathBuilder state.\n\n inner: ExitPathBuilderInner<'a>,\n", "file_path": "crates/tor-circmgr/src/path/exitpath.rs", "rank": 88, "score": 155372.53614358802 }, { "content": "/// Encrypt the 'plaintext' using 'enc_key'. Then compute the intro cell MAC\n\n/// using 'mac_key' and return (ciphertext, mac_tag).\n\nfn encrypt_and_mac(\n\n mut plaintext: Vec<u8>,\n\n other_data: &[u8],\n\n enc_key: EncKey,\n\n mac_key: MacKey,\n\n) -> Result<(Vec<u8>, MacTag)> {\n\n // Encrypt the introduction data using 'enc_key'\n\n let zero_iv = GenericArray::default();\n\n let mut cipher = Aes256Ctr::new(&enc_key.into(), &zero_iv);\n\n cipher.apply_keystream(&mut plaintext);\n\n let ciphertext = plaintext; // it's now encrypted\n\n\n\n // Now staple the other INTRODUCE1 data right before the ciphertext to\n\n // create the body of the MAC tag\n\n let mut mac_body: Vec<u8> = Vec::new();\n\n mac_body.extend(other_data);\n\n mac_body.extend(&ciphertext);\n\n let mac_tag = hs_ntor_mac(&mac_body, &mac_key)?;\n\n\n\n Ok((ciphertext, mac_tag))\n\n}\n\n\n", "file_path": "crates/tor-proto/src/crypto/handshake/hs_ntor.rs", "rank": 89, "score": 155237.44634639897 }, { "content": "/// Helper: compute the encryption key and mac_key for the client's\n\n/// encrypted message.\n\n///\n\n/// Takes as inputs `xb` (the shared secret derived from\n\n/// diffie-hellman as Bx or Xb), the relay's public key information,\n\n/// the client's public key (B), and the shared verification string.\n\nfn kdf_msgkdf(\n\n xb: &curve25519::SharedSecret,\n\n relay_public: &NtorV3PublicKey,\n\n client_public: &curve25519::PublicKey,\n\n verification: &[u8],\n\n) -> (Zeroizing<EncKey>, DigestWriter<Sha3_256>) {\n\n // secret_input_phase1 = Bx | ID | X | B | PROTOID | ENCAP(VER)\n\n // phase1_keys = KDF_msgkdf(secret_input_phase1)\n\n // (ENC_K1, MAC_K1) = PARTITION(phase1_keys, ENC_KEY_LEN, MAC_KEY_LEN\n\n use digest::{ExtendableOutput, XofReader};\n\n let mut msg_kdf = DigestWriter(Shake256::default());\n\n msg_kdf.write(&T_MSGKDF);\n\n msg_kdf.write(xb);\n\n msg_kdf.write(&relay_public.id);\n\n msg_kdf.write(client_public);\n\n msg_kdf.write(&relay_public.pk);\n\n msg_kdf.write(PROTOID);\n\n msg_kdf.write(&Encap(verification));\n\n let mut r = msg_kdf.take().finalize_xof();\n\n let mut enc_key = Zeroizing::new([0; ENC_KEY_LEN]);\n", "file_path": "crates/tor-proto/src/crypto/handshake/ntor_v3.rs", "rank": 90, "score": 155236.0139772711 }, { "content": "/// Return default threshold\n\nfn default_preemptive_threshold() -> usize {\n\n 12\n\n}\n\n\n", "file_path": "crates/tor-circmgr/src/config.rs", "rank": 91, "score": 155160.88436850332 }, { "content": "/// Return the default request loyalty timeout.\n\nfn default_request_loyalty() -> Duration {\n\n Duration::from_millis(50)\n\n}\n\n\n\n// NOTE: it seems that `unwrap` may be safe because of builder defaults\n\n// check `derive_builder` documentation for details\n\n// https://docs.rs/derive_builder/0.10.2/derive_builder/#default-values\n\n#[allow(clippy::unwrap_used)]\n\nimpl Default for CircuitTiming {\n\n fn default() -> Self {\n\n CircuitTimingBuilder::default().build().unwrap()\n\n }\n\n}\n\n\n\nimpl CircuitTiming {\n\n /// Return a new [`CircuitTimingBuilder`]\n\n pub fn builder() -> CircuitTimingBuilder {\n\n CircuitTimingBuilder::default()\n\n }\n\n}\n", "file_path": "crates/tor-circmgr/src/config.rs", "rank": 92, "score": 155151.99563840503 }, { "content": "/// Return the default value for `request_timeout`.\n\nfn default_request_timeout() -> Duration {\n\n Duration::from_secs(60)\n\n}\n\n\n", "file_path": "crates/tor-circmgr/src/config.rs", "rank": 93, "score": 155151.99563840503 }, { "content": "/// Return the default value for `max_dirtiness`.\n\nfn default_max_dirtiness() -> Duration {\n\n Duration::from_secs(60 * 10)\n\n}\n\n\n", "file_path": "crates/tor-circmgr/src/config.rs", "rank": 94, "score": 155151.99563840503 }, { "content": "/// Perform a server-side ntor handshake.\n\n///\n\n/// On success returns a key generator and a server onionskin.\n\nfn server_handshake_ntor_v1<R, T>(\n\n rng: &mut R,\n\n msg: T,\n\n keys: &[NtorSecretKey],\n\n) -> Result<(NtorHkdfKeyGenerator, Vec<u8>)>\n\nwhere\n\n R: RngCore + CryptoRng,\n\n T: AsRef<[u8]>,\n\n{\n\n // TODO(nickm): we generate this key whether or not we are\n\n // actually going to find our nodeid or keyid. Perhaps we should\n\n // delay that till later? It shouldn't matter for most cases,\n\n // though.\n\n let ephem = EphemeralSecret::new(rng.rng_compat());\n\n let ephem_pub = PublicKey::from(&ephem);\n\n\n\n server_handshake_ntor_v1_no_keygen(ephem_pub, ephem, msg, keys)\n\n}\n\n\n", "file_path": "crates/tor-proto/src/crypto/handshake/ntor.rs", "rank": 95, "score": 154357.03964824462 }, { "content": "#[cfg(any(feature = \"native-tls\", feature = \"rustls\"))]\n\nfn current_handle() -> std::io::Result<tokio_crate::runtime::Handle> {\n\n tokio_crate::runtime::Handle::try_current().map_err(|e| IoError::new(ErrorKind::Other, e))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #![allow(clippy::unwrap_used)]\n\n use super::*;\n\n\n\n #[test]\n\n fn no_current() {\n\n // There should be no running tokio runtime in this context.\n\n\n\n #[cfg(feature = \"native-tls\")]\n\n assert!(TokioNativeTlsRuntime::current().is_err());\n\n\n\n #[cfg(feature = \"rustls\")]\n\n assert!(TokioRustlsRuntime::current().is_err());\n\n }\n\n\n", "file_path": "crates/tor-rtcompat/src/tokio.rs", "rank": 96, "score": 153446.46443664367 }, { "content": "fn decode(body: &str, pad_body: bool) -> Vec<u8> {\n\n let mut body = body.to_string();\n\n body.retain(|c| !c.is_whitespace());\n\n let mut body = hex::decode(body).unwrap();\n\n if pad_body {\n\n body.resize(FIXED_BODY_LEN, 0);\n\n }\n\n body\n\n}\n\n\n", "file_path": "crates/tor-cell/tests/test_chancell.rs", "rank": 97, "score": 153341.98472403683 }, { "content": "/// Compute a tweaked hash.\n\nfn hash(t: &Encap<'_>, data: &[u8]) -> DigestVal {\n\n use digest::Digest;\n\n let mut d = Sha3_256::new();\n\n d.update((t.len() as u64).to_be_bytes());\n\n d.update(t.data());\n\n d.update(data);\n\n d.finalize().into()\n\n}\n\n\n", "file_path": "crates/tor-proto/src/crypto/handshake/ntor_v3.rs", "rank": 98, "score": 153227.13807151088 }, { "content": "fn bad_cell(body: &str, err: Error, pad_body: bool) {\n\n let body = decode(body, pad_body);\n\n\n\n let mut codec = codec::ChannelCodec::new(4);\n\n\n\n let decoded = {\n\n let mut bm = BytesMut::new();\n\n bm.extend_from_slice(&body[..]);\n\n bm.extend_from_slice(&b\"next thing\"[..]);\n\n codec.decode_cell(&mut bm).err().unwrap()\n\n };\n\n\n\n assert_eq!(format!(\"{:?}\", decoded), format!(\"{:?}\", err));\n\n}\n\n\n", "file_path": "crates/tor-cell/tests/test_chancell.rs", "rank": 99, "score": 150919.2227469131 } ]
Rust
tornado-kernel/src/user/space.rs
HUST-OS/tornado-os
e086b451a0836c00bc13ab940f6a4fa55c1855c6
use crate::memory::{AddressSpaceId, PhysicalAddress, PhysicalPageNumber, PAGE_SIZE}; use alloc::boxed::Box; use async_mutex::AsyncMutex; use lazy_static::lazy_static; #[cfg(feature = "qemu")] const BASE: usize = 0x8400_0000; #[cfg(feature = "k210")] const BASE: usize = 0x8050_0000; lazy_static! { pub static ref USER_SPACE: AsyncMutex<UserSpaceManager<2000, BASE>> = AsyncMutex::new(UserSpaceManager::new()); } pub struct UserSpaceManager<const N: usize, const B: usize> { used: ListNode<AddressSpaceId>, free: ListNode<AddressSpaceId>, len: usize, } impl<const N: usize, const B: usize> UserSpaceManager<N, B> { pub fn new() -> Self { let used = ListNode { id: 0, val: unsafe { AddressSpaceId::from_raw(0) }, next: None, }; let mut free = used.clone(); for i in 0..N { let prev = free.next.take(); let node = ListNode { id: N - 1 - i, val: unsafe { AddressSpaceId::from_raw(0) }, next: prev, }; free.next = Some(Box::new(node)); } Self { used, free, len: 0 } } pub fn alloc(&mut self, pages: usize, _asid: AddressSpaceId) -> Option<PhysicalPageNumber> { assert!(PAGE_SIZE % 2 == 0); if pages > N - self.len { None } else { let base = self.free.next.as_ref().unwrap().id * PAGE_SIZE + B; let base = PhysicalPageNumber::floor(PhysicalAddress(base)); for _ in 0..pages { let mut node = self.free.next.take().unwrap(); self.free.next = node.next.take(); let prev = self.used.next.take(); node.next = prev; self.used.next = Some(node); } self.len += pages; Some(base) } } #[allow(unused)] pub fn dealloc(&mut self, asid: AddressSpaceId) -> Option<(PhysicalPageNumber, usize)> { let mut prev = &mut self.used; loop { if prev.next.is_none() { break; } if prev.next.as_ref().unwrap().val == asid { let mut num = 0; let base = prev.next.as_ref().unwrap().id * PAGE_SIZE + B; let base = PhysicalPageNumber::floor(PhysicalAddress(base)); while prev.next.as_ref().is_some() && prev.next.as_ref().unwrap().val == asid { let mut node = prev.next.take().unwrap(); prev.next = node.next.take(); let temp = self.free.next.take(); node.next = temp; self.free.next = Some(node); num += 1; } self.len -= num; return Some((base, num)); } else { prev = prev.next.as_mut().unwrap(); } } None } } #[derive(PartialEq, Eq, Clone, Debug)] pub struct ListNode<T> { pub id: usize, pub val: T, pub next: Option<Box<ListNode<T>>>, }
use crate::memory::{AddressSpaceId, PhysicalAddress, PhysicalPageNumber, PAGE_SIZE}; use alloc::boxed::Box; use async_mutex::AsyncMutex; use lazy_static::lazy_static; #[cfg(feature = "qemu")] const BASE: usize = 0x8400_0000; #[cfg(feature = "k210")] const BASE: usize = 0x8050_0000; lazy_static! { pub static ref USER_SPACE: AsyncMutex<UserSpaceManager<2000, BASE>> = AsyncMutex::new(UserSpaceManager::new()); } pub struct UserSpaceManager<const N: usize, const B: usize> { used: ListNode<AddressSpaceId>, free: ListNode<AddressSpaceId>, len: usize, } impl<const N: usize, const B: usize> UserSpaceManager<N, B> { pub fn new() -> Self { let used = ListNode { id: 0, val: unsafe { AddressSpaceId::from_raw(0) }, next: None, }; let mut free = used.clone(); for i in 0..N { let prev = free.next.take(); let node = ListNode { id: N - 1 - i, val: unsafe { AddressSpaceId::from_raw(0) }, next: prev, }; free.next = Some(Box::new(node)); } Self { used, free, len: 0 } } pub fn alloc(&mut self, pages: usize, _asid: AddressSpaceId) -> Option<PhysicalPageNumber> { assert!(PAGE_SIZE % 2 == 0);
} #[allow(unused)] pub fn dealloc(&mut self, asid: AddressSpaceId) -> Option<(PhysicalPageNumber, usize)> { let mut prev = &mut self.used; loop { if prev.next.is_none() { break; } if prev.next.as_ref().unwrap().val == asid { let mut num = 0; let base = prev.next.as_ref().unwrap().id * PAGE_SIZE + B; let base = PhysicalPageNumber::floor(PhysicalAddress(base)); while prev.next.as_ref().is_some() && prev.next.as_ref().unwrap().val == asid { let mut node = prev.next.take().unwrap(); prev.next = node.next.take(); let temp = self.free.next.take(); node.next = temp; self.free.next = Some(node); num += 1; } self.len -= num; return Some((base, num)); } else { prev = prev.next.as_mut().unwrap(); } } None } } #[derive(PartialEq, Eq, Clone, Debug)] pub struct ListNode<T> { pub id: usize, pub val: T, pub next: Option<Box<ListNode<T>>>, }
if pages > N - self.len { None } else { let base = self.free.next.as_ref().unwrap().id * PAGE_SIZE + B; let base = PhysicalPageNumber::floor(PhysicalAddress(base)); for _ in 0..pages { let mut node = self.free.next.take().unwrap(); self.free.next = node.next.take(); let prev = self.used.next.take(); node.next = prev; self.used.next = Some(node); } self.len += pages; Some(base) }
if_condition
[ { "content": "pub fn bounded<T, const N: usize>() -> (Sender<T, N>, Receiver<T, N>) {\n\n let buf = Arc::new(AsyncMutex::new(ChannelBuf::new()));\n\n let tx_event = Arc::new(Event::new());\n\n let rx_event = Arc::new(Event::new());\n\n let sender = Sender {\n\n buf: Arc::clone(&buf),\n\n rx_event: Arc::clone(&rx_event),\n\n tx_event: Arc::clone(&tx_event),\n\n };\n\n let receiver = Receiver {\n\n buf: Arc::clone(&buf),\n\n rx_event: Arc::clone(&rx_event),\n\n tx_event: Arc::clone(&tx_event),\n\n };\n\n (sender, receiver)\n\n}\n", "file_path": "tornado-user/src/task/channel.rs", "rank": 0, "score": 284745.8608796664 }, { "content": "pub fn read_block(block_id: usize, buf: &mut [u8]) -> PollTwice {\n\n let _sys_ret = sys_enroll_read(block_id, buf);\n\n PollTwice::new()\n\n}\n\n\n", "file_path": "tornado-user/src/io/mod.rs", "rank": 1, "score": 247437.4814768923 }, { "content": "/// 各种缓存替换算法需要实现的 trait\n\n///\n\n/// N: 缓存项的数量\n\npub trait Cache<const N: usize> {\n\n type Key;\n\n type Value;\n\n /// 根据 `Key` 返回对应的 `Value`\n\n fn get(&mut self, key: &Self::Key) -> Option<Self::Value>;\n\n /// 写入一对 `(Key, Value)`\n\n ///\n\n /// 如果有需要写回的值,将它返回\n\n fn put(&mut self, key: &Self::Key, value: Self::Value) -> Option<(Self::Key, Self::Value)>;\n\n /// 返回所有的缓存项,用于数据同步\n\n fn all(&mut self) -> Vec<(Self::Key, Self::Value)>;\n\n}\n\n\n\n/// [`LFUCache`] 的缓存项\n\n///\n\n/// 除了记录键值对,还记录访问次数,最后访问时间,是否写脏\n\n#[derive(Clone, Copy)]\n\npub struct Node<K: Eq + PartialEq + Copy, V: Clone> {\n\n key: K,\n\n value: V,\n", "file_path": "async-fat32/src/cache.rs", "rank": 2, "score": 246638.8193566387 }, { "content": "/// 往内核注册一个块设备读任务\n\npub fn sys_enroll_read(block_id: usize, buf: &mut [u8]) -> SyscallResult {\n\n assert!(buf.len() == BLOCK_SIZE);\n\n // 第一个参数 0 表示读块设备\n\n syscall_3(\n\n MODULE_TASK,\n\n FUNC_IO_TASK,\n\n [0, block_id, buf.as_ptr() as usize],\n\n )\n\n}\n\n\n", "file_path": "tornado-user/src/syscall/mod.rs", "rank": 3, "score": 244344.48829060455 }, { "content": "/// 各种缓存替换算法需要实现的 trait\n\n///\n\n/// N: 缓存项的数量\n\npub trait Cache<const N: usize> {\n\n type Key;\n\n type Value;\n\n /// 根据 `Key` 返回对应的 `Value`\n\n fn get(&mut self, key: &Self::Key) -> Option<Self::Value>;\n\n /// 写入一对 `(Key, Value)`\n\n ///\n\n /// 如果有需要写回的值,将它返回\n\n fn put(&mut self, key: &Self::Key, value: Self::Value) -> Option<(Self::Key, Self::Value)>;\n\n /// 返回所有的缓存项,用于数据同步\n\n fn all(&mut self) -> Vec<(Self::Key, Self::Value)>;\n\n}\n\n\n\n/// 异步块缓存层\n\n/// B: 一个块中的字节数\n\n/// N: 块缓冲层的块数\n\n\n\npub struct AsyncBlockCache<\n\n C: Cache<N, Key = usize, Value = [u8; B]> + Send + Sync,\n\n const B: usize,\n", "file_path": "tornado-kernel/src/cache/mod.rs", "rank": 4, "score": 243432.39735755115 }, { "content": "/// 缓冲区\n\nstruct ChannelBuf<T, const N: usize> {\n\n data: [MaybeUninit<T>; N],\n\n head: usize,\n\n tail: usize,\n\n}\n\n\n\nimpl<T, const N: usize> ChannelBuf<T, N> {\n\n pub const fn new() -> Self {\n\n Self {\n\n data: MaybeUninit::uninit_array(),\n\n head: 0,\n\n tail: 0,\n\n }\n\n }\n\n pub const fn len(&self) -> usize {\n\n self.tail.wrapping_sub(self.head) % N\n\n }\n\n #[inline]\n\n pub const fn is_empty(&self) -> bool {\n\n self.tail == self.head\n", "file_path": "tornado-user/src/task/channel.rs", "rank": 5, "score": 242257.71153342188 }, { "content": "pub fn do_yield(next_asid: usize) -> SyscallResult {\n\n sys_yield(next_asid)\n\n}\n", "file_path": "tornado-user/src/lib.rs", "rank": 6, "score": 208803.1129221718 }, { "content": "pub fn max_asid() -> AddressSpaceId {\n\n #[cfg(target_pointer_width = \"64\")]\n\n let mut val: usize = ((1 << 16) - 1) << 44;\n\n #[cfg(target_pointer_width = \"32\")]\n\n let mut val: usize = ((1 << 9) - 1) << 22;\n\n unsafe {\n\n asm!(\"\n\n csrr {tmp}, satp\n\n or {val}, {tmp}, {val}\n\n csrw satp, {val}\n\n csrrw {val}, satp, {tmp}\n\n \", tmp = out(reg) _, val = inlateout(reg) val)\n\n };\n\n #[cfg(target_pointer_width = \"64\")]\n\n return AddressSpaceId(((val >> 44) & ((1 << 16) - 1)) as u16);\n\n #[cfg(target_pointer_width = \"32\")]\n\n return AddressSpaceId(((val >> 22) & ((1 << 9) - 1)) as u16);\n\n}\n", "file_path": "tornado-kernel/src/memory/mod.rs", "rank": 7, "score": 207013.44920299068 }, { "content": "pub fn sys_yield(next_asid: usize) -> SyscallResult {\n\n syscall_1(MODULE_TASK, FUNC_SWITCH_TASK, next_asid)\n\n}\n\n\n", "file_path": "tornado-user/src/syscall/mod.rs", "rank": 8, "score": 202999.29660066156 }, { "content": "pub fn write_block(block_id: usize, buf: &[u8]) -> PollTwice {\n\n let _sys_ret = sys_enroll_write(block_id, buf);\n\n PollTwice::new()\n\n}\n", "file_path": "tornado-user/src/io/mod.rs", "rank": 9, "score": 192306.02999056986 }, { "content": "/// 往内核注册一个块设备写任务\n\npub fn sys_enroll_write(block_id: usize, buf: &[u8]) -> SyscallResult {\n\n assert!(buf.len() == BLOCK_SIZE);\n\n // 第一个参数 1 表示写块设备\n\n syscall_3(\n\n MODULE_TASK,\n\n FUNC_IO_TASK,\n\n [1, block_id, buf.as_ptr() as usize],\n\n )\n\n}\n\n\n", "file_path": "tornado-user/src/syscall/mod.rs", "rank": 10, "score": 189749.1275993989 }, { "content": "// 创建一个新的用户任务,打包它的环境\n\npub fn new_user(\n\n future: impl Future<Output = ()> + 'static + Send + Sync,\n\n shared_scheduler: NonNull<()>,\n\n set_task_state: unsafe extern \"C\" fn(NonNull<()>, usize, TaskState),\n\n) -> Arc<UserTaskRepr> {\n\n Arc::new(UserTaskRepr(\n\n UserTask::new(future),\n\n shared_scheduler.as_ptr() as usize,\n\n set_task_state,\n\n ))\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct UserTaskRepr(\n\n UserTask,\n\n usize,\n\n unsafe extern \"C\" fn(NonNull<()>, usize, TaskState),\n\n);\n\n\n\nimpl UserTaskRepr {\n", "file_path": "tornado-user/src/task/mod.rs", "rank": 11, "score": 186971.17498702637 }, { "content": "/// 创建一个新的内核任务,打包它的环境\n\npub fn new_kernel(\n\n future: impl Future<Output = ()> + 'static + Send + Sync,\n\n process: Arc<Process>,\n\n shared_scheduler: NonNull<()>,\n\n set_task_state: unsafe extern \"C\" fn(NonNull<()>, usize, TaskState),\n\n) -> Arc<KernelTaskRepr> {\n\n Arc::new(KernelTaskRepr(\n\n KernelTask::new(future, process),\n\n shared_scheduler.as_ptr() as usize,\n\n set_task_state,\n\n ))\n\n}\n\n\n\n/// 内核任务的表示\n\n#[derive(Debug)]\n\npub struct KernelTaskRepr(\n\n KernelTask,\n\n usize,\n\n unsafe extern \"C\" fn(NonNull<()>, usize, TaskState),\n\n);\n", "file_path": "tornado-kernel/src/task/mod.rs", "rank": 12, "score": 186971.17498702637 }, { "content": "pub fn console_getchar() -> usize {\n\n sbi_call(SBI_CONSOLE_GETCHAR, 0, 0, 0)\n\n}\n\n\n", "file_path": "event/src/sbi.rs", "rank": 13, "score": 185151.33547936176 }, { "content": "pub fn sys_test_read_line(buf: &mut [u8]) -> SyscallResult {\n\n syscall_3(\n\n MODULE_TEST_INTERFACE,\n\n FUNC_TEST_READ_LINE,\n\n [0, buf.as_ptr() as usize, buf.len()],\n\n )\n\n}\n\n\n", "file_path": "tornado-user/src/syscall/mod.rs", "rank": 14, "score": 183731.05188349017 }, { "content": "pub fn read_timer() -> usize {\n\n sys_read_timer().code\n\n}\n", "file_path": "tornado-user/src/lib.rs", "rank": 15, "score": 182567.91686360826 }, { "content": "#[inline]\n\npub fn read_tp() -> usize {\n\n let tp: usize;\n\n unsafe {\n\n asm!(\"mv {}, tp\", out(reg) tp, options(nomem, nostack));\n\n }; // rust-lang/rust#82753 Thank you @Amanieu :)\n\n tp\n\n}\n\n\n\n/// 用户层将定义自己的tp寄存器意义\n\n///\n\n/// 在内核层中,tp指向一个结构体,说明当前的硬件线程编号,\n\n/// 以及已经分配的地址空间和对应的用户上下文\n\n#[repr(C)]\n\npub struct KernelHartInfo {\n\n hart_id: usize,\n\n current_address_space_id: AddressSpaceId, // currently unused\n\n current_process: Option<Arc<Process>>, // currently unused\n\n hart_max_asid: AddressSpaceId, // note: different between qemu and k210 platform\n\n asid_alloc: (LinkedList<usize>, usize), // (空余的编号回收池,目前已分配最大的编号)\n\n user_mm_sets: (LinkedList<MemorySet>, usize), // (注册的用户地址空间映射,上一次进入的用户地址空间编号)\n", "file_path": "tornado-kernel/src/hart.rs", "rank": 16, "score": 182567.91686360826 }, { "content": "pub fn console_getchar() -> usize {\n\n sbi_call(SBI_CONSOLE_GETCHAR, 0, 0, 0)\n\n}\n\n\n", "file_path": "async-sd/src/sbi.rs", "rank": 17, "score": 182567.91686360826 }, { "content": "pub fn console_getchar() -> usize {\n\n sbi_call(SBI_CONSOLE_GETCHAR, 0, 0, 0)\n\n}\n\n\n", "file_path": "async-fat32/src/sbi.rs", "rank": 18, "score": 182567.91686360826 }, { "content": "pub fn console_getchar() -> usize {\n\n sbi_call(SBI_CONSOLE_GETCHAR, 0, 0, 0)\n\n}\n\n\n", "file_path": "tornado-kernel/src/sbi.rs", "rank": 19, "score": 182567.91686360826 }, { "content": "pub fn console_putchar(c: usize) {\n\n sbi_call(SBI_CONSOLE_PUTCHAR, c, 0, 0);\n\n}\n\n\n", "file_path": "event/src/sbi.rs", "rank": 20, "score": 181591.24032209194 }, { "content": "#[allow(unused)]\n\npub fn console_getchar() -> usize {\n\n sbi_call(SBI_CONSOLE_GETCHAR, 0, 0, 0)\n\n}\n\n\n", "file_path": "async-virtio-driver/src/sbi.rs", "rank": 21, "score": 180086.3292856862 }, { "content": "pub fn console_putchar(c: usize) {\n\n sbi_call(SBI_CONSOLE_PUTCHAR, c, 0, 0);\n\n}\n\n\n", "file_path": "tornado-kernel/src/sbi.rs", "rank": 22, "score": 179007.82170633844 }, { "content": "pub fn console_putchar(c: usize) {\n\n sbi_call(SBI_CONSOLE_PUTCHAR, c, 0, 0);\n\n}\n\n\n", "file_path": "async-sd/src/sbi.rs", "rank": 23, "score": 179007.82170633844 }, { "content": "pub fn console_putchar(c: usize) {\n\n sbi_call(SBI_CONSOLE_PUTCHAR, c, 0, 0);\n\n}\n\n\n", "file_path": "async-fat32/src/sbi.rs", "rank": 24, "score": 179007.82170633844 }, { "content": "pub fn set_timer(time: usize) {\n\n sbi_call(SBI_SET_TIMER, time, 0, 0);\n\n}\n", "file_path": "event/src/sbi.rs", "rank": 25, "score": 179007.82170633844 }, { "content": "pub fn get_time_ms() -> usize {\n\n time::read() / (CLOCK_FREQ / MSEC_PER_SEC)\n\n}", "file_path": "tornado-kernel/src/trap/timer.rs", "rank": 26, "score": 177700.66827805137 }, { "content": "/// 生成一个新的任务\n\npub fn spawn(future: impl Future<Output = ()> + Send + Sync + 'static) {\n\n let shared_payload = unsafe { task::shared::SharedPayload::new(SHARED_PAYLOAD_BASE) };\n\n let asid = unsafe { task::shared::AddressSpaceId::from_raw(ADDRESS_SPACE_ID) };\n\n let task = task::new_user(\n\n future,\n\n shared_payload.shared_scheduler,\n\n shared_payload.shared_set_task_state,\n\n );\n\n unsafe {\n\n shared_payload.add_task(0 /* todo */, asid, task.task_repr());\n\n }\n\n}\n\n\n", "file_path": "tornado-user/src/lib.rs", "rank": 27, "score": 177438.70860223786 }, { "content": "pub fn set_timer(time: usize) {\n\n sbi_call(SBI_SET_TIMER, time, 0, 0);\n\n}\n", "file_path": "async-sd/src/sbi.rs", "rank": 28, "score": 176526.23412841637 }, { "content": "pub fn set_timer(time: usize) {\n\n sbi_call(SBI_SET_TIMER, time, 0, 0);\n\n}\n", "file_path": "tornado-kernel/src/sbi.rs", "rank": 29, "score": 176526.23412841637 }, { "content": "pub fn set_timer(time: usize) {\n\n sbi_call(SBI_SET_TIMER, time, 0, 0);\n\n}\n", "file_path": "async-fat32/src/sbi.rs", "rank": 30, "score": 176526.23412841637 }, { "content": "pub fn console_putchar(c: usize) {\n\n sbi_call(SBI_CONSOLE_PUTCHAR, c, 0, 0);\n\n}\n\n\n", "file_path": "async-virtio-driver/src/sbi.rs", "rank": 31, "score": 176526.23412841637 }, { "content": "#[allow(unused)]\n\npub fn set_timer(time: usize) {\n\n sbi_call(SBI_SET_TIMER, time, 0, 0);\n\n}\n", "file_path": "async-virtio-driver/src/sbi.rs", "rank": 32, "score": 174140.57312078154 }, { "content": "/// 进入地址空间为`asid`的用户态空间\n\n///\n\n/// 通常用于第一次从内核态进入用户态\n\npub fn enter_user(asid: usize) -> ! {\n\n let satp = KernelHartInfo::user_satp(asid).expect(\"get satp with asid\");\n\n let swap_context = unsafe { get_swap_cx(&satp, asid) };\n\n trap::switch_to_user(swap_context, satp.inner(), asid)\n\n}\n", "file_path": "tornado-kernel/src/user/trap.rs", "rank": 33, "score": 174140.57312078154 }, { "content": "/// 运行一个异步的main函数,在用户的entry函数里调用\n\n/// 应该作为标准库的一部分,这里使用一个库函数来模拟有标准库的情况\n\npub fn execute_async_main(main: impl Future<Output = i32> + Send + Sync + 'static) -> i32 {\n\n let hart_id = 0; // todo!\n\n let shared_payload = unsafe { task::shared::SharedPayload::new(SHARED_PAYLOAD_BASE) };\n\n let address_space_id = unsafe { task::shared::AddressSpaceId::from_raw(ADDRESS_SPACE_ID) };\n\n static mut EXIT_CODE: i32 = 0;\n\n let main_task = task::new_user(\n\n async move {\n\n unsafe { EXIT_CODE = main.await };\n\n },\n\n shared_payload.shared_scheduler,\n\n shared_payload.shared_set_task_state,\n\n );\n\n unsafe {\n\n shared_payload.add_task(hart_id, address_space_id, main_task.task_repr());\n\n }\n\n task::shared::run_until_ready(\n\n || unsafe { shared_payload.peek_task(task::shared::user_should_switch) },\n\n |task_repr| unsafe { shared_payload.delete_task(task_repr) },\n\n |task_repr, new_state| unsafe { shared_payload.set_task_state(task_repr, new_state) },\n\n );\n\n unsafe { EXIT_CODE }\n\n}\n\n\n", "file_path": "tornado-user/src/lib.rs", "rank": 34, "score": 164152.0785917525 }, { "content": "#[inline]\n\nfn use_tp_box<F: Fn(&mut Box<KernelHartInfo>) -> T, T>(f: F) -> T {\n\n let addr = read_tp();\n\n let mut bx: Box<KernelHartInfo> = unsafe { Box::from_raw(addr as *mut _) };\n\n let ans = f(&mut bx);\n\n drop(Box::into_raw(bx)); // 防止Box指向的空间被释放\n\n ans\n\n}\n\n\n", "file_path": "tornado-kernel/src/hart.rs", "rank": 35, "score": 163327.35839400208 }, { "content": "pub fn sys_test_write_one(data: usize) -> SyscallResult {\n\n syscall_2(MODULE_TEST_INTERFACE, FUNC_TEST_WRITE_ONE, [0, data])\n\n}\n\n\n", "file_path": "tornado-user/src/syscall/mod.rs", "rank": 36, "score": 162314.81524524197 }, { "content": "fn next_process_id() -> ProcessId {\n\n let mut pid = PROCESS_COUNTER.lock();\n\n let ans = *pid;\n\n *pid += 1;\n\n ProcessId(ans)\n\n}\n", "file_path": "tornado-kernel/src/task/process.rs", "rank": 37, "score": 161942.00993054453 }, { "content": "/// 系统调用的第一次分发\n\npub fn syscall(param: [usize; 6], user_satp: usize, func: usize, module: usize) -> SyscallResult {\n\n match module {\n\n MODULE_PROCESS => do_process(param, user_satp, func),\n\n MODULE_TEST_INTERFACE => do_test_interface(param, user_satp, func),\n\n MODULE_TASK => do_task(param, func),\n\n _ => panic!(\"Unknown module {:x}\", module),\n\n }\n\n}\n\n\n", "file_path": "tornado-kernel/src/syscall/mod.rs", "rank": 38, "score": 160473.12267065537 }, { "content": "#[no_mangle]\n\npub fn switch_to_user(context: &SwapContext, user_satp: usize, user_asid: usize) -> ! {\n\n use riscv::register::{\n\n sstatus::{self, SPP},\n\n stvec::{self, TrapMode},\n\n };\n\n // 关中断\n\n unsafe {\n\n sstatus::clear_sie();\n\n }\n\n extern \"C\" {\n\n fn _swap_frame();\n\n fn _supervisor_to_user();\n\n }\n\n // 用户态发生中断时 pc 将会被设置成此值\n\n let user_trap_va = SWAP_FRAME_VA as usize;\n\n // 该函数最后应该跳转的虚拟地址\n\n let jmp_va = _supervisor_to_user as usize - _swap_frame as usize + SWAP_FRAME_VA;\n\n\n\n // 设置用户态陷入内核时需要跳转的地址\n\n unsafe {\n", "file_path": "tornado-kernel/src/trap/switch.rs", "rank": 39, "score": 160322.94152099785 }, { "content": "#[inline]\n\nfn use_tp_box_move<F: FnOnce(&mut Box<KernelHartInfo>) -> T, T>(f: F) -> T {\n\n let addr = read_tp();\n\n let mut bx: Box<KernelHartInfo> = unsafe { Box::from_raw(addr as *mut _) };\n\n let ans = f(&mut bx);\n\n drop(Box::into_raw(bx)); // 防止Box指向的空间被释放\n\n ans\n\n}\n", "file_path": "tornado-kernel/src/hart.rs", "rank": 40, "score": 153324.55375853486 }, { "content": "/// `yield`系统调用\n\n///\n\n/// 用户态轮询任务的时候,发现下一个任务在不同地址空间,则产生该系统调用\n\n///\n\n/// 从共享调度器里面拿出下一个任务的引用,根据地址空间编号切换到相应的地址空间\n\n///\n\n/// 下一个任务的地址空间编号由用户通过 a0 参数传给内核\n\nfn switch_next_task(next_asid: usize) -> SyscallResult {\n\n if next_asid == 0 {\n\n // 内核任务\n\n SyscallResult::KernelTask\n\n } else {\n\n let satp = KernelHartInfo::user_satp(next_asid).expect(\"get satp register with asid\");\n\n SyscallResult::NextASID {\n\n asid: next_asid,\n\n satp,\n\n }\n\n }\n\n}\n\n\n", "file_path": "tornado-kernel/src/syscall/mod.rs", "rank": 41, "score": 151715.32164728869 }, { "content": "pub fn shutdown() -> ! {\n\n sbi_call(SBI_SHUTDOWN, 0, 0, 0);\n\n unreachable!()\n\n}\n\n\n", "file_path": "event/src/sbi.rs", "rank": 42, "score": 149810.09303022223 }, { "content": "pub fn shutdown() -> ! {\n\n sbi_call(SBI_SHUTDOWN, 0, 0, 0);\n\n unreachable!()\n\n}\n\n\n", "file_path": "async-sd/src/sbi.rs", "rank": 43, "score": 147944.03375281524 }, { "content": "pub fn shutdown() -> ! {\n\n sbi_call(SBI_SHUTDOWN, 0, 0, 0);\n\n unreachable!()\n\n}\n\n\n", "file_path": "tornado-kernel/src/sbi.rs", "rank": 44, "score": 147944.03375281524 }, { "content": "pub fn shutdown() -> ! {\n\n sbi_call(SBI_SHUTDOWN, 0, 0, 0);\n\n unreachable!()\n\n}\n\n\n", "file_path": "async-fat32/src/sbi.rs", "rank": 45, "score": 147944.03375281524 }, { "content": "pub fn init() {\n\n heap::init();\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\n#[repr(C)]\n\npub struct AddressSpaceId(u16); // in Sv39, [0, 2^16)\n\n\n\nimpl AddressSpaceId {\n\n pub(crate) unsafe fn from_raw(asid: usize) -> AddressSpaceId {\n\n AddressSpaceId(asid as u16)\n\n }\n\n pub(crate) fn into_inner(self) -> usize {\n\n self.0 as usize\n\n }\n\n}\n\n\n", "file_path": "tornado-kernel/src/memory/mod.rs", "rank": 46, "score": 146154.54753475302 }, { "content": "/// 初始化中断相关的子模块\n\n///\n\n/// - [`handler::init`]\n\n/// - [`timer::init`]\n\npub fn init() {\n\n handler::init();\n\n timer::init();\n\n unsafe {\n\n riscv::register::sstatus::set_sie();\n\n riscv::register::sie::set_sext();\n\n }\n\n\n\n // println!(\"[kernel] mod interrupt initialized\");\n\n}\n", "file_path": "tornado-kernel/src/trap/mod.rs", "rank": 47, "score": 146154.54753475302 }, { "content": "#[allow(unused)]\n\npub fn shutdown() -> ! {\n\n sbi_call(SBI_SHUTDOWN, 0, 0, 0);\n\n unreachable!()\n\n}\n\n\n", "file_path": "async-virtio-driver/src/sbi.rs", "rank": 48, "score": 146154.54753475302 }, { "content": "/// 初始化时钟中断\n\npub fn init() {\n\n unsafe {\n\n sie::set_stimer(); // 允许时钟中断\n\n // sie::clear_stimer(); // 关闭时钟中断\n\n }\n\n set_next_timeout(); // 设置下一次时钟中断\n\n}\n\n\n", "file_path": "tornado-kernel/src/trap/timer.rs", "rank": 49, "score": 146154.54753475302 }, { "content": "pub fn tick() {\n\n set_next_timeout();\n\n unsafe {\n\n TICKS = TICKS.wrapping_add(1);\n\n }\n\n}\n\n\n", "file_path": "tornado-kernel/src/trap/timer.rs", "rank": 50, "score": 146154.54753475302 }, { "content": "/// 运行异步任务\n\npub fn execute_async() {\n\n let shared_payload = unsafe { task::shared::SharedPayload::new(SHARED_PAYLOAD_BASE) };\n\n task::shared::run_until_ready(\n\n || unsafe { shared_payload.peek_task(task::shared::user_should_switch) },\n\n |task_repr| unsafe { shared_payload.delete_task(task_repr) },\n\n |task_repr, new_state| unsafe { shared_payload.set_task_state(task_repr, new_state) },\n\n );\n\n}\n\n\n", "file_path": "tornado-user/src/lib.rs", "rank": 51, "score": 146154.54753475302 }, { "content": "/// 初始化堆\n\npub fn init() {\n\n unsafe {\n\n HEAP.lock()\n\n .init(HEAP_SPACE.as_ptr() as usize, KERNEL_HEAP_SIZE)\n\n }\n\n}\n", "file_path": "tornado-kernel/src/memory/heap.rs", "rank": 52, "score": 146154.54753475302 }, { "content": "/// 初始化中断\n\npub fn init() {\n\n unsafe {\n\n // 将`trap_vector`的地址以[`Vectored`]形式写入到`stvec`寄存器\n\n stvec::write(trap_vector as usize, stvec::TrapMode::Vectored);\n\n }\n\n}\n\n\n\n/// S态时钟中断\n\n#[naked]\n\n#[link_section = \".text\"]\n\npub unsafe extern \"C\" fn supervisor_timer() {\n\n asm!(\n\n define_load_store!(),\n\n save_non_switch!(),\n\n \"mv a0, sp\",\n\n \"call {supervisor_timer}\",\n\n restore_non_switch!(),\n\n \"sret\",\n\n REGBYTES = const core::mem::size_of::<usize>(),\n\n supervisor_timer = sym rust_supervisor_timer,\n", "file_path": "tornado-kernel/src/trap/handler.rs", "rank": 53, "score": 146154.54753475302 }, { "content": "fn breakpoint(trap_frame: &mut TrapFrame) -> *mut TrapFrame {\n\n // println!(\"Breakpoint at {:#08x}\", trap_frame.sepc);\n\n trap_frame.sepc = trap_frame.sepc.wrapping_add(2);\n\n trap_frame\n\n}\n\n\n", "file_path": "tornado-kernel/src/trap/handler.rs", "rank": 54, "score": 144510.65273683885 }, { "content": "fn syscall(trap_frame: &mut TrapFrame) -> *mut TrapFrame {\n\n println!(\n\n \"Syscall at {:#08x}; note that user_satp /*todo*/\",\n\n trap_frame.sepc\n\n );\n\n let param = [trap_frame.x[10], trap_frame.x[11], 0, 0, 0, 0]; // a0, a1\n\n match do_syscall(param, 0 /* todo */, trap_frame.x[16], trap_frame.x[17]) {\n\n // a6, a7\n\n SyscallResult::Procceed { code, extra } => {\n\n trap_frame.x[10] = code; // a0\n\n trap_frame.x[11] = extra; // a1\n\n trap_frame.sepc = trap_frame.sepc.wrapping_add(4); // skip `ecall` instruction\n\n trap_frame\n\n }\n\n SyscallResult::Retry => trap_frame, // don't skip\n\n _ => unimplemented!(),\n\n }\n\n}\n\n\n\n/// 内核发生中断/异常时需要保存的上下文\n", "file_path": "tornado-kernel/src/trap/handler.rs", "rank": 55, "score": 144510.65273683885 }, { "content": "/// S态软件中断\n\n///\n\n/// unimplemented\n\npub fn supervisor_software() {\n\n panic!(\"Supervisor software: {:08x}\", sepc::read());\n\n}\n\n\n\n/// S态外部中断\n\n#[naked]\n\n#[link_section = \".text\"]\n\npub unsafe extern \"C\" fn supervisor_external() {\n\n asm!(\n\n save_non_switch!(),\n\n \"mv a0, sp\",\n\n \"call {supervisor_external}\",\n\n restore_non_switch!(),\n\n \"sret\",\n\n REGBYTES = const core::mem::size_of::<usize>(),\n\n supervisor_external = sym rust_supervisor_external,\n\n options(noreturn)\n\n )\n\n}\n\n\n", "file_path": "tornado-kernel/src/trap/handler.rs", "rank": 56, "score": 144437.01591331017 }, { "content": "pub fn run_until_ready(\n\n peek_task: impl Fn() -> TaskResult,\n\n delete_task: impl Fn(usize) -> bool,\n\n set_task_state: impl Fn(usize, TaskState),\n\n) {\n\n let mut threshold = 0;\n\n loop {\n\n if threshold > 50 {\n\n sys_kernel_check();\n\n threshold = 0;\n\n }\n\n let task = peek_task();\n\n // println!(\">>> user executor: next task = {:x?}\", task);\n\n match task {\n\n TaskResult::Task(task_repr) => {\n\n // 在相同的地址空间里面\n\n let task: Arc<UserTaskRepr> = unsafe { Arc::from_raw(task_repr as *mut _) };\n\n let waker = waker_ref(&task);\n\n let mut context = Context::from_waker(&*waker);\n\n let ret = task.task().future.lock().as_mut().poll(&mut context);\n", "file_path": "tornado-user/src/task/shared.rs", "rank": 57, "score": 144437.01591331017 }, { "content": "// 性能测试使用\n\npub fn execute_async_analysis() {\n\n let shared_payload = unsafe { task::shared::SharedPayload::new(SHARED_PAYLOAD_BASE) };\n\n task::shared::run_until_ready_analysis(\n\n || unsafe { shared_payload.peek_task(task::shared::user_should_switch) },\n\n |task_repr| unsafe { shared_payload.delete_task(task_repr) },\n\n );\n\n}\n\n\n\nuse syscall::*;\n\n\n", "file_path": "tornado-user/src/lib.rs", "rank": 58, "score": 144437.01591331017 }, { "content": "/// 异步IO系统调用\n\n///\n\n/// 这时候内核会创建一个块设备读写任务并添加到共享调度器中。\n\n///\n\n/// 任务的结尾会将对应的用户态任务唤醒。\n\nfn do_io_task(io_type: usize, block_id: usize, buf_ptr: usize) -> SyscallResult {\n\n match io_type {\n\n 0 => SyscallResult::IOTask {\n\n block_id,\n\n buf_ptr,\n\n write: false,\n\n },\n\n 1 => SyscallResult::IOTask {\n\n block_id,\n\n buf_ptr,\n\n write: true,\n\n },\n\n _ => panic!(\"unknown io type\"),\n\n }\n\n}\n\n\n", "file_path": "tornado-kernel/src/syscall/mod.rs", "rank": 59, "score": 142899.66690339305 }, { "content": "/// 关闭外部中断\n\npub fn ext_intr_off() {\n\n #[cfg(feature = \"qemu\")]\n\n unsafe {\n\n sie::clear_sext();\n\n }\n\n}\n", "file_path": "tornado-kernel/src/async_rt/executor.rs", "rank": 60, "score": 142787.18451917593 }, { "content": "pub fn run_until_ready_analysis(\n\n peek_task: impl Fn() -> TaskResult,\n\n delete_task: impl Fn(usize) -> bool,\n\n) {\n\n loop {\n\n let task = peek_task();\n\n match task {\n\n TaskResult::Task(task_repr) => {\n\n // 性能测试使用,直接删除任务\n\n let task: Arc<UserTaskRepr> = unsafe { Arc::from_raw(task_repr as *mut _) };\n\n let waker = waker_ref(&task);\n\n let _context = Context::from_waker(&*waker);\n\n delete_task(task_repr);\n\n }\n\n TaskResult::ShouldYield(next_asid) => {\n\n // // 不释放这个任务的内存,执行切换地址空间的系统调用\n\n // mem::forget(task);\n\n do_yield(next_asid);\n\n }\n\n TaskResult::NoWakeTask => unreachable!(),\n", "file_path": "tornado-user/src/task/shared.rs", "rank": 61, "score": 142787.18451917593 }, { "content": "/// 打开外部中断\n\npub fn ext_intr_on() {\n\n #[cfg(feature = \"qemu\")]\n\n unsafe {\n\n sie::set_sext();\n\n }\n\n}\n\n\n", "file_path": "tornado-kernel/src/async_rt/executor.rs", "rank": 62, "score": 142787.18451917593 }, { "content": "/// 内核执行器实现\n\n///\n\n/// 如果是当前上下文,就解释运行,如果不是,就切换上下文。\n\n///\n\n/// 切换上下文时,要把上下文保存好,最终还是要回到切换的地方继续运行。\n\npub fn run_until_idle(\n\n peek_task: impl Fn() -> TaskResult,\n\n delete_task: impl Fn(usize) -> bool,\n\n set_task_state: impl Fn(usize, TaskState),\n\n) {\n\n loop {\n\n // unsafe {\n\n // sstatus::set_sie();\n\n // }\n\n ext_intr_off();\n\n let task = peek_task();\n\n ext_intr_on();\n\n // println!(\">>> kernel executor: next task = {:x?}\", task);\n\n match task {\n\n TaskResult::Task(task_repr) => {\n\n // 在相同的(内核)地址空间里面\n\n ext_intr_off();\n\n set_task_state(task_repr, TaskState::Sleeping);\n\n ext_intr_on();\n\n let task: Arc<KernelTaskRepr> = unsafe { Arc::from_raw(task_repr as *mut _) };\n", "file_path": "tornado-kernel/src/async_rt/executor.rs", "rank": 63, "score": 142787.18451917593 }, { "content": "#[allow(unused)]\n\npub fn run_one(\n\n add_task: impl Fn(usize) -> bool,\n\n peek_task: impl Fn() -> TaskResult,\n\n delete_task: impl Fn(usize) -> bool,\n\n set_task_state: impl Fn(usize, TaskState),\n\n) {\n\n loop {\n\n ext_intr_off();\n\n let task = peek_task();\n\n ext_intr_on();\n\n println!(\">>> run one: next task = {:x?}\", task);\n\n match task {\n\n TaskResult::Task(task_repr) => {\n\n ext_intr_off();\n\n set_task_state(task_repr, TaskState::Sleeping);\n\n ext_intr_on();\n\n let task: Arc<KernelTaskRepr> = unsafe { Arc::from_raw(task_repr as *mut _) };\n\n // 注册 waker\n\n let waker = waker_ref(&task);\n\n let mut context = Context::from_waker(&*waker);\n", "file_path": "tornado-kernel/src/async_rt/executor.rs", "rank": 64, "score": 142787.18451917593 }, { "content": "pub fn stdin() -> Stdin {\n\n Stdin(Arc::clone(&*STDIN_LOCK))\n\n}\n\n\n\nimpl Stdin {\n\n // 锁上当前的Stdin\n\n // pub fn lock(&self) -> StdinLock { ... }\n\n\n\n // 从测试接口读一行\n\n // pub fn read_line(&self, buf: &mut String) -> Result<usize> {\n\n pub fn read_line(&self, buf: &mut String) -> usize {\n\n const CAPACITY: usize = 1024; // 目前的内核最长读1024字符,后面都切断,未来修改\n\n buf.reserve(CAPACITY);\n\n let buf_input = unsafe { core::slice::from_raw_parts_mut(buf.as_mut_ptr(), CAPACITY) };\n\n let syscall_ans = crate::syscall::sys_test_read_line(buf_input);\n\n let bytes_read = syscall_ans.extra;\n\n // buf.shrink_to(bytes_read); // 与API风格有关,不用缩缓冲区\n\n bytes_read\n\n }\n\n}\n", "file_path": "tornado-user/src/console.rs", "rank": 65, "score": 142082.70844183306 }, { "content": "pub fn reset_timer() -> SyscallResult {\n\n sys_test_reset_timer()\n\n}\n", "file_path": "tornado-user/src/lib.rs", "rank": 66, "score": 138715.34542625598 }, { "content": "/// Calls a function when dropped.\n\nstruct CallOnDrop<F: Fn()>(F);\n\n\n\nimpl<F: Fn()> Drop for CallOnDrop<F> {\n\n fn drop(&mut self) {\n\n (self.0)();\n\n }\n\n}\n", "file_path": "async-mutex/src/lib.rs", "rank": 67, "score": 137181.85400670028 }, { "content": "type InitFunction = unsafe extern \"C\" fn() -> PageList;\n", "file_path": "tornado-kernel/src/async_rt/shared.rs", "rank": 68, "score": 136077.21567479565 }, { "content": "/// 进行内核检查\n\n///\n\n/// 用于异步运行时中轮询一定次数都没发现可执行的任务时\n\npub fn sys_kernel_check() -> SyscallResult {\n\n syscall_0(MODULE_TASK, FUNC_CHECK)\n\n}\n", "file_path": "tornado-user/src/syscall/mod.rs", "rank": 69, "score": 135603.37920045122 }, { "content": "/// 读取计时器\n\npub fn sys_read_timer() -> SyscallResult {\n\n syscall_0(MODULE_TEST_INTERFACE, FUNC_TEST_READ_TIMER)\n\n}\n\n\n", "file_path": "tornado-user/src/syscall/mod.rs", "rank": 70, "score": 135603.37920045122 }, { "content": "/// 重置计时器\n\npub fn sys_test_reset_timer() -> SyscallResult {\n\n syscall_0(MODULE_TEST_INTERFACE, FUNC_TEST_RESET_TIMER)\n\n}\n\n\n", "file_path": "tornado-user/src/syscall/mod.rs", "rank": 71, "score": 134134.25892216177 }, { "content": "pub fn sys_test_read_one() -> SyscallResult {\n\n syscall_1(MODULE_TEST_INTERFACE, FUNC_TEST_READ_ONE, 0)\n\n}\n\n\n", "file_path": "tornado-user/src/syscall/mod.rs", "rank": 72, "score": 134134.25892216177 }, { "content": "#[allow(unused)]\n\npub fn print(args: fmt::Arguments) {\n\n Stdout.write_fmt(args).unwrap();\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! print {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::log::print(format_args!($fmt $(, $($arg)+)?));\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! println {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::log::print(format_args!(concat!($fmt, \"\\n\") $(, $($arg)+)?));\n\n }\n\n}\n", "file_path": "event/src/log.rs", "rank": 73, "score": 133665.9612616737 }, { "content": "fn syscall_2(module: usize, func: usize, args: [usize; 2]) -> SyscallResult {\n\n match () {\n\n #[cfg(any(target_arch = \"riscv32\", target_arch = \"riscv64\"))]\n\n () => {\n\n let (code, extra);\n\n unsafe {\n\n asm!(\n\n \"ecall\",\n\n in(\"a0\") args[0], in(\"a1\") args[1],\n\n in(\"a6\") func, in(\"a7\") module,\n\n lateout(\"a0\") code, lateout(\"a1\") extra,\n\n )\n\n };\n\n SyscallResult { code, extra }\n\n }\n\n #[cfg(not(any(target_arch = \"riscv32\", target_arch = \"riscv64\")))]\n\n () => {\n\n drop((module, func, args));\n\n unimplemented!(\"not RISC-V instruction set architecture\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "tornado-user/src/syscall/mod.rs", "rank": 74, "score": 133145.79323780205 }, { "content": "#[cfg(feature = \"k210\")]\n\nfn map_mmio(mapping: &mut Mapping) {\n\n // (0x3800_0000, 0x1000), /* UARTHS */\n\n let va = VirtualAddress(0x3800_0000);\n\n let pa = PhysicalAddress(0x3800_0000);\n\n mapping.map_one(\n\n VirtualPageNumber::floor(va),\n\n Some(PhysicalPageNumber::floor(pa)),\n\n Flags::WRITABLE | Flags::READABLE,\n\n );\n\n\n\n // (0x3800_1000, 0x1000), /* GPIOHS */\n\n let va = VirtualAddress(0x3800_1000);\n\n let pa = PhysicalAddress(0x3800_1000);\n\n mapping.map_one(\n\n VirtualPageNumber::floor(va),\n\n Some(PhysicalPageNumber::floor(pa)),\n\n Flags::WRITABLE | Flags::READABLE,\n\n );\n\n\n\n // (0x5020_0000, 0x1000), /* GPIO */\n", "file_path": "tornado-kernel/src/memory/mapping/memory_set.rs", "rank": 75, "score": 133020.34634690423 }, { "content": "/// 分配一个物理页 \n\n/// 如果已经分配完毕,返回 `None`\n\npub fn frame_alloc() -> Option<FrameTracker> {\n\n allocator::FRAME_ALLOCATOR.lock().alloc()\n\n}\n", "file_path": "tornado-kernel/src/memory/frame.rs", "rank": 76, "score": 132048.33710143587 }, { "content": "pub fn print(args: fmt::Arguments) {\n\n Stdout.write_fmt(args).unwrap();\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! print {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::console::print(format_args!($fmt $(, $($arg)+)?));\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! println {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::console::print(format_args!(concat!($fmt, \"\\n\") $(, $($arg)+)?));\n\n }\n\n}\n", "file_path": "shared-scheduler/src/console.rs", "rank": 77, "score": 132016.12986753948 }, { "content": "pub fn print(args: fmt::Arguments) {\n\n Stdout.write_fmt(args).unwrap();\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! print {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::console::print(format_args!($fmt $(, $($arg)+)?));\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! println {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::console::print(format_args!(concat!($fmt, \"\\n\") $(, $($arg)+)?));\n\n }\n\n}\n", "file_path": "async-sd/src/console.rs", "rank": 78, "score": 132016.12986753948 }, { "content": "pub fn print(args: fmt::Arguments) {\n\n Stdout.write_fmt(args).unwrap();\n\n}\n\n#[macro_export]\n\nmacro_rules! print {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::console::print(format_args!($fmt $(, $($arg)+)?));\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! println {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::console::print(format_args!(concat!($fmt, \"\\n\") $(, $($arg)+)?));\n\n }\n\n}\n\n\n\nlazy_static::lazy_static! {\n\n static ref STDIN_LOCK: Arc<spin::Mutex<()>> = Arc::new(spin::Mutex::new(()));\n\n}\n\n\n\n// 用0号调试接口实现的Stdin\n\npub struct Stdin(Arc<spin::Mutex<()>>);\n\n\n", "file_path": "tornado-user/src/console.rs", "rank": 79, "score": 132016.12986753948 }, { "content": "pub fn print(args: fmt::Arguments) {\n\n Stdout.write_fmt(args).unwrap();\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! print {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::console::print(format_args!($fmt $(, $($arg)+)?));\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! println {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::console::print(format_args!(concat!($fmt, \"\\n\") $(, $($arg)+)?));\n\n }\n\n}\n", "file_path": "tornado-kernel/src/console.rs", "rank": 80, "score": 132016.12986753948 }, { "content": "pub fn print(args: fmt::Arguments) {\n\n Stdout.write_fmt(args).unwrap();\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! print {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::log::print(format_args!($fmt $(, $($arg)+)?));\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! println {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::log::print(format_args!(concat!($fmt, \"\\n\") $(, $($arg)+)?));\n\n }\n\n}\n", "file_path": "async-fat32/src/log.rs", "rank": 81, "score": 132016.12986753948 }, { "content": "pub fn print(args: fmt::Arguments) {\n\n Stdout.write_fmt(args).unwrap();\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! print {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::log::print(format_args!($fmt $(, $($arg)+)?));\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! println {\n\n ($fmt: literal $(, $($arg: tt)+)?) => {\n\n $crate::log::print(format_args!(concat!($fmt, \"\\n\") $(, $($arg)+)?));\n\n }\n\n}\n", "file_path": "async-virtio-driver/src/log.rs", "rank": 82, "score": 130430.07323944595 }, { "content": "pub fn test_write(buf: &[u8]) -> SyscallResult {\n\n sys_test_write(buf)\n\n}\n", "file_path": "tornado-user/src/lib.rs", "rank": 83, "score": 128904.16364173472 }, { "content": "pub fn exit(exit_code: i32) -> SyscallResult {\n\n sys_exit(exit_code)\n\n}\n", "file_path": "tornado-user/src/lib.rs", "rank": 84, "score": 128904.16364173472 }, { "content": "pub fn sys_test_write(buf: &[u8]) -> SyscallResult {\n\n syscall_3(\n\n MODULE_TEST_INTERFACE,\n\n FUNC_TEST_WRITE,\n\n [0, buf.as_ptr() as usize, buf.len()],\n\n )\n\n}\n\n\n\npub struct SyscallResult {\n\n pub code: usize,\n\n pub extra: usize,\n\n}\n\n\n", "file_path": "shared-scheduler/src/syscall.rs", "rank": 85, "score": 127435.04336344526 }, { "content": "pub fn detect_serial_ports() -> Option<(String, UsbPortInfo)> {\n\n let ports = serialport::available_ports().expect(\"list available ports\");\n\n let mut ans = Vec::new();\n\n for p in ports {\n\n if let SerialPortType::UsbPort(info) = p.port_type {\n\n if info.vid == 0x1a86 && info.pid == 0x7523 {\n\n ans.push((p.port_name, info));\n\n }\n\n }\n\n }\n\n if ans.len() == 0 {\n\n None\n\n } else {\n\n Some(ans[0].clone())\n\n }\n\n}\n", "file_path": "xtask/src/port.rs", "rank": 86, "score": 127435.04336344526 }, { "content": "pub fn sys_exit(exit_code: i32) -> SyscallResult {\n\n syscall_1(MODULE_PROCESS, FUNC_PROCESS_EXIT, exit_code as usize)\n\n}\n\n\n", "file_path": "tornado-user/src/syscall/mod.rs", "rank": 87, "score": 126019.60005371223 }, { "content": "pub fn sys_test_write(buf: &[u8]) -> SyscallResult {\n\n syscall_3(\n\n MODULE_TEST_INTERFACE,\n\n FUNC_TEST_WRITE,\n\n [0, buf.as_ptr() as usize, buf.len()],\n\n )\n\n}\n\n\n", "file_path": "tornado-user/src/syscall/mod.rs", "rank": 88, "score": 126019.60005371223 }, { "content": "#[cfg_attr(not(test), alloc_error_handler)]\n\npub fn handle_alloc_error(layout: core::alloc::Layout) -> ! {\n\n println!(\"[User] user alloc error, layout = {:?}\", layout);\n\n panic!(\"user alloc error: {:?}\", layout)\n\n}\n\n\n\n#[no_mangle]\n\n#[link_section = \".text.entry\"]\n\npub extern \"C\" fn _start() -> ! {\n\n let mut address_space_id: usize;\n\n let mut shared_payload_base: usize;\n\n unsafe {\n\n // 从 gp 寄存器里面取出 shared_raw_table 的地址\n\n asm!(\"mv {}, gp\", out(reg) shared_payload_base, options(nomem, nostack));\n\n SHARED_PAYLOAD_BASE = shared_payload_base;\n\n // 从 tp 寄存器里面取出该用户态的地址空间编号\n\n asm!(\"mv {}, tp\", out(reg) address_space_id, options(nomem, nostack));\n\n ADDRESS_SPACE_ID = address_space_id;\n\n }\n\n extern \"C\" {\n\n fn sbss();\n", "file_path": "tornado-user/src/lib.rs", "rank": 89, "score": 124646.3967766329 }, { "content": "#[cfg_attr(not(test), alloc_error_handler)]\n\npub fn handle_alloc_error(layout: core::alloc::Layout) -> ! {\n\n println!(\"[shared scheduler] alloc error, layout = {:?}\", layout);\n\n panic!(\"shared scheduler alloc error: {:?}\", layout)\n\n}\n\n\n\n/// 共享调度器虚函数表\n\n#[link_section = \".meta\"] // 虚函数表只读\n\n#[no_mangle]\n\npub static SHARED_RAW_TABLE: (\n\n &'static u8, // 共享调度器编译时的基地址\n\n unsafe extern \"C\" fn() -> PageList, // 初始化函数,执行完之后,内核将函数指针置空\n\n &'static SharedScheduler, // 共享调度器的地址\n\n unsafe extern \"C\" fn(NonNull<()>, usize, AddressSpaceId, TaskRepr) -> bool, // 添加任务\n\n unsafe extern \"C\" fn(NonNull<()>, extern \"C\" fn(AddressSpaceId) -> bool) -> TaskResult, // 弹出任务引用\n\n unsafe extern \"C\" fn(NonNull<()>, TaskRepr) -> bool, // 删除任务\n\n unsafe extern \"C\" fn(NonNull<()>, TaskRepr, TaskState), // 改变任务的状态\n\n) = (\n\n unsafe { &payload_compiled_start },\n\n init_payload_environment,\n\n &SHARED_SCHEDULER,\n", "file_path": "shared-scheduler/src/main.rs", "rank": 90, "score": 124646.3967766329 }, { "content": "#[cfg_attr(not(test), panic_handler)]\n\npub fn panic_handler(panic_info: &core::panic::PanicInfo) -> ! {\n\n let err = panic_info.message().unwrap().as_str();\n\n if let Some(location) = panic_info.location() {\n\n syscall::sys_panic(\n\n Some(location.file()),\n\n location.line(),\n\n location.column(),\n\n err,\n\n );\n\n } else {\n\n syscall::sys_panic(None, 0, 0, err);\n\n }\n\n // println!(\"[shared scheduler] panic: {:?}\", panic_info);\n\n unreachable!()\n\n}\n\n\n\n// todo: 未来尽量使用有Allocator的new_in函数,这样能处理内存不足的问题\n", "file_path": "shared-scheduler/src/main.rs", "rank": 91, "score": 123230.95346689987 }, { "content": "#[cfg_attr(not(test), panic_handler)]\n\npub fn panic_handler(panic_info: &core::panic::PanicInfo) -> ! {\n\n let err = panic_info.message().unwrap().as_str();\n\n if let Some(location) = panic_info.location() {\n\n syscall::sys_panic(\n\n Some(location.file()),\n\n location.line(),\n\n location.column(),\n\n err,\n\n );\n\n } else {\n\n syscall::sys_panic(None, 0, 0, err);\n\n }\n\n unreachable!()\n\n}\n\n\n", "file_path": "tornado-user/src/lib.rs", "rank": 92, "score": 123230.95346689987 }, { "content": "#[derive(Debug)]\n\n#[repr(C)]\n\nstruct PageList {\n\n rodata: [usize; 2], // 只读数据段\n\n data: [usize; 2], // 数据段\n\n text: [usize; 2], // 代码段\n\n}\n", "file_path": "tornado-kernel/src/async_rt/shared.rs", "rank": 93, "score": 120406.14830897802 }, { "content": "fn range_vpn_contains_pa(src: Range<PhysicalAddress>) -> Range<PhysicalPageNumber> {\n\n PhysicalPageNumber::floor(src.start)..PhysicalPageNumber::ceil(src.end)\n\n}\n\n\n", "file_path": "tornado-kernel/src/memory/mapping/mapping.rs", "rank": 94, "score": 119148.44953993443 }, { "content": "/// 设置下一次时钟中断\n\nfn set_next_timeout() {\n\n set_timer(time::read() + CLOCK_FREQ / TICKS_PER_SEC);\n\n}\n\n\n\npub static mut TICKS: usize = 0;\n\n\n", "file_path": "tornado-kernel/src/trap/timer.rs", "rank": 95, "score": 116128.0104621535 }, { "content": "#![allow(unused)]\n\n#[inline(always)]\n\nfn sbi_call(which: usize, arg0: usize, arg1: usize, arg2: usize) -> usize {\n\n let ret;\n\n unsafe {\n\n llvm_asm!(\"ecall\"\n\n : \"={x10}\" (ret)\n\n : \"{x10}\" (arg0), \"{x11}\" (arg1), \"{x12}\" (arg2), \"{x17}\" (which)\n\n : \"memory\"\n\n : \"volatile\");\n\n }\n\n ret\n\n}\n\n\n\nconst SBI_SET_TIMER: usize = 0;\n\nconst SBI_CONSOLE_PUTCHAR: usize = 1;\n\nconst SBI_CONSOLE_GETCHAR: usize = 2;\n\nconst SBI_CLEAR_IPI: usize = 3;\n\nconst SBI_SEND_IPI: usize = 4;\n\nconst SBI_REMOTE_FENCE_I: usize = 5;\n\nconst SBI_REMOTE_SFENCE_VMA: usize = 6;\n\nconst SBI_REMOTE_SFENCE_VMA_ASID: usize = 7;\n\nconst SBI_SHUTDOWN: usize = 8;\n\n\n", "file_path": "event/src/sbi.rs", "rank": 96, "score": 115997.48474660241 }, { "content": "#[inline(always)]\n\nfn sbi_call(which: usize, arg0: usize, arg1: usize, arg2: usize) -> usize {\n\n let ret;\n\n unsafe {\n\n llvm_asm!(\"ecall\"\n\n : \"={x10}\" (ret)\n\n : \"{x10}\" (arg0), \"{x11}\" (arg1), \"{x12}\" (arg2), \"{x17}\" (which)\n\n : \"memory\"\n\n : \"volatile\");\n\n }\n\n ret\n\n}\n\n\n\nconst SBI_SET_TIMER: usize = 0;\n\nconst SBI_CONSOLE_PUTCHAR: usize = 1;\n\nconst SBI_CONSOLE_GETCHAR: usize = 2;\n\nconst SBI_CLEAR_IPI: usize = 3;\n\nconst SBI_SEND_IPI: usize = 4;\n\nconst SBI_REMOTE_FENCE_I: usize = 5;\n\nconst SBI_REMOTE_SFENCE_VMA: usize = 6;\n\nconst SBI_REMOTE_SFENCE_VMA_ASID: usize = 7;\n\nconst SBI_SHUTDOWN: usize = 8;\n\n\n", "file_path": "async-fat32/src/sbi.rs", "rank": 97, "score": 114984.23069923703 }, { "content": "#[inline(always)]\n\nfn sbi_call(which: usize, arg0: usize, arg1: usize, arg2: usize) -> usize {\n\n let ret;\n\n unsafe {\n\n llvm_asm!(\"ecall\"\n\n : \"={x10}\" (ret)\n\n : \"{x10}\" (arg0), \"{x11}\" (arg1), \"{x12}\" (arg2), \"{x17}\" (which)\n\n : \"memory\"\n\n : \"volatile\");\n\n }\n\n ret\n\n}\n\n\n\nconst SBI_SET_TIMER: usize = 0;\n\nconst SBI_CONSOLE_PUTCHAR: usize = 1;\n\nconst SBI_CONSOLE_GETCHAR: usize = 2;\n\nconst SBI_CLEAR_IPI: usize = 3;\n\nconst SBI_SEND_IPI: usize = 4;\n\nconst SBI_REMOTE_FENCE_I: usize = 5;\n\nconst SBI_REMOTE_SFENCE_VMA: usize = 6;\n\nconst SBI_REMOTE_SFENCE_VMA_ASID: usize = 7;\n\nconst SBI_SHUTDOWN: usize = 8;\n\n\n", "file_path": "async-sd/src/sbi.rs", "rank": 98, "score": 114984.23069923703 } ]
Rust
src/spv/logical/parser/parser.rs
Trark/magmaflow
c5ebdbedfa576b63aa54f29b8bafcf46b5716b1c
use spv::op::*; use spv::types::*; use spv::raw::*; use spv::logical::*; use super::OpByBlock; #[derive(Clone, Debug, PartialEq)] pub enum ValidationError { MemoryModelMissing, ExpectedOpFunction(usize), ExpectedOpFunctionEnd(usize), ExpectedBranch(usize), UnexpectedInstruction(usize), } pub type ValidationResult<T> = Result<T, ValidationError>; pub fn validate(raw: RawModule) -> ValidationResult<LogicalModule> { let group = Into::<OpByBlock>::into; let insts_storage = raw.instructions.into_iter().map(group).collect::<Vec<_>>(); let insts = OpSlice::new(&insts_storage); let (capabilities, insts) = try!(read_many(insts, read_capability)); let (extensions, insts) = try!(read_many(insts, read_extension)); let (ext_inst_imports, insts) = try!(read_many(insts, read_ext_inst_import)); let (memory_model, insts) = try!(read_memory_model(insts)); let (entry_points, insts) = try!(read_many(insts, read_entry_point)); let (execution_modes, insts) = try!(read_many(insts, read_execution_mode)); let (debug, insts) = try!(read_many(insts, read_debug)); let (annotations, insts) = try!(read_many(insts, read_annotation)); let (globals, insts) = try!(read_many(insts, read_global)); let (function_declarations, insts) = try!(read_many(insts, read_function_declaration)); let (function_definitions, insts) = try!(read_many(insts, read_function_definition)); if insts.get_remaining() > 0 { return Err(ValidationError::UnexpectedInstruction(insts.get_slot())); } Ok(LogicalModule { capabilities: capabilities, extensions: extensions, ext_inst_imports: ext_inst_imports, memory_model: memory_model, entry_points: entry_points, execution_modes: execution_modes, debug: debug, annotations: annotations, globals: globals, function_declarations: function_declarations, function_definitions: function_definitions, }) } #[derive(Clone, Copy)] struct OpSlice<'a> { insts: &'a [OpByBlock], index: usize, } impl<'a> OpSlice<'a> { fn new(insts: &'a [OpByBlock]) -> OpSlice { OpSlice { insts: insts, index: 0, } } fn first(&self) -> Option<&OpByBlock> { if self.index < self.insts.len() { Some(&self.insts[self.index]) } else { None } } fn advance(self) -> OpSlice<'a> { assert!(self.index < self.insts.len()); OpSlice { insts: self.insts, index: self.index + 1, } } fn get_slot(&self) -> usize { self.index } fn get_remaining(&self) -> usize { self.insts.len() - self.index } } enum PhaseResult<'a, T> { Ok(T, OpSlice<'a>), Next(OpSlice<'a>), Err(ValidationError), } fn read_many<T>(insts: OpSlice, f: fn(OpSlice) -> PhaseResult<T>) -> ValidationResult<(Vec<T>, OpSlice)> { fn read_rest<T>(insts: OpSlice, f: fn(OpSlice) -> PhaseResult<T>, mut output: Vec<T>) -> ValidationResult<(Vec<T>, OpSlice)> { match f(insts) { PhaseResult::Ok(item, next) => { output.push(item); read_rest(next, f, output) } PhaseResult::Next(next) => Ok((output, next)), PhaseResult::Err(err) => Err(err), } } read_rest(insts, f, Vec::new()) } fn read_capability(insts: OpSlice) -> PhaseResult<Capability> { if let Some(&OpByBlock::OpCapability(ref op)) = insts.first() { PhaseResult::Ok(op.capability.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_extension(insts: OpSlice) -> PhaseResult<String> { if let Some(&OpByBlock::OpExtension(ref op)) = insts.first() { PhaseResult::Ok(op.name.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_ext_inst_import(insts: OpSlice) -> PhaseResult<OpExtInstImport> { if let Some(&OpByBlock::OpExtInstImport(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_memory_model(insts: OpSlice) -> ValidationResult<(OpMemoryModel, OpSlice)> { if let Some(&OpByBlock::OpMemoryModel(ref op)) = insts.first() { Ok((op.clone(), insts.advance())) } else { Err(ValidationError::MemoryModelMissing) } } fn read_entry_point(insts: OpSlice) -> PhaseResult<OpEntryPoint> { if let Some(&OpByBlock::OpEntryPoint(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_execution_mode(insts: OpSlice) -> PhaseResult<OpExecutionMode> { if let Some(&OpByBlock::OpExecutionMode(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_debug(insts: OpSlice) -> PhaseResult<GroupDebug> { if let Some(&OpByBlock::GroupDebug(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_annotation(insts: OpSlice) -> PhaseResult<GroupAnnotation> { if let Some(&OpByBlock::GroupAnnotation(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_global(insts: OpSlice) -> PhaseResult<GroupGlobal> { if let Some(&OpByBlock::GroupGlobal(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_function_parameter(insts: OpSlice) -> PhaseResult<OpFunctionParameter> { if let Some(&OpByBlock::OpFunctionParameter(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_function_declaration(insts: OpSlice) -> PhaseResult<FunctionDeclaration> { let start_insts = insts; if insts.get_remaining() < 2 { return PhaseResult::Next(insts); } if let Some(&OpByBlock::OpFunction(ref op)) = insts.first() { match read_many(insts.advance(), read_function_parameter) { Ok((params, insts)) => { if let Some(&OpByBlock::OpFunctionEnd(_)) = insts.first() { let decl = FunctionDeclaration { function: op.clone(), parameters: params, }; PhaseResult::Ok(decl, insts.advance()) } else { PhaseResult::Next(start_insts) } } Err(err) => PhaseResult::Err(err), } } else { PhaseResult::Err(ValidationError::ExpectedOpFunction(insts.get_slot())) } } fn read_code(insts: OpSlice) -> PhaseResult<GroupCode> { if let Some(&OpByBlock::GroupCode(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_branch(insts: OpSlice) -> ValidationResult<(GroupBranch, OpSlice)> { if let Some(&OpByBlock::GroupBranch(ref op)) = insts.first() { Ok((op.clone(), insts.advance())) } else { Err(ValidationError::ExpectedBranch(insts.get_slot())) } } fn read_merge(insts: OpSlice) -> (Option<GroupMerge>, OpSlice) { if let Some(&OpByBlock::GroupMerge(ref op)) = insts.first() { (Some(op.clone()), insts.advance()) } else { (None, insts) } } fn read_basic_block(insts: OpSlice) -> PhaseResult<BasicBlock> { if let Some(&OpByBlock::OpLabel(ref op)) = insts.first() { let label = op.clone(); let (code, insts) = match read_many(insts.advance(), read_code) { Ok((code, insts)) => (code, insts), Err(err) => return PhaseResult::Err(err), }; let (merge, insts) = read_merge(insts); let (branch, insts) = match read_branch(insts) { Ok((branch, insts)) => (branch, insts), Err(err) => return PhaseResult::Err(err), }; let block = BasicBlock { label: label, code: code, merge: merge, branch: branch, }; PhaseResult::Ok(block, insts) } else { PhaseResult::Next(insts) } } fn read_function_definition(insts: OpSlice) -> PhaseResult<FunctionDefinition> { if insts.get_remaining() < 2 { return PhaseResult::Next(insts); } if let Some(&OpByBlock::OpFunction(ref op)) = insts.first() { match read_many(insts.advance(), read_function_parameter) { Ok((params, insts)) => { let (blocks, insts) = match read_many(insts, read_basic_block) { Ok((blocks, insts)) => (blocks, insts), Err(err) => return PhaseResult::Err(err), }; if let Some(&OpByBlock::OpFunctionEnd(_)) = insts.first() { let insts = insts.advance(); let def = FunctionDefinition { function: op.clone(), parameters: params, blocks: blocks, }; PhaseResult::Ok(def, insts) } else { PhaseResult::Err(ValidationError::ExpectedOpFunctionEnd(insts.get_slot())) } } Err(err) => PhaseResult::Err(err), } } else { PhaseResult::Err(ValidationError::ExpectedOpFunction(insts.get_slot())) } }
use spv::op::*; use spv::types::*; use spv::raw::*; use spv::logical::*; use super::OpByBlock; #[derive(Clone, Debug, PartialEq)] pub enum ValidationError { MemoryModelMissing, ExpectedOpFunction(usize), ExpectedOpFunctionEnd(usize), ExpectedBranch(usize), UnexpectedInstruction(usize), } pub type ValidationResult<T> = Result<T, ValidationError>; pub fn validate(raw: RawModule) -> ValidationResult<LogicalModule> { let group = Into::<OpByBlock>::into; let insts_storage = raw.instructions.into_iter().map(group).collect::<Vec<_>>(); let insts = OpSlice::new(&insts_storage); let (capabilities, insts) = try!(read_many(insts, read_capability)); let (extensions, insts) = try!(read_many(insts, read_extension)); let (ext_inst_imports, insts) = try!(read_many(insts, read_ext_inst_import)); let (memory_model, insts) = try!(read_memory_model(insts)); let (entry_points, insts) = try!(read_many(insts, read_entry_point)); let (execution_modes, insts) = try!(read_many(insts, read_execution_mode)); let (debug, insts) = try!(read_many(insts, read_debug)); let (annotations, insts) = try!(read_many(insts, read_annotation)); let (globals, insts) = try!(read_many(insts, read_global)); let (function_declarations, insts) = try!(read_many(insts, read_function_declaration)); let (function_definitions, insts) = try!(read_many(insts, read_function_definition)); if insts.get_remaining() > 0 { return Err(ValidationError::UnexpectedInstruction(insts.get_slot())); } Ok(LogicalModule { capabilities: capabilities, extensions: extensions, ext_inst_imports: ext_inst_imports, memory_model: memory_model, entry_points: entry_points, execution_modes: execution_modes, debug: debug, annotations: annotations, globals: globals, function_declarations: function_declarations, function_definitions: function_definitions, }) } #[derive(Clone, Copy)] struct OpSlice<'a> { insts: &'a [OpByBlock], index: usize, } impl<'a> OpSlice<'a> { fn new(insts: &'a [OpByBlock]) -> OpSlice { OpSlice { insts: insts, index: 0, } } fn first(&self) -> Option<&OpByBlock> { if self.index < self.insts.len() { Some(&self.insts[self.index]) } else { None } } fn advance(self) -> OpSlice<'a> { assert!(self.index < self.insts.len()); OpSlice { insts: self.insts, index: self.index + 1, } } fn get_slot(&self) -> usize { self.index } fn get_remaining(&self) -> usize { self.insts.len() - self.index } } enum PhaseResult<'a, T> { Ok(T, OpSlice<'a>), Next(OpSlice<'a>), Err(ValidationError), } fn read_many<T>(insts: OpSlice, f: fn(OpSlice) -> PhaseResult<T>) -> ValidationResult<(Vec<T>, OpSlice)> { fn read_rest<T>(insts: OpSlice, f: fn(OpSlice) -> PhaseResult<T>, mut output: Vec<T>) -> ValidationResult<(Vec<T>, OpSlice)> { match f(insts) { PhaseResult::Ok(item, next) => { output.push(item); read_rest(next, f
locks, insts), Err(err) => return PhaseResult::Err(err), }; if let Some(&OpByBlock::OpFunctionEnd(_)) = insts.first() { let insts = insts.advance(); let def = FunctionDefinition { function: op.clone(), parameters: params, blocks: blocks, }; PhaseResult::Ok(def, insts) } else { PhaseResult::Err(ValidationError::ExpectedOpFunctionEnd(insts.get_slot())) } } Err(err) => PhaseResult::Err(err), } } else { PhaseResult::Err(ValidationError::ExpectedOpFunction(insts.get_slot())) } }
, output) } PhaseResult::Next(next) => Ok((output, next)), PhaseResult::Err(err) => Err(err), } } read_rest(insts, f, Vec::new()) } fn read_capability(insts: OpSlice) -> PhaseResult<Capability> { if let Some(&OpByBlock::OpCapability(ref op)) = insts.first() { PhaseResult::Ok(op.capability.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_extension(insts: OpSlice) -> PhaseResult<String> { if let Some(&OpByBlock::OpExtension(ref op)) = insts.first() { PhaseResult::Ok(op.name.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_ext_inst_import(insts: OpSlice) -> PhaseResult<OpExtInstImport> { if let Some(&OpByBlock::OpExtInstImport(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_memory_model(insts: OpSlice) -> ValidationResult<(OpMemoryModel, OpSlice)> { if let Some(&OpByBlock::OpMemoryModel(ref op)) = insts.first() { Ok((op.clone(), insts.advance())) } else { Err(ValidationError::MemoryModelMissing) } } fn read_entry_point(insts: OpSlice) -> PhaseResult<OpEntryPoint> { if let Some(&OpByBlock::OpEntryPoint(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_execution_mode(insts: OpSlice) -> PhaseResult<OpExecutionMode> { if let Some(&OpByBlock::OpExecutionMode(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_debug(insts: OpSlice) -> PhaseResult<GroupDebug> { if let Some(&OpByBlock::GroupDebug(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_annotation(insts: OpSlice) -> PhaseResult<GroupAnnotation> { if let Some(&OpByBlock::GroupAnnotation(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_global(insts: OpSlice) -> PhaseResult<GroupGlobal> { if let Some(&OpByBlock::GroupGlobal(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_function_parameter(insts: OpSlice) -> PhaseResult<OpFunctionParameter> { if let Some(&OpByBlock::OpFunctionParameter(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_function_declaration(insts: OpSlice) -> PhaseResult<FunctionDeclaration> { let start_insts = insts; if insts.get_remaining() < 2 { return PhaseResult::Next(insts); } if let Some(&OpByBlock::OpFunction(ref op)) = insts.first() { match read_many(insts.advance(), read_function_parameter) { Ok((params, insts)) => { if let Some(&OpByBlock::OpFunctionEnd(_)) = insts.first() { let decl = FunctionDeclaration { function: op.clone(), parameters: params, }; PhaseResult::Ok(decl, insts.advance()) } else { PhaseResult::Next(start_insts) } } Err(err) => PhaseResult::Err(err), } } else { PhaseResult::Err(ValidationError::ExpectedOpFunction(insts.get_slot())) } } fn read_code(insts: OpSlice) -> PhaseResult<GroupCode> { if let Some(&OpByBlock::GroupCode(ref op)) = insts.first() { PhaseResult::Ok(op.clone(), insts.advance()) } else { PhaseResult::Next(insts) } } fn read_branch(insts: OpSlice) -> ValidationResult<(GroupBranch, OpSlice)> { if let Some(&OpByBlock::GroupBranch(ref op)) = insts.first() { Ok((op.clone(), insts.advance())) } else { Err(ValidationError::ExpectedBranch(insts.get_slot())) } } fn read_merge(insts: OpSlice) -> (Option<GroupMerge>, OpSlice) { if let Some(&OpByBlock::GroupMerge(ref op)) = insts.first() { (Some(op.clone()), insts.advance()) } else { (None, insts) } } fn read_basic_block(insts: OpSlice) -> PhaseResult<BasicBlock> { if let Some(&OpByBlock::OpLabel(ref op)) = insts.first() { let label = op.clone(); let (code, insts) = match read_many(insts.advance(), read_code) { Ok((code, insts)) => (code, insts), Err(err) => return PhaseResult::Err(err), }; let (merge, insts) = read_merge(insts); let (branch, insts) = match read_branch(insts) { Ok((branch, insts)) => (branch, insts), Err(err) => return PhaseResult::Err(err), }; let block = BasicBlock { label: label, code: code, merge: merge, branch: branch, }; PhaseResult::Ok(block, insts) } else { PhaseResult::Next(insts) } } fn read_function_definition(insts: OpSlice) -> PhaseResult<FunctionDefinition> { if insts.get_remaining() < 2 { return PhaseResult::Next(insts); } if let Some(&OpByBlock::OpFunction(ref op)) = insts.first() { match read_many(insts.advance(), read_function_parameter) { Ok((params, insts)) => { let (blocks, insts) = match read_many(insts, read_basic_block) { Ok((blocks, insts)) => (b
random
[ { "content": "/// An instruction from an extended instruction set\n\n///\n\n/// This is expected to itself be an enum variant in the instruction set\n\npub trait ExtInst: Any + ExtInstClone + Debug + Display {\n\n /// Returns the op in the extended instruction\n\n fn get_op(&self) -> &Op;\n\n\n\n fn as_any(&self) -> &Any;\n\n\n\n fn eq(&self, other: &ExtInst) -> bool;\n\n}\n\n\n", "file_path": "src/spv/mod.rs", "rank": 3, "score": 89833.1400246066 }, { "content": "/// An instruction\n\npub trait Op: Debug + Display {\n\n /// Returns the name of the instruction to use in disassembly\n\n fn get_name(&self) -> &'static str;\n\n}\n\n\n", "file_path": "src/spv/mod.rs", "rank": 5, "score": 72861.01875673445 }, { "content": "fn read_instruction(stream: &mut Stream,\n\n known_inst_sets: &[Box<ExtInstSet>],\n\n bound_inst_sets: &mut HashMap<OpId, Box<ExtInstSet>>)\n\n -> ReadResult<Core> {\n\n let head = try!(stream.read_word());\n\n let id = (head & 0xFFFF) as u16;\n\n let wc = (head >> 16) as u16;\n\n let mut words = Vec::with_capacity(wc as usize);\n\n words.push(head);\n\n for _ in 1..wc {\n\n words.push(try!(stream.read_word()));\n\n }\n\n let block = MemoryBlock::new(&words[1..words.len()]);\n\n let read_fn = match id {\n\n 0 => OpNop::read_core,\n\n 1 => OpUndef::read_core,\n\n 2 => OpSourceContinued::read_core,\n\n 3 => OpSource::read_core,\n\n 4 => OpSourceExtension::read_core,\n\n 5 => OpName::read_core,\n", "file_path": "src/spv/raw/reader.rs", "rank": 6, "score": 67954.92069113383 }, { "content": "/// Helper trait to implement box cloning for all ExtInst\n\npub trait ExtInstClone {\n\n fn clone_box(&self) -> Box<ExtInst>;\n\n}\n\n\n\nimpl<T> ExtInstClone for T\n\n where T: 'static + ExtInst + Clone\n\n{\n\n fn clone_box(&self) -> Box<ExtInst> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n\nimpl Clone for Box<ExtInst> {\n\n fn clone(&self) -> Box<ExtInst> {\n\n self.clone_box()\n\n }\n\n}\n\n\n\n/// Helper to own an ExtInst while providing equality tests\n\n#[derive(Clone, Debug)]\n", "file_path": "src/spv/mod.rs", "rank": 8, "score": 65453.95104349454 }, { "content": "/// Represents an object that controls an extended instruction set\n\npub trait ExtInstSet {\n\n /// Returns the name of the instruction set as seen in OpExtInstImport instructions\n\n fn get_name(&self) -> &'static str;\n\n\n\n /// Attempts to read an instruction from the instruction set\n\n fn read_instruction<'a, 'b>(&'b self,\n\n instruction: u32,\n\n block: MemoryBlock<'a>)\n\n -> MemoryBlockResult<'a, Box<ExtInst>>;\n\n\n\n /// Clones the instruction set handler\n\n fn duplicate(&self) -> Box<ExtInstSet>;\n\n}\n", "file_path": "src/spv/mod.rs", "rank": 9, "score": 65450.01016081107 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum OpByBlock {\n\n OpCapability(OpCapability),\n\n OpExtension(OpExtension),\n\n OpExtInstImport(OpExtInstImport),\n\n OpMemoryModel(OpMemoryModel),\n\n OpEntryPoint(OpEntryPoint),\n\n OpExecutionMode(OpExecutionMode),\n\n GroupDebug(GroupDebug),\n\n GroupAnnotation(GroupAnnotation),\n\n GroupGlobal(GroupGlobal),\n\n OpFunction(OpFunction),\n\n OpFunctionParameter(OpFunctionParameter),\n\n OpFunctionEnd(OpFunctionEnd),\n\n OpLabel(OpLabel),\n\n GroupCode(GroupCode),\n\n GroupMerge(GroupMerge),\n\n GroupBranch(GroupBranch),\n\n}\n\n\n\nimpl From<GroupDebug> for OpByBlock {\n", "file_path": "src/spv/logical/parser/mod.rs", "rank": 10, "score": 65153.642123812286 }, { "content": "pub fn read_module<'a>(data: &'a [u8],\n\n known_inst_sets: Vec<Box<ExtInstSet>>)\n\n -> ReadResult<RawModule> {\n\n let mut stream = Stream::new(data);\n\n\n\n let magic = try!(stream.read_word());\n\n match magic {\n\n SPIRV_MAGIC_NUMBER => {}\n\n SPIRV_MAGIC_NUMBER_OTHER_ENDIAN => stream.invert_endianness(),\n\n _ => return Err(ReadError::BadMagic),\n\n }\n\n\n\n let ver_word = try!(stream.read_word());\n\n let ver_high = (ver_word >> 24) as u8;\n\n let ver_major = ((ver_word >> 16) & 0xF) as u8;\n\n let ver_minor = ((ver_word >> 8) & 0xF) as u8;\n\n let ver_low = (ver_word & 0xF) as u8;\n\n let version = match (ver_high, ver_major, ver_minor, ver_low) {\n\n (0, 1, 0, 0) => Version(ver_major, ver_minor),\n\n (v3, v2, v1, v0) => return Err(ReadError::UnknownVersionBytes(v3, v2, v1, v0)),\n", "file_path": "src/spv/raw/reader.rs", "rank": 12, "score": 62691.21120034835 }, { "content": "/// Marker trait for Display types to implement DisplayArg for them.\n\n/// This limits arguments that may be printed to explicitly marked types instead\n\n/// for all Display types.\n\npub trait DisplayArgType: Display {}\n\n\n\nimpl<T: DisplayArgType> DisplayArg for T {\n\n fn display_arg(&self, f: &mut Formatter) -> fmt::Result {\n\n <T as Display>::fmt(self, f)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for u32 {}\n\n\n\n/// Macro to implement displaying for an instruction\n\nmacro_rules! def_op_display {\n\n ($name: ident; result_id = $($operand_name: ident)|*) => {\n\n impl Display for $name {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n try!(write!(f,\n\n \"{}{}\",\n\n Result(&self.result_id),\n\n stringify!($name),\n\n ));\n", "file_path": "src/spv/dis.rs", "rank": 14, "score": 59931.644879381325 }, { "content": "fn read_cos(block: MemoryBlock) -> MemoryBlockResult<Inst> {\n\n let (block, x) = try!(block.read_op_id());\n\n Ok((block, Inst::Cos(Cos { x: x })))\n\n}\n", "file_path": "src/glsl450/mod.rs", "rank": 16, "score": 59488.7943441329 }, { "content": "fn read_sin<'a>(block: MemoryBlock<'a>) -> MemoryBlockResult<'a, Inst> {\n\n let (block, x) = try!(block.read_op_id());\n\n Ok((block, Inst::Sin(Sin { x: x })))\n\n}\n\n\n", "file_path": "src/glsl450/mod.rs", "rank": 18, "score": 54055.1784762013 }, { "content": "pub fn find_control_flow(decl: FunctionDefinition) -> ControlFlowResult<ControlFlowChain> {\n\n\n\n let mut start_opt = None;\n\n\n\n let block_map: BlockMap = {\n\n let mut map = HashMap::new();\n\n for block in &decl.blocks {\n\n let id = BlockId(block.label.result_id.0);\n\n if let None = start_opt {\n\n start_opt = Some(id);\n\n }\n\n if let Some(_) = map.insert(id, block) {\n\n return Err(ControlFlowError::DuplicateBlockId(id));\n\n }\n\n }\n\n map\n\n };\n\n\n\n let start = match start_opt {\n\n Some(id) => id,\n", "file_path": "src/spv/logical/control_flow.rs", "rank": 21, "score": 49612.87854678012 }, { "content": "#[test]\n\nfn load_noop() {\n\n let result = read(include_bytes!(\"noop.spv\"));\n\n let glsl450 = OpExtInstImport {\n\n result_id: ResultId(1),\n\n name: \"GLSL.std.450\".into(),\n\n };\n\n let mem_model = OpMemoryModel {\n\n addressing_model: AddressingModel::Logical,\n\n memory_model: MemoryModel::Glsl450,\n\n };\n\n let entry_point = OpEntryPoint {\n\n execution_model: ExecutionModel::GlCompute,\n\n entry_point: OpId(4),\n\n name: \"main\".into(),\n\n interface: vec![],\n\n };\n\n let local_size = ExecutionMode::LocalSize(32, 32, 32);\n\n let source = OpSource {\n\n language: SourceLanguage::Glsl,\n\n version: SourceVersion(430),\n", "file_path": "src/tests/mod.rs", "rank": 23, "score": 46854.66633325377 }, { "content": "#[derive(Debug, PartialEq)]\n\nenum Continue {\n\n Next(BlockId),\n\n Return,\n\n}\n\n\n\n/// Marks how we're meant to converge from the children of a block\n", "file_path": "src/spv/logical/control_flow.rs", "rank": 24, "score": 46280.17124993073 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum Converge {\n\n If(BlockId, SelectionControl),\n\n Loop(BlockId, BlockId, LoopControl),\n\n}\n\n\n", "file_path": "src/spv/logical/control_flow.rs", "rank": 25, "score": 46280.110957248064 }, { "content": "/// Links back to the current flow control we're inside\n\nstruct FlowNode {\n\n converge: Converge,\n\n ptr: Option<Rc<FlowNode>>,\n\n}\n\n\n\nimpl FlowNode {\n\n fn merges(&self, id: &BlockId) -> bool {\n\n match self.converge {\n\n Converge::If(join, _) |\n\n Converge::Loop(join, _, _) => {\n\n if join == *id {\n\n return true;\n\n }\n\n }\n\n }\n\n match self.ptr {\n\n Some(ref node) => node.merges(id),\n\n None => false,\n\n }\n\n }\n", "file_path": "src/spv/logical/control_flow.rs", "rank": 29, "score": 44958.61999091822 }, { "content": "/// Manages the FlowNode list\n\nstruct FlowList {\n\n head: Option<Rc<FlowNode>>,\n\n}\n\n\n\nimpl FlowList {\n\n /// Start a new empty flow list\n\n fn new() -> FlowList {\n\n FlowList { head: None }\n\n }\n\n\n\n /// Add a node to the flow list\n\n fn extend(&self, converge: Converge) -> FlowList {\n\n FlowList {\n\n head: Some(Rc::new(FlowNode {\n\n converge: converge,\n\n ptr: self.head.clone(),\n\n })),\n\n }\n\n }\n\n\n", "file_path": "src/spv/logical/control_flow.rs", "rank": 30, "score": 44958.61999091822 }, { "content": "struct Stream<'a> {\n\n source: &'a [u8],\n\n is_le: bool,\n\n}\n\n\n\nimpl<'a> Stream<'a> {\n\n fn new(source: &'a [u8]) -> Stream<'a> {\n\n Stream {\n\n source: source,\n\n is_le: true,\n\n }\n\n }\n\n\n\n fn read_word(&mut self) -> ReadResult<u32> {\n\n if self.source.len() >= 4 {\n\n let result = if self.is_le {\n\n LittleEndian::read_u32(self.source)\n\n } else {\n\n BigEndian::read_u32(self.source)\n\n };\n", "file_path": "src/spv/raw/reader.rs", "rank": 31, "score": 43825.603122361004 }, { "content": "/// Formats an argument element for display inside a formatted argument.\n\npub trait DisplayArg {\n\n fn display_arg(&self, f: &mut Formatter) -> fmt::Result;\n\n}\n\n\n\nimpl DisplayArg for String {\n\n fn display_arg(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"\\\"{}\\\"\", self)\n\n }\n\n}\n\n\n", "file_path": "src/spv/dis.rs", "rank": 34, "score": 42128.03336610966 }, { "content": "/// Formats an argument for display as an argument to an intruction. This\n\n/// / includes padding at the start and between elements if there are multiple.\n\npub trait FormatArg {\n\n fn format_arg(&self, f: &mut Formatter) -> fmt::Result;\n\n}\n\n\n\nimpl<T> FormatArg for T\n\n where T: DisplayArg\n\n{\n\n fn format_arg(&self, f: &mut Formatter) -> fmt::Result {\n\n try!(write!(f, \" \"));\n\n <T as DisplayArg>::display_arg(self, f)\n\n }\n\n}\n\n\n\nimpl<T> FormatArg for Option<T>\n\n where T: DisplayArg\n\n{\n\n fn format_arg(&self, f: &mut Formatter) -> fmt::Result {\n\n match *self {\n\n Some(ref t) => {\n\n try!(write!(f, \" \"));\n", "file_path": "src/spv/dis.rs", "rank": 35, "score": 42128.03336610966 }, { "content": "pub trait MemoryBlockRead: Sized {\n\n fn read(block: MemoryBlock) -> MemoryBlockResult<Self>;\n\n}\n\n\n\nimpl MemoryBlockRead for OpId {\n\n fn read(block: MemoryBlock) -> MemoryBlockResult<OpId> {\n\n let (block, id) = try!(block.read_word());\n\n Ok((block, OpId(id)))\n\n }\n\n}\n\n\n\nimpl MemoryBlockRead for ResultId {\n\n fn read(block: MemoryBlock) -> MemoryBlockResult<ResultId> {\n\n let (block, id) = try!(block.read_word());\n\n Ok((block, ResultId(id)))\n\n }\n\n}\n\n\n\nimpl<T: MemoryBlockRead> MemoryBlockRead for Vec<T> {\n\n fn read(mut block: MemoryBlock) -> MemoryBlockResult<Vec<T>> {\n", "file_path": "src/spv/raw/mod.rs", "rank": 39, "score": 35932.31378359011 }, { "content": "fn search_block(id: BlockId,\n\n flow_list: &FlowList,\n\n block_map: &BlockMap)\n\n -> ControlFlowResult<(ControlFlowChain, Continue)> {\n\n\n\n if flow_list.merges(&id) {\n\n return Ok((ControlFlowChain::block(vec![]), Continue::Next(id)));\n\n }\n\n\n\n let block = match block_map.get(&id) {\n\n Some(block) => block,\n\n None => return Err(ControlFlowError::UnknownBlockId(id)),\n\n };\n\n\n\n match block.branch {\n\n GroupBranch::OpBranch(ref op) => {\n\n let current = ControlFlowChain::Atom(id);\n\n\n\n let next_id = BlockId(op.target_label.0);\n\n\n", "file_path": "src/spv/logical/control_flow.rs", "rank": 40, "score": 34512.072734343456 }, { "content": " };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for Capability {}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Signedness {\n\n UnsignedOrNone,\n\n Signed,\n\n}\n\n\n\nimpl Display for Signedness {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n Signedness::UnsignedOrNone => \"0\",\n\n Signedness::Signed => \"1\",\n\n };\n\n write!(f, \"{}\", name)\n", "file_path": "src/spv/types.rs", "rank": 41, "score": 30162.547667906834 }, { "content": " fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for MemberIndex {}\n\n\n\n/// The addressing model used by the module\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum AddressingModel {\n\n Logical,\n\n Physical32,\n\n Physical64,\n\n}\n\n\n\nimpl Display for AddressingModel {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n AddressingModel::Logical => \"Logical\",\n\n AddressingModel::Physical32 => \"Physical32\",\n", "file_path": "src/spv/types.rs", "rank": 42, "score": 30160.531546862876 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\npub enum MS {\n\n Single,\n\n Multi,\n\n}\n\n\n\nimpl Display for MS {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n MS::Single => 0,\n\n MS::Multi => 1,\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for MS {}\n\n\n\n/// Indicates how an image is used with samplers\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/spv/types.rs", "rank": 43, "score": 30158.851447164372 }, { "content": " }\n\n}\n\n\n\nimpl DisplayArgType for SourceVersion {}\n\n\n\n/// Line number used with OpLine instruction\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Line(pub u32);\n\n\n\nimpl Display for Line {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for Line {}\n\n\n\n/// Column number used with OpLine instruction\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Column(pub u32);\n", "file_path": "src/spv/types.rs", "rank": 44, "score": 30158.58918564078 }, { "content": " _ => return None,\n\n })\n\n }\n\n}\n\n\n\n/// Source language the module was created from\n\n#[allow(non_camel_case_types)]\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum SourceLanguage {\n\n Unknown,\n\n Essl,\n\n Glsl,\n\n OpenCL_C,\n\n OpenCL_Cpp,\n\n Other(u32),\n\n}\n\n\n\nimpl Display for SourceLanguage {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n", "file_path": "src/spv/types.rs", "rank": 45, "score": 30158.122895288136 }, { "content": "//! Core types used with SPIR-V instructions\n\n\n\nuse std::fmt;\n\nuse std::fmt::{Display, Formatter};\n\nuse super::dis::*;\n\n\n\n/// A SPIR-V `<id>`\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct OpId(pub u32);\n\n\n\nimpl Display for OpId {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n f.pad(&format!(\"%{}\", self.0))\n\n }\n\n}\n\n\n\nimpl DisplayArgType for OpId {}\n\n\n\n/// A SPIR-V `Result <id>`\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/spv/types.rs", "rank": 46, "score": 30157.108774556073 }, { "content": "impl DisplayArgType for FpRoundingMode {}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum LinkageType {\n\n Export,\n\n Import,\n\n}\n\n\n\nimpl Display for LinkageType {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n LinkageType::Export => \"Export\",\n\n LinkageType::Import => \"Import\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for LinkageType {}\n\n\n", "file_path": "src/spv/types.rs", "rank": 47, "score": 30156.9010106479 }, { "content": "\n\nimpl Display for AccessQualifier {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n AccessQualifier::ReadOnly => \"ReadOnly\",\n\n AccessQualifier::WriteOnly => \"WriteOnly\",\n\n AccessQualifier::ReadWrite => \"ReadWrite\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for AccessQualifier {}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum SamplerAddressingMode {\n\n None,\n\n ClampToEdge,\n\n Clamp,\n\n Repeat,\n", "file_path": "src/spv/types.rs", "rank": 48, "score": 30156.813664633024 }, { "content": "impl DisplayArgType for LoopControl {}\n\n\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct SelectionControl {\n\n pub flatten: bool,\n\n pub dont_flatten: bool,\n\n}\n\n\n\nimpl Display for SelectionControl {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let mut parts = Vec::new();\n\n if self.flatten {\n\n parts.push(\"Flatten\");\n\n }\n\n if self.dont_flatten {\n\n parts.push(\"DontFlatten\");\n\n }\n\n if parts.len() == 0 {\n\n write!(f, \"None\")\n\n } else {\n", "file_path": "src/spv/types.rs", "rank": 49, "score": 30156.558152882433 }, { "content": " RepeatMirrored,\n\n}\n\n\n\nimpl Display for SamplerAddressingMode {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n SamplerAddressingMode::None => \"None\",\n\n SamplerAddressingMode::ClampToEdge => \"ClampToEdge\",\n\n SamplerAddressingMode::Clamp => \"Clamp\",\n\n SamplerAddressingMode::Repeat => \"Repeat\",\n\n SamplerAddressingMode::RepeatMirrored => \"RepeatMirrored\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for SamplerAddressingMode {}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum SamplerParam {\n", "file_path": "src/spv/types.rs", "rank": 50, "score": 30156.2634960461 }, { "content": "pub struct ResultId(pub u32);\n\n\n\n/// Set of words used to represent a literal constant\n\npub type LitBytes = Vec<u32>;\n\n\n\n/// A String literal\n\npub type LitString = String;\n\n\n\n/// Version for a module\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Version(pub u8, pub u8);\n\n\n\n/// Struct to hold type and version for the generator of a module\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Generator {\n\n pub tool: Tool,\n\n pub version: u16,\n\n}\n\n\n\n/// The tool used to generate a module\n", "file_path": "src/spv/types.rs", "rank": 51, "score": 30155.998921742324 }, { "content": "\n\nimpl Display for Column {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for Column {}\n\n\n\n/// Type of decoration to annotate an instruction with\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Decoration {\n\n RelaxedPrecision,\n\n SpecId(u32),\n\n Block,\n\n BufferBlock,\n\n RowMajor,\n\n ColMajor,\n\n ArrayStride(u32),\n\n MatrixStride(u32),\n", "file_path": "src/spv/types.rs", "rank": 52, "score": 30155.824547252418 }, { "content": "/// Indicates if an image is an array or not\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Arrayed {\n\n False,\n\n True,\n\n}\n\n\n\nimpl Display for Arrayed {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n Arrayed::False => 0,\n\n Arrayed::True => 1,\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for Arrayed {}\n\n\n\n/// The multisample state of an image\n", "file_path": "src/spv/types.rs", "rank": 53, "score": 30155.73020721898 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\npub enum DepthStatus {\n\n NotDepth,\n\n Depth,\n\n NoIndication,\n\n}\n\n\n\nimpl Display for DepthStatus {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n DepthStatus::NotDepth => 0,\n\n DepthStatus::Depth => 1,\n\n DepthStatus::NoIndication => 2,\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for DepthStatus {}\n\n\n", "file_path": "src/spv/types.rs", "rank": 54, "score": 30155.332326952215 }, { "content": " NonNormalized,\n\n Normalized,\n\n}\n\n\n\nimpl Display for SamplerParam {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n SamplerParam::NonNormalized => 0,\n\n SamplerParam::Normalized => 1,\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for SamplerParam {}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum SamplerFilterMode {\n\n Nearest,\n\n Linear,\n", "file_path": "src/spv/types.rs", "rank": 55, "score": 30155.20518709481 }, { "content": "}\n\n\n\nimpl Display for SamplerFilterMode {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n SamplerFilterMode::Nearest => \"Nearest\",\n\n SamplerFilterMode::Linear => \"Linear\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for SamplerFilterMode {}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct ImageOperands {\n\n pub bias: Option<OpId>,\n\n pub lod: Option<OpId>,\n\n pub grad: Option<(OpId, OpId)>,\n\n pub const_offset: Option<OpId>,\n", "file_path": "src/spv/types.rs", "rank": 56, "score": 30154.971339844255 }, { "content": " AddressingModel::Physical64 => \"Physical64\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for AddressingModel {}\n\n\n\n/// The memory model required by the module\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum MemoryModel {\n\n Simple,\n\n Glsl450,\n\n OpenCL,\n\n}\n\n\n\nimpl Display for MemoryModel {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n MemoryModel::Simple => \"Simple\",\n", "file_path": "src/spv/types.rs", "rank": 57, "score": 30154.958637331947 }, { "content": " write!(f, \"{}\", parts.join(\" | \"))\n\n }\n\n }\n\n}\n\n\n\nimpl DisplayArgType for FunctionControl {}\n\n\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct LoopControl {\n\n pub unroll: bool,\n\n pub dont_unroll: bool,\n\n /// Added in 1.1\n\n pub dependency_infinite: bool,\n\n /// Added in 1.1\n\n pub dependency_length: Option<u32>,\n\n}\n\n\n\nimpl Display for LoopControl {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let mut parts = Vec::new();\n", "file_path": "src/spv/types.rs", "rank": 58, "score": 30154.690723085318 }, { "content": " write!(f, \"{}\", parts.join(\" | \"))\n\n }\n\n }\n\n}\n\n\n\nimpl DisplayArgType for SelectionControl {}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct PhiArg {\n\n pub variable: OpId,\n\n pub parent: OpId,\n\n}\n\n\n\nimpl Display for PhiArg {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{} {}\", self.variable, self.parent)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for PhiArg {}\n", "file_path": "src/spv/types.rs", "rank": 59, "score": 30154.62758509034 }, { "content": " }\n\n if self.non_temporal {\n\n parts.push(\"Nontemporal\");\n\n }\n\n if parts.len() == 0 {\n\n write!(f, \"None\")\n\n } else {\n\n write!(f, \"{}\", parts.join(\" | \"))\n\n }\n\n }\n\n}\n\n\n\nimpl DisplayArgType for MemoryAccess {}\n\n\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct FunctionControl {\n\n pub inline: bool,\n\n pub dont_inline: bool,\n\n pub pure_function: bool,\n\n pub const_function: bool,\n", "file_path": "src/spv/types.rs", "rank": 60, "score": 30154.43435740299 }, { "content": "\n\nimpl DisplayArgType for StorageClass {}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct FpFastMathMode {\n\n pub not_nan: bool,\n\n pub not_inf: bool,\n\n pub nsz: bool,\n\n pub allow_recip: bool,\n\n pub fast: bool,\n\n}\n\n\n\nimpl Display for FpFastMathMode {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let mut parts = Vec::new();\n\n if self.not_nan {\n\n parts.push(\"NotNaN\");\n\n }\n\n if self.not_inf {\n\n parts.push(\"NotInf\");\n", "file_path": "src/spv/types.rs", "rank": 61, "score": 30154.339162596938 }, { "content": "\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct BranchWeights {\n\n pub true_weight: u32,\n\n pub false_weight: u32,\n\n}\n\n\n\nimpl Display for BranchWeights {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{} {}\", self.true_weight, self.false_weight)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for BranchWeights {}\n\n\n\n/// An `<id>` that refers to a scope\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct ScopeId(pub u32);\n\n\n\n/// An `<id>` that refers to memory semantics\n", "file_path": "src/spv/types.rs", "rank": 62, "score": 30154.306526728553 }, { "content": " Capability::Pipes => \"Pipes\",\n\n Capability::Groups => \"Groups\",\n\n Capability::DeviceEnqueue => \"DeviceEnqueue\",\n\n Capability::LiteralSampler => \"LiteralSampler\",\n\n Capability::AtomicStorage => \"AtomicStorage\",\n\n Capability::Int16 => \"Int16\",\n\n Capability::TessellationPointSize => \"TessellationPointSize\",\n\n Capability::GeometryPointSize => \"GeometryPointSize\",\n\n Capability::ImageGatherExtended => \"ImageGatherExtended\",\n\n Capability::StorageImageMultisample => \"StorageImageMultisample\",\n\n Capability::UniformBufferArrayDynamicIndexing => \"UniformBufferArrayDynamicIndexing\",\n\n Capability::SampledImageArrayDynamicIndexing => \"SampledImageArrayDynamicIndexing\",\n\n Capability::StorageBufferArrayDynamicIndexing => \"StorageBufferArrayDynamicIndexing\",\n\n Capability::StorageImageArrayDynamicIndexing => \"StorageImageArrayDynamicIndexing\",\n\n Capability::ClipDistance => \"ClipDistance\",\n\n Capability::CullDistance => \"CullDistance\",\n\n Capability::ImageCubeArray => \"ImageCubeArray\",\n\n Capability::SampleRateShading => \"SampleRateShading\",\n\n Capability::ImageRect => \"ImageRect\",\n\n Capability::SampledRect => \"SampledRect\",\n", "file_path": "src/spv/types.rs", "rank": 63, "score": 30154.27823333326 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\npub struct MemorySemanticsId(pub u32);\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum GroupOperation {\n\n Reduce,\n\n InclusiveScan,\n\n ExclusiveScan,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum KernelEnqueueFlags {\n\n NoWait,\n\n WaitKernel,\n\n WaitWorkGroup,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct KernelProfilingInfo {\n\n cmd_exec_time: bool,\n\n}\n", "file_path": "src/spv/types.rs", "rank": 64, "score": 30154.064830541945 }, { "content": " }\n\n}\n\n\n\nimpl DisplayArgType for Signedness {}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum StorageClass {\n\n UniformConstant,\n\n Input,\n\n Uniform,\n\n Output,\n\n Workgroup,\n\n CrossWorkgroup,\n\n Private,\n\n Function,\n\n Generic,\n\n PushConstant,\n\n AtomicCounter,\n\n Image,\n\n}\n", "file_path": "src/spv/types.rs", "rank": 65, "score": 30153.975918418757 }, { "content": " BuiltIn::SubgroupSize => \"SubgroupSize\",\n\n BuiltIn::SubgroupMaxSize => \"SubgroupMaxSize\",\n\n BuiltIn::NumSubgroups => \"NumSubgroups\",\n\n BuiltIn::NumEnqueuedSubgroups => \"NumEnqueuedSubgroups\",\n\n BuiltIn::SubgroupId => \"SubgroupId\",\n\n BuiltIn::SubgroupLocalInvocationId => \"SubgroupLocalInvocationId\",\n\n BuiltIn::VertexIndex => \"VertexIndex\",\n\n BuiltIn::InstanceIndex => \"InstanceIndex\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for BuiltIn {}\n\n\n\n/// Offset of a member in a type\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct MemberIndex(pub u32);\n\n\n\nimpl Display for MemberIndex {\n", "file_path": "src/spv/types.rs", "rank": 66, "score": 30153.611894381887 }, { "content": " InputLinesAdjacency,\n\n Triangles,\n\n InputTrianglesAdjacency,\n\n Quads,\n\n Isolines,\n\n OutputVerticies(u32),\n\n OutputPoints,\n\n OutputLineStrip,\n\n OutputTriangleStrip,\n\n VecTypeHint(OpId),\n\n ContractionOff,\n\n}\n\n\n\nimpl Display for ExecutionMode {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n ExecutionMode::Invocations(ref n) => return write!(f, \"Invocations {}\", n),\n\n ExecutionMode::SpacingEqual => \"SpacingEqual\",\n\n ExecutionMode::SpacingFractionalEven => \"SpacingFractionalEven\",\n\n ExecutionMode::SpacingFractionalOdd => \"SpacingFractionalOdd\",\n", "file_path": "src/spv/types.rs", "rank": 67, "score": 30153.03704966704 }, { "content": "\n\nimpl Display for ExecutionModel {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n ExecutionModel::Vertex => \"Vertex\",\n\n ExecutionModel::TesselationControl => \"TesselationControl\",\n\n ExecutionModel::TesselationEvaluation => \"TesselationEvaluation\",\n\n ExecutionModel::Geometry => \"Geometry\",\n\n ExecutionModel::Fragment => \"Fragment\",\n\n ExecutionModel::GlCompute => \"GLCompute\",\n\n ExecutionModel::Kernel => \"Kernel\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for ExecutionModel {}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum ExecutionMode {\n", "file_path": "src/spv/types.rs", "rank": 68, "score": 30153.034362496237 }, { "content": "pub enum SampledStatus {\n\n RuntimeChoice,\n\n WithSampler,\n\n WithoutSampler,\n\n}\n\n\n\nimpl Display for SampledStatus {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n SampledStatus::RuntimeChoice => 0,\n\n SampledStatus::WithSampler => 1,\n\n SampledStatus::WithoutSampler => 2,\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for SampledStatus {}\n\n\n\n/// The format for an image type\n", "file_path": "src/spv/types.rs", "rank": 69, "score": 30152.87077499846 }, { "content": " ExecutionMode::InputLinesAdjacency => \"InputLinesAdjacency\",\n\n ExecutionMode::Triangles => \"Triangles\",\n\n ExecutionMode::InputTrianglesAdjacency => \"InputTrianglesAdjacency\",\n\n ExecutionMode::Quads => \"Quads\",\n\n ExecutionMode::Isolines => \"Isolines\",\n\n ExecutionMode::OutputVerticies(ref n) => return write!(f, \"OutputVerticies {}\", n),\n\n ExecutionMode::OutputPoints => \"OutputPoints\",\n\n ExecutionMode::OutputLineStrip => \"OutputLineStrip\",\n\n ExecutionMode::OutputTriangleStrip => \"OutputTriangleStrip\",\n\n ExecutionMode::VecTypeHint(ref id) => return write!(f, \"VecTypeHint {}\", id),\n\n ExecutionMode::ContractionOff => \"ContractionOff\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for ExecutionMode {}\n\n\n\n/// Capability that a module may require\n\n///\n", "file_path": "src/spv/types.rs", "rank": 70, "score": 30152.421387734532 }, { "content": " pub offset: Option<OpId>,\n\n pub const_offsets: Option<OpId>,\n\n pub min_lod: Option<OpId>,\n\n}\n\n\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct MemoryAccess {\n\n pub volatile: bool,\n\n pub aligned: bool,\n\n pub non_temporal: bool,\n\n}\n\n\n\nimpl Display for MemoryAccess {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let mut parts = Vec::new();\n\n if self.volatile {\n\n parts.push(\"Volatile\");\n\n }\n\n if self.aligned {\n\n parts.push(\"Aligned\");\n", "file_path": "src/spv/types.rs", "rank": 71, "score": 30152.40477841718 }, { "content": " Location(u32),\n\n Component(u32),\n\n Index(u32),\n\n Binding(u32),\n\n DescriptorSet(u32),\n\n Offset(u32),\n\n XfbBuffer(u32),\n\n XfbStride(u32),\n\n FuncParamAttr(FunctionParameterAttribute),\n\n FpRoundingMode(FpRoundingMode),\n\n FpFastMathMode(FpFastMathMode),\n\n LinkageAttributes(LitString, LinkageType),\n\n NoContraction,\n\n InputAttachmentIndex(u32),\n\n Alignment(u32),\n\n}\n\n\n\nimpl Display for Decoration {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n", "file_path": "src/spv/types.rs", "rank": 72, "score": 30152.253191785785 }, { "content": "\n\nimpl Display for Capability {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n Capability::Matrix => \"Matrix\",\n\n Capability::Shader => \"Shader\",\n\n Capability::Geometry => \"Geometry\",\n\n Capability::Tessellation => \"Tessellation\",\n\n Capability::Addresses => \"Addresses\",\n\n Capability::Linkage => \"Linkage\",\n\n Capability::Kernel => \"Kernel\",\n\n Capability::Vector16 => \"Vector16\",\n\n Capability::Float16Buffer => \"Float16Buffer\",\n\n Capability::Float16 => \"Float16\",\n\n Capability::Float64 => \"Float64\",\n\n Capability::Int64 => \"Int64\",\n\n Capability::Int64Atomics => \"Int64Atomics\",\n\n Capability::ImageBasic => \"ImageBasic\",\n\n Capability::ImageReadWrite => \"ImageReadWrite\",\n\n Capability::ImageMipmap => \"ImageMipmap\",\n", "file_path": "src/spv/types.rs", "rank": 73, "score": 30152.14605265761 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\npub enum FpRoundingMode {\n\n Rte,\n\n Rtz,\n\n Rtp,\n\n Rtn,\n\n}\n\n\n\nimpl Display for FpRoundingMode {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n FpRoundingMode::Rte => \"RTE\",\n\n FpRoundingMode::Rtz => \"RTZ\",\n\n FpRoundingMode::Rtp => \"RTP\",\n\n FpRoundingMode::Rtn => \"RTN\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n", "file_path": "src/spv/types.rs", "rank": 74, "score": 30152.14530104547 }, { "content": " SourceLanguage::Unknown => \"Unknown\",\n\n SourceLanguage::Essl => \"ESSL\",\n\n SourceLanguage::Glsl => \"GLSL\",\n\n SourceLanguage::OpenCL_C => \"OpenCL_C\",\n\n SourceLanguage::OpenCL_Cpp => \"OpenCL_CPP\",\n\n SourceLanguage::Other(n) => return write!(f, \"Unknown({})\", n),\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for SourceLanguage {}\n\n\n\n/// Version of the source language\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct SourceVersion(pub u32);\n\n\n\nimpl Display for SourceVersion {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n", "file_path": "src/spv/types.rs", "rank": 75, "score": 30151.680210161907 }, { "content": "/// Many instructions and variants depend on a certain capability\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Capability {\n\n Matrix,\n\n Shader,\n\n Geometry,\n\n Tessellation,\n\n Addresses,\n\n Linkage,\n\n Kernel,\n\n Vector16,\n\n Float16Buffer,\n\n Float16,\n\n Float64,\n\n Int64,\n\n Int64Atomics,\n\n ImageBasic,\n\n ImageReadWrite,\n\n ImageMipmap,\n\n Pipes,\n", "file_path": "src/spv/types.rs", "rank": 76, "score": 30151.60608862923 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\npub enum FunctionParameterAttribute {\n\n Zext,\n\n Sext,\n\n ByVal,\n\n Sret,\n\n NoAlias,\n\n NoCapture,\n\n NoWrite,\n\n NoReadWrite,\n\n}\n\n\n\nimpl Display for FunctionParameterAttribute {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n FunctionParameterAttribute::Zext => \"Zext\",\n\n FunctionParameterAttribute::Sext => \"Sext\",\n\n FunctionParameterAttribute::ByVal => \"ByVal\",\n\n FunctionParameterAttribute::Sret => \"Sret\",\n\n FunctionParameterAttribute::NoAlias => \"NoAlias\",\n", "file_path": "src/spv/types.rs", "rank": 77, "score": 30151.49307069461 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\npub enum Tool {\n\n KhronosReserved,\n\n LunarG,\n\n Valve,\n\n Codeplay,\n\n Nvidia,\n\n Arm,\n\n KhronosLLvmTranslator,\n\n KhronosAssembler,\n\n KhronosGlslang,\n\n Qualcomm,\n\n Amd,\n\n Intel,\n\n Other(u16),\n\n}\n\n\n\nimpl Tool {\n\n pub fn get_vendor(&self) -> Option<&'static str> {\n\n Some(match *self {\n", "file_path": "src/spv/types.rs", "rank": 78, "score": 30151.43485574021 }, { "content": " VertexIndex,\n\n InstanceIndex,\n\n}\n\n\n\nimpl Display for BuiltIn {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n BuiltIn::Position => \"Position\",\n\n BuiltIn::PointSize => \"PointSize\",\n\n BuiltIn::ClipDistance => \"ClipDistance\",\n\n BuiltIn::CullDistance => \"CullDistance\",\n\n BuiltIn::VertexId => \"VertexId\",\n\n BuiltIn::InstanceId => \"InstanceId\",\n\n BuiltIn::PrimitiveId => \"PrimitiveId\",\n\n BuiltIn::InvocationId => \"InvocationId\",\n\n BuiltIn::Layer => \"Layer\",\n\n BuiltIn::ViewportIndex => \"ViewportIndex\",\n\n BuiltIn::TessLevelOuter => \"TessLevelOuter\",\n\n BuiltIn::TessLevelInner => \"TessLevelInner\",\n\n BuiltIn::TessCoord => \"TessCoord\",\n", "file_path": "src/spv/types.rs", "rank": 79, "score": 30150.451545993536 }, { "content": " MemoryModel::Glsl450 => \"GLSL450\",\n\n MemoryModel::OpenCL => \"OpenCL\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for MemoryModel {}\n\n\n\n/// The execution model for an entry point into the module\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum ExecutionModel {\n\n Vertex,\n\n TesselationControl,\n\n TesselationEvaluation,\n\n Geometry,\n\n Fragment,\n\n GlCompute,\n\n Kernel,\n\n}\n", "file_path": "src/spv/types.rs", "rank": 80, "score": 30150.098825049256 }, { "content": "\n\nimpl Display for StorageClass {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n StorageClass::UniformConstant => \"UniformConstant\",\n\n StorageClass::Input => \"Input\",\n\n StorageClass::Uniform => \"Uniform\",\n\n StorageClass::Output => \"Output\",\n\n StorageClass::Workgroup => \"Workgroup\",\n\n StorageClass::CrossWorkgroup => \"CrossWorkgroup\",\n\n StorageClass::Private => \"Private\",\n\n StorageClass::Function => \"Function\",\n\n StorageClass::Generic => \"Generic\",\n\n StorageClass::PushConstant => \"PushConstant\",\n\n StorageClass::AtomicCounter => \"AtomicCounter\",\n\n StorageClass::Image => \"Image\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n", "file_path": "src/spv/types.rs", "rank": 81, "score": 30149.975214757767 }, { "content": "}\n\n\n\nimpl Display for Dim {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n Dim::Tex1D => \"1D\",\n\n Dim::Tex2D => \"2D\",\n\n Dim::Tex3D => \"3D\",\n\n Dim::Cube => \"Cube\",\n\n Dim::Rect => \"Rect\",\n\n Dim::Buffer => \"Buffer\",\n\n Dim::SubpassData => \"SubpassData\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for Dim {}\n\n\n\n/// Indicates if it is known if an image is a depth image\n", "file_path": "src/spv/types.rs", "rank": 82, "score": 30149.972994508542 }, { "content": " Decoration::FpFastMathMode(ref fast_math_mode) => {\n\n return write!(f, \"FpFastMathMode {}\", fast_math_mode)\n\n }\n\n Decoration::LinkageAttributes(ref name, ref lt) => {\n\n return write!(f, \"LinkageAttributes {} {}\", name, lt)\n\n }\n\n Decoration::NoContraction => \"NoContraction\",\n\n Decoration::InputAttachmentIndex(ref n) => {\n\n return write!(f, \"InputAttachmentIndex {}\", n)\n\n }\n\n Decoration::Alignment(ref n) => return write!(f, \"Alignment {}\", n),\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for Decoration {}\n\n\n\n/// Marks a special built in variable or member\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/spv/types.rs", "rank": 83, "score": 30149.94385690564 }, { "content": " BuiltIn::PatchVerticies => \"PatchVerticies\",\n\n BuiltIn::FragCoord => \"FragCoord\",\n\n BuiltIn::PointCoord => \"PointCoord\",\n\n BuiltIn::FrontFacing => \"FrontFacing\",\n\n BuiltIn::SampleId => \"SampleId\",\n\n BuiltIn::SamplePosition => \"SamplePosition\",\n\n BuiltIn::SampleMask => \"SampleMask\",\n\n BuiltIn::FragDepth => \"FragDepth\",\n\n BuiltIn::HelperInvocation => \"HelperInvocation\",\n\n BuiltIn::NumWorkgroups => \"NumWorkgroups\",\n\n BuiltIn::WorkgroupSize => \"WorkgroupSize\",\n\n BuiltIn::WorkgroupId => \"WorkgroupId\",\n\n BuiltIn::LocalInvocationId => \"LocalInvocationId\",\n\n BuiltIn::GlobalInvocationId => \"GlobalInvocationId\",\n\n BuiltIn::LocalInvocationIndex => \"LocalInvocationIndex\",\n\n BuiltIn::WorkDim => \"WorkDim\",\n\n BuiltIn::GlobalSize => \"GlobalSize\",\n\n BuiltIn::EnqueuedWorkgroupSize => \"EnqueuedWorkgroupSize\",\n\n BuiltIn::GlobalOffset => \"GlobalOffset\",\n\n BuiltIn::GlobalLinearId => \"GlobalLinearId\",\n", "file_path": "src/spv/types.rs", "rank": 84, "score": 30149.73499101177 }, { "content": " SampleMask,\n\n FragDepth,\n\n HelperInvocation,\n\n NumWorkgroups,\n\n WorkgroupSize,\n\n WorkgroupId,\n\n LocalInvocationId,\n\n GlobalInvocationId,\n\n LocalInvocationIndex,\n\n WorkDim,\n\n GlobalSize,\n\n EnqueuedWorkgroupSize,\n\n GlobalOffset,\n\n GlobalLinearId,\n\n SubgroupSize,\n\n SubgroupMaxSize,\n\n NumSubgroups,\n\n NumEnqueuedSubgroups,\n\n SubgroupId,\n\n SubgroupLocalInvocationId,\n", "file_path": "src/spv/types.rs", "rank": 85, "score": 30149.71291758328 }, { "content": " FunctionParameterAttribute::NoCapture => \"NoCapture\",\n\n FunctionParameterAttribute::NoWrite => \"NoWrite\",\n\n FunctionParameterAttribute::NoReadWrite => \"NoReadWrite\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for FunctionParameterAttribute {}\n\n\n\n/// The dimension for an image type\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Dim {\n\n Tex1D,\n\n Tex2D,\n\n Tex3D,\n\n Cube,\n\n Rect,\n\n Buffer,\n\n SubpassData,\n", "file_path": "src/spv/types.rs", "rank": 86, "score": 30149.68635495383 }, { "content": " ImageFormat::R32ui => \"R32ui\",\n\n ImageFormat::Rgb10a2ui => \"Rgb10a2ui\",\n\n ImageFormat::Rg32ui => \"Rg32ui\",\n\n ImageFormat::Rg16ui => \"Rg16ui\",\n\n ImageFormat::Rg8ui => \"Rg8ui\",\n\n ImageFormat::R16ui => \"R16ui\",\n\n ImageFormat::R8ui => \"R8ui\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\nimpl DisplayArgType for ImageFormat {}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum AccessQualifier {\n\n ReadOnly,\n\n WriteOnly,\n\n ReadWrite,\n\n}\n", "file_path": "src/spv/types.rs", "rank": 87, "score": 30149.55937175915 }, { "content": " Groups,\n\n DeviceEnqueue,\n\n LiteralSampler,\n\n AtomicStorage,\n\n Int16,\n\n TessellationPointSize,\n\n GeometryPointSize,\n\n ImageGatherExtended,\n\n StorageImageMultisample,\n\n UniformBufferArrayDynamicIndexing,\n\n SampledImageArrayDynamicIndexing,\n\n StorageBufferArrayDynamicIndexing,\n\n StorageImageArrayDynamicIndexing,\n\n ClipDistance,\n\n CullDistance,\n\n ImageCubeArray,\n\n SampleRateShading,\n\n ImageRect,\n\n SampledRect,\n\n GenericPointer,\n", "file_path": "src/spv/types.rs", "rank": 88, "score": 30149.390369696517 }, { "content": "}\n\n\n\nimpl Display for FunctionControl {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let mut parts = Vec::new();\n\n if self.inline {\n\n parts.push(\"Inline\");\n\n }\n\n if self.dont_inline {\n\n parts.push(\"DontInline\");\n\n }\n\n if self.pure_function {\n\n parts.push(\"Pure\");\n\n }\n\n if self.const_function {\n\n parts.push(\"Const\");\n\n }\n\n if parts.len() == 0 {\n\n write!(f, \"None\")\n\n } else {\n", "file_path": "src/spv/types.rs", "rank": 89, "score": 30149.381440200155 }, { "content": " }\n\n if self.nsz {\n\n parts.push(\"NSZ\");\n\n }\n\n if self.allow_recip {\n\n parts.push(\"AllowRecip\");\n\n }\n\n if self.fast {\n\n parts.push(\"Fast\");\n\n }\n\n if parts.len() == 0 {\n\n write!(f, \"None\")\n\n } else {\n\n write!(f, \"{}\", parts.join(\" | \"))\n\n }\n\n }\n\n}\n\n\n\nimpl DisplayArgType for FpFastMathMode {}\n\n\n", "file_path": "src/spv/types.rs", "rank": 90, "score": 30148.68870644108 }, { "content": " Tool::KhronosReserved => \"Khronos\",\n\n Tool::LunarG => \"LunarG\",\n\n Tool::Valve => \"Valve\",\n\n Tool::Codeplay => \"Codeplay\",\n\n Tool::Nvidia => \"NVIDIA\",\n\n Tool::Arm => \"ARM\",\n\n Tool::KhronosLLvmTranslator => \"Khronos\",\n\n Tool::KhronosAssembler => \"Khronos\",\n\n Tool::KhronosGlslang => \"Khronos\",\n\n Tool::Qualcomm => \"Qualcomm\",\n\n Tool::Amd => \"AMD\",\n\n Tool::Intel => \"Intel\",\n\n Tool::Other(_) => return None,\n\n })\n\n }\n\n pub fn get_tool(&self) -> Option<&'static str> {\n\n Some(match *self {\n\n Tool::KhronosLLvmTranslator => \"LLVM/SPIR-V Translator\",\n\n Tool::KhronosAssembler => \"SPIR-V Tools Assembler\",\n\n Tool::KhronosGlslang => \"Glslang Reference Front End\",\n", "file_path": "src/spv/types.rs", "rank": 91, "score": 30148.49181977216 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\npub enum ImageFormat {\n\n Unknown,\n\n Rgba32f,\n\n Rgba16f,\n\n R32f,\n\n Rgba8,\n\n Rgba8Snorm,\n\n Rg32f,\n\n Rg16f,\n\n R11fG11fB10f,\n\n R16f,\n\n Rgba16,\n\n Rgb10A2,\n\n Rg16,\n\n Rg8,\n\n R16,\n\n R8,\n\n Rgba16Snorm,\n\n Rg16Snorm,\n", "file_path": "src/spv/types.rs", "rank": 92, "score": 30148.118404724144 }, { "content": " Capability::GenericPointer => \"GenericPointer\",\n\n Capability::Int8 => \"Int8\",\n\n Capability::InputAttachment => \"InputAttachment\",\n\n Capability::SparseResidency => \"SparseResidency\",\n\n Capability::MinLod => \"MinLod\",\n\n Capability::Sampled1D => \"Sampled1D\",\n\n Capability::Image1D => \"Image1D\",\n\n Capability::SampledCubeArray => \"SampledCubeArray\",\n\n Capability::SampledBuffer => \"SampledBuffer\",\n\n Capability::ImageBuffer => \"ImageBuffer\",\n\n Capability::ImageMSArray => \"ImageMSArray\",\n\n Capability::StorageImageExtendedFormats => \"StorageImageExtendedFormats\",\n\n Capability::ImageQuery => \"ImageQuery\",\n\n Capability::DerivativeControl => \"DerivativeControl\",\n\n Capability::InterpolationFunction => \"InterpolationFunction\",\n\n Capability::TransformFeedback => \"TransformFeedback\",\n\n Capability::GeometryStreams => \"GeometryStreams\",\n\n Capability::StorageImageReadWithoutFormat => \"StorageImageReadWithoutFormat\",\n\n Capability::StorageImageWriteWithoutFormat => \"StorageImageWriteWithoutFormat\",\n\n Capability::MultiViewport => \"MultiViewport\",\n", "file_path": "src/spv/types.rs", "rank": 93, "score": 30148.075787345097 }, { "content": "pub enum BuiltIn {\n\n Position,\n\n PointSize,\n\n ClipDistance,\n\n CullDistance,\n\n VertexId,\n\n InstanceId,\n\n PrimitiveId,\n\n InvocationId,\n\n Layer,\n\n ViewportIndex,\n\n TessLevelOuter,\n\n TessLevelInner,\n\n TessCoord,\n\n PatchVerticies,\n\n FragCoord,\n\n PointCoord,\n\n FrontFacing,\n\n SampleId,\n\n SamplePosition,\n", "file_path": "src/spv/types.rs", "rank": 94, "score": 30147.963848887233 }, { "content": " R16ui,\n\n R8ui,\n\n}\n\n\n\nimpl Display for ImageFormat {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let name = match *self {\n\n ImageFormat::Unknown => \"Unknown\",\n\n ImageFormat::Rgba32f => \"Rgba32f\",\n\n ImageFormat::Rgba16f => \"Rgba16f\",\n\n ImageFormat::R32f => \"R32f\",\n\n ImageFormat::Rgba8 => \"Rgba8\",\n\n ImageFormat::Rgba8Snorm => \"Rgba8Snorm\",\n\n ImageFormat::Rg32f => \"Rg32f\",\n\n ImageFormat::Rg16f => \"Rg16f\",\n\n ImageFormat::R11fG11fB10f => \"R11fG11fB10f\",\n\n ImageFormat::R16f => \"R16f\",\n\n ImageFormat::Rgba16 => \"Rgba16\",\n\n ImageFormat::Rgb10A2 => \"Rgb10A2\",\n\n ImageFormat::Rg16 => \"Rg16\",\n", "file_path": "src/spv/types.rs", "rank": 95, "score": 30146.846659150633 }, { "content": " if self.unroll {\n\n parts.push(\"Unroll\".to_string());\n\n }\n\n if self.dont_unroll {\n\n parts.push(\"DontUnroll\".to_string());\n\n }\n\n if self.dependency_infinite {\n\n parts.push(\"DependencyInfinite\".to_string());\n\n }\n\n if let Some(len) = self.dependency_length {\n\n parts.push(format!(\"DependencyLength({})\", len));\n\n }\n\n if parts.len() == 0 {\n\n write!(f, \"None\")\n\n } else {\n\n write!(f, \"{}\", parts.join(\" | \"))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/spv/types.rs", "rank": 96, "score": 30145.787467853945 }, { "content": " Decoration::Volatile => \"Volatile\",\n\n Decoration::Constant => \"Constant\",\n\n Decoration::Coherent => \"Coherent\",\n\n Decoration::NonWritable => \"NonWritable\",\n\n Decoration::NonReadable => \"NonReadable\",\n\n Decoration::Uniform => \"Uniform\",\n\n Decoration::SaturatedConversion => \"SaturatedConversion\",\n\n Decoration::Stream(ref n) => return write!(f, \"Stream {}\", n),\n\n Decoration::Location(ref n) => return write!(f, \"Location {}\", n),\n\n Decoration::Component(ref n) => return write!(f, \"Component {}\", n),\n\n Decoration::Index(ref n) => return write!(f, \"Index {}\", n),\n\n Decoration::Binding(ref n) => return write!(f, \"Binding {}\", n),\n\n Decoration::DescriptorSet(ref n) => return write!(f, \"DescriptorSet {}\", n),\n\n Decoration::Offset(ref n) => return write!(f, \"Offset {}\", n),\n\n Decoration::XfbBuffer(ref n) => return write!(f, \"XfbBuffer {}\", n),\n\n Decoration::XfbStride(ref n) => return write!(f, \"XfbStride {}\", n),\n\n Decoration::FuncParamAttr(ref foa) => return write!(f, \"FuncParamAttr {}\", foa),\n\n Decoration::FpRoundingMode(ref rounding_mode) => {\n\n return write!(f, \"FpRoundingMode {}\", rounding_mode)\n\n }\n", "file_path": "src/spv/types.rs", "rank": 97, "score": 30145.52511348754 }, { "content": " GlslShared,\n\n GlslPacked,\n\n CPacked,\n\n BuiltIn(BuiltIn),\n\n NoPerspective,\n\n Flat,\n\n Patch,\n\n Centroid,\n\n Sample,\n\n Invariant,\n\n Restrict,\n\n Aliased,\n\n Volatile,\n\n Constant,\n\n Coherent,\n\n NonWritable,\n\n NonReadable,\n\n Uniform,\n\n SaturatedConversion,\n\n Stream(u32),\n", "file_path": "src/spv/types.rs", "rank": 98, "score": 30143.03966451284 }, { "content": " Decoration::RelaxedPrecision => \"RelaxedPrecision\",\n\n Decoration::SpecId(ref n) => return write!(f, \"SpecId {}\", n),\n\n Decoration::Block => \"Block\",\n\n Decoration::BufferBlock => \"BufferBlock\",\n\n Decoration::RowMajor => \"RowMajor\",\n\n Decoration::ColMajor => \"ColMajor\",\n\n Decoration::ArrayStride(ref n) => return write!(f, \"ArrayStride {}\", n),\n\n Decoration::MatrixStride(ref n) => return write!(f, \"MatrixStride {}\", n),\n\n Decoration::GlslShared => \"GLSLShared\",\n\n Decoration::GlslPacked => \"GLSLPacked\",\n\n Decoration::CPacked => \"CPacked\",\n\n Decoration::BuiltIn(ref b) => return write!(f, \"BuiltIn {}\", b),\n\n Decoration::NoPerspective => \"NoPerspective\",\n\n Decoration::Flat => \"Flat\",\n\n Decoration::Patch => \"Patch\",\n\n Decoration::Centroid => \"Centroid\",\n\n Decoration::Sample => \"Sample\",\n\n Decoration::Invariant => \"Invariant\",\n\n Decoration::Restrict => \"Restrict\",\n\n Decoration::Aliased => \"Aliased\",\n", "file_path": "src/spv/types.rs", "rank": 99, "score": 30143.03966451284 } ]
Rust
examples/src/bin/smoltcp.rs
akiles/ppproto
b13f98d99b5afb2ffee18cebe6bef6ffdc340c84
#[path = "../serial_port.rs"] mod serial_port; use as_slice::{AsMutSlice, AsSlice}; use clap::Clap; use std::fmt::Write as _; use std::io::{Read, Write}; use std::marker::PhantomData; use std::ops::Range; use std::os::unix::io::AsRawFd; use std::path::Path; use std::str; use log::*; use smoltcp::iface::InterfaceBuilder; use smoltcp::phy::wait as phy_wait; use smoltcp::phy::{Device, DeviceCapabilities, Medium, RxToken, TxToken}; use smoltcp::socket::SocketSet; use smoltcp::socket::{TcpSocket, TcpSocketBuffer}; use smoltcp::socket::{UdpPacketMetadata, UdpSocket, UdpSocketBuffer}; use smoltcp::time::{Duration, Instant}; use smoltcp::wire::{IpCidr, Ipv4Address}; use smoltcp::Result; use ppproto::{Config, PPPoS, PPPoSAction}; use serial_port::SerialPort; #[derive(Clap)] struct Opts { #[clap(short, long)] device: String, } const MTU: usize = 1520; struct Buf(Box<[u8; MTU]>); impl Buf { pub fn new() -> Self { Self(Box::new([0; MTU])) } } impl AsSlice for Buf { type Element = u8; fn as_slice(&self) -> &[Self::Element] { &*self.0 } } impl AsMutSlice for Buf { fn as_mut_slice(&mut self) -> &mut [Self::Element] { &mut *self.0 } } type PPP = PPPoS<'static, Buf>; struct PPPDevice { ppp: PPP, port: SerialPort, } impl PPPDevice { fn new(ppp: PPP, port: SerialPort) -> Self { Self { ppp, port } } } impl<'a> Device<'a> for PPPDevice { type RxToken = PPPRxToken<'a>; type TxToken = PPPTxToken<'a>; fn receive(&'a mut self) -> Option<(Self::RxToken, Self::TxToken)> { self.port.set_nonblocking(true).unwrap(); let mut tx_buf = [0; 2048]; let mut read_buf = [0; 2048]; let mut data: &[u8] = &[]; loop { match self.ppp.poll(&mut tx_buf) { PPPoSAction::None => {} PPPoSAction::Transmit(n) => self.port.write_all(&tx_buf[..n]).unwrap(), PPPoSAction::Received(buf, range) => { self.ppp.put_rx_buf(Buf::new()); return Some(( PPPRxToken { buf, range, _phantom: PhantomData, }, PPPTxToken { port: &mut self.port, ppp: &mut self.ppp, }, )); } } if data.len() == 0 { let n = match self.port.read(&mut read_buf) { Ok(n) => n, Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => return None, Err(e) => panic!("error reading: {:?}", e), }; data = &read_buf[..n]; } let n = self.ppp.consume(data); data = &data[n..]; } } fn transmit(&'a mut self) -> Option<Self::TxToken> { Some(PPPTxToken { port: &mut self.port, ppp: &mut self.ppp, }) } fn capabilities(&self) -> DeviceCapabilities { let mut caps: DeviceCapabilities = Default::default(); caps.max_transmission_unit = 1500; caps.medium = Medium::Ip; caps } } struct PPPRxToken<'a> { buf: Buf, range: Range<usize>, _phantom: PhantomData<&'a mut PPP>, } impl<'a> RxToken for PPPRxToken<'a> { fn consume<R, F>(mut self, _timestamp: Instant, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>, { f(&mut self.buf.0[self.range]) } } struct PPPTxToken<'a> { port: &'a mut SerialPort, ppp: &'a mut PPP, } impl<'a> TxToken for PPPTxToken<'a> { fn consume<R, F>(self, _timestamp: Instant, len: usize, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>, { let mut pkt_buf = [0; 2048]; let pkt = &mut pkt_buf[..len]; let r = f(pkt)?; let mut tx_buf = [0; 2048]; let n = self.ppp.send(pkt, &mut tx_buf).unwrap(); self.port.set_nonblocking(false).unwrap(); self.port.write_all(&tx_buf[..n]).unwrap(); Ok(r) } } fn main() { env_logger::init(); let opts: Opts = Opts::parse(); let port = SerialPort::new(Path::new(&opts.device)).unwrap(); let fd = port.as_raw_fd(); let config = Config { username: b"myuser", password: b"mypass", }; let mut ppp = PPPoS::new(config); ppp.put_rx_buf(Buf::new()); ppp.open().unwrap(); let device = PPPDevice::new(ppp, port); let udp_rx_buffer = UdpSocketBuffer::new(vec![UdpPacketMetadata::EMPTY], vec![0; 64]); let udp_tx_buffer = UdpSocketBuffer::new(vec![UdpPacketMetadata::EMPTY], vec![0; 128]); let udp_socket = UdpSocket::new(udp_rx_buffer, udp_tx_buffer); let tcp1_rx_buffer = TcpSocketBuffer::new(vec![0; 64]); let tcp1_tx_buffer = TcpSocketBuffer::new(vec![0; 128]); let tcp1_socket = TcpSocket::new(tcp1_rx_buffer, tcp1_tx_buffer); let tcp2_rx_buffer = TcpSocketBuffer::new(vec![0; 64]); let tcp2_tx_buffer = TcpSocketBuffer::new(vec![0; 128]); let tcp2_socket = TcpSocket::new(tcp2_rx_buffer, tcp2_tx_buffer); let tcp3_rx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp3_tx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp3_socket = TcpSocket::new(tcp3_rx_buffer, tcp3_tx_buffer); let tcp4_rx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp4_tx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp4_socket = TcpSocket::new(tcp4_rx_buffer, tcp4_tx_buffer); let ip_addrs = [IpCidr::new(Ipv4Address::UNSPECIFIED.into(), 0)]; let mut iface = InterfaceBuilder::new(device).ip_addrs(ip_addrs).finalize(); let mut sockets = SocketSet::new(vec![]); let udp_handle = sockets.add(udp_socket); let tcp1_handle = sockets.add(tcp1_socket); let tcp2_handle = sockets.add(tcp2_socket); let tcp3_handle = sockets.add(tcp3_socket); let tcp4_handle = sockets.add(tcp4_socket); let mut tcp_6970_active = false; loop { let timestamp = Instant::now(); match iface.poll(&mut sockets, timestamp) { Ok(_) => {} Err(e) => { debug!("poll error: {}", e); } } let status = iface.device().ppp.status(); if let Some(ipv4) = status.ipv4 { if let Some(want_addr) = ipv4.address { iface.update_ip_addrs(|addrs| { let addr = &mut addrs[0]; if addr.address() != want_addr.into() { *addr = IpCidr::new(want_addr.into(), 0); info!("Assigned a new IPv4 address: {}", want_addr); } }); } } { let mut socket = sockets.get::<UdpSocket>(udp_handle); if !socket.is_open() { socket.bind(6969).unwrap() } let client = match socket.recv() { Ok((data, endpoint)) => { debug!( "udp:6969 recv data: {:?} from {}", str::from_utf8(data.as_ref()).unwrap(), endpoint ); Some(endpoint) } Err(_) => None, }; if let Some(endpoint) = client { let data = b"hello\n"; debug!( "udp:6969 send data: {:?}", str::from_utf8(data.as_ref()).unwrap() ); socket.send_slice(data, endpoint).unwrap(); } } { let mut socket = sockets.get::<TcpSocket>(tcp1_handle); if !socket.is_open() { socket.listen(6969).unwrap(); } if socket.can_send() { debug!("tcp:6969 send greeting"); write!(socket, "hello\n").unwrap(); debug!("tcp:6969 close"); socket.close(); } } { let mut socket = sockets.get::<TcpSocket>(tcp2_handle); if !socket.is_open() { socket.listen(6970).unwrap() } if socket.is_active() && !tcp_6970_active { debug!("tcp:6970 connected"); } else if !socket.is_active() && tcp_6970_active { debug!("tcp:6970 disconnected"); } tcp_6970_active = socket.is_active(); if socket.may_recv() { let data = socket .recv(|buffer| { let recvd_len = buffer.len(); let mut data = buffer.to_owned(); if data.len() > 0 { debug!( "tcp:6970 recv data: {:?}", str::from_utf8(data.as_ref()).unwrap_or("(invalid utf8)") ); data = data.split(|&b| b == b'\n').collect::<Vec<_>>().concat(); data.reverse(); data.extend(b"\n"); } (recvd_len, data) }) .unwrap(); if socket.can_send() && data.len() > 0 { debug!( "tcp:6970 send data: {:?}", str::from_utf8(data.as_ref()).unwrap_or("(invalid utf8)") ); socket.send_slice(&data[..]).unwrap(); } } else if socket.may_send() { debug!("tcp:6970 close"); socket.close(); } } { let mut socket = sockets.get::<TcpSocket>(tcp3_handle); if !socket.is_open() { socket.listen(6971).unwrap(); socket.set_keep_alive(Some(Duration::from_millis(1000))); socket.set_timeout(Some(Duration::from_millis(2000))); } if socket.may_recv() { socket .recv(|buffer| { if buffer.len() > 0 { debug!("tcp:6971 recv {:?} octets", buffer.len()); } (buffer.len(), ()) }) .unwrap(); } else if socket.may_send() { socket.close(); } } { let mut socket = sockets.get::<TcpSocket>(tcp4_handle); if !socket.is_open() { socket.listen(6972).unwrap() } if socket.may_send() { socket .send(|data| { if data.len() > 0 { debug!("tcp:6972 send {:?} octets", data.len()); for (i, b) in data.iter_mut().enumerate() { *b = (i % 256) as u8; } } (data.len(), ()) }) .unwrap(); } } phy_wait(fd, iface.poll_delay(&sockets, timestamp)).expect("wait error"); } }
#[path = "../serial_port.rs"] mod serial_port; use as_slice::{AsMutSlice, AsSlice}; use clap::Clap; use std::fmt::Write as _; use std::io::{Read, Write}; use std::marker::PhantomData; use std::ops::Range; use std::os::unix::io::AsRawFd; use std::path::Path; use std::str; use log::*; use smoltcp::iface::InterfaceBuilder; use smoltcp::phy::wait as phy_wait; use smoltcp::phy::{Device, DeviceCapabilities, Medium, RxToken, TxToken}; use smoltcp::socket::SocketSet; use smoltcp::socket::{TcpSocket, TcpSocketBuffer}; use smoltcp::socket::{UdpPacketMetadata, UdpSocket, UdpSocketBuffer}; use smoltcp::time::{Duration, Instant}; use smoltcp::wire::{IpCidr, Ipv4Address}; use smoltcp::Result; use ppproto::{Config, PPPoS, PPPoSAction}; use serial_port::SerialPort; #[derive(Clap)] struct Opts { #[clap(short, long)] device: String, } const MTU: usize = 1520; struct Buf(Box<[u8; MTU]>); impl Buf { pub fn new() -> Self { Self(Box::new([0; MTU])) } } impl AsSlice for Buf { type Element = u8; fn as_slice(&self) -> &[Self::Element] { &*self.0 } } impl AsMutSlice for Buf { fn as_mut_slice(&mut self) -> &mut [Self::Element] { &mut *self.0 } } type PPP = PPPoS<'static, Buf>; struct PPPDevice { ppp: PPP, port: SerialPort, } impl PPPDevice { fn new(ppp: PPP, port: SerialPort) -> Self { Self { ppp, port } } } impl<'a> Device<'a> for PPPDevice { type RxToken = PPPRxToken<'a>; type TxToken = PPPTxToken<'a>; fn receive(&'a mut self) -> Option<(Self::RxToken, Self::TxToken)> { self.port.set_nonblocking(true).unwrap(); let mut tx_buf = [0; 2048]; let mut read_buf = [0; 2048]; let mut data: &[u8] = &[]; loop { match self.ppp.poll(&mut tx_buf) { PPPoSAction::None => {} PPPoSAction::Transmit(n) => self.port.write_all(&tx_buf[..n]).unwrap(), PPPoSAction::Received(buf, range) => { self.ppp.put_rx_buf(Buf::new()); return Some(( PPPRxToken { buf, range, _phantom: PhantomData, }, PPPTxToken { port: &mut self.port, ppp: &mut self.ppp, }, )); } } if data.len() == 0 { let n = match self.port.read(&mut read_buf) { Ok(n) => n, Err(e) if e.kind() == std::io::ErrorKind::WouldBlock => return None, Err(e) => panic!("error reading: {:?}", e), }; data = &read_buf[..n]; } let n = self.ppp.consume(data); data = &data[n..]; } } fn transmit(&'a mut self) -> Option<Self::TxToken> { Some(PPPTxToken { port: &mut self.port, ppp: &mut self.ppp, }) } fn capabilities(&self) -> DeviceCapabilities { let mut caps: DeviceCapabilities = Default::default(); caps.max_transmission_unit = 1500; caps.medium = Medium::Ip; caps } } struct PPPRxToken<'a> { buf: Buf, range: Range<usize>, _phantom: PhantomData<&'a mut PPP>, } impl<'a> RxToken for PPPRxToken<'a> { fn consume<R, F>(mut self, _timestamp: Instant, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>, { f(&mut self.buf.0[self.range]) } } struct PPPTxToken<'a> { port: &'a mut SerialPort, ppp: &'a mut PPP, } impl<'a> TxToken for PPPTxToken<'a> { fn consume<R, F>(self, _timestamp: Instant, len: usize, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>, { let mut pkt_buf = [0; 2048]; let pkt = &mut pkt_buf[..len]; let r = f(pkt)?; let mut tx_buf = [0; 2048]; let n = self.ppp.send(pkt, &mut tx_buf).unwrap(); self.port.set_nonblocking(false).unwrap(); self.port.write_all(&tx_buf[..n]).unwrap(); Ok(r) } } fn main() { env_logger::init(); let opts: Opts = Opts::parse(); let port = SerialPort::new(Path::new(&opts.device)).unwrap(); let fd = port.as_raw_fd(); let config = Config { username: b"myuser", password: b"mypass", }; let mut ppp = PPPoS::new(config); ppp.put_rx_buf(Buf::new()); ppp.open().unwrap(); let device = PPPDevice::new(ppp, port); let udp_rx_buffer = UdpSocketBuffer::new(vec![UdpPacketMetadata::EMPTY], vec![0; 64]); let udp_tx_buffer = UdpSocketBuffer::new(vec![UdpPacketMetadata::EMPTY], vec![0; 128]); let udp_socket = UdpSocket::new(udp_rx_buffer, udp_tx_buffer); let tcp1_rx_buffer = TcpSocketBuffer::new(vec![0; 64]); let tcp1_tx_buffer = TcpSocketBuffer::new(vec![0; 128]); let tcp1_socket = TcpSocket::new(tcp1_rx_buffer, tcp1_tx_buffer); let tcp2_rx_buffer = TcpSocketBuffer::new(vec![0; 64]); let tcp2_tx_buffer = TcpSocketBuffer::new(vec![0; 128]); let tcp2_socket = TcpSocket::new(tcp2_rx_buffer, tcp2_tx_buffer); let tcp3_rx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp3_tx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp3_socket = TcpSocket::new(tcp3_rx_buffer, tcp3_tx_buffer); let tcp4_rx_buffer = TcpSocketBuffer::ne
w(vec![0; 65535]); let tcp4_tx_buffer = TcpSocketBuffer::new(vec![0; 65535]); let tcp4_socket = TcpSocket::new(tcp4_rx_buffer, tcp4_tx_buffer); let ip_addrs = [IpCidr::new(Ipv4Address::UNSPECIFIED.into(), 0)]; let mut iface = InterfaceBuilder::new(device).ip_addrs(ip_addrs).finalize(); let mut sockets = SocketSet::new(vec![]); let udp_handle = sockets.add(udp_socket); let tcp1_handle = sockets.add(tcp1_socket); let tcp2_handle = sockets.add(tcp2_socket); let tcp3_handle = sockets.add(tcp3_socket); let tcp4_handle = sockets.add(tcp4_socket); let mut tcp_6970_active = false; loop { let timestamp = Instant::now(); match iface.poll(&mut sockets, timestamp) { Ok(_) => {} Err(e) => { debug!("poll error: {}", e); } } let status = iface.device().ppp.status(); if let Some(ipv4) = status.ipv4 { if let Some(want_addr) = ipv4.address { iface.update_ip_addrs(|addrs| { let addr = &mut addrs[0]; if addr.address() != want_addr.into() { *addr = IpCidr::new(want_addr.into(), 0); info!("Assigned a new IPv4 address: {}", want_addr); } }); } } { let mut socket = sockets.get::<UdpSocket>(udp_handle); if !socket.is_open() { socket.bind(6969).unwrap() } let client = match socket.recv() { Ok((data, endpoint)) => { debug!( "udp:6969 recv data: {:?} from {}", str::from_utf8(data.as_ref()).unwrap(), endpoint ); Some(endpoint) } Err(_) => None, }; if let Some(endpoint) = client { let data = b"hello\n"; debug!( "udp:6969 send data: {:?}", str::from_utf8(data.as_ref()).unwrap() ); socket.send_slice(data, endpoint).unwrap(); } } { let mut socket = sockets.get::<TcpSocket>(tcp1_handle); if !socket.is_open() { socket.listen(6969).unwrap(); } if socket.can_send() { debug!("tcp:6969 send greeting"); write!(socket, "hello\n").unwrap(); debug!("tcp:6969 close"); socket.close(); } } { let mut socket = sockets.get::<TcpSocket>(tcp2_handle); if !socket.is_open() { socket.listen(6970).unwrap() } if socket.is_active() && !tcp_6970_active { debug!("tcp:6970 connected"); } else if !socket.is_active() && tcp_6970_active { debug!("tcp:6970 disconnected"); } tcp_6970_active = socket.is_active(); if socket.may_recv() { let data = socket .recv(|buffer| { let recvd_len = buffer.len(); let mut data = buffer.to_owned(); if data.len() > 0 { debug!( "tcp:6970 recv data: {:?}", str::from_utf8(data.as_ref()).unwrap_or("(invalid utf8)") ); data = data.split(|&b| b == b'\n').collect::<Vec<_>>().concat(); data.reverse(); data.extend(b"\n"); } (recvd_len, data) }) .unwrap(); if socket.can_send() && data.len() > 0 { debug!( "tcp:6970 send data: {:?}", str::from_utf8(data.as_ref()).unwrap_or("(invalid utf8)") ); socket.send_slice(&data[..]).unwrap(); } } else if socket.may_send() { debug!("tcp:6970 close"); socket.close(); } } { let mut socket = sockets.get::<TcpSocket>(tcp3_handle); if !socket.is_open() { socket.listen(6971).unwrap(); socket.set_keep_alive(Some(Duration::from_millis(1000))); socket.set_timeout(Some(Duration::from_millis(2000))); } if socket.may_recv() { socket .recv(|buffer| { if buffer.len() > 0 { debug!("tcp:6971 recv {:?} octets", buffer.len()); } (buffer.len(), ()) }) .unwrap(); } else if socket.may_send() { socket.close(); } } { let mut socket = sockets.get::<TcpSocket>(tcp4_handle); if !socket.is_open() { socket.listen(6972).unwrap() } if socket.may_send() { socket .send(|data| { if data.len() > 0 { debug!("tcp:6972 send {:?} octets", data.len()); for (i, b) in data.iter_mut().enumerate() { *b = (i % 256) as u8; } } (data.len(), ()) }) .unwrap(); } } phy_wait(fd, iface.poll_delay(&sockets, timestamp)).expect("wait error"); } }
function_block-function_prefixed
[ { "content": "fn parse_options(mut pkt: &[u8], mut f: impl FnMut(u8, &[u8])) -> Result<(), MalformedError> {\n\n while pkt.len() != 0 {\n\n if pkt.len() < 2 {\n\n return Err(MalformedError);\n\n }\n\n\n\n let code = pkt[0];\n\n let len = pkt[1] as usize;\n\n\n\n if pkt.len() < len {\n\n return Err(MalformedError);\n\n }\n\n if len < 2 {\n\n return Err(MalformedError);\n\n }\n\n\n\n let data = &pkt[2..len];\n\n f(code, data);\n\n pkt = &pkt[len..];\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub struct MalformedError;\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 0, "score": 150214.6276984783 }, { "content": "pub fn crc16(mut seed: u16, data: &[u8]) -> u16 {\n\n for &b in data {\n\n let e = seed as u8 ^ b;\n\n let f = e ^ (e << 4);\n\n let f = f as u16;\n\n seed = (seed >> 8) ^ (f << 8) ^ (f << 3) ^ (f >> 4);\n\n }\n\n seed\n\n}\n", "file_path": "ppproto/src/pppos/crc.rs", "rank": 1, "score": 117900.55247980959 }, { "content": "struct OptionData(Vec<u8, MAX_OPTION_LEN>);\n\n\n\n#[cfg(feature = \"defmt\")]\n\nimpl defmt::Format for OptionData {\n\n fn format(&self, fmt: defmt::Formatter) {\n\n defmt::write!(fmt, \"{=[?]}\", &self.0[..])\n\n }\n\n}\n", "file_path": "ppproto/src/wire.rs", "rank": 2, "score": 91095.55983730985 }, { "content": "struct IpOption {\n\n address: Ipv4Address,\n\n is_rejected: bool,\n\n}\n\n\n\nimpl IpOption {\n\n fn new() -> Self {\n\n Self {\n\n address: Ipv4Address::UNSPECIFIED,\n\n is_rejected: false,\n\n }\n\n }\n\n\n\n fn get(&self) -> Option<Ipv4Address> {\n\n if self.is_rejected || self.address.is_unspecified() {\n\n None\n\n } else {\n\n Some(self.address)\n\n }\n\n }\n", "file_path": "ppproto/src/ppp/ipv4cp.rs", "rank": 3, "score": 52730.46805506429 }, { "content": " pub(crate) pap: PAP<'a>,\n\n pub(crate) ipv4cp: OptionFsm<IPv4CP>,\n\n}\n\n\n\nimpl<'a> PPP<'a> {\n\n pub fn new(config: Config<'a>) -> Self {\n\n Self {\n\n phase: Phase::Dead,\n\n lcp: OptionFsm::new(LCP::new()),\n\n pap: PAP::new(config.username, config.password),\n\n ipv4cp: OptionFsm::new(IPv4CP::new()),\n\n }\n\n }\n\n\n\n pub fn status(&self) -> Status {\n\n Status {\n\n ipv4: if self.ipv4cp.state() == State::Opened {\n\n Some(self.ipv4cp.proto().status())\n\n } else {\n\n None\n", "file_path": "ppproto/src/ppp/mod.rs", "rank": 4, "score": 43877.116585060634 }, { "content": "mod ipv4cp;\n\nmod lcp;\n\nmod option_fsm;\n\nmod pap;\n\n\n\nuse core::convert::TryInto;\n\n\n\nuse self::ipv4cp::IPv4CP;\n\nuse self::lcp::{AuthType, LCP};\n\nuse self::option_fsm::{OptionFsm, State};\n\nuse self::pap::{State as PAPState, PAP};\n\nuse crate::wire::{Packet, ProtocolType};\n\n\n\npub use self::ipv4cp::Ipv4Status;\n\n\n\npub struct Config<'a> {\n\n pub username: &'a [u8],\n\n pub password: &'a [u8],\n\n}\n\n\n", "file_path": "ppproto/src/ppp/mod.rs", "rank": 5, "score": 43876.57505999513 }, { "content": " },\n\n }\n\n }\n\n\n\n pub fn open(&mut self) -> Result<(), crate::InvalidStateError> {\n\n match self.phase {\n\n Phase::Dead => {\n\n self.phase = Phase::Establish;\n\n Ok(())\n\n }\n\n _ => Err(crate::InvalidStateError),\n\n }\n\n }\n\n\n\n pub fn received(&mut self, pkt: &mut [u8], mut tx: impl FnMut(Packet<'_>)) {\n\n let proto = u16::from_be_bytes(pkt[0..2].try_into().unwrap());\n\n\n\n match proto.into() {\n\n ProtocolType::LCP => self.lcp.handle(pkt, &mut tx),\n\n ProtocolType::PAP => self.pap.handle(pkt, &mut tx),\n", "file_path": "ppproto/src/ppp/mod.rs", "rank": 6, "score": 43876.43104351959 }, { "content": " ProtocolType::IPv4 => todo!(),\n\n ProtocolType::IPv4CP => self.ipv4cp.handle(pkt, &mut tx),\n\n ProtocolType::Unknown => tx(self.lcp.send_protocol_reject(pkt)),\n\n }\n\n }\n\n\n\n pub fn poll(&mut self, mut tx: impl FnMut(Packet<'_>)) {\n\n // TODO this state machine can probably be written in nicer way.\n\n // TODO this is probably not rfc compliant, check what other impls do\n\n let old_phase = self.phase;\n\n match self.phase {\n\n Phase::Dead => {}\n\n Phase::Establish => {\n\n if self.lcp.state() == State::Closed {\n\n tx(self.lcp.open());\n\n }\n\n\n\n if self.lcp.state() == State::Opened {\n\n match self.lcp.proto().auth {\n\n AuthType::None => {\n", "file_path": "ppproto/src/ppp/mod.rs", "rank": 7, "score": 43873.17485651985 }, { "content": "#[derive(Copy, Clone, Eq, PartialEq, Debug, Ord, PartialOrd)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub enum Phase {\n\n Dead,\n\n Establish,\n\n Auth,\n\n Network,\n\n Open,\n\n}\n\n\n\n#[derive(Debug)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub struct Status {\n\n /// IPv4 configuration obtained from IPv4CP. None if IPv4CP is not up.\n\n pub ipv4: Option<Ipv4Status>,\n\n}\n\n\n\npub struct PPP<'a> {\n\n phase: Phase,\n\n pub(crate) lcp: OptionFsm<LCP>,\n", "file_path": "ppproto/src/ppp/mod.rs", "rank": 8, "score": 43864.50162888902 }, { "content": " tx(self.ipv4cp.open());\n\n } else {\n\n if self.ipv4cp.state() != State::Closed {\n\n self.ipv4cp.close();\n\n }\n\n }\n\n }\n\n Phase::Network => {\n\n if self.ipv4cp.state() == State::Opened {\n\n self.phase = Phase::Open;\n\n }\n\n }\n\n Phase::Open => {}\n\n }\n\n\n\n if old_phase != self.phase {\n\n info!(\"PPP link phase {:?} -> {:?}\", old_phase, self.phase);\n\n }\n\n }\n\n}\n", "file_path": "ppproto/src/ppp/mod.rs", "rank": 9, "score": 43860.92396046482 }, { "content": " tx(self.ipv4cp.open());\n\n self.phase = Phase::Network;\n\n }\n\n AuthType::PAP => {\n\n tx(self.pap.open());\n\n self.phase = Phase::Auth;\n\n }\n\n }\n\n } else {\n\n if self.pap.state() != PAPState::Closed {\n\n self.pap.close();\n\n }\n\n if self.ipv4cp.state() != State::Closed {\n\n self.ipv4cp.close();\n\n }\n\n }\n\n }\n\n Phase::Auth => {\n\n if self.pap.state() == PAPState::Opened {\n\n self.phase = Phase::Network;\n", "file_path": "ppproto/src/ppp/mod.rs", "rank": 10, "score": 43860.76107709951 }, { "content": "pub trait Try {\n\n type Ok;\n\n type Error;\n\n fn into_result(self) -> Result<Self::Ok, Self::Error>;\n\n}\n\n\n\nimpl<T> Try for Option<T> {\n\n type Ok = T;\n\n type Error = NoneError;\n\n\n\n #[inline]\n\n fn into_result(self) -> Result<T, NoneError> {\n\n self.ok_or(NoneError)\n\n }\n\n}\n\n\n\nimpl<T, E> Try for Result<T, E> {\n\n type Ok = T;\n\n type Error = E;\n\n\n\n #[inline]\n\n fn into_result(self) -> Self {\n\n self\n\n }\n\n}\n", "file_path": "ppproto/src/fmt.rs", "rank": 11, "score": 33579.48462970654 }, { "content": " } else {\n\n OFlag::empty()\n\n };\n\n fcntl(self.fd, FcntlArg::F_SETFL(f))?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl AsRawFd for SerialPort {\n\n fn as_raw_fd(&self) -> RawFd {\n\n self.fd\n\n }\n\n}\n\n\n\nimpl io::Read for SerialPort {\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n Ok(nix::unistd::read(self.fd, buf)?)\n\n }\n\n}\n\n\n", "file_path": "examples/src/serial_port.rs", "rank": 12, "score": 23936.36066140562 }, { "content": "impl io::Write for SerialPort {\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n Ok(nix::unistd::write(self.fd, buf)?)\n\n }\n\n\n\n fn flush(&mut self) -> io::Result<()> {\n\n Ok(())\n\n }\n\n}\n", "file_path": "examples/src/serial_port.rs", "rank": 13, "score": 23934.4345874222 }, { "content": "use std::io;\n\nuse std::os::unix::io::{AsRawFd, RawFd};\n\nuse std::path::Path;\n\n\n\nuse nix::fcntl::{fcntl, FcntlArg, OFlag};\n\nuse nix::sys::termios;\n\n\n\npub struct SerialPort {\n\n fd: RawFd,\n\n}\n\n\n\nimpl SerialPort {\n\n pub fn new(path: &Path) -> io::Result<Self> {\n\n let fd = nix::fcntl::open(\n\n path,\n\n OFlag::O_RDWR | OFlag::O_NOCTTY,\n\n nix::sys::stat::Mode::empty(),\n\n )?;\n\n\n\n let mut cfg = termios::tcgetattr(fd)?;\n", "file_path": "examples/src/serial_port.rs", "rank": 14, "score": 23933.245068904085 }, { "content": " cfg.input_flags = termios::InputFlags::empty();\n\n cfg.output_flags = termios::OutputFlags::empty();\n\n cfg.control_flags = termios::ControlFlags::empty();\n\n cfg.local_flags = termios::LocalFlags::empty();\n\n termios::cfmakeraw(&mut cfg);\n\n cfg.input_flags |= termios::InputFlags::IGNBRK;\n\n cfg.control_flags |= termios::ControlFlags::CREAD;\n\n cfg.control_flags |= termios::ControlFlags::CRTSCTS;\n\n termios::cfsetospeed(&mut cfg, termios::BaudRate::B115200)?;\n\n termios::cfsetispeed(&mut cfg, termios::BaudRate::B115200)?;\n\n termios::cfsetspeed(&mut cfg, termios::BaudRate::B115200)?;\n\n termios::tcsetattr(fd, termios::SetArg::TCSANOW, &cfg)?;\n\n termios::tcflush(fd, termios::FlushArg::TCIOFLUSH)?;\n\n\n\n Ok(Self { fd })\n\n }\n\n\n\n pub fn set_nonblocking(&mut self, nonblocking: bool) -> io::Result<()> {\n\n let f = if nonblocking {\n\n OFlag::O_NONBLOCK\n", "file_path": "examples/src/serial_port.rs", "rank": 15, "score": 23921.337372016755 }, { "content": "}\n\n\n\npub struct PPPoS<'a, B: AsMutSlice<Element = u8>> {\n\n frame_reader: FrameReader,\n\n rx_buf: Option<B>,\n\n ppp: PPP<'a>,\n\n}\n\n\n\nimpl<'a, B: AsMutSlice<Element = u8>> PPPoS<'a, B> {\n\n pub fn new(config: Config<'a>) -> Self {\n\n Self {\n\n frame_reader: FrameReader::new(),\n\n rx_buf: None,\n\n ppp: PPP::new(config),\n\n }\n\n }\n\n\n\n pub fn status(&self) -> Status {\n\n self.ppp.status()\n\n }\n", "file_path": "ppproto/src/pppos/mod.rs", "rank": 16, "score": 23229.233113394555 }, { "content": " Ok(w.len())\n\n }\n\n\n\n /// Consume data received from the serial connection.\n\n ///\n\n /// After calling `consume`, `poll` must be called to process the consumed data.\n\n ///\n\n /// Returns how many bytes were actually consumed. If less than `data.len()`, `consume`\n\n /// must be called again with the remaining data.\n\n pub fn consume(&mut self, data: &[u8]) -> usize {\n\n let buf = unwrap!(self.rx_buf.as_mut(), \"called consume() without an rx_buf\");\n\n self.frame_reader.consume(buf.as_mut_slice(), data)\n\n }\n\n}\n", "file_path": "ppproto/src/pppos/mod.rs", "rank": 17, "score": 23224.784112889105 }, { "content": "mod crc;\n\nmod frame_reader;\n\nmod frame_writer;\n\n\n\nuse as_slice::AsMutSlice;\n\nuse core::convert::TryInto;\n\nuse core::ops::Range;\n\n\n\nuse self::frame_reader::FrameReader;\n\nuse self::frame_writer::FrameWriter;\n\nuse crate::ppp::PPP;\n\nuse crate::wire::{Packet, ProtocolType};\n\nuse crate::{Config, Status};\n\n\n\npub use self::frame_writer::BufferFullError;\n\n\n\npub enum PPPoSAction<B> {\n\n None,\n\n Received(B, Range<usize>),\n\n Transmit(usize),\n", "file_path": "ppproto/src/pppos/mod.rs", "rank": 18, "score": 23224.035600389525 }, { "content": " };\n\n\n\n // Handle input\n\n if let Some(range) = self.frame_reader.receive() {\n\n let pkt = &mut buf[range.clone()];\n\n let proto = u16::from_be_bytes(pkt[0..2].try_into().unwrap());\n\n match proto.into() {\n\n ProtocolType::IPv4 => {\n\n return PPPoSAction::Received(\n\n self.rx_buf.take().unwrap(),\n\n (range.start + 2)..range.end,\n\n )\n\n }\n\n _ => self.ppp.received(pkt, &mut tx),\n\n }\n\n }\n\n\n\n self.ppp.poll(tx);\n\n\n\n let r = w.len();\n", "file_path": "ppproto/src/pppos/mod.rs", "rank": 19, "score": 23222.68878327894 }, { "content": " if r == 0 {\n\n PPPoSAction::None\n\n } else {\n\n PPPoSAction::Transmit(r)\n\n }\n\n }\n\n\n\n /// Send an IP packet.\n\n ///\n\n /// You must provide buffer space for the data to be transmitted, and transmit the returned\n\n /// slice over the serial connection.\n\n pub fn send(&mut self, pkt: &[u8], tx_buf: &mut [u8]) -> Result<usize, BufferFullError> {\n\n // TODO check IPv4CP is up\n\n\n\n let mut w = FrameWriter::new_with_asyncmap(tx_buf, self.ppp.lcp.proto().asyncmap_remote);\n\n let proto: u16 = ProtocolType::IPv4.into();\n\n w.start()?;\n\n w.append(&proto.to_be_bytes())?;\n\n w.append(pkt)?;\n\n w.finish()?;\n", "file_path": "ppproto/src/pppos/mod.rs", "rank": 20, "score": 23221.801502435803 }, { "content": " /// to higher layers for processing.\n\n ///\n\n /// You must provide buffer space for data to be transmitted, and transmit the returned slice\n\n /// over the serial connection if Action::Transmit is returned.\n\n pub fn poll(&mut self, tx_buf: &mut [u8]) -> PPPoSAction<B> {\n\n let mut w = FrameWriter::new(tx_buf);\n\n\n\n let buf = unwrap!(self.rx_buf.as_mut(), \"called poll() without an rx_buf\").as_mut_slice();\n\n\n\n let mut tx = |pkt: Packet<'_>| {\n\n //info!(\"tx: {:?}\", pkt);\n\n\n\n let mut buf = [0; 128];\n\n let len = pkt.buffer_len();\n\n assert!(len <= buf.len());\n\n pkt.emit(&mut buf[..len]);\n\n\n\n w.start().unwrap();\n\n w.append(&mut buf[..len]).unwrap();\n\n w.finish().unwrap();\n", "file_path": "ppproto/src/pppos/mod.rs", "rank": 21, "score": 23220.913340960484 }, { "content": "\n\n pub fn open(&mut self) -> Result<(), crate::InvalidStateError> {\n\n self.ppp.open()\n\n }\n\n\n\n pub fn has_rx_buf(&self) -> bool {\n\n self.rx_buf.is_some()\n\n }\n\n\n\n pub fn put_rx_buf(&mut self, rx_buf: B) {\n\n if self.rx_buf.is_some() {\n\n panic!(\"called put_rx_buf when we already have a buffer.\")\n\n }\n\n\n\n self.rx_buf = Some(rx_buf)\n\n }\n\n\n\n /// Process received data and generate data to be send.\n\n ///\n\n /// Action::Received is returned when an IP packet is received. You must then pass the packet\n", "file_path": "ppproto/src/pppos/mod.rs", "rank": 22, "score": 23216.29600137553 }, { "content": " pub fn new(username: &'a [u8], password: &'a [u8]) -> Self {\n\n assert!(username.len() <= u8::MAX as usize);\n\n assert!(password.len() <= u8::MAX as usize);\n\n Self {\n\n state: State::Closed,\n\n id: 1,\n\n username,\n\n password,\n\n }\n\n }\n\n\n\n pub fn state(&self) -> State {\n\n self.state\n\n }\n\n\n\n pub fn open(&mut self) -> Packet<'_> {\n\n assert!(self.state == State::Closed);\n\n self.state = State::ReqSent;\n\n self.send_configure_request()\n\n }\n", "file_path": "ppproto/src/ppp/pap.rs", "rank": 23, "score": 20674.512438902162 }, { "content": "\n\n pub fn close(&mut self) {\n\n self.state = State::Closed;\n\n }\n\n\n\n pub fn handle(&mut self, pkt: &mut [u8], mut tx: impl FnMut(Packet<'_>)) {\n\n if pkt.len() < 6 {\n\n info!(\"warn: too short\");\n\n return;\n\n }\n\n let code = Code::from(pkt[2]);\n\n let _id = pkt[3];\n\n let len = u16::from_be_bytes(pkt[4..6].try_into().unwrap()) as usize;\n\n if len > pkt.len() {\n\n info!(\"warn: len too short\");\n\n return;\n\n }\n\n let _pkt = &mut pkt[..len + 2];\n\n\n\n info!(\"PAP: rx {:?}\", code);\n", "file_path": "ppproto/src/ppp/pap.rs", "rank": 24, "score": 20671.302310985513 }, { "content": " Verdict::Nack(&[0xc0, 0x23])\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn own_options(&mut self, mut f: impl FnMut(u8, &[u8])) {\n\n if !self.asyncmap_rej {\n\n f(Option::Asyncmap.into(), &self.asyncmap.to_be_bytes());\n\n }\n\n }\n\n\n\n fn own_option_nacked(&mut self, code: u8, data: &[u8], is_rej: bool) {\n\n let opt = Option::from(code);\n\n trace!(\"LCP nak {:?} {:?} {:?} {:?}\", code, opt, data, is_rej);\n\n match opt {\n\n Option::Asyncmap => {\n\n if !is_rej && data.len() == 4 {\n\n self.asyncmap = u32::from_be_bytes(data.try_into().unwrap())\n\n } else {\n\n self.asyncmap_rej = true\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n}\n", "file_path": "ppproto/src/ppp/lcp.rs", "rank": 25, "score": 20670.335891624847 }, { "content": "}\n\n\n\nimpl LCP {\n\n pub fn new() -> Self {\n\n Self {\n\n auth: AuthType::None,\n\n asyncmap_remote: 0xFFFFFFFF,\n\n asyncmap: 0x00000000,\n\n asyncmap_rej: false,\n\n }\n\n }\n\n}\n\n\n\nimpl Protocol for LCP {\n\n fn protocol(&self) -> ProtocolType {\n\n ProtocolType::LCP\n\n }\n\n\n\n fn peer_options_start(&mut self) {\n\n self.auth = AuthType::None;\n", "file_path": "ppproto/src/ppp/lcp.rs", "rank": 26, "score": 20669.620678428604 }, { "content": "use core::convert::TryInto;\n\n\n\nuse crate::wire::{Code, PPPPayload, Packet, Payload, ProtocolType};\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub enum State {\n\n Closed,\n\n ReqSent,\n\n Opened,\n\n}\n\npub struct PAP<'a> {\n\n state: State,\n\n id: u8,\n\n\n\n username: &'a [u8],\n\n password: &'a [u8],\n\n}\n\n\n\nimpl<'a> PAP<'a> {\n", "file_path": "ppproto/src/ppp/pap.rs", "rank": 27, "score": 20668.695414381542 }, { "content": " fn protocol(&self) -> ProtocolType {\n\n ProtocolType::IPv4CP\n\n }\n\n\n\n fn peer_options_start(&mut self) {}\n\n\n\n fn peer_option_received(&mut self, code: u8, data: &[u8]) -> Verdict {\n\n let opt = OptionCode::from(code);\n\n trace!(\"IPv4CP: rx option {:?} {:?} {:?}\", code, opt, data);\n\n match opt {\n\n OptionCode::IpAddress => {\n\n if data.len() == 4 {\n\n self.peer_address = Ipv4Address::from_bytes(data);\n\n Verdict::Ack\n\n } else {\n\n Verdict::Rej\n\n }\n\n }\n\n _ => Verdict::Rej,\n\n }\n", "file_path": "ppproto/src/ppp/ipv4cp.rs", "rank": 28, "score": 20668.35673175573 }, { "content": " }\n\n\n\n fn peer_option_received(&mut self, code: u8, data: &[u8]) -> Verdict {\n\n let opt = Option::from(code);\n\n trace!(\"LCP: rx option {:?} {:?} {:?}\", code, opt, data);\n\n match opt {\n\n Option::Unknown => Verdict::Rej,\n\n Option::Asyncmap => {\n\n if data.len() == 4 {\n\n self.asyncmap_remote = u32::from_be_bytes(data.try_into().unwrap());\n\n Verdict::Ack\n\n } else {\n\n Verdict::Rej\n\n }\n\n }\n\n Option::Auth => {\n\n if data == &[0xc0, 0x23] {\n\n self.auth = AuthType::PAP;\n\n Verdict::Ack\n\n } else {\n", "file_path": "ppproto/src/ppp/lcp.rs", "rank": 29, "score": 20667.43928850923 }, { "content": "\n\n fn nacked(&mut self, data: &[u8], is_rej: bool) {\n\n if is_rej {\n\n self.is_rejected = true\n\n } else {\n\n if data.len() == 4 {\n\n self.address = Ipv4Address::from_bytes(data);\n\n } else {\n\n // Peer wants us to use an address that's not 4 bytes.\n\n // Should never happen, but mark option as rejected just in case to\n\n // avoid endless loop.\n\n self.is_rejected = true\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub struct Ipv4Status {\n", "file_path": "ppproto/src/ppp/ipv4cp.rs", "rank": 30, "score": 20665.558664565404 }, { "content": " }\n\n }\n\n\n\n fn own_option_nacked(&mut self, code: u8, data: &[u8], is_rej: bool) {\n\n let opt = OptionCode::from(code);\n\n trace!(\"IPv4CP nak {:?} {:?} {:?} {:?}\", code, opt, data, is_rej);\n\n match opt {\n\n OptionCode::Unknown => {}\n\n OptionCode::IpAddress => self.address.nacked(data, is_rej),\n\n OptionCode::Dns1 => self.dns_server_1.nacked(data, is_rej),\n\n OptionCode::Dns2 => self.dns_server_2.nacked(data, is_rej),\n\n }\n\n }\n\n}\n", "file_path": "ppproto/src/ppp/ipv4cp.rs", "rank": 31, "score": 20664.464907359048 }, { "content": " proto: ProtocolType::PAP,\n\n payload: Payload::PPP(\n\n Code::ConfigureReq,\n\n self.next_id(),\n\n PPPPayload::PAP(self.username, self.password),\n\n ),\n\n }\n\n }\n\n}\n", "file_path": "ppproto/src/ppp/pap.rs", "rank": 32, "score": 20664.06672024269 }, { "content": " pub address: Option<Ipv4Address>,\n\n pub peer_address: Option<Ipv4Address>,\n\n pub dns_servers: [Option<Ipv4Address>; 2],\n\n}\n\n\n\npub(crate) struct IPv4CP {\n\n peer_address: Ipv4Address,\n\n\n\n address: IpOption,\n\n dns_server_1: IpOption,\n\n dns_server_2: IpOption,\n\n}\n\n\n\nimpl IPv4CP {\n\n pub fn new() -> Self {\n\n Self {\n\n peer_address: Ipv4Address::UNSPECIFIED,\n\n\n\n address: IpOption::new(),\n\n dns_server_1: IpOption::new(),\n", "file_path": "ppproto/src/ppp/ipv4cp.rs", "rank": 33, "score": 20662.676548930776 }, { "content": " }\n\n\n\n fn own_options(&mut self, mut f: impl FnMut(u8, &[u8])) {\n\n if !self.address.is_rejected {\n\n f(\n\n OptionCode::IpAddress.into(),\n\n self.address.address.as_bytes(),\n\n );\n\n }\n\n if !self.dns_server_1.is_rejected {\n\n f(\n\n OptionCode::Dns1.into(),\n\n self.dns_server_1.address.as_bytes(),\n\n );\n\n }\n\n if !self.dns_server_2.is_rejected {\n\n f(\n\n OptionCode::Dns2.into(),\n\n self.dns_server_2.address.as_bytes(),\n\n );\n", "file_path": "ppproto/src/ppp/ipv4cp.rs", "rank": 34, "score": 20662.143745787995 }, { "content": " dns_server_2: IpOption::new(),\n\n }\n\n }\n\n\n\n pub fn status(&self) -> Ipv4Status {\n\n let peer_address = if self.peer_address.is_unspecified() {\n\n None\n\n } else {\n\n Some(self.peer_address)\n\n };\n\n\n\n Ipv4Status {\n\n address: self.address.get(),\n\n peer_address,\n\n dns_servers: [self.dns_server_1.get(), self.dns_server_2.get()],\n\n }\n\n }\n\n}\n\n\n\nimpl Protocol for IPv4CP {\n", "file_path": "ppproto/src/ppp/ipv4cp.rs", "rank": 35, "score": 20660.88948192518 }, { "content": "use core::convert::TryInto;\n\nuse num_enum::{FromPrimitive, IntoPrimitive};\n\n\n\nuse super::option_fsm::{Protocol, Verdict};\n\nuse crate::wire::ProtocolType;\n\n\n\n#[derive(FromPrimitive, IntoPrimitive, Copy, Clone, Eq, PartialEq, Debug)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\n#[repr(u8)]\n", "file_path": "ppproto/src/ppp/lcp.rs", "rank": 36, "score": 20660.358702123314 }, { "content": "use num_enum::{FromPrimitive, IntoPrimitive};\n\n\n\nuse super::option_fsm::{Protocol, Verdict};\n\nuse crate::wire::ProtocolType;\n\n\n\nuse smoltcp::wire::Ipv4Address;\n\n\n\n#[derive(FromPrimitive, IntoPrimitive, Copy, Clone, Eq, PartialEq, Debug)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\n#[repr(u8)]\n", "file_path": "ppproto/src/ppp/ipv4cp.rs", "rank": 37, "score": 20660.303133709265 }, { "content": " let old_state = self.state;\n\n match (code, self.state) {\n\n (Code::ConfigureAck, State::ReqSent) => self.state = State::Opened,\n\n (Code::ConfigureNack, State::ReqSent) => tx(self.send_configure_request()),\n\n _ => {}\n\n }\n\n\n\n if old_state != self.state {\n\n info!(\"PAP: state {:?} -> {:?}\", old_state, self.state);\n\n }\n\n }\n\n\n\n fn next_id(&mut self) -> u8 {\n\n self.id = self.id.wrapping_add(1);\n\n self.id\n\n }\n\n\n\n fn send_configure_request(&mut self) -> Packet<'a> {\n\n info!(\"PAP: tx {:?}\", Code::ConfigureReq);\n\n Packet {\n", "file_path": "ppproto/src/ppp/pap.rs", "rank": 38, "score": 20658.523548817306 }, { "content": " pub fn open(&mut self) -> Packet<'_> {\n\n assert!(self.state == State::Closed);\n\n self.state = State::ReqSent;\n\n self.send_configure_request()\n\n }\n\n\n\n pub fn close(&mut self) {\n\n self.state = State::Closed;\n\n }\n\n\n\n pub fn handle(&mut self, pkt: &mut [u8], mut tx: impl FnMut(Packet<'_>)) {\n\n if pkt.len() < 6 {\n\n info!(\"warn: too short\");\n\n return;\n\n }\n\n let code = Code::from(pkt[2]);\n\n let id = pkt[3];\n\n let len = u16::from_be_bytes(pkt[4..6].try_into().unwrap()) as usize;\n\n if len + 2 > pkt.len() {\n\n info!(\"warn: len too short\");\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 39, "score": 19703.617627031093 }, { "content": "use core::convert::TryInto;\n\nuse heapless::Vec;\n\n\n\nuse crate::wire::{Code, OptionVal, Options, PPPPayload, Packet, Payload, ProtocolType};\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub(crate) enum Verdict<'a> {\n\n Ack,\n\n Nack(&'a [u8]),\n\n Rej,\n\n}\n\n\n\npub(crate) trait Protocol {\n\n fn protocol(&self) -> ProtocolType;\n\n\n\n fn own_options(&mut self, f: impl FnMut(u8, &[u8]));\n\n fn own_option_nacked(&mut self, code: u8, data: &[u8], is_rej: bool);\n\n\n\n fn peer_options_start(&mut self);\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 40, "score": 19702.966600391006 }, { "content": " fn received_configure_req(&mut self, pkt: &[u8]) -> Packet<'static> {\n\n let id = pkt[3];\n\n let mut code = Code::ConfigureAck;\n\n\n\n if pkt.len() < 6 {\n\n panic!(\"too short\");\n\n }\n\n let pkt = &pkt[6..]; // skip header\n\n\n\n let mut opts = Vec::new();\n\n\n\n self.proto.peer_options_start();\n\n parse_options(pkt, |ocode, odata| {\n\n let (ret_code, data) = match self.proto.peer_option_received(ocode, odata) {\n\n Verdict::Ack => (Code::ConfigureAck, odata),\n\n Verdict::Nack(data) => (Code::ConfigureNack, data),\n\n Verdict::Rej => (Code::ConfigureRej, odata),\n\n };\n\n\n\n if code < ret_code {\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 41, "score": 19700.62639614021 }, { "content": "\n\n fn send_configure_request(&mut self) -> Packet<'static> {\n\n let mut opts = Vec::new();\n\n\n\n self.proto.own_options(|code, data| {\n\n if opts.push(OptionVal::new(code, data)).is_err() {\n\n panic!(\"tx ConfigureReq: too many options\")\n\n }\n\n });\n\n\n\n Packet {\n\n proto: self.proto.protocol(),\n\n payload: Payload::PPP(\n\n Code::ConfigureReq,\n\n self.next_id(),\n\n PPPPayload::Options(Options(opts)),\n\n ),\n\n }\n\n }\n\n\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 42, "score": 19700.302367538992 }, { "content": " PPPPayload::Raw(&mut pkt[2..]),\n\n ),\n\n }\n\n }\n\n\n\n fn send_echo_response<'a>(&mut self, pkt: &'a mut [u8]) -> Packet<'a> {\n\n Packet {\n\n proto: self.proto.protocol(),\n\n payload: Payload::Raw(&mut pkt[2..]),\n\n }\n\n }\n\n\n\n // TODO maybe this should be in PPP because it's only for LCP\n\n pub fn send_protocol_reject<'a>(&mut self, pkt: &'a mut [u8]) -> Packet<'a> {\n\n Packet {\n\n proto: self.proto.protocol(),\n\n payload: Payload::PPP(Code::ProtocolRej, self.next_id(), PPPPayload::Raw(pkt)),\n\n }\n\n }\n\n\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 43, "score": 19699.02257457325 }, { "content": " code = ret_code;\n\n opts.clear();\n\n }\n\n\n\n if code == ret_code {\n\n if opts.push(OptionVal::new(ocode, data)).is_err() {\n\n panic!(\"rx ConfigureReq: too many options\")\n\n }\n\n }\n\n })\n\n .unwrap();\n\n\n\n Packet {\n\n proto: self.proto.protocol(),\n\n payload: Payload::PPP(code, id, PPPPayload::Options(Options(opts))),\n\n }\n\n }\n\n}\n\n\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 44, "score": 19697.683047118662 }, { "content": " pub fn new(proto: P) -> Self {\n\n Self {\n\n id: 1,\n\n state: State::Closed,\n\n proto,\n\n }\n\n }\n\n\n\n pub fn state(&self) -> State {\n\n self.state\n\n }\n\n\n\n pub fn proto(&self) -> &P {\n\n &self.proto\n\n }\n\n\n\n pub fn proto_mut(&mut self) -> &mut P {\n\n &mut self.proto\n\n }\n\n\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 45, "score": 19697.483154292782 }, { "content": " fn send_terminate_request<'a>(&mut self, reason: &'a mut [u8]) -> Packet<'a> {\n\n Packet {\n\n proto: self.proto.protocol(),\n\n payload: Payload::PPP(Code::TerminateReq, self.next_id(), PPPPayload::Raw(reason)),\n\n }\n\n }\n\n\n\n fn send_terminate_ack(&mut self, id: u8) -> Packet<'static> {\n\n Packet {\n\n proto: self.proto.protocol(),\n\n payload: Payload::PPP(Code::TerminateAck, id, PPPPayload::Raw(&mut [])),\n\n }\n\n }\n\n\n\n fn send_code_reject<'a>(&mut self, pkt: &'a mut [u8]) -> Packet<'a> {\n\n Packet {\n\n proto: self.proto.protocol(),\n\n payload: Payload::PPP(\n\n Code::CodeRej,\n\n self.next_id(),\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 46, "score": 19695.868781111796 }, { "content": " fn peer_option_received(&mut self, code: u8, data: &[u8]) -> Verdict;\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub(crate) enum State {\n\n Closed,\n\n ReqSent,\n\n AckReceived,\n\n AckSent,\n\n Opened,\n\n}\n\n\n\npub(crate) struct OptionFsm<P> {\n\n id: u8,\n\n state: State,\n\n proto: P,\n\n}\n\n\n\nimpl<P: Protocol> OptionFsm<P> {\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 47, "score": 19695.493118042174 }, { "content": " return;\n\n }\n\n let pkt = &mut pkt[..len + 2];\n\n\n\n info!(\"{:?}: rx {:?}\", self.proto.protocol(), code);\n\n let old_state = self.state;\n\n match (code, self.state) {\n\n // reply EchoReq on state Opened, ignore in all other states (including Closed!)\n\n (Code::EchoReq, State::Opened) => tx(self.send_echo_response(pkt)),\n\n (Code::EchoReq, x) => {\n\n info!(\"ignoring unexpected EchoReq in state {:?}\", x)\n\n }\n\n\n\n // DiscardReqs are, well, discarded.\n\n (Code::DiscardReq, _) => {}\n\n\n\n // in state Closed, reply to any packet with TerminateAck (except to EchoReq!)\n\n (_, State::Closed) => tx(self.send_terminate_ack(id)),\n\n\n\n (Code::ConfigureReq, _) => {\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 48, "score": 19692.953460244695 }, { "content": " }\n\n\n\n (Code::ConfigureAck, State::ReqSent) => self.state = State::AckReceived,\n\n (Code::ConfigureAck, State::AckSent) => self.state = State::Opened,\n\n (Code::ConfigureAck, State::AckReceived) | (Code::ConfigureAck, State::Opened) => {\n\n self.state = State::ReqSent;\n\n tx(self.send_configure_request())\n\n }\n\n\n\n (Code::ConfigureNack, _) | (Code::ConfigureRej, _) => {\n\n let is_rej = code == Code::ConfigureRej;\n\n\n\n if pkt.len() < 6 {\n\n panic!(\"too short\")\n\n }\n\n let pkt = &pkt[6..]; // skip header\n\n\n\n parse_options(pkt, |code, data| {\n\n self.proto.own_option_nacked(code, data, is_rej)\n\n })\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 49, "score": 19692.817636566993 }, { "content": " let resp = self.received_configure_req(pkt);\n\n let acked = matches!(resp.payload, Payload::PPP(Code::ConfigureAck, _, _));\n\n tx(resp);\n\n\n\n match (acked, self.state) {\n\n (_, State::Closed) => unreachable!(),\n\n (true, State::ReqSent) => self.state = State::AckSent,\n\n (true, State::AckReceived) => self.state = State::Opened,\n\n (true, State::AckSent) => self.state = State::AckSent,\n\n (true, State::Opened) => {\n\n tx(self.send_configure_request());\n\n self.state = State::AckSent;\n\n }\n\n (false, State::AckSent) => self.state = State::ReqSent,\n\n (false, State::Opened) => {\n\n tx(self.send_configure_request());\n\n self.state = State::ReqSent;\n\n }\n\n (false, _) => {}\n\n }\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 50, "score": 19691.22588582741 }, { "content": " x => info!(\n\n \"ignoring unexpected packet {:?} in state {:?}\",\n\n x, self.state\n\n ),\n\n };\n\n\n\n if old_state != self.state {\n\n info!(\n\n \"{:?}: state {:?} -> {:?}\",\n\n self.proto.protocol(),\n\n old_state,\n\n self.state\n\n );\n\n }\n\n }\n\n\n\n fn next_id(&mut self) -> u8 {\n\n self.id = self.id.wrapping_add(1);\n\n self.id\n\n }\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 51, "score": 19690.596647354992 }, { "content": "#[derive(FromPrimitive, IntoPrimitive, Copy, Clone, Eq, PartialEq, Debug)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\n#[repr(u8)]\n\nenum Option {\n\n #[num_enum(default)]\n\n Unknown = 0,\n\n Asyncmap = 2,\n\n Auth = 3,\n\n}\n\n\n\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub enum AuthType {\n\n None = 0,\n\n PAP = 0xc023,\n\n}\n\n\n\npub(crate) struct LCP {\n\n pub auth: AuthType,\n\n\n\n pub asyncmap_remote: u32,\n\n pub asyncmap: u32,\n\n pub asyncmap_rej: bool,\n", "file_path": "ppproto/src/ppp/lcp.rs", "rank": 52, "score": 19689.02778229331 }, { "content": " .unwrap();\n\n\n\n match self.state {\n\n State::Closed => unreachable!(),\n\n State::AckSent => {}\n\n _ => self.state = State::ReqSent,\n\n }\n\n tx(self.send_configure_request())\n\n }\n\n (Code::TerminateReq, State::Opened) => {\n\n self.state = State::Closed;\n\n tx(self.send_terminate_ack(id))\n\n }\n\n (Code::TerminateReq, State::ReqSent)\n\n | (Code::TerminateReq, State::AckReceived)\n\n | (Code::TerminateReq, State::AckSent) => {\n\n self.state = State::ReqSent;\n\n tx(self.send_terminate_ack(id))\n\n }\n\n\n", "file_path": "ppproto/src/ppp/option_fsm.rs", "rank": 53, "score": 19688.40675543682 }, { "content": "#[derive(FromPrimitive, IntoPrimitive, Copy, Clone, Eq, PartialEq, Debug)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\n#[repr(u8)]\n\nenum OptionCode {\n\n #[num_enum(default)]\n\n Unknown = 0,\n\n IpAddress = 3,\n\n Dns1 = 129,\n\n Dns2 = 131,\n\n}\n\n\n", "file_path": "ppproto/src/ppp/ipv4cp.rs", "rank": 54, "score": 18808.550963124326 }, { "content": " }\n\n\n\n pub fn receive(&mut self) -> Option<Range<usize>> {\n\n match self.state {\n\n State::Complete => {\n\n let len = self.len;\n\n self.len = 0;\n\n self.state = State::Address;\n\n Some(1..len - 2)\n\n }\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn consume(&mut self, buf: &mut [u8], data: &[u8]) -> usize {\n\n for (i, &b) in data.iter().enumerate() {\n\n match (self.state, b) {\n\n (State::Start, 0x7e) => self.state = State::Address,\n\n (State::Start, _) => {}\n\n (State::Address, 0xff) => self.state = State::Data,\n", "file_path": "ppproto/src/pppos/frame_reader.rs", "rank": 55, "score": 22.968428658808993 }, { "content": "pub enum Payload<'a> {\n\n Raw(&'a mut [u8]),\n\n PPP(Code, u8, PPPPayload<'a>),\n\n}\n\n\n\nimpl<'a> Payload<'a> {\n\n pub fn buffer_len(&self) -> usize {\n\n match self {\n\n Self::Raw(data) => data.len(),\n\n Self::PPP(_code, _id, payload) => 1 + 1 + 2 + payload.buffer_len(),\n\n }\n\n }\n\n\n\n pub fn emit(&self, buffer: &mut [u8]) {\n\n match self {\n\n Self::Raw(data) => buffer.copy_from_slice(data),\n\n Self::PPP(code, id, payload) => {\n\n buffer[0] = *code as u8;\n\n buffer[1] = *id;\n\n let len = payload.buffer_len() as u16 + 4;\n", "file_path": "ppproto/src/wire.rs", "rank": 56, "score": 22.48685580534965 }, { "content": " buffer[2..4].copy_from_slice(&len.to_be_bytes());\n\n payload.emit(&mut buffer[4..])\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub enum PPPPayload<'a> {\n\n Raw(&'a mut [u8]),\n\n PAP(&'a [u8], &'a [u8]),\n\n Options(Options),\n\n}\n\n\n\nimpl<'a> PPPPayload<'a> {\n\n pub fn buffer_len(&self) -> usize {\n\n match self {\n\n Self::Raw(data) => data.len(),\n\n Self::PAP(user, pass) => 1 + user.len() + 1 + pass.len(),\n\n Self::Options(options) => options.buffer_len(),\n", "file_path": "ppproto/src/wire.rs", "rank": 57, "score": 20.24297667499306 }, { "content": "use super::crc::crc16;\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub struct BufferFullError;\n\n\n\npub struct FrameWriter<'a> {\n\n buf: &'a mut [u8],\n\n len: usize,\n\n crc: u16,\n\n asyncmap: u32,\n\n}\n\n\n\nimpl<'a> FrameWriter<'a> {\n\n pub fn new(buf: &'a mut [u8]) -> Self {\n\n Self {\n\n buf,\n\n len: 0,\n\n crc: 0,\n\n asyncmap: 0xFFFFFFFF,\n", "file_path": "ppproto/src/pppos/frame_writer.rs", "rank": 58, "score": 19.476705016572016 }, { "content": "#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub struct OptionVal {\n\n code: u8,\n\n data: OptionData,\n\n}\n\n\n\nimpl OptionVal {\n\n pub fn new(code: u8, data: &[u8]) -> Self {\n\n Self {\n\n code,\n\n data: OptionData(unwrap!(Vec::from_slice(data))),\n\n }\n\n }\n\n\n\n pub fn buffer_len(&self) -> usize {\n\n 2 + self.data.0.len()\n\n }\n\n\n\n pub fn emit(&self, buffer: &mut [u8]) {\n\n buffer[0] = self.code;\n\n buffer[1] = self.data.0.len() as u8 + 2;\n\n buffer[2..].copy_from_slice(&self.data.0);\n\n }\n\n}\n\n\n", "file_path": "ppproto/src/wire.rs", "rank": 59, "score": 18.677819421929538 }, { "content": " }\n\n }\n\n\n\n pub fn new_with_asyncmap(buf: &'a mut [u8], asyncmap: u32) -> Self {\n\n Self {\n\n buf,\n\n len: 0,\n\n crc: 0,\n\n asyncmap,\n\n }\n\n }\n\n\n\n pub fn len(self) -> usize {\n\n self.len\n\n }\n\n\n\n pub fn start(&mut self) -> Result<(), BufferFullError> {\n\n self.crc = crc16(0xFFFF, &[0xFF, 0x03]);\n\n self.append_raw(&[0x7e, 0xff])?;\n\n self.append_escaped(&[0x03])?;\n", "file_path": "ppproto/src/pppos/frame_writer.rs", "rank": 60, "score": 17.495042718712607 }, { "content": " pub fn buffer_len(&self) -> usize {\n\n self.0.iter().map(|opt| opt.buffer_len()).sum()\n\n }\n\n\n\n pub fn emit(&self, mut buffer: &mut [u8]) {\n\n for o in &self.0 {\n\n let len = o.buffer_len();\n\n o.emit(&mut buffer[..len]);\n\n buffer = &mut buffer[len..];\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"defmt\")]\n\nimpl defmt::Format for Options {\n\n fn format(&self, fmt: defmt::Formatter) {\n\n defmt::write!(fmt, \"{=[?]}\", &self.0[..])\n\n }\n\n}\n\n\n", "file_path": "ppproto/src/wire.rs", "rank": 61, "score": 16.81698486808064 }, { "content": " }\n\n }\n\n\n\n pub fn emit(&self, buffer: &mut [u8]) {\n\n match self {\n\n Self::Raw(data) => buffer.copy_from_slice(data),\n\n Self::PAP(user, pass) => {\n\n buffer[0] = user.len() as u8;\n\n buffer[1..][..user.len()].copy_from_slice(user);\n\n buffer[1 + user.len()] = pass.len() as u8;\n\n buffer[1 + user.len() + 1..].copy_from_slice(pass);\n\n }\n\n Self::Options(options) => options.emit(buffer),\n\n }\n\n }\n\n}\n\n\n\npub struct Options(pub Vec<OptionVal, MAX_OPTIONS>);\n\n\n\nimpl Options {\n", "file_path": "ppproto/src/wire.rs", "rank": 62, "score": 16.180392995661393 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n pub fn finish(&mut self) -> Result<(), BufferFullError> {\n\n let crc = self.crc ^ 0xFFFF;\n\n self.append_escaped(&crc.to_le_bytes())?;\n\n self.append_raw(&[0x7e])?;\n\n\n\n Ok(())\n\n }\n\n\n\n fn append_raw(&mut self, data: &[u8]) -> Result<(), BufferFullError> {\n\n if self.len + data.len() > self.buf.len() {\n\n Err(BufferFullError)\n\n } else {\n\n self.buf[self.len..][..data.len()].copy_from_slice(data);\n\n self.len += data.len();\n\n Ok(())\n\n }\n", "file_path": "ppproto/src/pppos/frame_writer.rs", "rank": 63, "score": 14.746694156786328 }, { "content": "\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub struct Packet<'a> {\n\n pub proto: ProtocolType,\n\n pub payload: Payload<'a>,\n\n}\n\n\n\nimpl<'a> Packet<'a> {\n\n pub fn buffer_len(&self) -> usize {\n\n 2 + self.payload.buffer_len()\n\n }\n\n\n\n pub fn emit(&self, buffer: &mut [u8]) {\n\n let proto = self.proto as u16;\n\n buffer[0..2].copy_from_slice(&proto.to_be_bytes());\n\n self.payload.emit(&mut buffer[2..])\n\n }\n\n}\n\n\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n", "file_path": "ppproto/src/wire.rs", "rank": 64, "score": 12.751674240996907 }, { "content": " (State::Address, 0x7e) => self.state = State::Address,\n\n (State::Address, _) => self.state = State::Start,\n\n (State::Data, 0x7e) => {\n\n // End of packet\n\n let ok = self.len >= 3\n\n && buf[0] == 0x03\n\n && crc16(0x00FF, &buf[..self.len]) == 0xf0b8;\n\n self.state = if ok { State::Complete } else { State::Address }\n\n }\n\n (State::Data, 0x7d) => self.escape = true,\n\n (State::Data, mut b) => {\n\n if self.escape {\n\n self.escape = false;\n\n b ^= 0x20;\n\n }\n\n if self.len == usize::MAX || self.len >= buf.len() {\n\n self.state = State::Start;\n\n self.len = 0;\n\n } else {\n\n buf[self.len as usize] = b;\n", "file_path": "ppproto/src/pppos/frame_reader.rs", "rank": 65, "score": 11.739013905298442 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\n// This mod MUST go first, so that the others see its macros.\n\npub(crate) mod fmt;\n\n\n\nmod ppp;\n\npub mod pppos;\n\nmod wire;\n\n\n\npub use ppp::{Config, Phase, Status};\n\npub use pppos::{BufferFullError, PPPoS, PPPoSAction};\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub struct InvalidStateError;\n", "file_path": "ppproto/src/lib.rs", "rank": 66, "score": 11.28695635618361 }, { "content": " }\n\n\n\n fn append_escaped(&mut self, data: &[u8]) -> Result<(), BufferFullError> {\n\n for &b in data {\n\n let escape = match b {\n\n 0..=0x1f => self.asyncmap & (1 << (b as u32)) != 0,\n\n 0x7d => true,\n\n 0x7e => true,\n\n _ => false,\n\n };\n\n\n\n if escape {\n\n self.append_raw(&[0x7d, b ^ 0x20])?;\n\n } else {\n\n self.append_raw(&[b])?;\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn append(&mut self, data: &[u8]) -> Result<(), BufferFullError> {\n\n self.append_escaped(data)?;\n\n self.crc = crc16(self.crc, data);\n\n Ok(())\n\n }\n\n}\n", "file_path": "ppproto/src/pppos/frame_writer.rs", "rank": 67, "score": 10.122527430907006 }, { "content": "use heapless::Vec;\n\nuse num_enum::{FromPrimitive, IntoPrimitive};\n\n\n\npub const MAX_OPTIONS: usize = 6;\n\npub const MAX_OPTION_LEN: usize = 4;\n\n\n\n#[derive(FromPrimitive, IntoPrimitive, Copy, Clone, Eq, PartialEq, Debug)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\n#[repr(u16)]\n\npub enum ProtocolType {\n\n #[num_enum(default)]\n\n Unknown = 0,\n\n /// Link Control Protocol, rfc1661\n\n LCP = 0xc021,\n\n /// Password Authentication Protocol, rfc1334\n\n PAP = 0xc023,\n\n /// Internet Protocol v4\n\n IPv4 = 0x0021,\n\n /// Internet Protocol v4 Control Protocol, rfc1332\n\n IPv4CP = 0x8021,\n", "file_path": "ppproto/src/wire.rs", "rank": 68, "score": 9.85208093664039 }, { "content": " self.len += 1;\n\n }\n\n }\n\n // When we have received a frame, do not consume more data until it's processed with receive()\n\n (State::Complete, _) => return i,\n\n }\n\n }\n\n\n\n // All consumed\n\n data.len()\n\n }\n\n}\n", "file_path": "ppproto/src/pppos/frame_reader.rs", "rank": 69, "score": 9.376411453763296 }, { "content": " match $crate::fmt::Try::into_result($arg) {\n\n ::core::result::Result::Ok(t) => t,\n\n ::core::result::Result::Err(e) => {\n\n ::core::panic!(\"unwrap of `{}` failed: {:?}\", ::core::stringify!($arg), e);\n\n }\n\n }\n\n };\n\n ($arg:expr, $($msg:expr),+ $(,)? ) => {\n\n match $crate::fmt::Try::into_result($arg) {\n\n ::core::result::Result::Ok(t) => t,\n\n ::core::result::Result::Err(e) => {\n\n ::core::panic!(\"unwrap of `{}` failed: {}: {:?}\", ::core::stringify!($arg), ::core::format_args!($($msg,)*), e);\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub struct NoneError;\n\n\n", "file_path": "ppproto/src/fmt.rs", "rank": 70, "score": 6.829512179560101 }, { "content": "use core::ops::Range;\n\n\n\nuse super::crc::crc16;\n\n\n\n#[derive(Copy, Clone, Debug)]\n", "file_path": "ppproto/src/pppos/frame_reader.rs", "rank": 71, "score": 5.815652133214655 }, { "content": "# ppproto\n\n\n\nRust implementation of the Point-to-Point Protocol (PPP) for embedded systems. `no-std` compatible, no alloc (heap) required.\n\n\n\n## Relevant RFCs\n\n\n\n- [RFC 1661](https://tools.ietf.org/html/rfc1661) - The Point-to-Point Protocol (PPP)\n\n- [RFC 1332](https://tools.ietf.org/html/rfc1332) - The PPP Internet Protocol Control Protocol (IPCP)\n\n- [RFC 1334](https://tools.ietf.org/html/rfc1334) - PPP Authentication Protocols\n\n\n\n## Testing against pppd\n\n\n\nPut this in `/etc/ppp/pap-secrets`, where `myhostname` is the hostname of your machine.\n\n\n\n```\n\nmyuser myhostname mypass 192.168.7.10\n\n```\n\n\n\n```\n\nsocat -v -x PTY,link=pty1,rawer PTY,link=pty2,rawer\n\npppd $PWD/pty1 115200 192.168.7.1: ms-dns 8.8.4.4 ms-dns 8.8.8.8 nodetach debug local persist silent noproxyarp\n\nRUST_LOG=trace cargo run --bin simple -- --device pty2\n\nping 192.168.7.10\n\n```\n\n\n\n## Testing against a real modem\n\n\n\n- `minicom -D /dev/ttyUSB0 -b 115200`\n\n- put whatever AT commands you need to connect (such as `ATD*99#`)\n\n- Control+A Q\n\n- RUST_LOG=trace cargo run --bin simple -- --device /dev/ttyUSB0\n\n\n\nIf you want to \"MITM\" the serial communications to see the raw bytes coming and going, you can do this:\n\n\n\n```\n\nsocat -v -x /dev/ttyUSB0 PTY,link=pty,rawer\n\n```\n\n\n\nand then use `$PWD/pty` instead of `/dev/ttyUSB0`\n\n\n\n## License\n\n\n\nThis work is licensed under either of\n\n\n\n- Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or\n\n http://www.apache.org/licenses/LICENSE-2.0)\n\n- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n", "file_path": "README.md", "rank": 72, "score": 4.975247983999159 }, { "content": "#![macro_use]\n\n#![allow(unused_macros)]\n\n\n\n#[cfg(all(feature = \"defmt\", feature = \"log\"))]\n\ncompile_error!(\"You may not enable both `defmt` and `log` features.\");\n\n\n\nmacro_rules! assert {\n\n ($($x:tt)*) => {\n\n {\n\n #[cfg(not(feature = \"defmt\"))]\n\n ::core::assert!($($x)*);\n\n #[cfg(feature = \"defmt\")]\n\n ::defmt::assert!($($x)*);\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! assert_eq {\n\n ($($x:tt)*) => {\n\n {\n", "file_path": "ppproto/src/fmt.rs", "rank": 73, "score": 4.381547180536057 }, { "content": " {\n\n #[cfg(feature = \"log\")]\n\n ::log::debug!($s $(, $x)*);\n\n #[cfg(feature = \"defmt\")]\n\n ::defmt::debug!($s $(, $x)*);\n\n #[cfg(not(any(feature = \"log\", feature=\"defmt\")))]\n\n let _ = ($( & $x ),*);\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! info {\n\n ($s:literal $(, $x:expr)* $(,)?) => {\n\n {\n\n #[cfg(feature = \"log\")]\n\n ::log::info!($s $(, $x)*);\n\n #[cfg(feature = \"defmt\")]\n\n ::defmt::info!($s $(, $x)*);\n\n #[cfg(not(any(feature = \"log\", feature=\"defmt\")))]\n\n let _ = ($( & $x ),*);\n", "file_path": "ppproto/src/fmt.rs", "rank": 74, "score": 3.5436144142657353 }, { "content": " }\n\n };\n\n}\n\n\n\nmacro_rules! warn {\n\n ($s:literal $(, $x:expr)* $(,)?) => {\n\n {\n\n #[cfg(feature = \"log\")]\n\n ::log::warn!($s $(, $x)*);\n\n #[cfg(feature = \"defmt\")]\n\n ::defmt::warn!($s $(, $x)*);\n\n #[cfg(not(any(feature = \"log\", feature=\"defmt\")))]\n\n let _ = ($( & $x ),*);\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! error {\n\n ($s:literal $(, $x:expr)* $(,)?) => {\n\n {\n", "file_path": "ppproto/src/fmt.rs", "rank": 75, "score": 3.2204546091911874 }, { "content": " ::defmt::panic!($($x)*);\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! trace {\n\n ($s:literal $(, $x:expr)* $(,)?) => {\n\n {\n\n #[cfg(feature = \"log\")]\n\n ::log::trace!($s $(, $x)*);\n\n #[cfg(feature = \"defmt\")]\n\n ::defmt::trace!($s $(, $x)*);\n\n #[cfg(not(any(feature = \"log\", feature=\"defmt\")))]\n\n let _ = ($( & $x ),*);\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! debug {\n\n ($s:literal $(, $x:expr)* $(,)?) => {\n", "file_path": "ppproto/src/fmt.rs", "rank": 76, "score": 3.1748148627367505 }, { "content": " #[cfg(feature = \"log\")]\n\n ::log::error!($s $(, $x)*);\n\n #[cfg(feature = \"defmt\")]\n\n ::defmt::error!($s $(, $x)*);\n\n #[cfg(not(any(feature = \"log\", feature=\"defmt\")))]\n\n let _ = ($( & $x ),*);\n\n }\n\n };\n\n}\n\n\n\n#[cfg(feature = \"defmt\")]\n\nmacro_rules! unwrap {\n\n ($($x:tt)*) => {\n\n ::defmt::unwrap!($($x)*)\n\n };\n\n}\n\n\n\n#[cfg(not(feature = \"defmt\"))]\n\nmacro_rules! unwrap {\n\n ($arg:expr) => {\n", "file_path": "ppproto/src/fmt.rs", "rank": 77, "score": 3.066181334499394 }, { "content": "}\n\n\n\n#[derive(FromPrimitive, IntoPrimitive, Copy, Clone, Eq, PartialEq, Debug, Ord, PartialOrd)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\n#[repr(u8)]\n\npub enum Code {\n\n #[num_enum(default)]\n\n Unknown = 0,\n\n ConfigureReq = 1,\n\n ConfigureAck = 2,\n\n ConfigureNack = 3,\n\n ConfigureRej = 4,\n\n TerminateReq = 5,\n\n TerminateAck = 6,\n\n CodeRej = 7,\n\n ProtocolRej = 8,\n\n EchoReq = 9,\n\n EchoReply = 10,\n\n DiscardReq = 11,\n\n}\n", "file_path": "ppproto/src/wire.rs", "rank": 78, "score": 2.724849051768155 } ]
Rust
rav1e-worker/src/main.rs
rust-av/rav1e-by-gop
2fad32cd518dcf95e88fffbd2f3c8f66ab1ab41d
use std::{collections::BTreeMap, env, net::SocketAddrV4, path::PathBuf, time::Duration}; use clap::{App, Arg}; use lazy_static::lazy_static; use log::{debug, log_enabled}; use parking_lot::RwLock; use rand::Rng; use rav1e_by_gop::{EncodeOptions, EncodeState, VideoDetails}; use server::*; use tokio::time::sleep; use uuid::{v1::Context, Uuid}; use worker::*; mod server; mod worker; #[cfg(all(target_arch = "x86_64", target_os = "linux"))] #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; lazy_static! { pub static ref ENCODER_QUEUE: RwLock<BTreeMap<Uuid, RwLock<EncodeItem>>> = RwLock::new(BTreeMap::new()); pub static ref UUID_CONTEXT: Context = Context::new(0); pub static ref UUID_NODE_ID: Box<[u8]> = { let mut id = Vec::with_capacity(6); let mut rng = rand::thread_rng(); for _ in 0..6 { id.push(rng.gen()); } id.into_boxed_slice() }; } pub struct EncodeItem { pub state: EncodeState, pub options: EncodeOptions, pub video_info: VideoDetails, } impl EncodeItem { fn new(options: EncodeOptions, video_info: VideoDetails) -> Self { EncodeItem { state: EncodeState::Enqueued, options, video_info, } } } impl std::fmt::Debug for EncodeItem { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self.state { EncodeState::Enqueued => f.write_str("Enqueued"), EncodeState::AwaitingInfo { .. } => f.write_str("Awaiting Segment Info"), EncodeState::AwaitingData { .. } => f.write_str("Awaiting Data"), EncodeState::Ready { ref raw_frames, .. } => f.write_fmt(format_args!( "Ready to encode {} frames", raw_frames.frame_count() )), EncodeState::InProgress { ref progress } => f.write_fmt(format_args!( "Encoding {} of {} frames", progress.frame_info.len(), progress.total_frames )), EncodeState::EncodingDone { ref encoded_data, .. } => f.write_fmt(format_args!("Done encoding {} bytes", encoded_data.len())), } } } #[tokio::main] async fn main() { env::var("SERVER_PASSWORD").expect("SERVER_PASSWORD env var MUST be set!"); if env::var("RUST_LOG").is_err() { env::set_var("RUST_LOG", "rav1e_worker=info"); } env_logger::init(); let matches = App::new("rav1e-worker") .arg( Arg::with_name("LISTEN_IP") .help("Select which IP to listen on") .long("ip") .visible_alias("host") .default_value("0.0.0.0") .takes_value(true), ) .arg( Arg::with_name("LISTEN_PORT") .help("Select which port to listen on") .long("port") .short("p") .default_value("13415") .takes_value(true), ) .arg( Arg::with_name("MAX_THREADS") .help( "Limit the number of threads that can be used for workers [default: num cpus]", ) .long("threads") .takes_value(true), ) .arg( Arg::with_name("TEMP_DIR") .help( "Store input segments in temp files in the specified directory; by default \ stores in memory", ) .long("temp-dir") .takes_value(true), ) .get_matches(); let server_ip = SocketAddrV4::new( matches.value_of("LISTEN_IP").unwrap().parse().unwrap(), matches.value_of("LISTEN_PORT").unwrap().parse().unwrap(), ); let mut threads = num_cpus::get(); if let Some(thread_setting) = matches .value_of("MAX_THREADS") .and_then(|val| val.parse().ok()) { threads = threads.min(thread_setting); } let temp_dir = if let Some(temp_dir) = matches.value_of("TEMP_DIR") { let dir = PathBuf::from(temp_dir); if !dir.is_dir() { panic!("Specified temp dir does not exist or is not a directory"); } if dir.metadata().unwrap().permissions().readonly() { panic!("Specified temp dir is not writeable"); } Some(dir) } else { None }; start_listener(server_ip, temp_dir, threads).await; start_workers(threads).await; loop { if log_enabled!(log::Level::Debug) { let queue_handle = ENCODER_QUEUE.read(); let mut items = Vec::with_capacity(queue_handle.len()); for (key, item) in queue_handle.iter() { items.push((key, item.read())); } debug!("Items in queue: {:?}", items); } sleep(Duration::from_secs(5)).await; } }
use std::{collections::BTreeMap, env, net::SocketAddrV4, path::PathBuf, time::Duration}; use clap::{App, Arg}; use lazy_static::lazy_static; use log::{debug, log_enabled}; use parking_lot::RwLock; use rand::Rng; use rav1e_by_gop::{EncodeOptions, EncodeState, VideoDetails}; use server::*; use tokio::time::sleep; use uuid::{v1::Context, Uuid}; use worker::*; mod server; mod worker; #[cfg(all(target_arch = "x86_64", target_os = "linux"))] #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; lazy_static! { pub static ref ENCODER_QUEUE: RwLock<BTreeMap<Uuid, RwLock<EncodeItem>>> = RwLock::new(BTreeMap::new()); pub static ref UUID_CONTEXT: Context = Context::new(0); pub static ref UUID_NODE_ID: Box<[u8]> = { let mut id = Vec::with_capacity(6); let mut rng = rand::thread_rng(); for _ in 0..6 { id.push(rng.gen()); } id.into_boxed_slice() }; } pub struct EncodeItem { pub state: EncodeState, pub options: EncodeOptions, pub video_info: VideoDetails, } impl EncodeItem { fn new(options: EncodeOptions, video_info: VideoDetails) -> Self { EncodeItem { state: EncodeState::Enqueued, options, video_info, } } } impl std::fmt::Debug for EncodeItem { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self.state { EncodeState::Enqueued => f.write_str("Enqueued"), EncodeState::AwaitingInfo { .. } => f.write_str("Awaiting Segment Info"), EncodeState::AwaitingData { .. } => f.write_str("Awaiting Data"), EncodeState::Ready { ref raw_frames, .. } => f.write_fmt(format_args!( "Ready to encode {} frames", raw_frames.frame_count() )), EncodeState::InProgress { ref progress } => f.write_fmt(format_args!( "Encoding {} of {} frames", progress.frame_info.len(), progress.total_frames )), EncodeState::EncodingDone { ref encoded_data, .. } => f.write_fmt(format_args!("Done encoding {} bytes", encoded_data.len())), } } } #[tokio::main]
async fn main() { env::var("SERVER_PASSWORD").expect("SERVER_PASSWORD env var MUST be set!"); if env::var("RUST_LOG").is_err() { env::set_var("RUST_LOG", "rav1e_worker=info"); } env_logger::init(); let matches = App::new("rav1e-worker") .arg( Arg::with_name("LISTEN_IP") .help("Select which IP to listen on") .long("ip") .visible_alias("host") .default_value("0.0.0.0") .takes_value(true), ) .arg( Arg::with_name("LISTEN_PORT") .help("Select which port to listen on") .long("port") .short("p") .default_value("13415") .takes_value(true), ) .arg( Arg::with_name("MAX_THREADS") .help( "Limit the number of threads that can be used for workers [default: num cpus]", ) .long("threads") .takes_value(true), ) .arg( Arg::with_name("TEMP_DIR") .help( "Store input segments in temp files in the specified directory; by default \ stores in memory", ) .long("temp-dir") .takes_value(true), ) .get_matches(); let server_ip = SocketAddrV4::new( matches.value_of("LISTEN_IP").unwrap().parse().unwrap(), matches.value_of("LISTEN_PORT").unwrap().parse().unwrap(), ); let mut threads = num_cpus::get(); if let Some(thread_setting) = matches .value_of("MAX_THREADS") .and_then(|val| val.parse().ok()) { threads = threads.min(thread_setting); } let temp_dir = if let Some(temp_dir) = matches.value_of("TEMP_DIR") { let dir = PathBuf::from(temp_dir); if !dir.is_dir() { panic!("Specified temp dir does not exist or is not a directory"); } if dir.metadata().unwrap().permissions().readonly() { panic!("Specified temp dir is not writeable"); } Some(dir) } else { None }; start_listener(server_ip, temp_dir, threads).await; start_workers(threads).await; loop { if log_enabled!(log::Level::Debug) { let queue_handle = ENCODER_QUEUE.read(); let mut items = Vec::with_capacity(queue_handle.len()); for (key, item) in queue_handle.iter() { items.push((key, item.read())); } debug!("Items in queue: {:?}", items); } sleep(Duration::from_secs(5)).await; } }
function_block-full_function
[ { "content": "pub fn encode_segment(\n\n opts: EncodeOptions,\n\n video_info: VideoDetails,\n\n data: SegmentData,\n\n thread_pool: &mut ThreadPool,\n\n rayon_pool: Arc<rayon::ThreadPool>,\n\n progress_sender: ProgressSender,\n\n segment_output_file: Output,\n\n) -> Result<()> {\n\n let progress = ProgressInfo::new(\n\n Rational {\n\n num: video_info.time_base.den,\n\n den: video_info.time_base.num,\n\n },\n\n match data.frame_data {\n\n SegmentFrameData::Y4MFile { frame_count, .. } => frame_count,\n\n SegmentFrameData::CompressedFrames(ref frames) => frames.len(),\n\n },\n\n {\n\n let mut kf = BTreeSet::new();\n", "file_path": "rav1e-by-gop/src/encode/mod.rs", "rank": 0, "score": 135510.26978386153 }, { "content": "pub fn with_state<T: Clone + Send>(\n\n state: T,\n\n) -> impl Filter<Extract = (T,), Error = Infallible> + Clone {\n\n warp::any().map(move || state.clone())\n\n}\n\n\n", "file_path": "rav1e-worker/src/server/helpers.rs", "rank": 1, "score": 118756.78819548376 }, { "content": "pub fn require_auth() -> impl Filter<Extract = ((),), Error = Rejection> + Copy {\n\n warp::header(\"X-RAV1E-AUTH\").and_then(move |password: String| async move {\n\n if verify(password, &HASHED_SERVER_PASSWORD).unwrap() {\n\n return Ok(());\n\n }\n\n Err(warp::reject::custom(InvalidAuthorization))\n\n })\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct InvalidAuthorization;\n\n\n\nimpl Reject for InvalidAuthorization {}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct ClientVersionMismatch;\n\n\n\nimpl Reject for ClientVersionMismatch {}\n\n\n\npub async fn handle_rejection_types(err: Rejection) -> Result<impl Reply, Rejection> {\n", "file_path": "rav1e-worker/src/server/helpers.rs", "rank": 2, "score": 115089.41513743527 }, { "content": "pub fn process_frame<T: Pixel + DeserializeOwned>(\n\n ctx: &mut Context<T>,\n\n source: &mut Source,\n\n) -> Result<ProcessFrameResult<T>> {\n\n let pkt_wrapped = ctx.receive_packet();\n\n match pkt_wrapped {\n\n Ok(pkt) => Ok(ProcessFrameResult::Packet(Box::new(pkt))),\n\n Err(EncoderStatus::NeedMoreData) => {\n\n source.read_frame(ctx);\n\n Ok(ProcessFrameResult::NoPacket(false))\n\n }\n\n Err(EncoderStatus::EnoughData) => {\n\n unreachable!();\n\n }\n\n Err(EncoderStatus::LimitReached) => Ok(ProcessFrameResult::EndOfSegment),\n\n e @ Err(EncoderStatus::Failure) => {\n\n e?;\n\n unreachable!();\n\n }\n\n Err(EncoderStatus::NotReady) => {\n", "file_path": "rav1e-by-gop/src/encode/mod.rs", "rank": 3, "score": 110405.47908953446 }, { "content": "pub fn get_routes(\n\n temp_dir: Option<PathBuf>,\n\n worker_threads: usize,\n\n) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {\n\n warp::path!(\"info\")\n\n .and(warp::get())\n\n .and(require_auth())\n\n .and(with_state(worker_threads))\n\n .and_then(get_info)\n\n .or(warp::path!(\"enqueue\" / Uuid)\n\n .and(warp::get())\n\n .and(require_auth())\n\n .and_then(get_enqueue))\n\n .or(warp::path!(\"enqueue\")\n\n .and(warp::post())\n\n .and(require_auth())\n\n .and(json_body())\n\n .and_then(post_enqueue))\n\n .or(warp::path!(\"segment\" / Uuid)\n\n .and(warp::post())\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 4, "score": 106021.14759231004 }, { "content": "pub fn compress_frame<T: Pixel + Serialize>(frame: &Frame<T>) -> Vec<u8> {\n\n let mut compressed_frame = Vec::new();\n\n let mut encoder = zstd::Encoder::new(&mut compressed_frame, 0).unwrap();\n\n bincode::serialize_into(&mut encoder, frame).unwrap();\n\n encoder.finish().unwrap();\n\n compressed_frame\n\n}\n\n\n", "file_path": "rav1e-by-gop/src/compress.rs", "rank": 5, "score": 92657.15595803174 }, { "content": "pub fn json_body<T: DeserializeOwned + Send>(\n\n) -> impl Filter<Extract = (T,), Error = Rejection> + Copy {\n\n warp::body::content_length_limit(1024 * 1024).and(warp::body::json())\n\n}\n\n\n", "file_path": "rav1e-worker/src/server/helpers.rs", "rank": 6, "score": 91171.45466281405 }, { "content": "pub fn decompress_frame<T: Pixel + DeserializeOwned>(compressed_frame: &[u8]) -> Frame<T> {\n\n let decoder = zstd::Decoder::new(compressed_frame).unwrap();\n\n bincode::deserialize_from(decoder).unwrap()\n\n}\n", "file_path": "rav1e-by-gop/src/compress.rs", "rank": 7, "score": 87362.39231305438 }, { "content": "pub fn map_error_to_500<E: ToString>(_e: E) -> WithStatus<Json> {\n\n warp::reply::with_status(warp::reply::json(&()), StatusCode::INTERNAL_SERVER_ERROR)\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! try_or_500 {\n\n ( $expr:expr ) => {\n\n match $expr {\n\n Ok(val) => val,\n\n Err(e) => {\n\n return Ok(crate::server::helpers::map_error_to_500(e));\n\n }\n\n }\n\n };\n\n}\n", "file_path": "rav1e-worker/src/server/helpers.rs", "rank": 8, "score": 86740.16419142696 }, { "content": "#[allow(clippy::clippy::too_many_arguments)]\n\npub fn build_encoder_config(\n\n speed: usize,\n\n qp: usize,\n\n max_bitrate: Option<i32>,\n\n tiles: usize,\n\n video_info: VideoDetails,\n\n color_primaries: ColorPrimaries,\n\n transfer_characteristics: TransferCharacteristics,\n\n matrix_coefficients: MatrixCoefficients,\n\n) -> EncoderConfig {\n\n let mut enc_config = EncoderConfig::with_speed_preset(speed);\n\n enc_config.width = video_info.width;\n\n enc_config.height = video_info.height;\n\n enc_config.bit_depth = video_info.bit_depth;\n\n enc_config.chroma_sampling = video_info.chroma_sampling;\n\n enc_config.chroma_sample_position = video_info.chroma_sample_position;\n\n enc_config.time_base = video_info.time_base;\n\n enc_config.tiles = tiles;\n\n enc_config.min_key_frame_interval = 0;\n\n enc_config.max_key_frame_interval = u16::max_value() as u64;\n", "file_path": "rav1e-by-gop/src/lib.rs", "rank": 9, "score": 81657.52527332652 }, { "content": "use std::{env, net::SocketAddrV4, path::PathBuf};\n\n\n\nuse bcrypt::{hash, DEFAULT_COST};\n\nuse lazy_static::lazy_static;\n\nuse log::info;\n\n\n\nuse crate::server::routes::get_routes;\n\n\n\nmod helpers;\n\nmod routes;\n\n\n\nlazy_static! {\n\n static ref HASHED_SERVER_PASSWORD: String =\n\n hash(env::var(\"SERVER_PASSWORD\").unwrap(), DEFAULT_COST).unwrap();\n\n static ref CLIENT_VERSION_REQUIRED: semver::VersionReq = {\n\n let server_version = semver::Version::parse(env!(\"CARGO_PKG_VERSION\")).unwrap();\n\n if server_version.major > 0 {\n\n semver::VersionReq::parse(&format!(\"^{}.0.0\", server_version.major)).unwrap()\n\n } else {\n\n semver::VersionReq::parse(&format!(\"~0.{}.0\", server_version.minor)).unwrap()\n", "file_path": "rav1e-worker/src/server/mod.rs", "rank": 10, "score": 76429.52942035589 }, { "content": " }\n\n };\n\n}\n\n\n\npub async fn start_listener(\n\n server_ip: SocketAddrV4,\n\n temp_dir: Option<PathBuf>,\n\n worker_threads: usize,\n\n) {\n\n // This thread watches for new incoming connections,\n\n // both for the initial negotiation and for new slot requests\n\n tokio::spawn(async move {\n\n info!(\"Remote listener started on {}\", server_ip);\n\n\n\n match (env::var(\"TLS_CERT_PATH\"), env::var(\"TLS_KEY_PATH\")) {\n\n (Ok(cert_path), Ok(key_path)) => {\n\n warp::serve(get_routes(temp_dir, worker_threads))\n\n .tls()\n\n .cert_path(&cert_path)\n\n .key_path(&key_path)\n", "file_path": "rav1e-worker/src/server/mod.rs", "rank": 11, "score": 76424.6202150179 }, { "content": " .run(server_ip)\n\n .await;\n\n }\n\n _ => {\n\n warp::serve(get_routes(temp_dir, worker_threads))\n\n .run(server_ip)\n\n .await;\n\n }\n\n };\n\n });\n\n}\n", "file_path": "rav1e-worker/src/server/mod.rs", "rank": 12, "score": 76417.27070426202 }, { "content": "fn do_encode<T: Pixel + DeserializeOwned>(\n\n pool: Arc<rayon::ThreadPool>,\n\n opts: EncodeOptions,\n\n video_info: VideoDetails,\n\n mut source: Source,\n\n segment_output_file: &Output,\n\n mut progress: ProgressInfo,\n\n progress_sender: ProgressSender,\n\n) -> Result<ProgressInfo> {\n\n let cfg = build_config(\n\n opts.speed,\n\n opts.qp,\n\n opts.max_bitrate,\n\n opts.tiles,\n\n video_info,\n\n pool,\n\n opts.color_primaries,\n\n opts.transfer_characteristics,\n\n opts.matrix_coefficients,\n\n );\n", "file_path": "rav1e-by-gop/src/encode/mod.rs", "rank": 13, "score": 74797.04123012342 }, { "content": "#[cfg(feature = \"binary\")]\n\nfn secs_to_human_time(mut secs: u64, always_show_hours: bool) -> String {\n\n let mut mins = secs / 60;\n\n secs %= 60;\n\n let hours = mins / 60;\n\n mins %= 60;\n\n if hours > 0 || always_show_hours {\n\n format!(\"{:02}:{:02}:{:02}\", hours, mins, secs)\n\n } else {\n\n format!(\"{:02}:{:02}\", mins, secs)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct SerializableProgressInfo {\n\n pub frame_rate: (u64, u64),\n\n pub frame_info: Vec<SerializableFrameSummary>,\n\n pub encoded_size: usize,\n\n pub keyframes: BTreeSet<usize>,\n\n pub completed_segments: BTreeSet<usize>,\n\n pub next_analysis_frame: usize,\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 14, "score": 68321.33122351693 }, { "content": "pub fn create_muxer(path: &Output) -> Result<Box<dyn Muxer>> {\n\n match path {\n\n Output::File(path) => {\n\n let ext = path\n\n .extension()\n\n .and_then(OsStr::to_str)\n\n .map(str::to_lowercase)\n\n .unwrap_or_else(|| \"ivf\".into());\n\n\n\n match &ext[..] {\n\n \"ivf\" => Ok(Box::new(IvfMuxer::<BufWriter<File>>::open(\n\n path.to_str().unwrap(),\n\n )?)),\n\n _e => {\n\n panic!(\n\n \"{} is not a supported extension, please change to .ivf\",\n\n ext\n\n );\n\n }\n\n }\n\n }\n\n Output::Memory => Ok(Box::new(IvfMuxer::<Vec<u8>>::in_memory())),\n\n Output::Null => Ok(Box::new(IvfMuxer::<Sink>::null())),\n\n }\n\n}\n", "file_path": "rav1e-by-gop/src/muxer/mod.rs", "rank": 15, "score": 66739.87598686026 }, { "content": "#[allow(clippy::clippy::too_many_arguments)]\n\npub fn build_config(\n\n speed: usize,\n\n qp: usize,\n\n max_bitrate: Option<i32>,\n\n tiles: usize,\n\n video_info: VideoDetails,\n\n pool: Arc<rayon::ThreadPool>,\n\n color_primaries: ColorPrimaries,\n\n transfer_characteristics: TransferCharacteristics,\n\n matrix_coefficients: MatrixCoefficients,\n\n) -> Config {\n\n Config::new()\n\n .with_encoder_config(build_encoder_config(\n\n speed,\n\n qp,\n\n max_bitrate,\n\n tiles,\n\n video_info,\n\n color_primaries,\n\n transfer_characteristics,\n\n matrix_coefficients,\n\n ))\n\n .with_thread_pool(pool)\n\n}\n\n\n", "file_path": "rav1e-by-gop/src/lib.rs", "rank": 16, "score": 63226.429204620814 }, { "content": "pub trait Muxer {\n\n fn write_header(\n\n &mut self,\n\n width: usize,\n\n height: usize,\n\n framerate_num: usize,\n\n framerate_den: usize,\n\n );\n\n\n\n fn write_frame(&mut self, pts: u64, data: &[u8], frame_type: FrameType);\n\n\n\n fn flush(&mut self) -> io::Result<()>;\n\n}\n\n\n", "file_path": "rav1e-by-gop/src/muxer/mod.rs", "rank": 17, "score": 56373.91562532056 }, { "content": " StatusCode::NOT_FOUND,\n\n ))\n\n}\n\n\n\n// returns progress on currently encoded segment\n\nasync fn get_segment(request_id: Uuid, _auth: ()) -> Result<impl Reply, Rejection> {\n\n if let Some(item) = ENCODER_QUEUE.read().get(&request_id) {\n\n let item_reader = item.read();\n\n match item_reader.state {\n\n EncodeState::InProgress { ref progress, .. } => Ok(warp::reply::with_status(\n\n warp::reply::json(&GetProgressResponse {\n\n progress: SerializableProgressInfo::from(&*progress),\n\n done: false,\n\n }),\n\n StatusCode::OK,\n\n )),\n\n EncodeState::EncodingDone { ref progress, .. } => Ok(warp::reply::with_status(\n\n warp::reply::json(&GetProgressResponse {\n\n progress: SerializableProgressInfo::from(&*progress),\n\n done: true,\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 18, "score": 50473.02344573139 }, { "content": "async fn get_info(_auth: (), worker_threads: usize) -> Result<impl Reply, Rejection> {\n\n Ok(warp::reply::with_status(\n\n warp::reply::json(&GetInfoResponse {\n\n worker_count: worker_threads,\n\n }),\n\n StatusCode::NOT_FOUND,\n\n ))\n\n}\n\n\n\n// this endpoint tells a client if their slot is ready\n\nasync fn get_enqueue(request_id: Uuid, _auth: ()) -> Result<impl Reply, Rejection> {\n\n let reader = ENCODER_QUEUE.read();\n\n let item = reader.get(&request_id);\n\n if let Some(item) = item {\n\n match item.read().state {\n\n EncodeState::Enqueued => Ok(warp::reply::with_status(\n\n warp::reply::json(&()),\n\n StatusCode::ACCEPTED,\n\n )),\n\n EncodeState::AwaitingInfo { .. } | EncodeState::AwaitingData { .. } => Ok(\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 19, "score": 50471.62714173384 }, { "content": " SerializableProgressInfo,\n\n SlotRequestMessage,\n\n};\n\nuse uuid::{v1::Timestamp, Uuid};\n\nuse warp::{http::StatusCode, reply::Response, Filter, Rejection, Reply};\n\n\n\nuse crate::{\n\n server::{\n\n helpers::{\n\n handle_rejection_types,\n\n json_body,\n\n require_auth,\n\n with_state,\n\n ClientVersionMismatch,\n\n },\n\n CLIENT_VERSION_REQUIRED,\n\n },\n\n try_or_500,\n\n EncodeItem,\n\n ENCODER_QUEUE,\n\n UUID_CONTEXT,\n\n UUID_NODE_ID,\n\n};\n\n\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 20, "score": 50470.95050896441 }, { "content": " ));\n\n }\n\n\n\n Ok(warp::reply::with_status(\n\n warp::reply::json(&()),\n\n StatusCode::NOT_FOUND,\n\n ))\n\n}\n\n\n\n// client sends raw video data via this endpoint\n\nasync fn post_segment_data(\n\n request_id: Uuid,\n\n _auth: (),\n\n body: Bytes,\n\n temp_dir: Option<PathBuf>,\n\n) -> Result<impl Reply, Rejection> {\n\n if let Some(item) = ENCODER_QUEUE.read().get(&request_id) {\n\n let mut item_handle = item.write();\n\n let frame_data;\n\n let keyframe_number_outer;\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 21, "score": 50470.32639852605 }, { "content": " if let Some(item) = ENCODER_QUEUE.read().get(&request_id) {\n\n let mut item_handle = item.write();\n\n match item_handle.state {\n\n EncodeState::AwaitingInfo { .. } => (),\n\n _ => {\n\n return Ok(warp::reply::with_status(\n\n warp::reply::json(&()),\n\n StatusCode::GONE,\n\n ));\n\n }\n\n };\n\n item_handle.state = EncodeState::AwaitingData {\n\n keyframe_number: body.keyframe_number,\n\n segment_idx: body.segment_idx,\n\n next_analysis_frame: body.next_analysis_frame,\n\n time_ready: Utc::now(),\n\n };\n\n return Ok(warp::reply::with_status(\n\n warp::reply::json(&()),\n\n StatusCode::OK,\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 22, "score": 50469.946147568124 }, { "content": "use std::{\n\n fs::File,\n\n io::{BufWriter, Write},\n\n path::PathBuf,\n\n sync::Arc,\n\n};\n\n\n\nuse byteorder::{LittleEndian, WriteBytesExt};\n\nuse bytes::Bytes;\n\nuse chrono::Utc;\n\nuse http::header::{HeaderValue, CONTENT_TYPE};\n\nuse parking_lot::RwLock;\n\nuse rav1e_by_gop::{\n\n decompress_frame,\n\n EncodeState,\n\n GetInfoResponse,\n\n GetProgressResponse,\n\n PostEnqueueResponse,\n\n PostSegmentMessage,\n\n SegmentFrameData,\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 23, "score": 50469.89048315794 }, { "content": " }),\n\n StatusCode::OK,\n\n )),\n\n _ => Ok(warp::reply::with_status(\n\n warp::reply::json(&()),\n\n StatusCode::GONE,\n\n )),\n\n }\n\n } else {\n\n Ok(warp::reply::with_status(\n\n warp::reply::json(&()),\n\n StatusCode::NOT_FOUND,\n\n ))\n\n }\n\n}\n\n\n\n// if segment is ready, sends client the encoded video data\n\nasync fn get_segment_data(request_id: Uuid, _auth: ()) -> Result<impl Reply, Rejection> {\n\n // Check first without mutating the state\n\n let mut can_send_data = false;\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 24, "score": 50468.93718355861 }, { "content": " }\n\n\n\n let ts = Timestamp::from_unix(&*UUID_CONTEXT, 1497624119, 1234);\n\n let request_id = try_or_500!(Uuid::new_v1(ts, &UUID_NODE_ID));\n\n ENCODER_QUEUE.write().insert(\n\n request_id,\n\n RwLock::new(EncodeItem::new(body.options, body.video_info)),\n\n );\n\n\n\n Ok(warp::reply::with_status(\n\n warp::reply::json(&PostEnqueueResponse { request_id }),\n\n StatusCode::ACCEPTED,\n\n ))\n\n}\n\n\n\nasync fn post_segment(\n\n request_id: Uuid,\n\n _auth: (),\n\n body: PostSegmentMessage,\n\n) -> Result<impl Reply, Rejection> {\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 25, "score": 50468.566125859405 }, { "content": " let next_analysis_frame_outer;\n\n let segment_idx_outer;\n\n if let EncodeState::AwaitingData {\n\n keyframe_number,\n\n next_analysis_frame,\n\n segment_idx,\n\n ..\n\n } = &mut item_handle.state\n\n {\n\n keyframe_number_outer = *keyframe_number;\n\n next_analysis_frame_outer = *next_analysis_frame;\n\n segment_idx_outer = *segment_idx;\n\n let compressed_frames: Vec<Vec<u8>> = try_or_500!(bincode::deserialize(&body));\n\n frame_data = match temp_dir {\n\n Some(temp_dir) => {\n\n let frame_count = compressed_frames.len();\n\n let mut temp_path = temp_dir;\n\n temp_path.push(request_id.to_hyphenated().to_string());\n\n temp_path.set_extension(\"py4m\");\n\n let file = File::create(&temp_path).unwrap();\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 26, "score": 50467.00264178354 }, { "content": " .and(require_auth())\n\n .and(json_body())\n\n .and_then(post_segment))\n\n .or(warp::path!(\"segment_data\" / Uuid)\n\n .and(warp::post())\n\n .and(require_auth())\n\n .and(warp::body::bytes())\n\n .and(with_state(temp_dir))\n\n .and_then(post_segment_data))\n\n .or(warp::path!(\"segment\" / Uuid)\n\n .and(warp::get())\n\n .and(require_auth())\n\n .and_then(get_segment))\n\n .or(warp::path!(\"segment_data\" / Uuid)\n\n .and(warp::get())\n\n .and(require_auth())\n\n .and_then(get_segment_data))\n\n .recover(handle_rejection_types)\n\n}\n\n\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 27, "score": 50464.337157339476 }, { "content": " if let Some(item) = ENCODER_QUEUE.read().get(&request_id) {\n\n if let EncodeState::EncodingDone { .. } = item.read().state {\n\n can_send_data = true;\n\n }\n\n }\n\n if !can_send_data {\n\n return Ok(warp::reply::with_status(\n\n Response::new(Vec::new().into()),\n\n StatusCode::NOT_FOUND,\n\n ));\n\n }\n\n\n\n // Now pop it from the queue and send it, freeing the resources simultaneously\n\n let item = {\n\n let mut queue_handle = ENCODER_QUEUE.write();\n\n queue_handle.remove(&request_id)\n\n };\n\n if let Some(item) = item {\n\n if let EncodeState::EncodingDone { encoded_data, .. } = item.into_inner().state {\n\n return Ok(warp::reply::with_status(\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 28, "score": 50463.663423450686 }, { "content": " } else {\n\n return Ok(warp::reply::with_status(\n\n warp::reply::json(&()),\n\n StatusCode::NOT_FOUND,\n\n ));\n\n }\n\n item_handle.state = EncodeState::Ready {\n\n keyframe_number: keyframe_number_outer,\n\n next_analysis_frame: next_analysis_frame_outer,\n\n segment_idx: segment_idx_outer,\n\n raw_frames: Arc::new(frame_data),\n\n };\n\n return Ok(warp::reply::with_status(\n\n warp::reply::json(&()),\n\n StatusCode::OK,\n\n ));\n\n }\n\n\n\n Ok(warp::reply::with_status(\n\n warp::reply::json(&()),\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 29, "score": 50463.22704120625 }, { "content": " let mut writer = BufWriter::new(file);\n\n for frame in compressed_frames {\n\n let frame_data = if item_handle.video_info.bit_depth == 8 {\n\n bincode::serialize(&decompress_frame::<u8>(&frame)).unwrap()\n\n } else {\n\n bincode::serialize(&decompress_frame::<u16>(&frame)).unwrap()\n\n };\n\n writer\n\n .write_u32::<LittleEndian>(frame_data.len() as u32)\n\n .unwrap();\n\n writer.write_all(&frame_data).unwrap();\n\n }\n\n writer.flush().unwrap();\n\n SegmentFrameData::Y4MFile {\n\n path: temp_path,\n\n frame_count,\n\n }\n\n }\n\n None => SegmentFrameData::CompressedFrames(compressed_frames),\n\n }\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 30, "score": 50462.16193184403 }, { "content": " {\n\n let mut response = Response::new(encoded_data.into());\n\n response.headers_mut().insert(\n\n CONTENT_TYPE,\n\n HeaderValue::from_static(\"application/octet-stream\"),\n\n );\n\n response\n\n },\n\n StatusCode::OK,\n\n ));\n\n }\n\n }\n\n\n\n unreachable!()\n\n}\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 31, "score": 50460.81302148016 }, { "content": "use std::convert::Infallible;\n\n\n\nuse bcrypt::verify;\n\nuse serde::de::DeserializeOwned;\n\nuse warp::{\n\n http::StatusCode,\n\n reject::{MissingHeader, Reject},\n\n reply::{Json, WithStatus},\n\n Filter,\n\n Rejection,\n\n Reply,\n\n};\n\n\n\nuse crate::server::HASHED_SERVER_PASSWORD;\n\n\n", "file_path": "rav1e-worker/src/server/helpers.rs", "rank": 32, "score": 50456.97924878546 }, { "content": " warp::reply::with_status(warp::reply::json(&()), StatusCode::OK),\n\n ),\n\n _ => Ok(warp::reply::with_status(\n\n warp::reply::json(&()),\n\n StatusCode::GONE,\n\n )),\n\n }\n\n } else {\n\n Ok(warp::reply::with_status(\n\n warp::reply::json(&()),\n\n StatusCode::NOT_FOUND,\n\n ))\n\n }\n\n}\n\n\n\n// client hits this endpoint to say that they want to request a slot\n\n// returns a JSON body with a request ID\n\nasync fn post_enqueue(_auth: (), body: SlotRequestMessage) -> Result<impl Reply, Rejection> {\n\n if !CLIENT_VERSION_REQUIRED.matches(&body.client_version) {\n\n return Err(warp::reject::custom(ClientVersionMismatch));\n", "file_path": "rav1e-worker/src/server/routes.rs", "rank": 33, "score": 50454.9801337141 }, { "content": " if err.find::<InvalidAuthorization>().is_some() {\n\n return Ok(warp::reply::with_status(\n\n \"Incorrect server password\".to_string(),\n\n StatusCode::UNAUTHORIZED,\n\n ));\n\n }\n\n if let Some(err) = err.find::<MissingHeader>() {\n\n if err.name() == \"X-RAV1E-AUTH\" {\n\n return Ok(warp::reply::with_status(\n\n \"Password header not provided\".to_string(),\n\n StatusCode::UNAUTHORIZED,\n\n ));\n\n }\n\n }\n\n if err.find::<ClientVersionMismatch>().is_some() {\n\n return Ok(warp::reply::with_status(\n\n \"Client/server version mismatch\".to_string(),\n\n StatusCode::BAD_REQUEST,\n\n ));\n\n }\n\n\n\n Err(err)\n\n}\n\n\n", "file_path": "rav1e-worker/src/server/helpers.rs", "rank": 34, "score": 50452.60910817574 }, { "content": "\n\n let mut ctx: Context<T> = cfg.new_context()?;\n\n let _ = progress_sender.send(ProgressStatus::Encoding(Box::new(progress.clone())));\n\n\n\n let mut output = create_muxer(&segment_output_file).expect(\"Failed to create segment output\");\n\n loop {\n\n match process_frame(&mut ctx, &mut source)? {\n\n ProcessFrameResult::Packet(packet) => {\n\n output.write_frame(\n\n packet.input_frameno as u64,\n\n packet.data.as_ref(),\n\n packet.frame_type,\n\n );\n\n progress.add_packet(*packet);\n\n let _ = progress_sender.send(ProgressStatus::Encoding(Box::new(progress.clone())));\n\n }\n\n ProcessFrameResult::NoPacket(_) => {\n\n // Next iteration\n\n }\n\n ProcessFrameResult::EndOfSegment => {\n", "file_path": "rav1e-by-gop/src/encode/mod.rs", "rank": 35, "score": 48901.03524313479 }, { "content": "}\n\n\n\npub struct Source {\n\n pub sent_count: usize,\n\n pub frame_data: SourceFrameData,\n\n}\n\n\n\nimpl Source {\n\n fn read_frame<T: Pixel + DeserializeOwned>(&mut self, ctx: &mut Context<T>) {\n\n if self.sent_count == self.frame_count() {\n\n ctx.flush();\n\n return;\n\n }\n\n\n\n match self.frame_data {\n\n SourceFrameData::CompressedFrames(ref mut frames) => {\n\n let _ = ctx.send_frame(Some(Arc::new(decompress_frame(&frames[self.sent_count]))));\n\n // Deallocate the compressed frame from memory, we no longer need it\n\n frames[self.sent_count] = Vec::new();\n\n }\n", "file_path": "rav1e-by-gop/src/encode/mod.rs", "rank": 36, "score": 48899.36521925124 }, { "content": " },\n\n SegmentFrameData::CompressedFrames(frames) => {\n\n SourceFrameData::CompressedFrames(frames)\n\n }\n\n },\n\n sent_count: 0,\n\n };\n\n if video_info.bit_depth > 8 {\n\n do_encode::<u16>(\n\n rayon_pool,\n\n opts,\n\n video_info,\n\n source,\n\n &segment_output_file,\n\n progress,\n\n progress_sender,\n\n )\n\n .expect(\"Failed encoding segment\");\n\n } else {\n\n do_encode::<u8>(\n", "file_path": "rav1e-by-gop/src/encode/mod.rs", "rank": 37, "score": 48897.95115430595 }, { "content": " SourceFrameData::Y4MFile { ref mut input, .. } => {\n\n let _ = ctx.send_frame(Some(Arc::new(bincode::deserialize_from(input).unwrap())));\n\n }\n\n };\n\n self.sent_count += 1;\n\n }\n\n\n\n pub fn frame_count(&self) -> usize {\n\n match self.frame_data {\n\n SourceFrameData::CompressedFrames(ref frames) => frames.len(),\n\n SourceFrameData::Y4MFile { frame_count, .. } => frame_count,\n\n }\n\n }\n\n}\n\n\n\npub enum ProcessFrameResult<T: Pixel> {\n\n Packet(Box<Packet<T>>),\n\n NoPacket(bool),\n\n EndOfSegment,\n\n}\n\n\n", "file_path": "rav1e-by-gop/src/encode/mod.rs", "rank": 38, "score": 48896.884398475646 }, { "content": "pub mod stats;\n\n\n\nuse std::{collections::BTreeSet, fs, fs::File, io::BufReader, path::PathBuf, sync::Arc};\n\n\n\nuse anyhow::Result;\n\nuse crossbeam_channel::{Receiver, Sender};\n\nuse rav1e::prelude::*;\n\nuse serde::{de::DeserializeOwned, Deserialize, Serialize};\n\nuse systemstat::data::ByteSize;\n\nuse threadpool::ThreadPool;\n\n\n\npub use self::stats::*;\n\nuse super::VideoDetails;\n\nuse crate::{\n\n build_config,\n\n decompress_frame,\n\n muxer::create_muxer,\n\n Output,\n\n SegmentData,\n\n SegmentFrameData,\n", "file_path": "rav1e-by-gop/src/encode/mod.rs", "rank": 39, "score": 48896.85713094995 }, { "content": " kf.insert(data.start_frameno);\n\n kf\n\n },\n\n data.segment_no + 1,\n\n data.next_analysis_frame,\n\n None,\n\n );\n\n let _ = progress_sender.send(ProgressStatus::Encoding(Box::new(progress.clone())));\n\n\n\n thread_pool.execute(move || {\n\n let source = Source {\n\n frame_data: match data.frame_data {\n\n SegmentFrameData::Y4MFile { path, frame_count } => SourceFrameData::Y4MFile {\n\n frame_count,\n\n video_info,\n\n input: {\n\n let file = File::open(&path).unwrap();\n\n BufReader::new(file)\n\n },\n\n path,\n", "file_path": "rav1e-by-gop/src/encode/mod.rs", "rank": 40, "score": 48896.7883631633 }, { "content": " unreachable!();\n\n }\n\n Err(EncoderStatus::Encoded) => Ok(ProcessFrameResult::NoPacket(true)),\n\n }\n\n}\n\n\n\npub type ProgressSender = Sender<ProgressStatus>;\n\npub type ProgressReceiver = Receiver<ProgressStatus>;\n\npub type ProgressChannel = (ProgressSender, ProgressReceiver);\n\n\n\npub enum ProgressStatus {\n\n Idle,\n\n Loading,\n\n Compressing(usize),\n\n Sending(ByteSize),\n\n Encoding(Box<ProgressInfo>),\n\n}\n", "file_path": "rav1e-by-gop/src/encode/mod.rs", "rank": 41, "score": 48895.973898049466 }, { "content": " rayon_pool,\n\n opts,\n\n video_info,\n\n source,\n\n &segment_output_file,\n\n progress,\n\n progress_sender,\n\n )\n\n .expect(\"Failed encoding segment\");\n\n }\n\n });\n\n Ok(())\n\n}\n\n\n", "file_path": "rav1e-by-gop/src/encode/mod.rs", "rank": 42, "score": 48891.77598286395 }, { "content": " output.flush().unwrap();\n\n break;\n\n }\n\n };\n\n }\n\n if let SourceFrameData::Y4MFile { path, .. } = source.frame_data {\n\n fs::remove_file(&path).unwrap();\n\n }\n\n\n\n Ok(progress)\n\n}\n\n\n\npub enum SourceFrameData {\n\n CompressedFrames(Vec<Vec<u8>>),\n\n Y4MFile {\n\n path: PathBuf,\n\n input: BufReader<File>,\n\n frame_count: usize,\n\n video_info: VideoDetails,\n\n },\n", "file_path": "rav1e-by-gop/src/encode/mod.rs", "rank": 43, "score": 48891.71761941467 }, { "content": "};\n\n\n\n#[derive(Debug, Clone, Copy, Serialize, Deserialize)]\n\npub struct EncodeOptions {\n\n pub speed: usize,\n\n pub qp: usize,\n\n pub max_bitrate: Option<i32>,\n\n pub tiles: usize,\n\n pub color_primaries: ColorPrimaries,\n\n pub transfer_characteristics: TransferCharacteristics,\n\n pub matrix_coefficients: MatrixCoefficients,\n\n}\n\n\n", "file_path": "rav1e-by-gop/src/encode/mod.rs", "rank": 44, "score": 48888.875593789024 }, { "content": " }\n\n\n\n for (&request_id, item) in reader.iter() {\n\n let mut item_handle = item.write();\n\n match item_handle.state {\n\n EncodeState::Enqueued if in_progress_items < worker_threads => {\n\n info!(\"A slot is ready for request {}\", request_id);\n\n item_handle.state = EncodeState::AwaitingInfo {\n\n time_ready: Utc::now(),\n\n };\n\n in_progress_items += 1;\n\n }\n\n EncodeState::Ready { ref raw_frames, .. } => {\n\n info!(\"Beginning encode for request {}\", request_id);\n\n let video_info = item_handle.video_info;\n\n let options = item_handle.options;\n\n let raw_frames = raw_frames.clone();\n\n let pool_handle = rayon_pool.clone();\n\n tokio::spawn(async move {\n\n if video_info.bit_depth <= 8 {\n", "file_path": "rav1e-worker/src/worker.rs", "rank": 45, "score": 33309.9893249025 }, { "content": " _ => (),\n\n }\n\n }\n\n }\n\n });\n\n}\n\n\n\npub async fn encode_segment<T: Pixel + Default + Serialize + DeserializeOwned>(\n\n request_id: Uuid,\n\n video_info: VideoDetails,\n\n options: EncodeOptions,\n\n input: Arc<SegmentFrameData>,\n\n pool: Arc<rayon::ThreadPool>,\n\n) {\n\n {\n\n let queue_handle = ENCODER_QUEUE.read();\n\n let mut item_handle = queue_handle.get(&request_id).unwrap().write();\n\n item_handle.state = EncodeState::InProgress {\n\n progress: ProgressInfo::new(\n\n Rational::from_reciprocal(item_handle.video_info.time_base),\n", "file_path": "rav1e-worker/src/worker.rs", "rank": 46, "score": 33309.229236952495 }, { "content": " match input.as_ref() {\n\n SegmentFrameData::CompressedFrames(frames) => frames.len(),\n\n SegmentFrameData::Y4MFile { frame_count, .. } => *frame_count,\n\n },\n\n {\n\n let mut keyframes = BTreeSet::new();\n\n keyframes.insert(match item_handle.state {\n\n EncodeState::Ready {\n\n keyframe_number, ..\n\n } => keyframe_number,\n\n _ => unreachable!(),\n\n });\n\n keyframes\n\n },\n\n match item_handle.state {\n\n EncodeState::Ready { segment_idx, .. } => segment_idx,\n\n _ => unreachable!(),\n\n },\n\n match item_handle.state {\n\n EncodeState::Ready {\n", "file_path": "rav1e-worker/src/worker.rs", "rank": 47, "score": 33307.44604679171 }, { "content": " options.matrix_coefficients,\n\n );\n\n\n\n let mut ctx: Context<T> = match cfg.new_context() {\n\n Ok(ctx) => ctx,\n\n Err(e) => {\n\n error!(\n\n \"Failed to create encode context for request {}: {}\",\n\n request_id, e\n\n );\n\n return;\n\n }\n\n };\n\n let mut output = IvfMuxer::<Vec<u8>>::in_memory();\n\n loop {\n\n match process_frame(&mut ctx, &mut source) {\n\n Ok(ProcessFrameResult::Packet(packet)) => {\n\n let queue_handle = ENCODER_QUEUE.read();\n\n let mut item_handle = queue_handle.get(&request_id).unwrap().write();\n\n if let EncodeState::InProgress { ref mut progress } = item_handle.state {\n", "file_path": "rav1e-worker/src/worker.rs", "rank": 48, "score": 33306.67626899206 }, { "content": " };\n\n return;\n\n }\n\n };\n\n }\n\n\n\n {\n\n let queue_handle = ENCODER_QUEUE.read();\n\n let mut item_handle = queue_handle.get(&request_id).unwrap().write();\n\n item_handle.state = if let EncodeState::InProgress { ref progress } = item_handle.state {\n\n EncodeState::EncodingDone {\n\n progress: progress.clone(),\n\n encoded_data: output.output,\n\n time_finished: Utc::now(),\n\n }\n\n } else {\n\n unreachable!()\n\n };\n\n }\n\n if let SourceFrameData::Y4MFile { path, .. } = source.frame_data {\n\n let _ = fs::remove_file(path);\n\n };\n\n info!(\"Segment {} finished\", request_id);\n\n}\n", "file_path": "rav1e-worker/src/worker.rs", "rank": 49, "score": 33306.51003990682 }, { "content": " encode_segment::<u8>(\n\n request_id,\n\n video_info,\n\n options,\n\n raw_frames,\n\n pool_handle,\n\n )\n\n .await;\n\n } else {\n\n encode_segment::<u16>(\n\n request_id,\n\n video_info,\n\n options,\n\n raw_frames,\n\n pool_handle,\n\n )\n\n .await;\n\n }\n\n });\n\n }\n", "file_path": "rav1e-worker/src/worker.rs", "rank": 50, "score": 33300.46906034726 }, { "content": " );\n\n\n\n // This thread watches the slot request queue\n\n // and allocates slots when they are available.\n\n tokio::spawn(async move {\n\n loop {\n\n sleep(Duration::from_secs(3)).await;\n\n\n\n let reader = ENCODER_QUEUE.read();\n\n let mut in_progress_items = 0;\n\n for item in reader.values() {\n\n match item.read().state {\n\n EncodeState::Enqueued => (),\n\n _ => {\n\n in_progress_items += 1;\n\n }\n\n }\n\n }\n\n if in_progress_items >= worker_threads {\n\n continue;\n", "file_path": "rav1e-worker/src/worker.rs", "rank": 51, "score": 33299.70804787815 }, { "content": "use std::{collections::BTreeSet, fs, fs::File, io::BufReader, sync::Arc, time::Duration};\n\n\n\nuse chrono::Utc;\n\nuse log::{error, info};\n\nuse rav1e::prelude::*;\n\nuse rav1e_by_gop::*;\n\nuse serde::{de::DeserializeOwned, Serialize};\n\nuse tokio::time::sleep;\n\nuse uuid::Uuid;\n\nuse v_frame::pixel::Pixel;\n\n\n\nuse crate::ENCODER_QUEUE;\n\n\n\npub async fn start_workers(worker_threads: usize) {\n\n info!(\"Starting {} workers\", worker_threads);\n\n let rayon_pool = Arc::new(\n\n rayon::ThreadPoolBuilder::new()\n\n .num_threads(worker_threads)\n\n .build()\n\n .unwrap(),\n", "file_path": "rav1e-worker/src/worker.rs", "rank": 52, "score": 33298.1665904 }, { "content": " next_analysis_frame,\n\n ..\n\n } => next_analysis_frame,\n\n _ => unreachable!(),\n\n },\n\n None,\n\n ),\n\n };\n\n }\n\n\n\n let mut source = Source {\n\n frame_data: match Arc::try_unwrap(input) {\n\n Ok(SegmentFrameData::CompressedFrames(input)) => {\n\n SourceFrameData::CompressedFrames(input)\n\n }\n\n Ok(SegmentFrameData::Y4MFile {\n\n frame_count,\n\n ref path,\n\n }) => SourceFrameData::Y4MFile {\n\n frame_count,\n", "file_path": "rav1e-worker/src/worker.rs", "rank": 53, "score": 33297.824264416195 }, { "content": " output.write_frame(\n\n packet.input_frameno as u64,\n\n packet.data.as_ref(),\n\n packet.frame_type,\n\n );\n\n progress.add_packet(*packet);\n\n } else {\n\n unreachable!();\n\n }\n\n }\n\n Ok(ProcessFrameResult::NoPacket(_)) => {\n\n // Next iteration\n\n }\n\n Ok(ProcessFrameResult::EndOfSegment) => {\n\n break;\n\n }\n\n Err(e) => {\n\n error!(\"Encoding error for request {}: {}\", request_id, e);\n\n if let SourceFrameData::Y4MFile { path, .. } = source.frame_data {\n\n let _ = fs::remove_file(path);\n", "file_path": "rav1e-worker/src/worker.rs", "rank": 54, "score": 33297.63292732298 }, { "content": " input: {\n\n let file = File::open(&path).unwrap();\n\n BufReader::new(file)\n\n },\n\n path: path.to_path_buf(),\n\n video_info,\n\n },\n\n Err(_) => unreachable!(\"input should only have one reference at this point\"),\n\n },\n\n sent_count: 0,\n\n };\n\n let cfg = build_config(\n\n options.speed,\n\n options.qp,\n\n options.max_bitrate,\n\n options.tiles,\n\n video_info,\n\n pool,\n\n options.color_primaries,\n\n options.transfer_characteristics,\n", "file_path": "rav1e-worker/src/worker.rs", "rank": 55, "score": 33288.65401220192 }, { "content": "pub use client::*;\n\npub use server::*;\n\n\n\nmod client;\n\nmod server;\n", "file_path": "rav1e-by-gop/src/remote/mod.rs", "rank": 60, "score": 25975.910451979857 }, { "content": "// Copyright (c) 2017-2019, The rav1e contributors. All rights reserved\n\n//\n\n// This source code is subject to the terms of the BSD 2 Clause License and\n\n// the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License\n\n// was not distributed with this source code in the LICENSE file, you can\n\n// obtain it at www.aomedia.org/license/software. If the Alliance for Open\n\n// Media Patent License 1.0 was not distributed with this source code in the\n\n// PATENTS file, you can obtain it at www.aomedia.org/license/patent.\n\n\n\nmod ivf;\n\n\n\nuse std::{\n\n ffi::OsStr,\n\n fs::File,\n\n io,\n\n io::{BufWriter, Sink},\n\n};\n\n\n\nuse anyhow::Result;\n\nuse rav1e::prelude::*;\n\n\n\npub use self::ivf::IvfMuxer;\n\nuse crate::Output;\n\n\n", "file_path": "rav1e-by-gop/src/muxer/mod.rs", "rank": 65, "score": 25967.346347228086 }, { "content": "use std::sync::Arc;\n\n\n\nuse chrono::{DateTime, Utc};\n\nuse serde::{Deserialize, Serialize};\n\nuse uuid::Uuid;\n\n\n\nuse crate::{ProgressInfo, SegmentFrameData, SerializableProgressInfo};\n\n\n\n#[derive(Debug, Clone, Copy, Serialize, Deserialize)]\n\npub struct GetInfoResponse {\n\n pub worker_count: usize,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Serialize, Deserialize)]\n\npub struct PostEnqueueResponse {\n\n pub request_id: Uuid,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct GetProgressResponse {\n", "file_path": "rav1e-by-gop/src/remote/server.rs", "rank": 66, "score": 25635.816605630764 }, { "content": " pub progress: SerializableProgressInfo,\n\n pub done: bool,\n\n}\n\n\n\npub enum EncodeState {\n\n Enqueued,\n\n AwaitingInfo {\n\n time_ready: DateTime<Utc>,\n\n },\n\n AwaitingData {\n\n keyframe_number: usize,\n\n segment_idx: usize,\n\n next_analysis_frame: usize,\n\n time_ready: DateTime<Utc>,\n\n },\n\n Ready {\n\n keyframe_number: usize,\n\n segment_idx: usize,\n\n next_analysis_frame: usize,\n\n raw_frames: Arc<SegmentFrameData>,\n", "file_path": "rav1e-by-gop/src/remote/server.rs", "rank": 67, "score": 25635.367745354357 }, { "content": " },\n\n InProgress {\n\n progress: ProgressInfo,\n\n },\n\n EncodingDone {\n\n progress: ProgressInfo,\n\n encoded_data: Vec<u8>,\n\n time_finished: DateTime<Utc>,\n\n },\n\n}\n", "file_path": "rav1e-by-gop/src/remote/server.rs", "rank": 68, "score": 25624.957464619314 }, { "content": " // Wall encoding time elapsed so far, in seconds\n\n #[serde(default)]\n\n pub elapsed_time: u64,\n\n #[serde(default)]\n\n pub encoding_stats: (SerializableEncoderStats, SerializableEncoderStats),\n\n pub total_frames: usize,\n\n #[serde(default)]\n\n pub frame_limit: Option<u64>,\n\n #[serde(default)]\n\n pub segment_idx: usize,\n\n}\n\n\n\nimpl From<&ProgressInfo> for SerializableProgressInfo {\n\n fn from(other: &ProgressInfo) -> Self {\n\n SerializableProgressInfo {\n\n frame_rate: (other.frame_rate.num, other.frame_rate.den),\n\n frame_info: other\n\n .frame_info\n\n .iter()\n\n .map(SerializableFrameSummary::from)\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 69, "score": 22941.29579637613 }, { "content": " pub time_started: Instant,\n\n // List of frames encoded so far\n\n pub frame_info: Vec<FrameSummary>,\n\n // Summarized verbose encoding stats, split into I and P frames\n\n pub encoding_stats: (EncoderStats, EncoderStats),\n\n // Video size so far in bytes.\n\n //\n\n // This value will be updated in the CLI very frequently, so we cache the previous value\n\n // to reduce the overall complexity.\n\n pub encoded_size: usize,\n\n // The below are used for resume functionality\n\n pub keyframes: BTreeSet<usize>,\n\n pub completed_segments: BTreeSet<usize>,\n\n pub segment_idx: usize,\n\n pub next_analysis_frame: usize,\n\n pub frame_limit: Option<u64>,\n\n}\n\n\n\nimpl ProgressInfo {\n\n pub fn new(\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 70, "score": 22940.4434803701 }, { "content": " .collect(),\n\n encoded_size: other.encoded_size,\n\n keyframes: other.keyframes.clone(),\n\n next_analysis_frame: other.next_analysis_frame,\n\n completed_segments: other.completed_segments.clone(),\n\n elapsed_time: other.elapsed_time() as u64,\n\n encoding_stats: (\n\n (&other.encoding_stats.0).into(),\n\n (&other.encoding_stats.1).into(),\n\n ),\n\n total_frames: other.total_frames,\n\n frame_limit: other.frame_limit,\n\n segment_idx: other.segment_idx,\n\n }\n\n }\n\n}\n\n\n\nimpl From<&SerializableProgressInfo> for ProgressInfo {\n\n fn from(other: &SerializableProgressInfo) -> Self {\n\n ProgressInfo {\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 71, "score": 22938.80149917346 }, { "content": "use std::{\n\n collections::BTreeSet,\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse arrayvec::ArrayVec;\n\n#[cfg(feature = \"binary\")]\n\nuse console::style;\n\n#[cfg(feature = \"binary\")]\n\nuse log::info;\n\nuse rav1e::{data::EncoderStats, prelude::*};\n\nuse serde::{Deserialize, Deserializer, Serialize, Serializer};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ProgressInfo {\n\n // Frame rate of the video\n\n pub frame_rate: Rational,\n\n // The length of the whole video, in frames. `None` if not known.\n\n pub total_frames: usize,\n\n // The time the encode was started\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 72, "score": 22936.167579991612 }, { "content": " }\n\n\n\n pub fn add_packet<T: Pixel>(&mut self, packet: Packet<T>) {\n\n self.encoded_size += packet.data.len();\n\n match packet.frame_type {\n\n FrameType::KEY => self.encoding_stats.0 += &packet.enc_stats,\n\n _ => self.encoding_stats.1 += &packet.enc_stats,\n\n };\n\n self.frame_info.push(packet.into());\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n fn frames_encoded(&self) -> usize {\n\n self.frame_info.len()\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n fn encoding_fps(&self) -> f64 {\n\n self.frame_info.len() as f64 / self.elapsed_time()\n\n }\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 73, "score": 22935.20806942215 }, { "content": " /// Frame size in bytes\n\n pub size: usize,\n\n pub frame_type: FrameType,\n\n /// QP selected for the frame.\n\n pub qp: u8,\n\n}\n\n\n\nimpl<T: Pixel> From<Packet<T>> for FrameSummary {\n\n fn from(packet: Packet<T>) -> Self {\n\n Self {\n\n size: packet.data.len(),\n\n frame_type: packet.frame_type,\n\n qp: packet.qp,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Serialize, Deserialize)]\n\npub struct SerializableFrameSummary {\n\n pub size: usize,\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 74, "score": 22934.094227713096 }, { "content": " frame_rate: Rational,\n\n total_frames: usize,\n\n keyframes: BTreeSet<usize>,\n\n segment_idx: usize,\n\n next_analysis_frame: usize,\n\n frame_limit: Option<u64>,\n\n ) -> Self {\n\n Self {\n\n frame_rate,\n\n total_frames,\n\n time_started: Instant::now(),\n\n frame_info: Vec::with_capacity(total_frames),\n\n encoded_size: 0,\n\n keyframes,\n\n completed_segments: BTreeSet::new(),\n\n segment_idx,\n\n encoding_stats: (EncoderStats::default(), EncoderStats::default()),\n\n next_analysis_frame,\n\n frame_limit,\n\n }\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 75, "score": 22934.06171715865 }, { "content": " frame_rate: Rational::new(other.frame_rate.0, other.frame_rate.1),\n\n frame_info: other.frame_info.iter().map(FrameSummary::from).collect(),\n\n encoded_size: other.encoded_size,\n\n keyframes: other.keyframes.clone(),\n\n next_analysis_frame: other.next_analysis_frame,\n\n completed_segments: other.completed_segments.clone(),\n\n time_started: Instant::now() - Duration::from_secs(other.elapsed_time),\n\n segment_idx: other.segment_idx,\n\n encoding_stats: (\n\n (&other.encoding_stats.0).into(),\n\n (&other.encoding_stats.1).into(),\n\n ),\n\n total_frames: other.total_frames,\n\n frame_limit: other.frame_limit,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct FrameSummary {\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 76, "score": 22932.367168132158 }, { "content": " pub frame_type: u8,\n\n pub qp: u8,\n\n}\n\n\n\nimpl From<&FrameSummary> for SerializableFrameSummary {\n\n fn from(summary: &FrameSummary) -> Self {\n\n SerializableFrameSummary {\n\n size: summary.size,\n\n frame_type: summary.frame_type as u8,\n\n qp: summary.qp,\n\n }\n\n }\n\n}\n\n\n\nimpl From<&SerializableFrameSummary> for FrameSummary {\n\n fn from(summary: &SerializableFrameSummary) -> Self {\n\n FrameSummary {\n\n size: summary.size,\n\n frame_type: match summary.frame_type {\n\n 0 => FrameType::KEY,\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 77, "score": 22932.0989370673 }, { "content": " &self,\n\n pred_mode: PredictionMode,\n\n frame_type: FrameType,\n\n ) -> f32 {\n\n let count = self.get_chroma_pred_count_by_frame_type(frame_type);\n\n if count == 0 {\n\n return 0.;\n\n }\n\n (match frame_type {\n\n FrameType::KEY => self.encoding_stats.0.chroma_pred_mode_counts[pred_mode as usize],\n\n FrameType::INTER => self.encoding_stats.1.chroma_pred_mode_counts[pred_mode as usize],\n\n _ => unreachable!(),\n\n }) as f32\n\n / count as f32\n\n * 100.\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n pub fn print_summary(&self, verbose: bool) {\n\n info!(\"{}\", self.end_of_encode_progress());\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 78, "score": 22931.5300338428 }, { "content": " style(frame_type.to_string().replace(\" frame\", \"\")).blue(),\n\n style(count).cyan(),\n\n style(format!(\"{} B\", size)).cyan(),\n\n style(avg_qp).cyan(),\n\n );\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n pub fn progress(&self) -> String {\n\n format!(\n\n \"{:.2} fps, {:.1} Kb/s, ETA {}\",\n\n self.encoding_fps(),\n\n self.bitrate() as f64 / 1000f64,\n\n secs_to_human_time(self.estimated_time(self.total_frames), false)\n\n )\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n pub fn progress_overall(&self) -> String {\n\n if self.frames_encoded() == 0 {\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 79, "score": 22929.622386318184 }, { "content": " pub tx_type_counts: [usize; TX_TYPES],\n\n /// Stores count of pixels belonging to each luma prediction mode in this frame\n\n pub luma_pred_mode_counts: ArrayVec<[usize; PREDICTION_MODES]>,\n\n /// Stores count of pixels belonging to each chroma prediction mode in this frame\n\n pub chroma_pred_mode_counts: ArrayVec<[usize; PREDICTION_MODES]>,\n\n}\n\n\n\nimpl From<&EncoderStats> for SerializableEncoderStats {\n\n fn from(stats: &EncoderStats) -> Self {\n\n SerializableEncoderStats {\n\n block_size_counts: stats.block_size_counts,\n\n skip_block_count: stats.skip_block_count,\n\n tx_type_counts: stats.tx_type_counts,\n\n luma_pred_mode_counts: stats.luma_pred_mode_counts.clone(),\n\n chroma_pred_mode_counts: stats.chroma_pred_mode_counts.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<&SerializableEncoderStats> for EncoderStats {\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 80, "score": 22929.433560431753 }, { "content": "impl<'de> Deserialize<'de> for FrameSummary {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n let de = SerializableFrameSummary::deserialize(deserializer)?;\n\n Ok(FrameSummary::from(&de))\n\n }\n\n}\n\n\n\npub const TX_TYPES: usize = 16;\n\npub const PREDICTION_MODES: usize = 34;\n\n\n\n#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]\n\npub struct SerializableEncoderStats {\n\n /// Stores count of pixels belonging to each block size in this frame\n\n pub block_size_counts: [usize; BlockSize::BLOCK_SIZES_ALL],\n\n /// Stores count of pixels belonging to skip blocks in this frame\n\n pub skip_block_count: usize,\n\n /// Stores count of pixels belonging to each transform type in this frame\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 81, "score": 22929.392715612492 }, { "content": " self.frame_info\n\n .iter()\n\n .filter(|frame| frame.frame_type == frame_type)\n\n .map(|frame| frame.qp as f32)\n\n .sum::<f32>()\n\n / count as f32\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n fn get_block_count_by_frame_type(&self, frame_type: FrameType) -> usize {\n\n match frame_type {\n\n FrameType::KEY => self\n\n .encoding_stats\n\n .0\n\n .block_size_counts\n\n .iter()\n\n .sum::<usize>(),\n\n FrameType::INTER => self\n\n .encoding_stats\n\n .1\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 82, "score": 22928.45724988534 }, { "content": "\n\n #[cfg(feature = \"binary\")]\n\n fn video_fps(&self) -> f64 {\n\n self.frame_rate.num as f64 / self.frame_rate.den as f64\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n // Returns the bitrate of the frames so far, in bits/second\n\n fn bitrate(&self) -> usize {\n\n let bits = self.encoded_size * 8;\n\n let seconds = self.frame_info.len() as f64 / self.video_fps();\n\n (bits as f64 / seconds) as usize\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n // Estimates the final filesize in bytes, if the number of frames is known\n\n fn estimated_size(&self) -> usize {\n\n self.encoded_size * self.total_frames / self.frames_encoded()\n\n }\n\n\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 83, "score": 22928.29009358655 }, { "content": " #[cfg(feature = \"binary\")]\n\n // Estimates the remaining encoding time in seconds\n\n fn estimated_time(&self, total_frames: usize) -> u64 {\n\n ((total_frames - self.frames_encoded()) as f64 / self.encoding_fps()) as u64\n\n }\n\n\n\n pub fn elapsed_time(&self) -> f64 {\n\n let duration = Instant::now().duration_since(self.time_started);\n\n duration.as_secs() as f64 + duration.subsec_millis() as f64 / 1000f64\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n // Number of frames of given type which appear in the video\n\n fn get_frame_type_count(&self, frame_type: FrameType) -> usize {\n\n self.frame_info\n\n .iter()\n\n .filter(|frame| frame.frame_type == frame_type)\n\n .count()\n\n }\n\n\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 84, "score": 22927.965793002673 }, { "content": " self.frames_encoded(),\n\n max_frames,\n\n self.encoding_fps(),\n\n self.bitrate() as f64 / 1000f64,\n\n secs_to_human_time(self.estimated_time(max_frames as usize), false)\n\n )\n\n } else {\n\n format!(\n\n \"Input Frame {}, Output Frame {}, {:.2} fps, {:.1} Kb/s\",\n\n self.next_analysis_frame + 1,\n\n self.frames_encoded(),\n\n self.encoding_fps(),\n\n self.bitrate() as f64 / 1000f64,\n\n )\n\n }\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n fn end_of_encode_progress(&self) -> String {\n\n format!(\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 85, "score": 22927.702384921096 }, { "content": " return 0.;\n\n }\n\n (match frame_type {\n\n FrameType::KEY => self.encoding_stats.0.block_size_counts[bsize as usize],\n\n FrameType::INTER => self.encoding_stats.1.block_size_counts[bsize as usize],\n\n _ => unreachable!(),\n\n }) as f32\n\n / count as f32\n\n * 100.\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n fn get_skip_pct_by_frame_type(&self, frame_type: FrameType) -> f32 {\n\n let count = self.get_block_count_by_frame_type(frame_type);\n\n if count == 0 {\n\n return 0.;\n\n }\n\n (match frame_type {\n\n FrameType::KEY => self.encoding_stats.0.skip_block_count,\n\n FrameType::INTER => self.encoding_stats.1.skip_block_count,\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 86, "score": 22927.59019114207 }, { "content": "\n\n #[cfg(feature = \"binary\")]\n\n fn get_luma_pred_count_by_frame_type(&self, frame_type: FrameType) -> usize {\n\n match frame_type {\n\n FrameType::KEY => self\n\n .encoding_stats\n\n .0\n\n .luma_pred_mode_counts\n\n .iter()\n\n .sum::<usize>(),\n\n FrameType::INTER => self\n\n .encoding_stats\n\n .1\n\n .luma_pred_mode_counts\n\n .iter()\n\n .sum::<usize>(),\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 87, "score": 22926.82694330136 }, { "content": " #[cfg(feature = \"binary\")]\n\n fn get_chroma_pred_count_by_frame_type(&self, frame_type: FrameType) -> usize {\n\n match frame_type {\n\n FrameType::KEY => self\n\n .encoding_stats\n\n .0\n\n .chroma_pred_mode_counts\n\n .iter()\n\n .sum::<usize>(),\n\n FrameType::INTER => self\n\n .encoding_stats\n\n .1\n\n .chroma_pred_mode_counts\n\n .iter()\n\n .sum::<usize>(),\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 88, "score": 22926.684114169566 }, { "content": " _ => unreachable!(),\n\n }) as f32\n\n / count as f32\n\n * 100.\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n fn get_txtype_pct_by_frame_type(&self, tx_type: TxType, frame_type: FrameType) -> f32 {\n\n let count = self.get_tx_count_by_frame_type(frame_type);\n\n if count == 0 {\n\n return 0.;\n\n }\n\n (match frame_type {\n\n FrameType::KEY => self.encoding_stats.0.tx_type_counts[tx_type as usize],\n\n FrameType::INTER => self.encoding_stats.1.tx_type_counts[tx_type as usize],\n\n _ => unreachable!(),\n\n }) as f32\n\n / count as f32\n\n * 100.\n\n }\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 89, "score": 22926.259815515397 }, { "content": " fn get_luma_pred_mode_pct_by_frame_type(\n\n &self,\n\n pred_mode: PredictionMode,\n\n frame_type: FrameType,\n\n ) -> f32 {\n\n let count = self.get_luma_pred_count_by_frame_type(frame_type);\n\n if count == 0 {\n\n return 0.;\n\n }\n\n (match frame_type {\n\n FrameType::KEY => self.encoding_stats.0.luma_pred_mode_counts[pred_mode as usize],\n\n FrameType::INTER => self.encoding_stats.1.luma_pred_mode_counts[pred_mode as usize],\n\n _ => unreachable!(),\n\n }) as f32\n\n / count as f32\n\n * 100.\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n fn get_chroma_pred_mode_pct_by_frame_type(\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 90, "score": 22926.042481642216 }, { "content": " .block_size_counts\n\n .iter()\n\n .sum::<usize>(),\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n fn get_tx_count_by_frame_type(&self, frame_type: FrameType) -> usize {\n\n match frame_type {\n\n FrameType::KEY => self.encoding_stats.0.tx_type_counts.iter().sum::<usize>(),\n\n FrameType::INTER => self.encoding_stats.1.tx_type_counts.iter().sum::<usize>(),\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n fn get_bsize_pct_by_frame_type(&self, bsize: BlockSize, frame_type: FrameType) -> f32 {\n\n let count = self.get_block_count_by_frame_type(frame_type);\n\n if count == 0 {\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 91, "score": 22926.015818396645 }, { "content": " \"Encoded {} in {}, {:.3} fps, {:.2} Kb/s, size: {:.2} MB\",\n\n style(format!(\"{} frames\", self.total_frames)).yellow(),\n\n style(secs_to_human_time(self.elapsed_time() as u64, true)).cyan(),\n\n self.encoding_fps(),\n\n self.bitrate() as f64 / 1000f64,\n\n self.estimated_size() as f64 / (1024 * 1024) as f64,\n\n )\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n fn print_block_type_summary(&self) {\n\n self.print_block_type_summary_for_frame_type(FrameType::KEY, 'I');\n\n self.print_block_type_summary_for_frame_type(FrameType::INTER, 'P');\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n fn print_block_type_summary_for_frame_type(&self, frame_type: FrameType, type_label: char) {\n\n info!(\n\n \"{:8} {:>6} {:>6} {:>6} {:>6} {:>6} {:>6}\",\n\n style(format!(\"{} Frames\", type_label)).yellow(),\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 92, "score": 22925.64642954935 }, { "content": " self.print_block_type_summary();\n\n info!(\"\");\n\n\n\n info!(\"{}\", style(\"Transform Type Usage\").yellow());\n\n self.print_transform_type_summary();\n\n info!(\"\");\n\n\n\n info!(\"{}\", style(\"Prediction Mode Usage\").yellow());\n\n self.print_prediction_modes_summary();\n\n info!(\"\");\n\n }\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n fn print_frame_type_summary(&self, frame_type: FrameType) {\n\n let count = self.get_frame_type_count(frame_type);\n\n let size = self.get_frame_type_avg_size(frame_type);\n\n let avg_qp = self.get_frame_type_avg_qp(frame_type);\n\n info!(\n\n \"{:10} | {:>6} | {:>9} | {:>6.2}\",\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 93, "score": 22925.39524190619 }, { "content": "\n\n info!(\"\");\n\n\n\n info!(\"{}\", style(\"Summary by Frame Type\").yellow());\n\n info!(\n\n \"{:10} | {:>6} | {:>9} | {:>6}\",\n\n style(\"Frame Type\").blue(),\n\n style(\"Count\").blue(),\n\n style(\"Avg Size\").blue(),\n\n style(\"Avg QP\").blue(),\n\n );\n\n self.print_frame_type_summary(FrameType::KEY);\n\n self.print_frame_type_summary(FrameType::INTER);\n\n self.print_frame_type_summary(FrameType::INTRA_ONLY);\n\n self.print_frame_type_summary(FrameType::SWITCH);\n\n\n\n info!(\"\");\n\n\n\n if verbose {\n\n info!(\"{}\", style(\"Block Type Usage\").yellow());\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 94, "score": 22925.37702306299 }, { "content": " &self,\n\n frame_type: FrameType,\n\n type_label: char,\n\n ) {\n\n info!(\n\n \"{}\",\n\n style(format!(\"{} Frame Luma Modes\", type_label)).yellow()\n\n );\n\n if frame_type == FrameType::KEY {\n\n info!(\n\n \"{:8} {:>5.1}%\",\n\n style(\"DC\").blue(),\n\n style(\n\n self.get_luma_pred_mode_pct_by_frame_type(PredictionMode::DC_PRED, frame_type)\n\n )\n\n .cyan()\n\n );\n\n\n\n info!(\n\n \"{:8} {:>5.1}%\",\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 95, "score": 22925.21205810569 }, { "content": " if let Some(max_frames) = self.frame_limit {\n\n format!(\n\n \"Input Frame {}/{}, Output Frame {}/{}\",\n\n self.next_analysis_frame + 1,\n\n max_frames,\n\n self.frames_encoded(),\n\n max_frames,\n\n )\n\n } else {\n\n format!(\n\n \"Input Frame {}, Output Frame {}\",\n\n self.next_analysis_frame + 1,\n\n self.frames_encoded(),\n\n )\n\n }\n\n } else if let Some(max_frames) = self.frame_limit {\n\n format!(\n\n \"Input Frame {}/{}, Output Frame {}/{}, {:.2} fps, {:.1} Kb/s, ETA {}\",\n\n self.next_analysis_frame + 1,\n\n max_frames,\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 96, "score": 22925.101074909588 }, { "content": " frame_type\n\n ))\n\n .cyan()\n\n );\n\n }\n\n }\n\n\n\n #[cfg(feature = \"binary\")]\n\n fn print_chroma_prediction_mode_summary_by_frame_type(\n\n &self,\n\n frame_type: FrameType,\n\n type_label: char,\n\n ) {\n\n info!(\n\n \"{}\",\n\n style(format!(\"{} Frame Chroma Modes\", type_label)).yellow()\n\n );\n\n if frame_type == FrameType::KEY {\n\n info!(\n\n \"{:8} {:>5.1}%\",\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 97, "score": 22925.015597728758 }, { "content": " style(\"ADST_DCT\").blue(),\n\n style(self.get_txtype_pct_by_frame_type(TxType::ADST_DCT, frame_type)).cyan()\n\n );\n\n info!(\n\n \"{:9} {:>5.1}%\",\n\n style(\"DCT_ADST\").blue(),\n\n style(self.get_txtype_pct_by_frame_type(TxType::DCT_ADST, frame_type)).cyan()\n\n );\n\n info!(\n\n \"{:9} {:>5.1}%\",\n\n style(\"ADST_ADST\").blue(),\n\n style(self.get_txtype_pct_by_frame_type(TxType::ADST_ADST, frame_type)).cyan()\n\n );\n\n info!(\n\n \"{:9} {:>5.1}%\",\n\n style(\"IDTX\").blue(),\n\n style(self.get_txtype_pct_by_frame_type(TxType::IDTX, frame_type)).cyan()\n\n );\n\n info!(\n\n \"{:9} {:>5.1}%\",\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 98, "score": 22924.883746177835 }, { "content": " info!(\n\n \"{:15} {:>5.1}%\",\n\n style(\"Near-Near-1\").blue(),\n\n style(self.get_chroma_pred_mode_pct_by_frame_type(\n\n PredictionMode::NEAR_NEAR1MV,\n\n frame_type\n\n ))\n\n .cyan()\n\n );\n\n info!(\n\n \"{:15} {:>5.1}%\",\n\n style(\"Near-Near-2\").blue(),\n\n style(self.get_chroma_pred_mode_pct_by_frame_type(\n\n PredictionMode::NEAR_NEAR2MV,\n\n frame_type\n\n ))\n\n .cyan()\n\n );\n\n info!(\n\n \"{:15} {:>5.1}%\",\n", "file_path": "rav1e-by-gop/src/encode/stats.rs", "rank": 99, "score": 22924.838994300815 } ]
Rust
rust-runtime/aws-smithy-http-server/src/routing/tiny_map.rs
eduardomourar/smithy-rs
817bf68e69da1d1ef14f8e79a27ec39a6d92bbad
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0 */ use std::{borrow::Borrow, collections::HashMap, hash::Hash}; #[derive(Clone, Debug)] pub struct TinyMap<K, V, const CUTOFF: usize> { inner: TinyMapInner<K, V, CUTOFF>, } #[derive(Clone, Debug)] enum TinyMapInner<K, V, const CUTOFF: usize> { Vec(Vec<(K, V)>), HashMap(HashMap<K, V>), } enum OrIterator<Left, Right> { Left(Left), Right(Right), } impl<Left, Right> Iterator for OrIterator<Left, Right> where Left: Iterator, Right: Iterator<Item = Left::Item>, { type Item = Left::Item; fn next(&mut self) -> Option<Self::Item> { match self { Self::Left(left) => left.next(), Self::Right(right) => right.next(), } } } pub struct IntoIter<K, V> { inner: OrIterator<std::vec::IntoIter<(K, V)>, std::collections::hash_map::IntoIter<K, V>>, } impl<K, V> Iterator for IntoIter<K, V> { type Item = (K, V); fn next(&mut self) -> Option<Self::Item> { self.inner.next() } } impl<K, V, const CUTOFF: usize> IntoIterator for TinyMap<K, V, CUTOFF> { type Item = (K, V); type IntoIter = IntoIter<K, V>; fn into_iter(self) -> Self::IntoIter { let inner = match self.inner { TinyMapInner::Vec(vec) => OrIterator::Left(vec.into_iter()), TinyMapInner::HashMap(hash_map) => OrIterator::Right(hash_map.into_iter()), }; IntoIter { inner } } } impl<K, V, const CUTOFF: usize> FromIterator<(K, V)> for TinyMap<K, V, CUTOFF> where K: Hash + Eq, { fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self { let mut vec = Vec::with_capacity(CUTOFF); let mut iter = iter.into_iter().enumerate(); while let Some((index, pair)) = iter.next() { if index == CUTOFF { let inner = TinyMapInner::HashMap(vec.into_iter().chain(iter.map(|(_, pair)| pair)).collect()); return TinyMap { inner }; } vec.push(pair); } TinyMap { inner: TinyMapInner::Vec(vec), } } } impl<K, V, const CUTOFF: usize> TinyMap<K, V, CUTOFF> where K: Eq + Hash, { pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V> where K: Borrow<Q>, Q: Hash + Eq, { match &self.inner { TinyMapInner::Vec(vec) => vec .iter() .find(|(key_inner, _)| key_inner.borrow() == key) .map(|(_, value)| value), TinyMapInner::HashMap(hash_map) => hash_map.get(key), } } } #[cfg(test)] mod tests { use super::*; const CUTOFF: usize = 5; const SMALL_VALUES: [(&'static str, usize); 3] = [("a", 0), ("b", 1), ("c", 2)]; const MEDIUM_VALUES: [(&'static str, usize); 5] = [("a", 0), ("b", 1), ("c", 2), ("d", 3), ("e", 4)]; const LARGE_VALUES: [(&'static str, usize); 10] = [ ("a", 0), ("b", 1), ("c", 2), ("d", 3), ("e", 4), ("f", 5), ("g", 6), ("h", 7), ("i", 8), ("j", 9), ]; #[test] fn collect_small() { let tiny_map: TinyMap<_, _, CUTOFF> = SMALL_VALUES.into_iter().collect(); assert!(matches!(tiny_map.inner, TinyMapInner::Vec(_))) } #[test] fn collect_medium() { let tiny_map: TinyMap<_, _, CUTOFF> = MEDIUM_VALUES.into_iter().collect(); assert!(matches!(tiny_map.inner, TinyMapInner::Vec(_))) } #[test] fn collect_large() { let tiny_map: TinyMap<_, _, CUTOFF> = LARGE_VALUES.into_iter().collect(); assert!(matches!(tiny_map.inner, TinyMapInner::HashMap(_))) } #[test] fn get_small_success() { let tiny_map: TinyMap<_, _, CUTOFF> = SMALL_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("a"), Some(&0)) } #[test] fn get_medium_success() { let tiny_map: TinyMap<_, _, CUTOFF> = MEDIUM_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("d"), Some(&3)) } #[test] fn get_large_success() { let tiny_map: TinyMap<_, _, CUTOFF> = LARGE_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("h"), Some(&7)) } #[test] fn get_small_fail() { let tiny_map: TinyMap<_, _, CUTOFF> = SMALL_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("x"), None) } #[test] fn get_medium_fail() { let tiny_map: TinyMap<_, _, CUTOFF> = MEDIUM_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("y"), None) } #[test] fn get_large_fail() { let tiny_map: TinyMap<_, _, CUTOFF> = LARGE_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("z"), None) } }
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0 */ use std::{borrow::Borrow, collections::HashMap, hash::Hash}; #[derive(Clone, Debug)] pub struct TinyMap<K, V, const CUTOFF: usize> { inner: TinyMapInner<K, V, CUTOFF>, } #[derive(Clone, Debug)] enum TinyMapInner<K, V, const CUTOFF: usize> { Vec(Vec<(K, V)>), HashMap(HashMap<K, V>), } enum OrIterator<Left, Right> { Left(Left), Right(Right), } impl<Left, Right> Iterator for OrIterator<Left, Right> where Left: Iterator, Right: Iterator<Item = Left::Item>, { type Item = Left::Item; fn next(&mut self) -> Option<Self::Item> { match self { Self::Left(left) => left.next(), Self::Right(right) => right.next(), } } } pub struct IntoIter<K, V> { inner: OrIterator<std::vec::IntoIter<(K, V)>, std::collections::hash_map::IntoIter<K, V>>, } impl<K, V> Iterator for IntoIter<K, V> { type Item = (K, V); fn next(&mut self) -> Option<Self::Item> { self.inner.next() } } impl<K, V, const CUTOFF: usize> IntoIterator for
("c", 2), ("d", 3), ("e", 4)]; const LARGE_VALUES: [(&'static str, usize); 10] = [ ("a", 0), ("b", 1), ("c", 2), ("d", 3), ("e", 4), ("f", 5), ("g", 6), ("h", 7), ("i", 8), ("j", 9), ]; #[test] fn collect_small() { let tiny_map: TinyMap<_, _, CUTOFF> = SMALL_VALUES.into_iter().collect(); assert!(matches!(tiny_map.inner, TinyMapInner::Vec(_))) } #[test] fn collect_medium() { let tiny_map: TinyMap<_, _, CUTOFF> = MEDIUM_VALUES.into_iter().collect(); assert!(matches!(tiny_map.inner, TinyMapInner::Vec(_))) } #[test] fn collect_large() { let tiny_map: TinyMap<_, _, CUTOFF> = LARGE_VALUES.into_iter().collect(); assert!(matches!(tiny_map.inner, TinyMapInner::HashMap(_))) } #[test] fn get_small_success() { let tiny_map: TinyMap<_, _, CUTOFF> = SMALL_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("a"), Some(&0)) } #[test] fn get_medium_success() { let tiny_map: TinyMap<_, _, CUTOFF> = MEDIUM_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("d"), Some(&3)) } #[test] fn get_large_success() { let tiny_map: TinyMap<_, _, CUTOFF> = LARGE_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("h"), Some(&7)) } #[test] fn get_small_fail() { let tiny_map: TinyMap<_, _, CUTOFF> = SMALL_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("x"), None) } #[test] fn get_medium_fail() { let tiny_map: TinyMap<_, _, CUTOFF> = MEDIUM_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("y"), None) } #[test] fn get_large_fail() { let tiny_map: TinyMap<_, _, CUTOFF> = LARGE_VALUES.into_iter().collect(); assert_eq!(tiny_map.get("z"), None) } }
TinyMap<K, V, CUTOFF> { type Item = (K, V); type IntoIter = IntoIter<K, V>; fn into_iter(self) -> Self::IntoIter { let inner = match self.inner { TinyMapInner::Vec(vec) => OrIterator::Left(vec.into_iter()), TinyMapInner::HashMap(hash_map) => OrIterator::Right(hash_map.into_iter()), }; IntoIter { inner } } } impl<K, V, const CUTOFF: usize> FromIterator<(K, V)> for TinyMap<K, V, CUTOFF> where K: Hash + Eq, { fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self { let mut vec = Vec::with_capacity(CUTOFF); let mut iter = iter.into_iter().enumerate(); while let Some((index, pair)) = iter.next() { if index == CUTOFF { let inner = TinyMapInner::HashMap(vec.into_iter().chain(iter.map(|(_, pair)| pair)).collect()); return TinyMap { inner }; } vec.push(pair); } TinyMap { inner: TinyMapInner::Vec(vec), } } } impl<K, V, const CUTOFF: usize> TinyMap<K, V, CUTOFF> where K: Eq + Hash, { pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V> where K: Borrow<Q>, Q: Hash + Eq, { match &self.inner { TinyMapInner::Vec(vec) => vec .iter() .find(|(key_inner, _)| key_inner.borrow() == key) .map(|(_, value)| value), TinyMapInner::HashMap(hash_map) => hash_map.get(key), } } } #[cfg(test)] mod tests { use super::*; const CUTOFF: usize = 5; const SMALL_VALUES: [(&'static str, usize); 3] = [("a", 0), ("b", 1), ("c", 2)]; const MEDIUM_VALUES: [(&'static str, usize); 5] = [("a", 0), ("b", 1),
random
[ { "content": "#[track_caller]\n\npub fn assert_uris_match(left: &Uri, right: &Uri) {\n\n if left == right {\n\n return;\n\n }\n\n assert_eq!(left.authority(), right.authority());\n\n assert_eq!(left.scheme(), right.scheme());\n\n assert_eq!(left.path(), right.path());\n\n assert_eq!(\n\n extract_params(left),\n\n extract_params(right),\n\n \"Query parameters did not match. left: {}, right: {}\",\n\n left,\n\n right\n\n );\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 1, "score": 335959.99611840135 }, { "content": "pub fn set_request_header_if_absent<V>(\n\n request: http::request::Builder,\n\n key: HeaderName,\n\n value: V,\n\n) -> http::request::Builder\n\nwhere\n\n HeaderValue: TryFrom<V>,\n\n <HeaderValue as TryFrom<V>>::Error: Into<http::Error>,\n\n{\n\n if !request\n\n .headers_ref()\n\n .map(|map| map.contains_key(&key))\n\n .unwrap_or(false)\n\n {\n\n request.header(key, value)\n\n } else {\n\n request\n\n }\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-http/src/header.rs", "rank": 3, "score": 245814.56339238505 }, { "content": "pub fn set_response_header_if_absent<V>(\n\n response: http::response::Builder,\n\n key: HeaderName,\n\n value: V,\n\n) -> http::response::Builder\n\nwhere\n\n HeaderValue: TryFrom<V>,\n\n <HeaderValue as TryFrom<V>>::Error: Into<http::Error>,\n\n{\n\n if !response\n\n .headers_ref()\n\n .map(|map| map.contains_key(&key))\n\n .unwrap_or(false)\n\n {\n\n response.header(key, value)\n\n } else {\n\n response\n\n }\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-http/src/header.rs", "rank": 4, "score": 245814.56339238505 }, { "content": "enum Inner {\n\n /// Boolean\n\n Bool(bool),\n\n /// 8-bit signed integer\n\n I8(i8, itoa::Buffer),\n\n /// 16-bit signed integer\n\n I16(i16, itoa::Buffer),\n\n /// 32-bit signed integer\n\n I32(i32, itoa::Buffer),\n\n /// 64-bit signed integer\n\n I64(i64, itoa::Buffer),\n\n /// 64-bit unsigned integer\n\n U64(u64, itoa::Buffer),\n\n /// 32-bit IEEE 754 single-precision floating-point number\n\n F32(f32, ryu::Buffer),\n\n /// 64-bit IEEE 754 double-precision floating-point number\n\n F64(f64, ryu::Buffer),\n\n}\n\n\n\nimpl Debug for Inner {\n", "file_path": "rust-runtime/aws-smithy-types/src/primitive.rs", "rank": 5, "score": 241516.80453291268 }, { "content": "#[derive(Clone, Eq, PartialEq)]\n\nstruct Inner {\n\n access_key_id: Zeroizing<String>,\n\n secret_access_key: Zeroizing<String>,\n\n session_token: Zeroizing<Option<String>>,\n\n\n\n /// Credential Expiry\n\n ///\n\n /// A SystemTime at which the credentials should no longer be used because they have expired.\n\n /// The primary purpose of this value is to allow credentials to communicate to the caching\n\n /// provider when they need to be refreshed.\n\n ///\n\n /// If these credentials never expire, this value will be set to `None`\n\n expires_after: Option<SystemTime>,\n\n\n\n provider_name: &'static str,\n\n}\n\n\n\nimpl Debug for Credentials {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n let mut creds = f.debug_struct(\"Credentials\");\n", "file_path": "aws/rust-runtime/aws-types/src/credentials/credentials_impl.rs", "rank": 6, "score": 235029.86444539632 }, { "content": "pub fn fn_with_local_struct(_local: LocalStruct) -> LocalStruct {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "tools/api-linter/test-workspace/test-crate/src/lib.rs", "rank": 7, "score": 223379.08216576237 }, { "content": "pub fn external_in_fn_output() -> SomeStruct {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "tools/api-linter/test-workspace/test-crate/src/lib.rs", "rank": 8, "score": 220862.5475925696 }, { "content": "fn pretty_comparison(left: &str, right: &str) -> PrettyString {\n\n PrettyString(format!(\n\n \"{}\",\n\n Comparison::new(&PrettyStr(left), &PrettyStr(right))\n\n ))\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 9, "score": 218468.68349126703 }, { "content": "pub fn external_in_fn_output_generic() -> Option<SomeStruct> {\n\n unimplemented!()\n\n}\n\n\n\n// Try to trick api-linter here by putting something in a private module and re-exporting it\n\nmod private_module {\n\n use external_lib::SomeStruct;\n\n\n\n pub fn something(_one: &SomeStruct) {}\n\n}\n\npub use private_module::something;\n\n\n\npub struct StructWithExternalFields {\n\n pub field: SomeStruct,\n\n pub optional_field: Option<SomeStruct>,\n\n}\n\n\n\nimpl StructWithExternalFields {\n\n pub fn new(_field: impl Into<SomeStruct>, _optional_field: Option<SomeOtherStruct>) -> Self {\n\n unimplemented!()\n\n }\n\n}\n\n\n", "file_path": "tools/api-linter/test-workspace/test-crate/src/lib.rs", "rank": 10, "score": 210982.51644184388 }, { "content": "/// JSON token parser as a Rust iterator\n\n///\n\n/// This parser will parse and yield exactly one [`Token`] per iterator `next()` call.\n\n/// Validation is done on the fly, so it is possible for it to parse an invalid JSON document\n\n/// until it gets to the first [`Error`].\n\n///\n\n/// JSON string values are left escaped in the [`Token::ValueString`] as an [`EscapedStr`],\n\n/// which is a new type around a slice of original `input` bytes so that the caller can decide\n\n/// when to unescape and allocate into a [`String`].\n\n///\n\n/// The parser *will* accept multiple valid JSON values. For example, `b\"null true\"` will\n\n/// yield `ValueNull` and `ValueTrue`. It is the responsibility of the caller to handle this for\n\n/// their use-case.\n\npub fn json_token_iter(input: &[u8]) -> JsonTokenIterator {\n\n JsonTokenIterator {\n\n input,\n\n index: 0,\n\n state_stack: vec![State::Initial],\n\n }\n\n}\n\n\n\n/// Internal parser state for the iterator. Used to context between successive `next` calls.\n", "file_path": "rust-runtime/aws-smithy-json/src/deserialize.rs", "rank": 11, "score": 206733.42842270702 }, { "content": "/// Resolve the AWS Endpoint for a given region\n\n///\n\n/// To provide a static endpoint, [`Endpoint`](aws_smithy_http::endpoint::Endpoint) implements this trait.\n\n/// Example usage:\n\n/// ```rust\n\n/// # mod dynamodb {\n\n/// # use aws_types::endpoint::ResolveAwsEndpoint;\n\n/// # pub struct ConfigBuilder;\n\n/// # impl ConfigBuilder {\n\n/// # pub fn endpoint(&mut self, resolver: impl ResolveAwsEndpoint + 'static) {\n\n/// # // ...\n\n/// # }\n\n/// # }\n\n/// # pub struct Config;\n\n/// # impl Config {\n\n/// # pub fn builder() -> ConfigBuilder {\n\n/// # ConfigBuilder\n\n/// # }\n\n/// # }\n\n/// # }\n\n/// use aws_smithy_http::endpoint::Endpoint;\n\n/// use http::Uri;\n\n/// let config = dynamodb::Config::builder()\n\n/// .endpoint(\n\n/// Endpoint::immutable(Uri::from_static(\"http://localhost:8080\"))\n\n/// );\n\n/// ```\n\n/// Each AWS service generates their own implementation of `ResolveAwsEndpoint`.\n\npub trait ResolveAwsEndpoint: Send + Sync + Debug {\n\n /// Resolves the AWS endpoint for a given region.\n\n // TODO(https://github.com/awslabs/smithy-rs/issues/866): Create `ResolveEndpointError`\n\n fn resolve_endpoint(&self, region: &Region) -> Result<AwsEndpoint, BoxError>;\n\n}\n\n\n\n/// The scope for AWS credentials.\n\n#[derive(Clone, Default, Debug)]\n\npub struct CredentialScope {\n\n region: Option<SigningRegion>,\n\n service: Option<SigningService>,\n\n}\n\n\n\nimpl CredentialScope {\n\n /// Creates a builder for [`CredentialScope`].\n\n pub fn builder() -> credential_scope::Builder {\n\n credential_scope::Builder::default()\n\n }\n\n}\n\n\n", "file_path": "aws/rust-runtime/aws-types/src/endpoint.rs", "rank": 12, "score": 199600.51764385353 }, { "content": "/// Asynchronous Credentials Provider\n\npub trait ProvideCredentials: Send + Sync + Debug {\n\n /// Returns a future that provides credentials.\n\n fn provide_credentials<'a>(&'a self) -> future::ProvideCredentials<'a>\n\n where\n\n Self: 'a;\n\n}\n\n\n\nimpl ProvideCredentials for Credentials {\n\n fn provide_credentials<'a>(&'a self) -> future::ProvideCredentials<'a>\n\n where\n\n Self: 'a,\n\n {\n\n future::ProvideCredentials::ready(Ok(self.clone()))\n\n }\n\n}\n\n\n\nimpl ProvideCredentials for Arc<dyn ProvideCredentials> {\n\n fn provide_credentials<'a>(&'a self) -> future::ProvideCredentials<'a>\n\n where\n\n Self: 'a,\n", "file_path": "aws/rust-runtime/aws-types/src/credentials/provider.rs", "rank": 13, "score": 199572.3870606237 }, { "content": "fn all_runtime_crates() -> Result<impl Iterator<Item = PathBuf>> {\n\n Ok(aws_runtime_crates()?.chain(smithy_rs_crates()?))\n\n}\n\n\n", "file_path": "tools/sdk-lints/src/main.rs", "rank": 14, "score": 197131.77364328486 }, { "content": "fn all_cargo_tomls() -> Result<impl Iterator<Item = PathBuf>> {\n\n Ok(all_runtime_crates()?.map(|pkg| pkg.join(\"Cargo.toml\")))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{date_based_release_metadata, version_based_release_metadata};\n\n use time::OffsetDateTime;\n\n\n\n #[test]\n\n fn test_date_based_release_metadata() {\n\n let now = OffsetDateTime::from_unix_timestamp(100_000_000).unwrap();\n\n let result = date_based_release_metadata(now, \"some-manifest.json\");\n\n assert_eq!(\"March 3rd, 1973\", result.title);\n\n assert_eq!(\"release-1973-03-03\", result.tag);\n\n assert_eq!(\"some-manifest.json\", result.manifest_name);\n\n }\n\n\n\n #[test]\n\n fn test_version_based_release_metadata() {\n\n let now = OffsetDateTime::from_unix_timestamp(100_000_000).unwrap();\n\n let result = version_based_release_metadata(now, \"0.11.0\", \"some-other-manifest.json\");\n\n assert_eq!(\"v0.11.0 (March 3rd, 1973)\", result.title);\n\n assert_eq!(\"v0.11.0\", result.tag);\n\n assert_eq!(\"some-other-manifest.json\", result.manifest_name);\n\n }\n\n}\n", "file_path": "tools/sdk-lints/src/main.rs", "rank": 15, "score": 197131.77364328486 }, { "content": "fn smithy_rs_crates() -> Result<impl Iterator<Item = PathBuf>> {\n\n let smithy_crate_root = repo_root().join(\"rust-runtime\");\n\n Ok(ls(smithy_crate_root)?.filter(|path| is_crate(path.as_path())))\n\n}\n\n\n", "file_path": "tools/sdk-lints/src/main.rs", "rank": 16, "score": 194521.30478792067 }, { "content": "fn aws_runtime_crates() -> Result<impl Iterator<Item = PathBuf>> {\n\n let aws_crate_root = repo_root().join(\"aws\").join(\"rust-runtime\");\n\n Ok(ls(aws_crate_root)?.filter(|path| is_crate(path.as_path())))\n\n}\n\n\n", "file_path": "tools/sdk-lints/src/main.rs", "rank": 17, "score": 194521.30478792067 }, { "content": "pub fn external_opaque_type_in_output() -> impl SimpleTrait {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "tools/api-linter/test-workspace/test-crate/src/lib.rs", "rank": 18, "score": 194444.02374529815 }, { "content": "pub fn external_in_fn_input(_one: &SomeStruct, _two: impl SimpleTrait) {}\n\n\n", "file_path": "tools/api-linter/test-workspace/test-crate/src/lib.rs", "rank": 19, "score": 192745.93229781464 }, { "content": "#[test]\n\nfn types_are_debug() {\n\n assert_debug::<kms::Client>();\n\n assert_debug::<kms::client::fluent_builders::GenerateRandom>();\n\n assert_debug::<kms::client::fluent_builders::CreateAlias>();\n\n}\n\n\n\nasync fn create_alias_op() -> Parts<CreateAlias, AwsErrorRetryPolicy> {\n\n let conf = kms::Config::builder().build();\n\n let (_, parts) = CreateAlias::builder()\n\n .build()\n\n .unwrap()\n\n .make_operation(&conf)\n\n .await\n\n .expect(\"valid request\")\n\n .into_request_response();\n\n parts\n\n}\n\n\n\n/// Parse a semi-real response body and assert that the correct retry status is returned\n\n#[tokio::test]\n", "file_path": "aws/sdk/integration-tests/kms/tests/sensitive-it.rs", "rank": 20, "score": 192533.8245905378 }, { "content": "enum Inner {\n\n Static(&'static str),\n\n Random(Mutex<fastrand::Rng>),\n\n}\n\n\n", "file_path": "rust-runtime/inlineable/src/idempotency_token.rs", "rank": 21, "score": 192035.51231722566 }, { "content": "#[pin_project(project = InnerProj)]\n\nenum Inner {\n\n Once(#[pin] Option<Bytes>),\n\n Streaming(#[pin] hyper::Body),\n\n Dyn(#[pin] BoxBody),\n\n\n\n /// When a streaming body is transferred out to a stream parser, the body is replaced with\n\n /// `Taken`. This will return an Error when polled. Attempting to read data out of a `Taken`\n\n /// Body is a bug.\n\n Taken,\n\n}\n\n\n\nimpl Debug for Inner {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n match &self {\n\n Inner::Once(once) => f.debug_tuple(\"Once\").field(once).finish(),\n\n Inner::Streaming(streaming) => f.debug_tuple(\"Streaming\").field(streaming).finish(),\n\n Inner::Taken => f.debug_tuple(\"Taken\").finish(),\n\n Inner::Dyn(_) => write!(f, \"BoxBody\"),\n\n }\n\n }\n", "file_path": "rust-runtime/aws-smithy-http/src/body.rs", "rank": 22, "score": 189830.22991606765 }, { "content": "fn not_pub_external_in_fn_output() -> SomeStruct {\n\n unimplemented!()\n\n}\n", "file_path": "tools/api-linter/test-workspace/test-crate/src/lib.rs", "rank": 23, "score": 188708.20284826774 }, { "content": "#[derive(Debug)]\n\nstruct Inner {\n\n response: Option<http::Response<SdkBody>>,\n\n sender: Option<oneshot::Sender<http::Request<SdkBody>>>,\n\n}\n\n\n\n/// Receiver for [`CaptureRequestHandler`](CaptureRequestHandler)\n\n#[derive(Debug)]\n\npub struct CaptureRequestReceiver {\n\n receiver: oneshot::Receiver<http::Request<SdkBody>>,\n\n}\n\n\n\nimpl CaptureRequestReceiver {\n\n pub fn expect_request(mut self) -> http::Request<SdkBody> {\n\n self.receiver.try_recv().expect(\"no request was received\")\n\n }\n\n}\n\n\n\n#[doc(inline)]\n\npub use crate::never;\n\n\n", "file_path": "rust-runtime/aws-smithy-client/src/test_connection.rs", "rank": 24, "score": 187442.73230050452 }, { "content": "#[derive(Debug)]\n\nstruct Inner {\n\n sts: aws_smithy_client::Client<DynConnector, DefaultMiddleware>,\n\n conf: aws_sdk_sts::Config,\n\n op: aws_sdk_sts::input::AssumeRoleInput,\n\n}\n\n\n\nimpl AssumeRoleProvider {\n\n /// Build a new role-assuming provider for the given role.\n\n ///\n\n /// The `role` argument should take the form an Amazon Resource Name (ARN) like\n\n ///\n\n /// ```text\n\n /// arn:aws:iam::123456789012:role/example\n\n /// ```\n\n pub fn builder(role: impl Into<String>) -> AssumeRoleProviderBuilder {\n\n AssumeRoleProviderBuilder::new(role.into())\n\n }\n\n}\n\n\n\n/// A builder for [`AssumeRoleProvider`].\n", "file_path": "aws/rust-runtime/aws-config/src/sts/assume_role.rs", "rank": 25, "score": 185406.26638867927 }, { "content": "/// Encode `input` into base64 using the standard base64 alphabet\n\npub fn encode<T: AsRef<[u8]>>(input: T) -> String {\n\n encode_inner(input.as_ref())\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-types/src/base64.rs", "rank": 26, "score": 184455.94940248752 }, { "content": "fn not_pub_external_in_fn_output_generic() -> Option<SomeStruct> {\n\n unimplemented!()\n\n}\n", "file_path": "tools/api-linter/test-workspace/test-crate/src/lib.rs", "rank": 27, "score": 181028.4185524351 }, { "content": "pub fn init_tracing() {\n\n let filter_layer = EnvFilter::try_from_default_env()\n\n .or_else(|_| EnvFilter::try_new(\"debug\"))\n\n .unwrap();\n\n let fmt_layer = tracing_subscriber::fmt::layer()\n\n .with_ansi(true)\n\n .with_level(true)\n\n .with_target(false)\n\n .with_span_events(FmtSpan::CLOSE)\n\n .compact();\n\n tracing_subscriber::registry()\n\n .with(filter_layer)\n\n .with(fmt_layer)\n\n .init();\n\n}\n", "file_path": "tools/sdk-sync/src/lib.rs", "rank": 28, "score": 178525.61279881874 }, { "content": "pub fn replace_anchor(\n\n haystack: &mut String,\n\n anchors: &(impl AsRef<str>, impl AsRef<str>),\n\n new_content: &str,\n\n) -> anyhow::Result<bool> {\n\n let anchor_start = anchors.0.as_ref();\n\n let anchor_end = anchors.1.as_ref();\n\n let start = haystack.find(&anchor_start);\n\n if start.is_none() {\n\n if haystack.contains(anchor_end) {\n\n bail!(\"found end anchor but no start anchor\");\n\n }\n\n haystack.push('\\n');\n\n haystack.push_str(anchor_start);\n\n haystack.push_str(new_content);\n\n haystack.push_str(anchor_end);\n\n return Ok(true);\n\n }\n\n let start = start.unwrap_or_else(|| haystack.find(&anchor_start).expect(\"must be present\"));\n\n let end = match haystack[start..].find(&anchor_end) {\n", "file_path": "tools/sdk-lints/src/anchor.rs", "rank": 29, "score": 178525.61279881874 }, { "content": "fn ls(path: impl AsRef<Path>) -> Result<impl Iterator<Item = PathBuf>> {\n\n Ok(fs::read_dir(path.as_ref())\n\n .with_context(|| format!(\"failed to ls: {:?}\", path.as_ref()))?\n\n .map(|res| res.map(|e| e.path()))\n\n .collect::<Result<Vec<_>, io::Error>>()?\n\n .into_iter())\n\n}\n\n\n", "file_path": "tools/sdk-lints/src/main.rs", "rank": 30, "score": 177975.81111073302 }, { "content": "#[pin_project]\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\nstruct Inner<B> {\n\n #[pin]\n\n body: B,\n\n}\n\n\n\nimpl<B> Inner<B> {\n\n fn new(body: B) -> Self {\n\n Self { body }\n\n }\n\n\n\n async fn collect(self) -> Result<AggregatedBytes, B::Error>\n\n where\n\n B: http_body::Body<Data = Bytes>,\n\n {\n\n let mut output = SegmentedBuf::new();\n\n let body = self.body;\n\n crate::pin_mut!(body);\n\n while let Some(buf) = body.data().await {\n\n output.push(buf?);\n\n }\n", "file_path": "rust-runtime/aws-smithy-http/src/byte_stream.rs", "rank": 31, "score": 177820.95458830212 }, { "content": "fn render_handauthored<'a>(entries: impl Iterator<Item = &'a HandAuthoredEntry>, out: &mut String) {\n\n let (breaking, non_breaking) = entries.partition::<Vec<_>, _>(|entry| entry.meta.breaking);\n\n\n\n if !breaking.is_empty() {\n\n out.push_str(\"**Breaking Changes:**\\n\");\n\n for change in breaking {\n\n change.render(out);\n\n out.push('\\n');\n\n }\n\n out.push('\\n')\n\n }\n\n\n\n if !non_breaking.is_empty() {\n\n out.push_str(\"**New this release:**\\n\");\n\n for change in non_breaking {\n\n change.render(out);\n\n out.push('\\n');\n\n }\n\n out.push('\\n');\n\n }\n\n}\n\n\n", "file_path": "tools/sdk-lints/src/changelog.rs", "rank": 32, "score": 175790.97388425845 }, { "content": "/// encode_inner defined to reduce monomorphisation cost\n\nfn encode_inner(inp: &[u8]) -> String {\n\n // Base 64 encodes groups of 6 bits into characters—this means that each\n\n // 3 byte group (24 bits) is encoded into 4 base64 characters.\n\n let char_ct = ((inp.len() + 2) / 3) * 4;\n\n let mut output = String::with_capacity(char_ct);\n\n for chunk in inp.chunks(3) {\n\n let mut block: i32 = 0;\n\n // Write the chunks into the beginning of a 32 bit int\n\n for (idx, chunk) in chunk.iter().enumerate() {\n\n block |= (*chunk as i32) << ((3 - idx) * 8);\n\n }\n\n let num_sextets = ((chunk.len() * 8) + 5) / 6;\n\n for idx in 0..num_sextets {\n\n let slice = block >> (26 - (6 * idx));\n\n let idx = (slice as u8) & 0b0011_1111;\n\n output.push(BASE64_ENCODE_TABLE[idx as usize] as char);\n\n }\n\n for _ in 0..(4 - num_sextets) {\n\n output.push('=');\n\n }\n\n }\n\n // be sure we calculated the size right\n\n debug_assert_eq!(output.capacity(), char_ct);\n\n output\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-types/src/base64.rs", "rank": 33, "score": 174675.18232103778 }, { "content": "/// Read all the dates from the header map at `key` according the `format`\n\n///\n\n/// This is separate from `read_many` below because we need to invoke `DateTime::read` to take advantage\n\n/// of comma-aware parsing\n\npub fn many_dates(\n\n values: ValueIter<HeaderValue>,\n\n format: Format,\n\n) -> Result<Vec<DateTime>, ParseError> {\n\n let mut out = vec![];\n\n for header in values {\n\n let mut header = header\n\n .to_str()\n\n .map_err(|_| ParseError::new_with_message(\"header was not valid utf-8 string\"))?;\n\n while !header.is_empty() {\n\n let (v, next) = DateTime::read(header, format, ',').map_err(|err| {\n\n ParseError::new_with_message(format!(\"header could not be parsed as date: {}\", err))\n\n })?;\n\n out.push(v);\n\n header = next;\n\n }\n\n }\n\n Ok(out)\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-http/src/header.rs", "rank": 34, "score": 174390.13342603145 }, { "content": "#[cfg(not(any(feature = \"rustls\", feature = \"native-tls\")))]\n\npub fn default_connector(\n\n _settings: &HttpSettings,\n\n _sleep: Option<Arc<dyn AsyncSleep>>,\n\n) -> Option<DynConnector> {\n\n None\n\n}\n", "file_path": "aws/rust-runtime/aws-config/src/connector.rs", "rank": 35, "score": 174390.13342603145 }, { "content": "pub fn parse_generic_error(\n\n payload: &Bytes,\n\n headers: &HeaderMap<HeaderValue>,\n\n) -> Result<SmithyError, DeserializeError> {\n\n let ErrorBody { code, message } = parse_error_body(payload.as_ref())?;\n\n\n\n let mut err_builder = SmithyError::builder();\n\n if let Some(code) = error_type_from_header(headers)\n\n .map_err(|_| DeserializeError::custom(\"X-Amzn-Errortype header was not valid UTF-8\"))?\n\n .or_else(|| code.as_deref())\n\n .map(sanitize_error_code)\n\n {\n\n err_builder.code(code);\n\n }\n\n if let Some(message) = message {\n\n err_builder.message(message);\n\n }\n\n if let Some(request_id) = request_id(headers) {\n\n err_builder.request_id(request_id);\n\n }\n", "file_path": "rust-runtime/inlineable/src/json_errors.rs", "rank": 36, "score": 174390.13342603145 }, { "content": "pub fn parse_generic_error(body: &[u8]) -> Result<aws_smithy_types::Error, XmlError> {\n\n let mut doc = Document::try_from(body)?;\n\n let mut root = doc.root_element()?;\n\n let mut err_builder = aws_smithy_types::Error::builder();\n\n while let Some(mut tag) = root.next_tag() {\n\n match tag.start_el().local() {\n\n \"Errors\" => {\n\n while let Some(mut error_tag) = tag.next_tag() {\n\n if let \"Error\" = error_tag.start_el().local() {\n\n while let Some(mut error_field) = error_tag.next_tag() {\n\n match error_field.start_el().local() {\n\n \"Code\" => {\n\n err_builder.code(try_data(&mut error_field)?);\n\n }\n\n \"Message\" => {\n\n err_builder.message(try_data(&mut error_field)?);\n\n }\n\n _ => {}\n\n }\n\n }\n", "file_path": "rust-runtime/inlineable/src/ec2_query_errors.rs", "rank": 37, "score": 173172.53483109607 }, { "content": "/// Test connection used to capture a single request\n\n///\n\n/// If response is `None`, it will reply with a 200 response with an empty body\n\n///\n\n/// Example:\n\n/// ```rust,compile_fail\n\n/// let (server, request) = capture_request(None);\n\n/// let client = aws_sdk_sts::Client::from_conf_conn(conf, server);\n\n/// let _ = client.assume_role_with_saml().send().await;\n\n/// // web identity should be unsigned\n\n/// assert_eq!(\n\n/// request.expect_request().headers().get(\"AUTHORIZATION\"),\n\n/// None\n\n/// );\n\n/// ```\n\npub fn capture_request(\n\n response: Option<http::Response<SdkBody>>,\n\n) -> (CaptureRequestHandler, CaptureRequestReceiver) {\n\n let (tx, rx) = oneshot::channel();\n\n (\n\n CaptureRequestHandler(Arc::new(Mutex::new(Inner {\n\n response: Some(response.unwrap_or_else(|| {\n\n http::Response::builder()\n\n .status(200)\n\n .body(SdkBody::empty())\n\n .expect(\"unreachable\")\n\n })),\n\n sender: Some(tx),\n\n }))),\n\n CaptureRequestReceiver { receiver: rx },\n\n )\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-client/src/test_connection.rs", "rank": 38, "score": 172447.83612047735 }, { "content": "pub fn forbid_headers(\n\n headers: &HeaderMap,\n\n forbidden_headers: &[&str],\n\n) -> Result<(), ProtocolTestFailure> {\n\n for key in forbidden_headers {\n\n // Protocol tests store header lists as comma-delimited\n\n if let Some(value) = normalized_header(headers, *key) {\n\n return Err(ProtocolTestFailure::ForbiddenHeader {\n\n forbidden: key.to_string(),\n\n found: format!(\"{}: {}\", key, value),\n\n });\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 39, "score": 172443.74505924655 }, { "content": "pub fn require_headers(\n\n headers: &HeaderMap,\n\n required_headers: &[&str],\n\n) -> Result<(), ProtocolTestFailure> {\n\n for key in required_headers {\n\n // Protocol tests store header lists as comma-delimited\n\n if normalized_header(headers, *key).is_none() {\n\n return Err(ProtocolTestFailure::MissingHeader {\n\n expected: key.to_string(),\n\n });\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\npub enum MediaType {\n\n /// Json media types are deserialized and compared\n\n Json,\n\n /// XML media types are normalized and compared\n\n Xml,\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 40, "score": 172443.74505924655 }, { "content": "/// Generates a signing key for Sigv4\n\npub fn generate_signing_key(\n\n secret: &str,\n\n time: SystemTime,\n\n region: &str,\n\n service: &str,\n\n) -> hmac::Tag {\n\n // kSecret = your secret access key\n\n // kDate = HMAC(\"AWS4\" + kSecret, Date)\n\n // kRegion = HMAC(kDate, Region)\n\n // kService = HMAC(kRegion, Service)\n\n // kSigning = HMAC(kService, \"aws4_request\")\n\n\n\n let secret = format!(\"AWS4{}\", secret);\n\n let secret = hmac::Key::new(hmac::HMAC_SHA256, secret.as_bytes());\n\n let tag = hmac::sign(&secret, format_date(time).as_bytes());\n\n\n\n // sign region\n\n let key = hmac::Key::new(hmac::HMAC_SHA256, tag.as_ref());\n\n let tag = hmac::sign(&key, region.as_bytes());\n\n\n", "file_path": "aws/rust-runtime/aws-sigv4/src/sign.rs", "rank": 41, "score": 172443.74505924655 }, { "content": "pub fn main() -> Result<()> {\n\n let file_list = FileList::discover(&Args::parse().location)?;\n\n println!(\"{}\", file_list.sha256());\n\n Ok(())\n\n}\n", "file_path": "tools/crate-hasher/src/main.rs", "rank": 42, "score": 172414.5226868195 }, { "content": "pub fn parse_generic_error(body: &[u8]) -> Result<aws_smithy_types::Error, XmlError> {\n\n let mut doc = Document::try_from(body)?;\n\n let mut root = doc.root_element()?;\n\n let mut err = aws_smithy_types::Error::builder();\n\n while let Some(mut tag) = root.next_tag() {\n\n match tag.start_el().local() {\n\n \"Code\" => {\n\n err.code(try_data(&mut tag)?);\n\n }\n\n \"Message\" => {\n\n err.message(try_data(&mut tag)?);\n\n }\n\n \"RequestId\" => {\n\n err.request_id(try_data(&mut tag)?);\n\n }\n\n _ => {}\n\n }\n\n }\n\n Ok(err.build())\n\n}\n", "file_path": "rust-runtime/inlineable/src/rest_xml_unwrapped_errors.rs", "rank": 43, "score": 171138.43869424728 }, { "content": "pub fn parse_generic_error(body: &[u8]) -> Result<aws_smithy_types::Error, XmlError> {\n\n let mut doc = Document::try_from(body)?;\n\n let mut root = doc.root_element()?;\n\n let mut err_builder = aws_smithy_types::Error::builder();\n\n while let Some(mut tag) = root.next_tag() {\n\n match tag.start_el().local() {\n\n \"Error\" => {\n\n while let Some(mut error_field) = tag.next_tag() {\n\n match error_field.start_el().local() {\n\n \"Code\" => {\n\n err_builder.code(try_data(&mut error_field)?);\n\n }\n\n \"Message\" => {\n\n err_builder.message(try_data(&mut error_field)?);\n\n }\n\n _ => {}\n\n }\n\n }\n\n }\n\n \"RequestId\" => {\n\n err_builder.request_id(try_data(&mut tag)?);\n\n }\n\n _ => {}\n\n }\n\n }\n\n Ok(err_builder.build())\n\n}\n\n\n", "file_path": "rust-runtime/inlineable/src/rest_xml_wrapped_errors.rs", "rank": 44, "score": 171138.43869424728 }, { "content": "pub fn deserialize_string_list(\n\n decoder: &mut ScopedDecoder,\n\n) -> Result<std::vec::Vec<std::string::String>, XmlError> {\n\n let mut out = std::vec::Vec::new();\n\n while let Some(mut tag) = decoder.next_tag() {\n\n match dbg!(tag.start_el()) {\n\n s if s.matches(\"member\") => {\n\n out.push(dbg!({\n\n aws_smithy_xml::decode::try_data(&mut tag)?.to_string()\n\n }));\n\n }\n\n _ => {}\n\n };\n\n }\n\n println!(\"done\");\n\n Ok(out)\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-xml/tests/handwritten_parsers.rs", "rank": 45, "score": 170572.04850001045 }, { "content": "/// Expects a [Token::ValueString], [Token::ValueNumber] or [Token::ValueNull].\n\n///\n\n/// If the value is a string, it MUST be `Infinity`, `-Infinity` or `Nan`.\n\n/// If the value is a number, it is returned directly\n\npub fn expect_number_or_null(\n\n token: Option<Result<Token<'_>, Error>>,\n\n) -> Result<Option<Number>, Error> {\n\n match token.transpose()? {\n\n Some(Token::ValueNull { .. }) => Ok(None),\n\n Some(Token::ValueNumber { value, .. }) => Ok(Some(value)),\n\n Some(Token::ValueString { value, offset }) => match value.to_unescaped() {\n\n Err(err) => Err(Error::new(\n\n ErrorReason::Custom(format!(\"expected a valid string, escape was invalid: {}\", err).into()), Some(offset.0))\n\n ),\n\n Ok(v) => f64::parse_smithy_primitive(v.as_ref())\n\n // disregard the exact error\n\n .map_err(|_|())\n\n // only infinite / NaN can be used as strings\n\n .and_then(must_not_be_finite)\n\n .map(|float| Some(aws_smithy_types::Number::Float(float)))\n\n // convert to a helpful error\n\n .map_err(|_| {\n\n Error::new(\n\n ErrorReason::Custom(Cow::Owned(format!(\n", "file_path": "rust-runtime/aws-smithy-json/src/deserialize/token.rs", "rank": 46, "score": 170572.04850001045 }, { "content": "/// Expects a [Token::ValueNull], [Token::ValueString], or [Token::ValueNumber] depending\n\n/// on the passed in `timestamp_format`. If there is a non-null value, it interprets it as an\n\n/// [`DateTime` ] in the requested format.\n\npub fn expect_timestamp_or_null(\n\n token: Option<Result<Token<'_>, Error>>,\n\n timestamp_format: Format,\n\n) -> Result<Option<DateTime>, Error> {\n\n Ok(match timestamp_format {\n\n Format::EpochSeconds => {\n\n expect_number_or_null(token)?.map(|v| DateTime::from_secs_f64(v.to_f64()))\n\n }\n\n Format::DateTime | Format::HttpDate => expect_string_or_null(token)?\n\n .map(|v| DateTime::from_str(v.as_escaped_str(), timestamp_format))\n\n .transpose()\n\n .map_err(|err| {\n\n Error::new(\n\n ErrorReason::Custom(Cow::Owned(format!(\"failed to parse timestamp: {}\", err))),\n\n None,\n\n )\n\n })?,\n\n })\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-json/src/deserialize/token.rs", "rank": 47, "score": 170572.04850001045 }, { "content": "/// Parses the S3 Extended Request ID out of S3 error response headers.\n\npub fn parse_extended_error(\n\n error: aws_smithy_types::Error,\n\n headers: &HeaderMap<HeaderValue>,\n\n) -> aws_smithy_types::Error {\n\n let mut builder = error.into_builder();\n\n let host_id = headers\n\n .get(\"x-amz-id-2\")\n\n .and_then(|header_value| header_value.to_str().ok());\n\n if let Some(host_id) = host_id {\n\n builder.custom(EXTENDED_REQUEST_ID, host_id);\n\n }\n\n builder.build()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::s3_errors::{parse_extended_error, ErrorExt};\n\n\n\n #[test]\n\n fn add_error_fields() {\n", "file_path": "aws/rust-runtime/aws-inlineable/src/s3_errors.rs", "rank": 48, "score": 170572.04850001045 }, { "content": "/// Confirms that cargo exists on the path.\n\npub fn confirm_installed_on_path() -> Result<()> {\n\n handle_failure(\n\n \"discover cargo version\",\n\n &Command::new(\"cargo\")\n\n .arg(\"version\")\n\n .output()\n\n .context(\"cargo is not installed on the PATH\")?,\n\n )\n\n .context(\"cargo is not installed on the PATH\")\n\n}\n", "file_path": "tools/publisher/src/cargo.rs", "rank": 49, "score": 170304.68548372228 }, { "content": "/// Converts a token stream into a Serde [Value]\n\nfn convert_tokens<'a, I: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<I>) -> Value {\n\n match tokens.next().unwrap() {\n\n Token::StartObject { .. } => {\n\n let mut map = Map::new();\n\n loop {\n\n match tokens.next() {\n\n Some(Token::EndObject { .. }) => break,\n\n Some(Token::ObjectKey { key, .. }) => {\n\n let key = key.to_unescaped().unwrap().to_string();\n\n let value = convert_tokens(tokens);\n\n map.insert(key, value);\n\n }\n\n Some(_) => unreachable!(),\n\n None => panic!(\"should have encountered EndObject before end of stream\"),\n\n }\n\n }\n\n Value::Object(map)\n\n }\n\n Token::StartArray { .. } => {\n\n let mut list = Vec::new();\n", "file_path": "rust-runtime/aws-smithy-json/fuzz/fuzz_targets/common.rs", "rank": 50, "score": 169210.00731105043 }, { "content": "pub fn deserialize_nested_string_list(\n\n decoder: &mut ScopedDecoder,\n\n) -> Result<std::vec::Vec<std::vec::Vec<std::string::String>>, XmlError> {\n\n let mut out = std::vec::Vec::new();\n\n while let Some(mut tag) = decoder.next_tag() {\n\n match tag.start_el() {\n\n s if s.matches(\"member\") => {\n\n out.push(deserialize_string_list(&mut tag)?);\n\n }\n\n _ => {}\n\n }\n\n }\n\n Ok(out)\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-xml/tests/handwritten_parsers.rs", "rank": 51, "score": 168770.82529059087 }, { "content": "pub fn headers_for_prefix<'a>(\n\n headers: &'a http::HeaderMap,\n\n key: &'a str,\n\n) -> impl Iterator<Item = (&'a str, &'a HeaderName)> {\n\n let lower_key = key.to_ascii_lowercase();\n\n headers\n\n .keys()\n\n .filter(move |k| k.as_str().starts_with(&lower_key))\n\n .map(move |h| (&h.as_str()[key.len()..], h))\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-http/src/header.rs", "rank": 52, "score": 168279.0433140322 }, { "content": "/// Decode `input` from base64 using the standard base64 alphabet\n\n///\n\n/// If input is not a valid base64 encoded string, this function will return `DecodeError`.\n\npub fn decode<T: AsRef<str>>(input: T) -> Result<Vec<u8>, DecodeError> {\n\n decode_inner(input.as_ref())\n\n}\n\n\n\n/// Failure to decode a base64 value.\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\n#[non_exhaustive]\n\npub enum DecodeError {\n\n /// Encountered an invalid byte.\n\n InvalidByte,\n\n /// Encountered an invalid base64 padding value.\n\n InvalidPadding,\n\n /// Input wasn't long enough to be a valid base64 value.\n\n InvalidLength,\n\n}\n\n\n\nimpl Error for DecodeError {}\n\n\n\nimpl fmt::Display for DecodeError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n use DecodeError::*;\n\n match self {\n\n InvalidByte => write!(f, \"invalid byte\"),\n\n InvalidPadding => write!(f, \"invalid padding\"),\n\n InvalidLength => write!(f, \"invalid length\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-types/src/base64.rs", "rank": 53, "score": 168045.27240985987 }, { "content": "fn not_pub_external_in_fn_input(_one: &SomeStruct, _two: impl SimpleTrait) {}\n\n\n", "file_path": "tools/api-linter/test-workspace/test-crate/src/lib.rs", "rank": 54, "score": 167912.96111387768 }, { "content": "fn assert_debug<T: std::fmt::Debug>() {}\n\n\n\n#[tokio::test]\n\nasync fn types_are_send_sync() {\n\n assert_send_sync::<kms::Error>();\n\n assert_send_sync::<kms::types::SdkError<CreateAliasError>>();\n\n assert_send_sync::<kms::error::CreateAliasError>();\n\n assert_send_sync::<kms::output::CreateAliasOutput>();\n\n assert_send_sync::<kms::Client>();\n\n assert_send_sync::<GenerateRandom>();\n\n let conf = kms::Config::builder().build();\n\n assert_send_fut(kms::Client::from_conf(conf).list_keys().send());\n\n}\n\n\n\n#[tokio::test]\n\nasync fn client_is_debug() {\n\n let conf = kms::Config::builder().build();\n\n let client = kms::Client::from_conf(conf);\n\n assert_ne!(format!(\"{:?}\", client), \"\");\n\n}\n\n\n\n#[tokio::test]\n\nasync fn client_is_clone() {\n\n let conf = kms::Config::builder().build();\n\n let client = kms::Client::from_conf(conf);\n\n let _ = client.clone();\n\n}\n\n\n", "file_path": "aws/sdk/integration-tests/kms/tests/sensitive-it.rs", "rank": 55, "score": 167442.35011559457 }, { "content": "/// Convert a [`timeout::Api`](aws_smithy_types::timeout::Api) into an [`ClientTimeoutParams`] in order to create\n\n/// the set of [`TimeoutService`]s needed by a [`crate::Client`]\n\npub fn generate_timeout_service_params_from_timeout_config(\n\n api_timeout_config: &aws_smithy_types::timeout::Api,\n\n async_sleep: Option<Arc<dyn AsyncSleep>>,\n\n) -> ClientTimeoutParams {\n\n if let Some(async_sleep) = async_sleep {\n\n ClientTimeoutParams {\n\n api_call: api_timeout_config\n\n .call_timeout()\n\n .map(|duration| TimeoutServiceParams {\n\n duration,\n\n kind: \"API call (all attempts including retries)\",\n\n async_sleep: async_sleep.clone(),\n\n })\n\n .into(),\n\n api_call_attempt: api_timeout_config\n\n .call_attempt_timeout()\n\n .map(|duration| TimeoutServiceParams {\n\n duration,\n\n kind: \"API call (single attempt)\",\n\n async_sleep: async_sleep.clone(),\n", "file_path": "rust-runtime/aws-smithy-client/src/timeout.rs", "rank": 56, "score": 167041.45361232784 }, { "content": "/// Setup `tracing::subscriber` to read the log level from RUST_LOG environment variable.\n\npub fn setup_tracing() {\n\n let format = tracing_subscriber::fmt::layer()\n\n .with_ansi(true)\n\n .with_line_number(true)\n\n .with_level(true);\n\n let filter = EnvFilter::try_from_default_env()\n\n .or_else(|_| EnvFilter::try_new(\"info\"))\n\n .unwrap();\n\n tracing_subscriber::registry().with(format).with(filter).init();\n\n}\n\n\n\n/// Structure holding the translations for a Pokémon description.\n", "file_path": "rust-runtime/aws-smithy-http-server/examples/pokemon_service/src/lib.rs", "rank": 57, "score": 167036.1687711121 }, { "content": "pub fn try_url_encoded_form_equivalent(\n\n actual: &str,\n\n expected: &str,\n\n) -> Result<(), ProtocolTestFailure> {\n\n let actual = rewrite_url_encoded_body(actual);\n\n let expected = rewrite_url_encoded_body(expected);\n\n if actual == expected {\n\n Ok(())\n\n } else {\n\n Err(ProtocolTestFailure::BodyDidNotMatch {\n\n comparison: pretty_comparison(&actual, &expected),\n\n hint: \"\".into(),\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::urlencoded::try_url_encoded_form_equivalent;\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/urlencoded.rs", "rank": 58, "score": 167036.1687711121 }, { "content": "/// Create an environment loader for AWS Configuration\n\n///\n\n/// # Examples\n\n/// ```no_run\n\n/// # async fn create_config() {\n\n/// use aws_types::region::Region;\n\n/// let config = aws_config::from_env().region(\"us-east-1\").load().await;\n\n/// # }\n\n/// ```\n\npub fn from_env() -> ConfigLoader {\n\n ConfigLoader::default()\n\n}\n\n\n\n/// Load a default configuration from the environment\n\n///\n\n/// Convenience wrapper equivalent to `aws_config::from_env().load().await`\n\npub async fn load_from_env() -> aws_types::SdkConfig {\n\n from_env().load().await\n\n}\n\n\n\n/// Load default sources for all configuration with override support\n\npub use loader::ConfigLoader;\n\n\n\nmod loader {\n\n use std::sync::Arc;\n\n\n\n use crate::connector::default_connector;\n\n use aws_smithy_async::rt::sleep::{default_async_sleep, AsyncSleep};\n\n use aws_smithy_client::http_connector::{HttpConnector, HttpSettings};\n", "file_path": "aws/rust-runtime/aws-config/src/lib.rs", "rank": 59, "score": 166347.69526425225 }, { "content": "/// Produces a signature for the given `request` and returns instructions\n\n/// that can be used to apply that signature to an HTTP request.\n\npub fn sign<'a>(\n\n request: SignableRequest<'a>,\n\n params: &'a SigningParams<'a>,\n\n) -> Result<SigningOutput<SigningInstructions>, Error> {\n\n tracing::trace!(request = ?request, params = ?params, \"signing request\");\n\n match params.settings.signature_location {\n\n SignatureLocation::Headers => {\n\n let (signing_headers, signature) =\n\n calculate_signing_headers(&request, params)?.into_parts();\n\n Ok(SigningOutput::new(\n\n SigningInstructions::new(Some(signing_headers), None),\n\n signature,\n\n ))\n\n }\n\n SignatureLocation::QueryParams => {\n\n let (params, signature) = calculate_signing_params(&request, params)?;\n\n Ok(SigningOutput::new(\n\n SigningInstructions::new(None, Some(params)),\n\n signature,\n\n ))\n\n }\n\n }\n\n}\n\n\n", "file_path": "aws/rust-runtime/aws-sigv4/src/http_request/sign.rs", "rank": 60, "score": 166338.1970984626 }, { "content": "/// Assumes a start object/array token has already been consumed and skips tokens until\n\n/// until its corresponding end object/array token is found.\n\npub fn skip_to_end<'a>(\n\n tokens: &mut impl Iterator<Item = Result<Token<'a>, Error>>,\n\n) -> Result<(), Error> {\n\n skip_inner(1, tokens)\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-json/src/deserialize/token.rs", "rank": 61, "score": 166332.65494724733 }, { "content": "/// Skips an entire value in the token stream. Errors if it isn't a value.\n\npub fn skip_value<'a>(\n\n tokens: &mut impl Iterator<Item = Result<Token<'a>, Error>>,\n\n) -> Result<(), Error> {\n\n skip_inner(0, tokens)\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-json/src/deserialize/token.rs", "rank": 62, "score": 166332.65494724733 }, { "content": "pub fn validate_headers<'a>(\n\n actual_headers: &HeaderMap,\n\n expected_headers: impl IntoIterator<Item = (impl AsRef<str> + 'a, impl AsRef<str> + 'a)>,\n\n) -> Result<(), ProtocolTestFailure> {\n\n for (key, expected_value) in expected_headers {\n\n let key = key.as_ref();\n\n let expected_value = expected_value.as_ref();\n\n match normalized_header(actual_headers, key) {\n\n None => {\n\n return Err(ProtocolTestFailure::MissingHeader {\n\n expected: key.to_string(),\n\n })\n\n }\n\n Some(actual_value) if actual_value != *expected_value => {\n\n return Err(ProtocolTestFailure::InvalidHeader {\n\n key: key.to_string(),\n\n expected: expected_value.to_string(),\n\n found: actual_value,\n\n })\n\n }\n\n _ => (),\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 63, "score": 166332.65494724733 }, { "content": "/// Returns `true` if this code is being run in CI\n\npub fn running_in_ci() -> bool {\n\n std::env::var(\"GITHUB_ACTIONS\").unwrap_or_default() == \"true\"\n\n || std::env::var(\"SMITHY_RS_DOCKER_BUILD_IMAGE\").unwrap_or_default() == \"1\"\n\n}\n", "file_path": "tools/smithy-rs-tool-common/src/ci.rs", "rank": 64, "score": 166332.65494724733 }, { "content": "/// Signs an Event Stream message with the given `credentials`.\n\n///\n\n/// Each message's signature incorporates the signature of the previous message (`last_signature`).\n\n/// The very first message incorporates the signature of the top-level request\n\n/// for both HTTP 2 and WebSocket.\n\npub fn sign_message<'a>(\n\n message: &'a Message,\n\n last_signature: &'a str,\n\n params: &'a SigningParams<'a>,\n\n) -> SigningOutput<Message> {\n\n let message_payload = {\n\n let mut payload = Vec::new();\n\n message.write_to(&mut payload).unwrap();\n\n payload\n\n };\n\n sign_payload(Some(message_payload), last_signature, params)\n\n}\n\n\n", "file_path": "aws/rust-runtime/aws-sigv4/src/event_stream.rs", "rank": 65, "score": 166332.65494724733 }, { "content": "pub fn main() -> anyhow::Result<()> {\n\n let opt = Args::parse().validate()?;\n\n\n\n let start_time = Instant::now();\n\n let mut manifest_paths = Vec::new();\n\n for crate_path in &opt.crate_paths {\n\n discover_manifests(&mut manifest_paths, crate_path)?;\n\n }\n\n\n\n for manifest_path in manifest_paths {\n\n update_manifest(&manifest_path, &opt)?;\n\n }\n\n\n\n println!(\"Finished in {:?}\", start_time.elapsed());\n\n Ok(())\n\n}\n\n\n", "file_path": "tools/sdk-versioner/src/v1.rs", "rank": 66, "score": 165115.63596083433 }, { "content": "/// Returns a signed empty message\n\n///\n\n/// Empty signed event stream messages differ from normal signed event stream\n\n/// in that the payload is 0-bytes rather than a nested message. There is no way\n\n/// to create a signed empty message using [`sign_message`].\n\npub fn sign_empty_message<'a>(\n\n last_signature: &'a str,\n\n params: &'a SigningParams<'a>,\n\n) -> SigningOutput<Message> {\n\n sign_payload(None, last_signature, params)\n\n}\n\n\n", "file_path": "aws/rust-runtime/aws-sigv4/src/event_stream.rs", "rank": 67, "score": 164465.8499099011 }, { "content": "pub fn default_provider() -> IdempotencyTokenProvider {\n\n IdempotencyTokenProvider::random()\n\n}\n\n\n\nimpl From<&'static str> for IdempotencyTokenProvider {\n\n fn from(token: &'static str) -> Self {\n\n Self::fixed(token)\n\n }\n\n}\n\n\n\nimpl IdempotencyTokenProvider {\n\n pub fn make_idempotency_token(&self) -> String {\n\n match &self.inner {\n\n Inner::Static(token) => token.to_string(),\n\n Inner::Random(rng) => {\n\n let input: u128 = rng.lock().unwrap().u128(..);\n\n uuid_v4(input)\n\n }\n\n }\n\n }\n", "file_path": "rust-runtime/inlineable/src/idempotency_token.rs", "rank": 68, "score": 164460.9583880112 }, { "content": "class SmithyTypesPubUseGeneratorTest {\n\n private fun emptyModel() = modelWithMember()\n\n private fun modelWithMember(\n\n inputMember: String = \"\",\n\n outputMember: String = \"\",\n\n unionMember: String = \"\",\n\n additionalShape: String = \"\"\n\n ): Model {\n\n return \"\"\"\n\n namespace test\n\n\n\n $additionalShape\n\n structure SomeStruct {\n\n }\n\n union SomeUnion {\n\n someStruct: SomeStruct,\n\n $unionMember\n\n }\n\n structure SomeInput {\n\n $inputMember\n", "file_path": "codegen/src/test/kotlin/software/amazon/smithy/rust/codegen/customizations/SmithyTypesPubUseGeneratorTest.kt", "rank": 69, "score": 163931.55837470412 }, { "content": "pub fn require_query_params<B>(\n\n request: &Request<B>,\n\n require_keys: &[&str],\n\n) -> Result<(), ProtocolTestFailure> {\n\n let actual_keys: HashSet<&str> = extract_params(request.uri())\n\n .iter()\n\n .map(|param| QueryParam::parse(param).key)\n\n .collect();\n\n for key in require_keys {\n\n if !actual_keys.contains(*key) {\n\n return Err(ProtocolTestFailure::RequiredQueryParam {\n\n expected: key.to_string(),\n\n });\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 70, "score": 162659.73517859165 }, { "content": "pub fn validate_query_string<B>(\n\n request: &Request<B>,\n\n expected_params: &[&str],\n\n) -> Result<(), ProtocolTestFailure> {\n\n let actual_params = extract_params(request.uri());\n\n for param in expected_params {\n\n if !actual_params.contains(param) {\n\n return Err(ProtocolTestFailure::MissingQueryParam {\n\n expected: param.to_string(),\n\n found: actual_params.iter().map(|s| s.to_string()).collect(),\n\n });\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 71, "score": 162659.73517859165 }, { "content": "pub fn forbid_query_params<B>(\n\n request: &Request<B>,\n\n forbid_params: &[&str],\n\n) -> Result<(), ProtocolTestFailure> {\n\n let actual_params: HashSet<QueryParam> = extract_params(request.uri())\n\n .iter()\n\n .map(|param| QueryParam::parse(param))\n\n .collect();\n\n let actual_keys: HashSet<&str> = actual_params.iter().map(|param| param.key).collect();\n\n for param in forbid_params {\n\n let parsed = QueryParam::parse(param);\n\n // If the forbidden param is k=v, then forbid this key-value pair\n\n if actual_params.contains(&parsed) {\n\n return Err(ProtocolTestFailure::ForbiddenQueryParam {\n\n expected: param.to_string(),\n\n });\n\n }\n\n // If the assertion is only about a key, then check keys\n\n if parsed.value.is_none() && actual_keys.contains(parsed.key) {\n\n return Err(ProtocolTestFailure::ForbiddenQueryParam {\n\n expected: param.to_string(),\n\n });\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 72, "score": 162659.73517859165 }, { "content": "/// Converts an Event Stream [`Message`](Message) into a Smithy modeled type.\n\npub trait UnmarshallMessage: fmt::Debug {\n\n /// Smithy modeled type to convert into.\n\n type Output;\n\n /// Smithy modeled error to convert into.\n\n type Error;\n\n\n\n fn unmarshall(\n\n &self,\n\n message: &Message,\n\n ) -> Result<UnmarshalledMessage<Self::Output, Self::Error>, Error>;\n\n}\n\n\n\nmod value {\n\n use crate::error::Error;\n\n use crate::frame::checked;\n\n use crate::str_bytes::StrBytes;\n\n use aws_smithy_types::DateTime;\n\n use bytes::{Buf, BufMut, Bytes};\n\n use std::convert::TryInto;\n\n use std::mem::size_of;\n", "file_path": "rust-runtime/aws-smithy-eventstream/src/frame.rs", "rank": 73, "score": 161774.92939465394 }, { "content": "/// Converts a Smithy modeled Event Stream type into a [`Message`](Message).\n\npub trait MarshallMessage: fmt::Debug {\n\n /// Smithy modeled input type to convert from.\n\n type Input;\n\n\n\n fn marshall(&self, input: Self::Input) -> Result<Message, Error>;\n\n}\n\n\n\n/// A successfully unmarshalled message that is either an `Event` or an `Error`.\n\n#[derive(Debug)]\n\npub enum UnmarshalledMessage<T, E> {\n\n Event(T),\n\n Error(E),\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-eventstream/src/frame.rs", "rank": 74, "score": 161774.92939465394 }, { "content": "/// Signs an Event Stream message.\n\npub trait SignMessage: fmt::Debug {\n\n fn sign(&mut self, message: Message) -> Result<Message, SignMessageError>;\n\n\n\n fn sign_empty(&mut self) -> Result<Message, SignMessageError>;\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-eventstream/src/frame.rs", "rank": 75, "score": 161769.11010334373 }, { "content": "/// Returns the data element at the current position\n\n///\n\n/// If the current position is not a data element (and is instead a <startelement>) an error\n\n/// will be returned\n\npub fn try_data<'a, 'inp>(\n\n tokens: &'a mut impl Iterator<Item = Result<(Token<'inp>, Depth), XmlError>>,\n\n) -> Result<Cow<'inp, str>, XmlError> {\n\n loop {\n\n match tokens.next().map(|opt| opt.map(|opt| opt.0)) {\n\n None => return Ok(Cow::Borrowed(\"\")),\n\n Some(Ok(Token::Text { text })) => return unescape(text.as_str()),\n\n Some(Ok(e @ Token::ElementStart { .. })) => {\n\n return Err(XmlError::custom(format!(\n\n \"Looking for a data element, found: {:?}\",\n\n e\n\n )))\n\n }\n\n Some(Err(e)) => return Err(e),\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "rust-runtime/aws-smithy-xml/src/decode.rs", "rank": 76, "score": 161143.60542435938 }, { "content": "/// Default [`timeout::Config`] Provider chain\n\n///\n\n/// Unlike other credentials and region, [`timeout::Config`] has no related `TimeoutConfigProvider` trait. Instead,\n\n/// a builder struct is returned which has a similar API.\n\n///\n\n/// This provider will check the following sources in order:\n\n/// 1. [Environment variables](EnvironmentVariableTimeoutConfigProvider)\n\n/// 2. [Profile file](crate::profile::timeout_config::ProfileFileTimeoutConfigProvider) (`~/.aws/config`)\n\n///\n\n/// # Example\n\n///\n\n/// ```no_run\n\n/// # use std::error::Error;\n\n/// # #[tokio::main]\n\n/// # async fn main() {\n\n/// use aws_config::default_provider::timeout_config;\n\n///\n\n/// // Load a timeout config from a specific profile\n\n/// let timeout_config = timeout_config::default_provider()\n\n/// .profile_name(\"other_profile\")\n\n/// .timeout_config()\n\n/// .await;\n\n/// let config = aws_config::from_env()\n\n/// // Override the timeout config set by the default profile\n\n/// .timeout_config(timeout_config)\n\n/// .load()\n\n/// .await;\n\n/// // instantiate a service client:\n\n/// // <my_aws_service>::Client::new(&config);\n\n/// # }\n\n/// ```\n\npub fn default_provider() -> Builder {\n\n Builder::default()\n\n}\n\n\n\n/// Builder for [`timeout::Config`](aws_smithy_types::timeout::Config) that checks the environment variables and AWS profile files for configuration\n\n#[derive(Debug, Default)]\n\npub struct Builder {\n\n env_provider: EnvironmentVariableTimeoutConfigProvider,\n\n profile_file: profile::timeout_config::Builder,\n\n}\n\n\n\nimpl Builder {\n\n /// Configure the default chain\n\n ///\n\n /// Exposed for overriding the environment when unit-testing providers\n\n pub fn configure(mut self, configuration: &ProviderConfig) -> Self {\n\n self.env_provider =\n\n EnvironmentVariableTimeoutConfigProvider::new_with_env(configuration.env());\n\n self.profile_file = self.profile_file.configure(configuration);\n\n self\n", "file_path": "aws/rust-runtime/aws-config/src/default_provider/timeout_config.rs", "rank": 77, "score": 160935.66195016773 }, { "content": "/// Default RetryConfig Provider chain\n\n///\n\n/// Unlike other \"providers\" `RetryConfig` has no related `RetryConfigProvider` trait. Instead,\n\n/// a builder struct is returned which has a similar API.\n\n///\n\n/// This provider will check the following sources in order:\n\n/// 1. [Environment variables](EnvironmentVariableRetryConfigProvider)\n\n/// 2. [Profile file](crate::profile::retry_config::ProfileFileRetryConfigProvider)\n\n///\n\n/// # Example\n\n///\n\n/// When running [`aws_config::from_env()`](crate::from_env()), a [`ConfigLoader`](crate::ConfigLoader)\n\n/// is created that will then create a [`RetryConfig`] from the default_provider. There is no\n\n/// need to call `default_provider` and the example below is only for illustration purposes.\n\n///\n\n/// ```no_run\n\n/// # use std::error::Error;\n\n/// # #[tokio::main]\n\n/// # async fn main() -> Result<(), Box<dyn Error>> {\n\n/// use aws_config::default_provider::retry_config;\n\n///\n\n/// // Load a retry config from a specific profile\n\n/// let retry_config = retry_config::default_provider()\n\n/// .profile_name(\"other_profile\")\n\n/// .retry_config()\n\n/// .await;\n\n/// let config = aws_config::from_env()\n\n/// // Override the retry config set by the default profile\n\n/// .retry_config(retry_config)\n\n/// .load()\n\n/// .await;\n\n/// // instantiate a service client:\n\n/// // <my_aws_service>::Client::new(&config);\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn default_provider() -> Builder {\n\n Builder::default()\n\n}\n\n\n\n/// Builder for RetryConfig that checks the environment and aws profile for configuration\n\n#[derive(Debug, Default)]\n\npub struct Builder {\n\n env_provider: EnvironmentVariableRetryConfigProvider,\n\n profile_file: profile::retry_config::Builder,\n\n}\n\n\n\nimpl Builder {\n\n /// Configure the default chain\n\n ///\n\n /// Exposed for overriding the environment when unit-testing providers\n\n pub fn configure(mut self, configuration: &ProviderConfig) -> Self {\n\n self.env_provider =\n\n EnvironmentVariableRetryConfigProvider::new_with_env(configuration.env());\n\n self.profile_file = self.profile_file.configure(configuration);\n\n self\n", "file_path": "aws/rust-runtime/aws-config/src/default_provider/retry_config.rs", "rank": 78, "score": 160934.57369616436 }, { "content": "/// Default App Name Provider chain\n\n///\n\n/// This provider will check the following sources in order:\n\n/// 1. [Environment variables](EnvironmentVariableAppNameProvider)\n\n/// 2. [Profile file](crate::profile::app_name::ProfileFileAppNameProvider)\n\npub fn default_provider() -> Builder {\n\n Builder::default()\n\n}\n\n\n\n/// Default provider builder for [`AppName`]\n\n#[derive(Debug, Default)]\n\npub struct Builder {\n\n env_provider: EnvironmentVariableAppNameProvider,\n\n profile_file: app_name::Builder,\n\n}\n\n\n\nimpl Builder {\n\n #[doc(hidden)]\n\n /// Configure the default chain\n\n ///\n\n /// Exposed for overriding the environment when unit-testing providers\n\n pub fn configure(mut self, configuration: &ProviderConfig) -> Self {\n\n self.env_provider = EnvironmentVariableAppNameProvider::new_with_env(configuration.env());\n\n self.profile_file = self.profile_file.configure(configuration);\n\n self\n", "file_path": "aws/rust-runtime/aws-config/src/default_provider/app_name.rs", "rank": 79, "score": 160925.07865911286 }, { "content": "#[allow(dead_code)]\n\npub fn client() -> Client<\n\n aws_smithy_client::erase::DynConnector,\n\n aws_smithy_client::erase::DynMiddleware<aws_smithy_client::erase::DynConnector>,\n\n> {\n\n let raw_client = Builder::new()\n\n .rustls()\n\n .middleware_fn(|mut req| {\n\n let http_req = req.http_mut();\n\n let uri = format!(\"http://localhost:13734{}\", http_req.uri().path());\n\n *http_req.uri_mut() = uri.parse().unwrap();\n\n req\n\n })\n\n .build_dyn();\n\n let config = Config::builder().build();\n\n Client::with_config(raw_client, config)\n\n}\n", "file_path": "rust-runtime/aws-smithy-http-server/examples/pokemon_service/tests/helpers.rs", "rank": 80, "score": 160925.07865911286 }, { "content": "pub fn read_many_primitive<T: Parse>(values: ValueIter<HeaderValue>) -> Result<Vec<T>, ParseError> {\n\n read_many(values, |v: &str| {\n\n T::parse_smithy_primitive(v).map_err(|primitive| {\n\n ParseError::new_with_message(format!(\n\n \"failed reading a list of primitives: {}\",\n\n primitive\n\n ))\n\n })\n\n })\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-http/src/header.rs", "rank": 81, "score": 160367.353465964 }, { "content": "/// Read exactly one or none from a headers iterator\n\n///\n\n/// This function does not perform comma splitting like `read_many`\n\npub fn one_or_none<T: FromStr>(\n\n mut values: ValueIter<HeaderValue>,\n\n) -> Result<Option<T>, ParseError> {\n\n let first = match values.next() {\n\n Some(v) => v,\n\n None => return Ok(None),\n\n };\n\n let value = std::str::from_utf8(first.as_bytes())\n\n .map_err(|_| ParseError::new_with_message(\"invalid utf-8\"))?;\n\n match values.next() {\n\n None => T::from_str(value.trim())\n\n .map_err(|_| ParseError::new())\n\n .map(Some),\n\n Some(_) => Err(ParseError::new_with_message(\n\n \"expected a single value but found multiple\",\n\n )),\n\n }\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-http/src/header.rs", "rank": 82, "score": 159277.6056777918 }, { "content": "fn decode_inner(inp: &str) -> Result<Vec<u8>, DecodeError> {\n\n // one base64 character is only 6 bits so it can't produce valid data.\n\n if inp.len() == 1 {\n\n return Err(DecodeError::InvalidLength);\n\n }\n\n\n\n // when there's padding, we might slightly over allocate but it significantly simplifies\n\n // the code to just ignore it.\n\n let mut ret = Vec::with_capacity((inp.len() + 3) / 4 * 3);\n\n\n\n // 4 base-64 characters = 3 bytes\n\n // 1. Break the input into 4 character segments\n\n // 2. Write those segments into an i32\n\n // 3. Read u8s back out of the i32\n\n let chunks = inp.as_bytes().chunks(4);\n\n let mut padding = 0;\n\n for chunk in chunks {\n\n // padding should only be set on the last input\n\n if padding != 0 {\n\n return Err(DecodeError::InvalidPadding);\n", "file_path": "rust-runtime/aws-smithy-types/src/base64.rs", "rank": 83, "score": 158695.8329363758 }, { "content": "pub fn read_many_from_str<T: FromStr>(\n\n values: ValueIter<HeaderValue>,\n\n) -> Result<Vec<T>, ParseError> {\n\n read_many(values, |v: &str| {\n\n v.parse()\n\n .map_err(|_err| ParseError::new_with_message(\"failed during FromString conversion\"))\n\n })\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-http/src/header.rs", "rank": 84, "score": 157470.6856557037 }, { "content": "fn add_item(table_name: impl Into<String>, item: Value) -> PutItemInput {\n\n let attribute_value = match value_to_item(item) {\n\n AttributeValue::M(map) => map,\n\n other => panic!(\"can only insert top level values, got {:?}\", other),\n\n };\n\n\n\n PutItemInput::builder()\n\n .table_name(table_name)\n\n .set_item(Some(attribute_value))\n\n .build()\n\n .expect(\"valid operation\")\n\n}\n\n\n", "file_path": "aws/sdk/integration-tests/dynamodb/tests/movies.rs", "rank": 85, "score": 156629.01696422795 }, { "content": "pub fn run_data(data: &[u8]) {\n\n // Parse through with aws-smithy-json first to make sure it doesn't panic on invalid inputs\n\n if let Ok(tokens) =\n\n aws_smithy_json::deserialize::json_token_iter(data).collect::<Result<Vec<Token>, Error>>()\n\n {\n\n // Exercise string unescaping since the later comparison against Serde\n\n // re-serializes, and thus, loses UTF-16 surrogate pairs.\n\n for token in tokens {\n\n if let Token::ValueString { value, .. } = token {\n\n if let Ok(unescaped) = value.to_unescaped() {\n\n let serde_equiv =\n\n serde_json::from_str::<String>(&format!(\"\\\"{}\\\"\", value.as_escaped_str()))\n\n .unwrap();\n\n assert_eq!(serde_equiv, unescaped);\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Now parse with Serde, and if it's valid, compare the two and panic if different\n", "file_path": "rust-runtime/aws-smithy-json/fuzz/fuzz_targets/common.rs", "rank": 86, "score": 155736.02913622494 }, { "content": "/// Given a `location`, this function looks for the `aws-sdk-rust` git repository. If found,\n\n/// it resolves the `sdk/` directory. Otherwise, it returns the original `location`.\n\npub fn resolve_publish_location(location: &Path) -> PathBuf {\n\n match Repository::new(SDK_REPO_NAME, location) {\n\n // If the given path was the `aws-sdk-rust` repo root, then resolve the `sdk/` directory to publish from\n\n Ok(sdk_repo) => sdk_repo.root.join(SDK_REPO_CRATE_PATH),\n\n // Otherwise, publish from the given path (likely the smithy-rs runtime bundle)\n\n Err(_) => location.into(),\n\n }\n\n}\n", "file_path": "tools/publisher/src/repo.rs", "rank": 87, "score": 154810.86206632823 }, { "content": "/// Default Region Provider chain\n\n///\n\n/// This provider will check the following sources in order:\n\n/// 1. [Environment variables](EnvironmentVariableRegionProvider)\n\n/// 2. [Profile file](crate::profile::region::ProfileFileRegionProvider)\n\n/// 3. [EC2 IMDSv2](crate::imds::region)\n\npub fn default_provider() -> impl ProvideRegion {\n\n Builder::default().build()\n\n}\n\n\n\n/// Default region provider chain\n\n#[derive(Debug)]\n\npub struct DefaultRegionChain(RegionProviderChain);\n\n\n\nimpl DefaultRegionChain {\n\n /// Load a region from this chain\n\n pub async fn region(&self) -> Option<Region> {\n\n self.0.region().await\n\n }\n\n\n\n /// Builder for [`DefaultRegionChain`]\n\n pub fn builder() -> Builder {\n\n Builder::default()\n\n }\n\n}\n\n\n", "file_path": "aws/rust-runtime/aws-config/src/default_provider/region.rs", "rank": 88, "score": 154064.3161600164 }, { "content": "/// Provide a [`Region`](Region) to use with AWS requests\n\n///\n\n/// For most cases [`default_provider`](crate::default_provider::region::default_provider) will be the best option, implementing\n\n/// a standard provider chain.\n\npub trait ProvideRegion: Send + Sync + Debug {\n\n /// Load a region from this provider\n\n fn region(&self) -> future::ProvideRegion<'_>;\n\n}\n\n\n\nimpl ProvideRegion for Region {\n\n fn region(&self) -> future::ProvideRegion<'_> {\n\n future::ProvideRegion::ready(Some(self.clone()))\n\n }\n\n}\n\n\n\nimpl<'a> ProvideRegion for &'a Region {\n\n fn region(&self) -> future::ProvideRegion<'_> {\n\n future::ProvideRegion::ready(Some((*self).clone()))\n\n }\n\n}\n\n\n\nimpl ProvideRegion for Box<dyn ProvideRegion> {\n\n fn region(&self) -> future::ProvideRegion<'_> {\n\n self.as_ref().region()\n", "file_path": "aws/rust-runtime/aws-config/src/meta/region.rs", "rank": 89, "score": 153615.9125111284 }, { "content": "pub fn anchors(name: &str) -> (String, String) {\n\n (\n\n format!(\"{}{} -->\", ANCHOR_START, name),\n\n format!(\"{}{} -->\", ANCHOR_END, name),\n\n )\n\n}\n\n\n\nconst ANCHOR_START: &str = \"<!-- anchor_start:\";\n\nconst ANCHOR_END: &str = \"<!-- anchor_end:\";\n\n\n", "file_path": "tools/sdk-lints/src/anchor.rs", "rank": 90, "score": 152806.35325702518 }, { "content": "#[test]\n\nfn size_type() {\n\n let size = i64::MAX;\n\n\n\n // Should only compile if the type is correctly customized\n\n let object = Object::builder().size(size).build();\n\n assert_eq!(size, object.size);\n\n}\n", "file_path": "aws/sdk/integration-tests/s3/tests/size-type.rs", "rank": 91, "score": 152438.9175795167 }, { "content": "/// Returns a new credentials provider built with the given closure. This allows you\n\n/// to create an [`ProvideCredentials`] implementation from an async block that returns\n\n/// a [`credentials::Result`].\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// use aws_types::Credentials;\n\n/// use aws_config::meta::credentials::provide_credentials_fn;\n\n///\n\n/// async fn load_credentials() -> Credentials {\n\n/// todo!()\n\n/// }\n\n///\n\n/// provide_credentials_fn(|| async {\n\n/// // Async process to retrieve credentials goes here\n\n/// let credentials = load_credentials().await;\n\n/// Ok(credentials)\n\n/// });\n\n/// ```\n\npub fn provide_credentials_fn<'c, T, F>(f: T) -> ProvideCredentialsFn<'c, T>\n\nwhere\n\n T: Fn() -> F + Send + Sync + 'c,\n\n F: Future<Output = credentials::Result> + Send + 'static,\n\n{\n\n ProvideCredentialsFn {\n\n f,\n\n phantom: Default::default(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::meta::credentials::credential_fn::provide_credentials_fn;\n\n use async_trait::async_trait;\n\n use aws_types::credentials::ProvideCredentials;\n\n use aws_types::{credentials, Credentials};\n\n use std::fmt::{Debug, Formatter};\n\n\n\n fn assert_send_sync<T: Send + Sync>() {}\n\n\n\n #[test]\n\n fn creds_are_send_sync() {\n\n assert_send_sync::<Credentials>()\n\n }\n\n\n", "file_path": "aws/rust-runtime/aws-config/src/meta/credentials/credential_fn.rs", "rank": 92, "score": 151415.02810438888 }, { "content": "pub fn validate_body<T: AsRef<[u8]>>(\n\n actual_body: T,\n\n expected_body: &str,\n\n media_type: MediaType,\n\n) -> Result<(), ProtocolTestFailure> {\n\n let body_str = std::str::from_utf8(actual_body.as_ref());\n\n match (media_type, body_str) {\n\n (MediaType::Json, Ok(actual_body)) => try_json_eq(actual_body, expected_body),\n\n (MediaType::Xml, Ok(actual_body)) => try_xml_equivalent(actual_body, expected_body),\n\n (MediaType::Json, Err(_)) => Err(ProtocolTestFailure::InvalidBodyFormat {\n\n expected: \"json\".to_owned(),\n\n found: \"input was not valid UTF-8\".to_owned(),\n\n }),\n\n (MediaType::Xml, Err(_)) => Err(ProtocolTestFailure::InvalidBodyFormat {\n\n expected: \"XML\".to_owned(),\n\n found: \"input was not valid UTF-8\".to_owned(),\n\n }),\n\n (MediaType::UrlEncodedForm, Ok(actual_body)) => {\n\n try_url_encoded_form_equivalent(actual_body, expected_body)\n\n }\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 93, "score": 151274.9823374299 }, { "content": "pub fn escape(s: &str) -> Cow<str> {\n\n let mut remaining = s;\n\n if !s.contains(ESCAPES) {\n\n return Cow::Borrowed(s);\n\n }\n\n let mut out = String::new();\n\n while let Some(idx) = remaining.find(ESCAPES) {\n\n out.push_str(&remaining[..idx]);\n\n remaining = &remaining[idx..];\n\n let mut idxs = remaining.char_indices();\n\n let (_, chr) = idxs.next().expect(\"must not be none\");\n\n match chr {\n\n '>' => out.push_str(\"&gt;\"),\n\n '<' => out.push_str(\"&lt;\"),\n\n '\\'' => out.push_str(\"&apos;\"),\n\n '\"' => out.push_str(\"&quot;\"),\n\n '&' => out.push_str(\"&amp;\"),\n\n // push a hex escape sequence\n\n other => {\n\n write!(&mut out, \"&#x{:X};\", other as u32).expect(\"write to string cannot fail\")\n", "file_path": "rust-runtime/aws-smithy-xml/src/escape.rs", "rank": 94, "score": 150934.6566977891 }, { "content": "class SmithyTypesPubUseGenerator(private val runtimeConfig: RuntimeConfig) : LibRsCustomization() {\n\n override fun section(section: LibRsSection) = writable {\n\n when (section) {\n\n is LibRsSection.Body -> {\n\n val types = pubUseTypes(runtimeConfig, section.model)\n\n if (types.isNotEmpty()) {\n\n docs(\"Re-exported types from supporting crates.\")\n\n rustBlock(\"pub mod types\") {\n\n types.forEach { type -> rust(\"pub use #T;\", type) }\n\n }\n\n }\n\n }\n\n else -> {\n\n }\n\n }\n\n }\n\n}\n", "file_path": "codegen/src/main/kotlin/software/amazon/smithy/rust/codegen/smithy/customizations/SmithyTypesPubUseGenerator.kt", "rank": 95, "score": 150743.0833476689 }, { "content": "#[track_caller]\n\npub fn assert_ok(inp: Result<(), ProtocolTestFailure>) {\n\n match inp {\n\n Ok(_) => (),\n\n Err(e) => {\n\n eprintln!(\"{}\", e);\n\n panic!(\"Protocol test failed\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 96, "score": 147991.13503430065 }, { "content": "/// Escapes a string for embedding in a JSON string value.\n\npub fn escape_string(value: &str) -> Cow<str> {\n\n let bytes = value.as_bytes();\n\n for (index, byte) in bytes.iter().enumerate() {\n\n match byte {\n\n 0..=0x1F | b'\"' | b'\\\\' => {\n\n return Cow::Owned(escape_string_inner(&bytes[0..index], &bytes[index..]))\n\n }\n\n _ => {}\n\n }\n\n }\n\n Cow::Borrowed(value)\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-json/src/escape.rs", "rank": 97, "score": 147398.77696889074 } ]
Rust
src/util/test_helpers.rs
aimerib/nightrunner-lib
55d8a326a35e1f397fa9e8741d02bfe05359c2c8
use crate::config::{ determiners::AllowedDeterminers, directions::{AllowedDirections, Directions}, movements::AllowedMovements, prepositions::AllowedPrepositions, rooms::{Exits, Item, Room, Storage}, Config, Event, Narrative, State, Subject, Verb, VerbFunction, }; pub fn mock_config() -> Config { Config { allowed_prepositions: AllowedPrepositions::init(), allowed_determiners: AllowedDeterminers::init(), allowed_directions: AllowedDirections::init(), allowed_movements: AllowedMovements::init(), intro: "The introduction text to be displayed at the begining of the game.".to_string(), allowed_verbs: vec![ Verb { id: 1, names: vec![String::from("quit"), String::from(":q"), String::from("q")], verb_function: VerbFunction::Quit, }, Verb { id: 2, names: vec![String::from("help")], verb_function: VerbFunction::Help, }, Verb { id: 3, names: vec![String::from("look"), String::from("stare")], verb_function: VerbFunction::Look, }, Verb { id: 4, names: vec![String::from("inventory"), String::from("i")], verb_function: VerbFunction::Inventory, }, Verb { id: 5, names: vec![ String::from("pick"), String::from("take"), String::from("grab"), String::from("pi"), String::from("tk"), String::from("gr"), String::from("get"), String::from("g"), ], verb_function: VerbFunction::Take, }, Verb { id: 6, names: vec![String::from("drop"), String::from("place")], verb_function: VerbFunction::Drop, }, Verb { id: 7, names: vec![String::from("give"), String::from("hand")], verb_function: VerbFunction::Normal, }, Verb { id: 8, names: vec![String::from("talk"), String::from("chat")], verb_function: VerbFunction::Talk, }, Verb { id: 9, names: vec![String::from("hug")], verb_function: VerbFunction::Normal, }, ], items: vec![ Item { id: 1, name: String::from("item1"), description: String::from("item 1 description"), can_pick: false, }, Item { id: 2, name: String::from("item2"), description: String::from("item 2 description"), can_pick: true, }, ], narratives: vec![ Narrative { id: 1, text: String::from("text"), description: String::from("text"), }, Narrative { id: 2, text: String::from( "this is a templated which exists in the game {item1}.\n\nthis is a templated subject that exists in the game {subject1}.", ), description: String::from("text"), }, Narrative { id: 3, text: String::from("this narrative should replace the old one."), description: String::from("a replaced narrative"), }, Narrative { id: 4, text: String::from("this narrative should be returned along with the text of room 1."), description: String::from("a narrative that is added to the room narrative"), }, Narrative { id: 5, text: "this narrative should be returned along with the text of room 1 when completing event 6.".to_string(), description: "a narrative that is added to the room narrative".to_string() }, ], rooms: vec![ Room { id: 1, name: String::from("room 1"), description: String::from("first room"), exits: vec![Exits { room_id: 2, direction: Directions::South, }], stash: Storage { items: Vec::new(), item_ids: vec![1, 2], }, room_events: vec![1, 4, 2, 6], narrative: 1, subjects: vec![1], }, Room { id: 2, name: String::from("room 2"), description: String::from("second room"), exits: vec![Exits { room_id: 1, direction: Directions::North, }], stash: Storage { items: Vec::new(), item_ids: Vec::new(), }, room_events: vec![5], narrative: 2, subjects: vec![2], }, ], events: vec![ Event { id: 1, name: String::from("text"), description: String::from("text"), location: 1, destination: None, narrative: Some(1), required_verb: Some(2), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: false, remove_item: None, required_events: vec![], }, Event { id: 2, name: "event 2".to_string(), description: "hug subject 2 - requires event 4".to_string(), location: 1, destination: None, narrative: Some(3), required_verb: Some(9), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: true, remove_item: None, required_events: vec![4], }, Event { id: 3, name: String::from("text"), description: String::from("text"), location: 1, destination: None, narrative: Some(2), required_verb: Some(2), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: true, remove_item: None, required_events: vec![2], }, Event { id: 4, name: String::from("event 4"), description: String::from("talk to subject 1"), location: 1, destination: None, narrative: Some(1), required_verb: Some(8), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: true, remove_item: None, required_events: vec![], }, Event { id: 5, name: "event 5".to_string(), description: "gives item 2 to player when talking to subject2".to_string(), location: 2, destination: Some(1), narrative: Some(4), required_verb: Some(8), required_subject: Some(2), required_item: None, completed: false, add_item: Some(2), remove_old_narrative: false, remove_item: None, required_events: vec![], }, Event { id: 6, name: "event 6".to_string(), description: "gives item 2 to subject1 when talking to subject1 after event 5".to_string(), location: 1, destination: None, narrative: Some(4), required_verb: Some(7), required_subject: Some(1), required_item: Some(2), completed: false, add_item: None, remove_old_narrative: false, remove_item: Some(2), required_events: vec![5], } ], subjects: vec![ Subject { id: 1, name: String::from("subject1"), description: String::from("a subject description"), default_text: String::from("default text"), }, Subject { id: 2, name: String::from("subject2"), description: String::from("subject2 description"), default_text: String::from("default text"), } ], } } pub fn mock_json_data() -> String { let data = mock_config(); serde_json::to_string(&data).unwrap() } pub fn mock_state() -> State { State::init(mock_config()).borrow().clone() }
use crate::config::{ determiners::AllowedDeterminers, directions::{AllowedDirections,
String::from("pick"), String::from("take"), String::from("grab"), String::from("pi"), String::from("tk"), String::from("gr"), String::from("get"), String::from("g"), ], verb_function: VerbFunction::Take, }, Verb { id: 6, names: vec![String::from("drop"), String::from("place")], verb_function: VerbFunction::Drop, }, Verb { id: 7, names: vec![String::from("give"), String::from("hand")], verb_function: VerbFunction::Normal, }, Verb { id: 8, names: vec![String::from("talk"), String::from("chat")], verb_function: VerbFunction::Talk, }, Verb { id: 9, names: vec![String::from("hug")], verb_function: VerbFunction::Normal, }, ], items: vec![ Item { id: 1, name: String::from("item1"), description: String::from("item 1 description"), can_pick: false, }, Item { id: 2, name: String::from("item2"), description: String::from("item 2 description"), can_pick: true, }, ], narratives: vec![ Narrative { id: 1, text: String::from("text"), description: String::from("text"), }, Narrative { id: 2, text: String::from( "this is a templated which exists in the game {item1}.\n\nthis is a templated subject that exists in the game {subject1}.", ), description: String::from("text"), }, Narrative { id: 3, text: String::from("this narrative should replace the old one."), description: String::from("a replaced narrative"), }, Narrative { id: 4, text: String::from("this narrative should be returned along with the text of room 1."), description: String::from("a narrative that is added to the room narrative"), }, Narrative { id: 5, text: "this narrative should be returned along with the text of room 1 when completing event 6.".to_string(), description: "a narrative that is added to the room narrative".to_string() }, ], rooms: vec![ Room { id: 1, name: String::from("room 1"), description: String::from("first room"), exits: vec![Exits { room_id: 2, direction: Directions::South, }], stash: Storage { items: Vec::new(), item_ids: vec![1, 2], }, room_events: vec![1, 4, 2, 6], narrative: 1, subjects: vec![1], }, Room { id: 2, name: String::from("room 2"), description: String::from("second room"), exits: vec![Exits { room_id: 1, direction: Directions::North, }], stash: Storage { items: Vec::new(), item_ids: Vec::new(), }, room_events: vec![5], narrative: 2, subjects: vec![2], }, ], events: vec![ Event { id: 1, name: String::from("text"), description: String::from("text"), location: 1, destination: None, narrative: Some(1), required_verb: Some(2), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: false, remove_item: None, required_events: vec![], }, Event { id: 2, name: "event 2".to_string(), description: "hug subject 2 - requires event 4".to_string(), location: 1, destination: None, narrative: Some(3), required_verb: Some(9), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: true, remove_item: None, required_events: vec![4], }, Event { id: 3, name: String::from("text"), description: String::from("text"), location: 1, destination: None, narrative: Some(2), required_verb: Some(2), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: true, remove_item: None, required_events: vec![2], }, Event { id: 4, name: String::from("event 4"), description: String::from("talk to subject 1"), location: 1, destination: None, narrative: Some(1), required_verb: Some(8), required_subject: Some(1), required_item: None, completed: false, add_item: None, remove_old_narrative: true, remove_item: None, required_events: vec![], }, Event { id: 5, name: "event 5".to_string(), description: "gives item 2 to player when talking to subject2".to_string(), location: 2, destination: Some(1), narrative: Some(4), required_verb: Some(8), required_subject: Some(2), required_item: None, completed: false, add_item: Some(2), remove_old_narrative: false, remove_item: None, required_events: vec![], }, Event { id: 6, name: "event 6".to_string(), description: "gives item 2 to subject1 when talking to subject1 after event 5".to_string(), location: 1, destination: None, narrative: Some(4), required_verb: Some(7), required_subject: Some(1), required_item: Some(2), completed: false, add_item: None, remove_old_narrative: false, remove_item: Some(2), required_events: vec![5], } ], subjects: vec![ Subject { id: 1, name: String::from("subject1"), description: String::from("a subject description"), default_text: String::from("default text"), }, Subject { id: 2, name: String::from("subject2"), description: String::from("subject2 description"), default_text: String::from("default text"), } ], } } pub fn mock_json_data() -> String { let data = mock_config(); serde_json::to_string(&data).unwrap() } pub fn mock_state() -> State { State::init(mock_config()).borrow().clone() }
Directions}, movements::AllowedMovements, prepositions::AllowedPrepositions, rooms::{Exits, Item, Room, Storage}, Config, Event, Narrative, State, Subject, Verb, VerbFunction, }; pub fn mock_config() -> Config { Config { allowed_prepositions: AllowedPrepositions::init(), allowed_determiners: AllowedDeterminers::init(), allowed_directions: AllowedDirections::init(), allowed_movements: AllowedMovements::init(), intro: "The introduction text to be displayed at the begining of the game.".to_string(), allowed_verbs: vec![ Verb { id: 1, names: vec![String::from("quit"), String::from(":q"), String::from("q")], verb_function: VerbFunction::Quit, }, Verb { id: 2, names: vec![String::from("help")], verb_function: VerbFunction::Help, }, Verb { id: 3, names: vec![String::from("look"), String::from("stare")], verb_function: VerbFunction::Look, }, Verb { id: 4, names: vec![String::from("inventory"), String::from("i")], verb_function: VerbFunction::Inventory, }, Verb { id: 5, names: vec![
random
[ { "content": "use std::cell::RefCell;\n\nuse std::collections::HashMap;\n\nuse std::fmt::{self, Display};\n\nuse std::iter::FromIterator;\n\n\n\nuse regex::Regex;\n\nuse serde::{Deserialize, Serialize};\n\n/// Module containing a few utility functions to\n\n/// make testing a little easier.\n\npub mod test_helpers;\n\n\n\nuse crate::config::directions::Directions;\n\nuse crate::config::rooms::Item;\n\nuse crate::config::{Player, State};\n\nuse crate::parser::errors::{InvalidMovement, InvalidRoom, NoItem, NoRoom};\n\nuse crate::parser::interpreter::{EventMessage, MessageParts};\n\nuse crate::NRResult;\n\nuse crate::ParsingResult;\n\n\n\n/// This struct is used when parsing the narrative text.\n", "file_path": "src/util/mod.rs", "rank": 0, "score": 4.525275652518343 }, { "content": "pub(crate) mod determiners;\n\npub(crate) mod directions;\n\npub(crate) mod movements;\n\npub(crate) mod prepositions;\n\npub(crate) mod rooms;\n\n\n\nuse self::determiners::AllowedDeterminers;\n\nuse self::directions::AllowedDirections;\n\nuse self::movements::AllowedMovements;\n\nuse self::prepositions::AllowedPrepositions;\n\nuse self::rooms::{Item, Room, Storage};\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json;\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\n/// This struct holds the texts used to display the story\n\n/// in the game. These narratives are used to display\n\n/// texts for events as well as the current text in the room.\n\n///\n", "file_path": "src/config/mod.rs", "rank": 1, "score": 4.436561664161391 }, { "content": "use regex::Regex;\n\n\n\nuse super::*;\n\nuse crate::config::{directions::Directions, Config, State};\n\n#[cfg(test)]\n\nuse pretty_assertions::assert_eq;\n\n\n\n#[test]\n", "file_path": "src/util/tests.rs", "rank": 2, "score": 4.388991665355528 }, { "content": "use std::cell::RefCell;\n\nuse std::collections::HashMap;\n\nuse std::rc::Rc;\n\n\n\nuse crate::config::{directions::Directions, Subject};\n\nuse crate::config::{rooms::Item, State};\n\nuse crate::config::{Narrative, VerbFunction};\n\nuse crate::parser::action::{Action, ActionType};\n\nuse crate::parser::errors::*;\n\nuse crate::util::{\n\n display_help, move_to_direction, parse_room_text, player_get_item, player_receive_item,\n\n player_remove_item, MoveSuccess,\n\n};\n\nuse crate::NRResult;\n\nuse crate::ParsingResult;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)]\n\n#[serde(rename_all = \"snake_case\")]\n\n/// An enum representing the different parts of a message returned\n", "file_path": "src/parser/interpreter.rs", "rank": 3, "score": 4.337363930251595 }, { "content": "use crate::config::Config;\n\n#[cfg(test)]\n\nuse pretty_assertions::assert_eq;\n\n\n\nuse super::*;\n\n#[test]\n", "file_path": "src/parser/action_tests.rs", "rank": 4, "score": 4.3254537058711495 }, { "content": "use super::super::interpreter::*;\n\nuse crate::config::{Config, Verb};\n\n#[cfg(test)]\n\nuse pretty_assertions::assert_eq;\n\n#[test]\n", "file_path": "src/parser/interpreter_tests.rs", "rank": 5, "score": 4.245730576305752 }, { "content": "use super::*;\n\nuse crate::{config::directions::Directions, config::rooms::Room};\n\n#[cfg(test)]\n\nuse pretty_assertions::assert_eq;\n\n\n\n#[test]\n", "file_path": "src/config/rooms_tests.rs", "rank": 6, "score": 4.219805274168725 }, { "content": "use std::collections::HashMap;\n\n\n\nuse super::*;\n\nuse crate::{\n\n config::{Config, State},\n\n parser::{\n\n errors::InvalidEvent,\n\n interpreter::{EventMessage, MessageParts},\n\n },\n\n};\n\n#[cfg(test)]\n\nuse pretty_assertions::assert_eq;\n\n\n\n#[test]\n", "file_path": "src/parser/tests.rs", "rank": 7, "score": 4.208572636632233 }, { "content": "/// Module for the action parser. [Action](Action)\n\n/// are structs containing the important information\n\n/// needed to parse the user input.\n\npub mod action;\n\npub mod errors;\n\n/// Module with the various functions used to parse\n\n/// the user input.\n\npub mod interpreter;\n\n\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\nuse self::action::Action;\n\nuse self::errors::{EmptyInput, InvalidAction};\n\nuse self::interpreter::process_action;\n\nuse crate::config::State;\n\nuse crate::NRResult;\n\nuse crate::ParsingResult;\n\n\n\n/// This is the main function that executes the game.\n\n/// The `NightRunner` struct is the main entry point\n\n/// for the library, and calls this function along with\n\n/// the `State` struct. The return for this function is\n\n/// a `ParsingResult` which is contains the output of\n\n/// the game. The `ParsingResult` returned by this\n\n/// function that is meant to be consumed by the frontend.\n", "file_path": "src/parser/mod.rs", "rank": 8, "score": 4.1480672923262265 }, { "content": "use crate::{\n\n config::rooms::Exits,\n\n util::test_helpers::{self, mock_config, mock_state},\n\n};\n\n#[cfg(test)]\n\nuse pretty_assertions::assert_eq;\n\n\n\nuse super::*;\n\n#[test]\n", "file_path": "src/config/tests.rs", "rank": 9, "score": 4.119194682161548 }, { "content": "use crate::config::directions::Directions;\n\nuse crate::config::rooms::Item;\n\nuse crate::config::{State, Subject, Verb};\n\nuse regex::Regex;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Describes the type of action that is\n\n/// being parsed.\n\n/// They are determined based on the\n\n/// combination of input tokens. If\n\n/// an action contains only a verb,\n\n/// such as \"look\", then it is a\n\n/// `ActionType::Verb` and so on.\n\n/// Invalid actions are returned\n\n/// when the action parser can't\n\n/// determine the type of action.\n\n#[derive(Debug, PartialEq)]\n\npub enum ActionType {\n\n /// Action is a single verb\n\n /// Example: \"look\", \"quit\"\n", "file_path": "src/parser/action.rs", "rank": 10, "score": 3.929490130381626 }, { "content": "/// This function is used to remove an item from the player's inventory\n\n/// but it won't add the inventory to the room. This is used when the\n\n/// event indicates that the player should lose an item.\n\npub fn player_remove_item(player: &mut Player, item: Item) -> NRResult<String> {\n\n let old_item = player.inventory.remove_item(item)?;\n\n Ok(format!(\"\\nYou no longer have a {}\\n\", old_item.name))\n\n}\n\n\n\n#[derive(Debug, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\n\n\n/// Represents a successful movement.\n\npub struct MoveSuccess;\n\n\n\nimpl Display for MoveSuccess {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"You moved in that direction\")\n\n }\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 11, "score": 3.917762292823974 }, { "content": "use serde::{Deserialize, Serialize};\n\n// #[cfg(target_arch = \"wasm32\")]\n\n// use serde::{Deserialize, Serialize};\n\nuse std::{cell::RefCell, error::Error, rc::Rc};\n\nuse util::parse_room_text;\n\n/// Module containing the configuration code for this\n\n/// library.\n\npub mod config;\n\n/// The parser module contains a single function that\n\n/// parses the input string and returns a `ParsingResult`.\n\npub mod parser;\n\n/// Helper functions.\n\npub mod util;\n\nextern crate wasm_bindgen;\n\nuse wasm_bindgen::prelude::*;\n\n\n\n/// We use a type alias to make error handling easier.\n\npub type NRResult<T> = Result<T, Box<dyn Error>>;\n\n\n\n/// This is the result of the parsing of the input.\n", "file_path": "src/lib.rs", "rank": 12, "score": 3.9012107715669533 }, { "content": "use cursive::align::{HAlign, VAlign};\n\nuse cursive::event::EventResult;\n\nuse cursive::theme::{BaseColor, BorderStyle, Color, ColorStyle, Style};\n\nuse cursive::utils::span::SpannedString;\n\nuse cursive::view::{Nameable, Resizable, ScrollStrategy};\n\nuse cursive::views::{\n\n Dialog, DummyView, EditView, Layer, LinearLayout, NamedView, PaddedView, ResizedView,\n\n ScrollView, TextContent, TextView,\n\n};\n\nuse cursive::{Cursive, View};\n\nuse cursive_aligned_view::Alignable;\n\nuse nightrunner_lib::parser::interpreter::{EventMessage, MessageParts};\n\nuse nightrunner_lib::{NightRunner, NightRunnerBuilder, ParsingResult};\n\n\n\nstatic THEME_COLORS: [(&str, Color); 10] = [\n\n (\"background\", Color::Rgb(17, 17, 17)),\n\n (\"shadow\", Color::Dark(BaseColor::Green)),\n\n (\"view\", Color::Rgb(17, 17, 17)),\n\n (\"primary\", Color::Dark(BaseColor::Green)),\n\n (\"secondary\", Color::Dark(BaseColor::Green)),\n\n (\"tertiary\", Color::Dark(BaseColor::Green)),\n\n (\"title_primary\", Color::Rgb(255, 0, 127)),\n\n (\"title_secondary\", Color::Dark(BaseColor::Green)),\n\n (\"highlight\", Color::Dark(BaseColor::Green)),\n\n (\"highlight_text\", Color::Dark(BaseColor::Black)),\n\n];\n\n\n", "file_path": "examples/cursive_example/main.rs", "rank": 13, "score": 3.833057057290168 }, { "content": "/// required_events: Vec::new(),\n\n/// };\n\n/// ```\n\n///\n\n/// An event that happens in room 2 as a response to\n\n/// using an item with a subject and requires event\n\n/// 1 to be completed:\n\n/// ```rust\n\n/// # use nightrunner_lib::config::Event;\n\n/// let event = Event {\n\n/// id: 2,\n\n/// location: 2,\n\n/// name: \"Using item 3 with subject 1\".to_string(),\n\n/// description: \"This event happens when you use an item with subject 1.\".to_string(),\n\n/// destination: None,\n\n/// narrative: Some(4),\n\n/// // here verb id 3 would be marked with VerbFunction::Normal\n\n/// required_verb: Some(3),\n\n/// required_subject: Some(1),\n\n/// required_item: Some(3),\n", "file_path": "src/config/mod.rs", "rank": 14, "score": 3.824291759415418 }, { "content": " /// The help verb is used to display the help text.\n\n Help,\n\n #[serde(rename = \"look\")]\n\n /// The look verb is used to look at a room, item, or subject.\n\n Look,\n\n #[serde(rename = \"inventory\")]\n\n /// The inventory verb is used to display the inventory.\n\n Inventory,\n\n #[serde(rename = \"take\")]\n\n /// The take verb is used to take an item from a room.\n\n /// Some items can't be picked up, and some other items\n\n /// can only be given to the player through an event.\n\n Take,\n\n #[serde(rename = \"drop\")]\n\n /// The drop verb is used to drop an item in a room. When\n\n /// a player drops an item, the item is removed from the\n\n /// player's inventory and placed in the room where it can\n\n /// be retrieved again.\n\n Drop,\n\n #[serde(rename = \"talk\")]\n", "file_path": "src/config/mod.rs", "rank": 15, "score": 3.799851896189363 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\nuse nightrunner_lib::{parser::interpreter::MessageParts, NightRunnerBuilder, ParsingResult};\n\n#[cfg(test)]\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nuse pretty_assertions::assert_eq;\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nuse std::collections::HashMap;\n\n#[test]\n\n#[cfg(not(target_arch = \"wasm32\"))]\n", "file_path": "tests/integration.rs", "rank": 16, "score": 3.70014661969551 }, { "content": "/// This is the main struct for this library\n\n/// and represents the game. It holds the state\n\n/// internally and passes it to the parser for\n\n/// processing along with the provided input.\n\n#[wasm_bindgen]\n\n#[derive(Debug, PartialEq)]\n\npub struct NightRunner {\n\n state: Rc<RefCell<State>>,\n\n}\n\n\n\n/// You can use this to build a NightRunner\n\n/// strut. While you can build the NightRunner\n\n/// struct directly, using this builder is a\n\n/// little more convenient.\n\n///\n\n/// # Examples:\n\n/// ```rust\n\n/// use nightrunner_lib::NightRunnerBuilder;\n\n/// use nightrunner_lib::util::test_helpers::mock_json_data;\n\n/// let data = mock_json_data();\n", "file_path": "src/lib.rs", "rank": 17, "score": 3.642859331290755 }, { "content": "use wasm_bindgen_test::*;\n\nwasm_bindgen_test_configure!(run_in_browser);\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\nuse wasm_bindgen::JsValue;\n\n\n\n#[wasm_bindgen_test]\n\n#[cfg(target_arch = \"wasm32\")]\n", "file_path": "tests/web.rs", "rank": 19, "score": 3.610717435039688 }, { "content": "impl Config {\n\n /// # Config::init_yaml\n\n /// Loads config from serialized JSON.\n\n ///\n\n /// This is useful for web frontends\n\n /// Arguments:\n\n /// * `data` - serialized JSON to be used\n\n /// for the game configuration.\n\n ///\n\n /// ## Example:\n\n /// ```rust\n\n /// # use nightrunner_lib::config::Config;\n\n /// # let data = nightrunner_lib::util::test_helpers::mock_json_data();\n\n /// let config = Config::from_json(&data);\n\n /// ```\n\n ///\n\n /// Example valid JSON:\n\n /// ```rust\n\n /// let data = r#\"{\n\n /// \"allowed_verbs\": [\n", "file_path": "src/config/mod.rs", "rank": 20, "score": 3.6036719778625663 }, { "content": " /// but keep the config struct clean.\n\n pub rooms: Vec<Room>,\n\n /// This Config struct holds all the game data\n\n /// such as verbs, items, etc.\n\n pub config: Config,\n\n}\n\n\n\nimpl State {\n\n /// Takes a config struct and populates the state struct.\n\n ///\n\n /// ## Example:\n\n /// ```rust\n\n /// # use nightrunner_lib::config::{Config, State};\n\n /// # use nightrunner_lib::util::test_helpers::mock_json_data;\n\n /// # let json_data = mock_json_data();\n\n /// // Using yaml config files from a path\n\n /// let config1 = Config::from_path(\"./fixtures/\");\n\n /// let state1 = State::init(config1);\n\n /// // or using JSON data from a front-end\n\n /// let config2 = Config::from_json(&json_data);\n", "file_path": "src/config/mod.rs", "rank": 21, "score": 3.5861478705635452 }, { "content": "// This should be re-worked to use events instead. Maybe v2.0\n\n// Using events allows for commands such \"sneak north\" to get\n\n// past a sleeping dragon, or a corporate goon standing guard.\n\n// As it stands, the parser is very simple when it comes to mo-\n\n// ving around.\n\nfn extract_movement(state: &State, command_tokens: &[String]) -> Option<Directions> {\n\n let movements = state.config.allowed_movements.movements.clone();\n\n let directions = state.config.allowed_directions.directions.clone();\n\n match command_tokens.len() {\n\n 1 => match &command_tokens[0][..] {\n\n \"north\" | \"n\" => Some(Directions::North),\n\n \"south\" | \"s\" => Some(Directions::South),\n\n \"east\" | \"e\" => Some(Directions::East),\n\n \"west\" | \"w\" => Some(Directions::West),\n\n _ => None,\n\n },\n\n 2 => {\n\n if movements.contains(&command_tokens[0]) && directions.contains(&command_tokens[1]) {\n\n match &command_tokens[1][..] {\n\n \"north\" | \"n\" => Some(Directions::North),\n\n \"south\" | \"s\" => Some(Directions::South),\n\n \"east\" | \"e\" => Some(Directions::East),\n\n \"west\" | \"w\" => Some(Directions::West),\n\n _ => None,\n\n }\n", "file_path": "src/parser/action.rs", "rank": 22, "score": 3.554994665262725 }, { "content": "//! This library is a text-adventure game engine that can be used to create\n\n//! text based adventure games. It is designed to be used with a front-end\n\n//! which can be written in any language. Implementing this library in a\n\n//! language is a matter of writing a front-end an passing string data to\n\n//! the library for parsing.\n\n//!\n\n//! The configuration of the game is done in the `Config` struct\n\n//! and can be initialized both with YAML files and serialized\n\n//! JSON data, so it is perfect for both web and desktop games.\n\n//!\n\n//! The `parse_input` and `parse_input_json` functions are the only\n\n//! functions that need to be called by the front-end, but the library\n\n//! exposes some of the internal structs and functions to help developers\n\n//! understand how the library works, and to allow a little bit of flexibility\n\n//! in how the library is used.\n\n//!\n\n//! # Example:\n\n//! ```rust\n\n//! use nightrunner_lib::{NightRunner, NightRunnerBuilder, ParsingResult};\n\n//! use nightrunner_lib::util::test_helpers::mock_json_data;\n", "file_path": "src/lib.rs", "rank": 23, "score": 3.5536502875455636 }, { "content": "/// - help\n\n/// - quit\n\n/// - take\n\n/// - drop\n\n/// - talk\n\n///\n\n/// for all other verbs you can use the VerbFunction::Normal.\n\n/// VerbFunction::Normal is the default, and will be parsed by\n\n/// the events parser.\n\n///\n\n/// Verbs can have multiple names, but only one function. If\n\n/// a verb has more than one name you can use any of the names\n\n/// available to trigger the verb.\n\n///\n\n/// For example, if you have a verb that is named 'look' and\n\n/// 'peek' you can use either of the two names to trigger\n\n/// the look action.\n\n///\n\n/// # Examples\n\n/// ```rust\n", "file_path": "src/config/mod.rs", "rank": 24, "score": 3.548164783925216 }, { "content": "use crate::{config::directions::Directions, parser::errors::NoItem, NRResult};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// This struct represents a room in the game.\n\n#[derive(Debug, Clone, Deserialize, Serialize, Eq, Ord, PartialEq, PartialOrd)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct Room {\n\n pub id: u16,\n\n /// The name of the room. This is used only\n\n /// for making reading the game configs more\n\n /// human readable.\n\n pub name: String,\n\n /// This is a description of the room used\n\n /// when looking at the room or for the\n\n /// description shown in the exists list of\n\n /// the parsed result.\n\n pub description: String,\n\n /// This is the list of possible exits from\n\n /// this room. If the player tries to move\n\n /// in a direction that is not in this list\n", "file_path": "src/config/rooms.rs", "rank": 25, "score": 3.5176942328459155 }, { "content": "# nightrunner-lib\n\n\n\nThis library is a text-adventure game engine that can be used to create\n\ntext based adventure games. It is designed to be used with a front-end\n\nwhich can be written in any language. Implementing this library in a\n\nlanguage is a matter of writing a front-end an passing string data to\n\nthe library for parsing.\n\n\n\n## Using the Rust library\n\n\n\nThe `parse_input` and `parse_input_json` functions are the only\n\nfunctions that need to be called by the front-end, but the library\n\nexposes some of the internal structs and functions to help developers\n\nunderstand how the library works, and to allow a little bit of flexibility\n\nin how the library is used.\n\n\n\nTo initialize the parser you must pass either a JSON string or a path\n\nto YAML files containing the configuration for the game to\n\n`NightRunnerBuilder` using the builder pattern.\n\n\n\n### Example:\n\n\n\n```rust\n\nuse nightrunner_lib::NightRunner;\n\nuse nightrunner_lib::NightRunnerBuilder;\n\nuse nightrunner_lib::parser::interpreter::{ParsingResult};\n\nlet nr = NightRunnerBuilder::new().with_path(\"/game_config/\").build();\n\nlet result = nr.parse_input(\"look\");\n\nlet json_result = nr.json_parse_input(\"look\");\n\nassert!(result.is_ok());\n\nassert_eq!(result.unwrap(),\n\n ParsingResult::Look(String::from(\"first room\\nHere you see: \\n\\na item1\\na item2\"))\n\n);\n\nassert_eq!(json_result,\n\n \"{\\\"ok\\\":{\\\"look\\\":\\\"first room\\\\nHere you see: \\\\n\\\\na item1\\\\na item2\\\"}}\".to_string()\n\n);\n\n```\n\n\n", "file_path": "README.md", "rank": 26, "score": 3.5126762646992806 }, { "content": " fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\n/// # Nightrunner Rust Library\n\n///\n\n/// Use this implementation when using this library\n\n/// in a Rust application.\n\nimpl NightRunner {\n\n /// This is the main function that executes the game. Pass\n\n /// the input string to this function and it will return\n\n /// a result that can be used on the front-end to display\n\n /// the game to the user.\n\n pub fn parse_input(&self, input: &str) -> NRResult<ParsingResult> {\n\n parser::parse(self.state.clone(), input)\n\n }\n\n /// This is the main function that executes the game. Pass\n\n /// the input string to this function and it will return\n", "file_path": "src/lib.rs", "rank": 27, "score": 3.3748873352689075 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n/// The parser expects simple commands,\n\n/// such as \"go north\" or \"look door\".\n\n/// Instead of expecting the user to\n\n/// figure out how to use the commands,\n\n/// the parser will drop any determiners\n\n/// from the user input.\n\n///\n\n/// For example, the input \"look around\"\n\n/// will be translated as \"look\" for\n\n/// parsing and return the description of\n\n/// the current room. This is a very simplistic\n\n/// approach to parsing, since \"sneak past dragon\"\n\n/// would be translated as \"sneak dragon\" and\n\n/// that has a different meaning from the original\n\n/// intent. A possible solution for things like\n\n/// \"sneak past dragon\" would be to use an event\n\n/// requiring the verb \"sneak\" and the subject\n\n/// \"dragon\".\n", "file_path": "src/config/prepositions.rs", "rank": 29, "score": 3.3261566833110514 }, { "content": "/// This function is used to return the text\n\n/// that should be displayed to the player in the room.\n\n/// This text is composed of the current narrative associated\n\n/// with this room, and the exits that are available to the player.\n\n///\n\n/// This function only returns highlting information if either the\n\n/// item or the subject to be highlighted is in the room, otherwise\n\n/// it returns the narrative withouth the template.\n\n///\n\n/// The items and subjects vectors are used to determine if the\n\n/// corresponding items and subjects should be returned with the\n\n/// room text. These can be used for highlighting the items and\n\n/// subjects in the front-end.\n\n///\n\n/// This function will return a Result wrapping an EventMessage\n\n/// with the following format:\n\n/// ```rust\n\n/// # use nightrunner_lib::parser::interpreter::{EventMessage, MessageParts};\n\n/// # use std::collections::HashMap;\n\n/// let mut message_parts = HashMap::new();\n\n/// message_parts.insert(MessageParts::RoomText, \"some message with highlighted text.\".to_string());\n\n/// message_parts.insert(MessageParts::RoomText, \"You now have item1.\".to_string());\n\n/// message_parts.insert(MessageParts::RoomText, \"Exits: to the south you see an alley.\".to_string());\n\n/// let event_message = EventMessage {\n\n/// message: \"some message with highlighted text.\\nYou now have item1.\\nExits: to the south you see an alley.\".to_string(),\n\n/// templated_words: vec![\"highlighted\".to_string()],\n\n/// message_parts: message_parts,\n\n/// };\n\n/// ```\n\n/// and the parser will return a respone wrappping this result.\n\n/// `message_parts` contains the three parts of the message that\n\n/// is returned and can be used for layouting the message in the\n\n/// front-end, otherwise the message field can be used for simpler\n\n/// applications.\n\npub fn parse_room_text(\n\n state: State,\n\n narrative_text: String,\n\n event_message: String,\n\n event_id: Option<u16>,\n\n) -> NRResult<EventMessage> {\n\n let current_room = match state\n\n .config\n\n .rooms\n\n .iter()\n\n .find(|room| room.id == state.current_room)\n\n {\n\n Some(room) => room,\n\n None => return Err(InvalidRoom.into()),\n\n };\n\n let player_items = state\n\n .player\n\n .inventory\n\n .items\n\n .clone()\n", "file_path": "src/util/mod.rs", "rank": 30, "score": 3.321603753451364 }, { "content": "/// completed: false,\n\n/// add_item: None,\n\n/// remove_old_narrative: false,\n\n/// // here item id 3 would be removed after the event is completed\n\n/// remove_item: Some(3),\n\n/// required_events: Vec::new(),\n\n/// };\n\n/// ```\n\n\n\n#[derive(Debug, Clone, Eq, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct Event {\n\n /// The id of the event used when referencing the event.\n\n pub id: u16,\n\n /// Room id where the event happens.\n\n pub location: u16,\n\n /// Name of the event. This isn't used in the game\n\n /// and is used to make the event configuration\n\n /// more readable.\n\n pub name: String,\n", "file_path": "src/config/mod.rs", "rank": 31, "score": 3.294443940511801 }, { "content": " SubjectNoEvent(String),\n\n /// Returned when an event is triggered by the player's command. The\n\n /// returned struct contains the text to be displayed to the player.\n\n EventSuccess(EventMessage),\n\n /// Returned when a parser result isn't applicable to the wasm library\n\n NoOp,\n\n}\n\n\n\n#[cfg(any(target_arch = \"wasm32\", doc))]\n\n#[wasm_bindgen]\n\n/// # Nightrunner Wasm Library\n\n///\n\n/// Use this implementation when using this library\n\n/// in a WebAssembly browser application. This is the\n\n/// implementation exposed when compiling with `--target=wasm32-unknown-unknown`.\n\nimpl NightRunner {\n\n /// When using the wasm library we won't have access to the\n\n /// builder patter, so the constructor needs to receive the\n\n /// configuration for games as a parameter.\n\n ///\n", "file_path": "src/lib.rs", "rank": 32, "score": 3.2788132634854588 }, { "content": "/// use nightrunner_lib::parser::errors::InvalidEvent;\n\n/// use nightrunner_lib::parser::{action::Action};\n\n/// let nr = NightRunnerBuilder::new().with_path_for_config(\"fixtures/\").build();\n\n/// let mut result = nr.parse_input(\"give item2 to subject2\");\n\n/// let json_result = nr.json_parse_input(\"give item2 to subject2\");\n\n/// // There is no event for player giving item2 to subject2\n\n/// // so we expect an error. InvalidEvent should be used to\n\n/// // indicate that the event is not valid, and how to handle\n\n/// // this error is up to the front-end. Perhaps you display\n\n/// // a message to the user saying that this action is invalid,\n\n/// // or you don't understand the command.\n\n/// // For convenience, this error wraps the action as it was\n\n/// // interpreted by the parser from the input. This is useful\n\n/// // when writing custom logic for the front-end.\n\n/// assert_eq!(\n\n/// result.unwrap_err().to_string(),\n\n/// InvalidEvent.to_string()\n\n/// );\n\n/// result = nr.parse_input(\"give item2 to subject2\");\n\n/// assert_eq!(\n", "file_path": "src/parser/errors.rs", "rank": 33, "score": 3.245139118925465 }, { "content": "/// This function is used when the player is given an item.\n\n/// This function is called by the events parser if the event\n\n/// indicates that the player should receive an item.\n\npub fn player_receive_item(state: &mut State, item: Item) -> String {\n\n state.player.inventory.add_item(item.clone());\n\n let item_message = format!(\"\\nYou now have a {}\\n\", item.name);\n\n item_message\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 34, "score": 3.2252060370750772 }, { "content": "/// # use nightrunner_lib::config::Subject;\n\n/// let subject = Subject {\n\n/// id: 1,\n\n/// name: \"person\".to_string(),\n\n/// description: \"A person dressed all in black\".to_string(),\n\n/// default_text: \"Person: I'm busy now. Maybe later.\".to_string(),\n\n/// };\n\n/// ```\n\n#[derive(Debug, Clone, Eq, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct Subject {\n\n /// The id of the subject used when referencing the subject.\n\n pub id: u16,\n\n /// The name of the subject.\n\n pub name: String,\n\n /// This is what the parser will use when the player\n\n /// looks at the subject.\n\n pub description: String,\n\n /// The default text to display when the the player\n\n /// interacts with the subject and no active events\n", "file_path": "src/config/mod.rs", "rank": 35, "score": 3.188057733850187 }, { "content": "/// Some of the text displayed in the game comes from the\n\n/// item, room, or subject's description. For everything\n\n/// else, the narrative's text is used.\n\n#[derive(Debug, Clone, Eq, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct Narrative {\n\n /// Narrative id used when referencing the narrative.\n\n pub id: u16,\n\n /// The actual text of the narrative to be displayed.\n\n pub text: String,\n\n /// This is a human readable name for the narrative.\n\n pub description: String,\n\n}\n\n\n\n/// Verbs can be named anything, but a few are reserved for specific functions.\n\n/// The verb_function field is used to determine what function the verb serves.\n\n///\n\n/// The following verbs functions need to be declared:\n\n/// - look\n\n/// - inventory\n", "file_path": "src/config/mod.rs", "rank": 36, "score": 3.158912153571554 }, { "content": " /// a result that can be used on the front-end to display\n\n /// the game to the user.\n\n /// Unlike the `parse_input` function, this function will\n\n /// return the result in JSON format. This is useful for\n\n /// front-ends that can't integrate with a rust library.\n\n pub fn json_parse_input(&self, input: &str) -> String {\n\n let result = parser::parse(self.state.clone(), input);\n\n let json = match result {\n\n Ok(ok) => serde_json::to_string(&ok).unwrap(),\n\n Err(err) => format!(\n\n \"{{\\\"error\\\":{}}}\",\n\n serde_json::to_string(&err.to_string()).unwrap()\n\n ),\n\n };\n\n json\n\n }\n\n /// Returns the string with the game intro text. This can\n\n /// be used to display the game intro to the user, but isn't\n\n /// required.\n\n pub fn game_intro(&self) -> String {\n", "file_path": "src/lib.rs", "rank": 37, "score": 3.158912153571554 }, { "content": " VerbFunction::Inventory => write!(f, \"inventory\"),\n\n VerbFunction::Quit => write!(f, \"quit\"),\n\n VerbFunction::Talk => write!(f, \"talk\"),\n\n VerbFunction::Normal => write!(f, \"{}\", self.names[0]),\n\n }\n\n }\n\n}\n\n\n\n/// The VerbFunction enum is used to determine what function\n\n/// the verb serves. Some verbs are reserved for specific\n\n/// functions but can named anything. For example, the verb\n\n/// 'look' is used to look at the room or item but it can\n\n/// be named anything.\n\n#[derive(Debug, Clone, Eq, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum VerbFunction {\n\n #[serde(rename = \"quit\")]\n\n /// The quit verb is used to quit the game.\n\n Quit,\n\n #[serde(rename = \"help\")]\n", "file_path": "src/config/mod.rs", "rank": 38, "score": 3.144538293228404 }, { "content": "/// player is trying to do.\n\n///\n\n/// The struct `Action` implements its own\n\n/// parser and return a valid action struct\n\n/// with the appropriate data.\n\n///\n\n/// # Examples:\n\n///\n\n/// * \"look subject1\"\n\n/// ```rust\n\n/// # use nightrunner_lib::parser::action::Action;\n\n/// # use nightrunner_lib::config::{Verb, Subject, VerbFunction};\n\n/// let action_look = Action {\n\n/// verb: Some(Verb {\n\n/// id: 1,\n\n/// names: vec![\"look\".to_string()],\n\n/// verb_function: VerbFunction::Look\n\n/// }),\n\n/// subject: Some(Subject {\n\n/// id: 1,\n", "file_path": "src/parser/action.rs", "rank": 39, "score": 3.1427391672835236 }, { "content": "/// front-end. Otherwise you should use JSON for\n\n/// most use cases.\n\n///\n\n/// While you can create this struct manually,\n\n/// you should use the Config::init_yaml or the\n\n/// Config::init_json functions to load the data\n\n/// from a YAML file or JSON data.\n\n#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Eq)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct Config {\n\n /// This field is hardcoded in the library.\n\n pub allowed_prepositions: AllowedPrepositions,\n\n /// This field is hardcoded in the library.\n\n pub allowed_determiners: AllowedDeterminers,\n\n /// This field is hardcoded in the library.\n\n pub allowed_movements: AllowedMovements,\n\n /// This field is hardcoded in the library.\n\n pub allowed_directions: AllowedDirections,\n\n /// All the allowed verbs in the game.\n\n pub allowed_verbs: Vec<Verb>,\n", "file_path": "src/config/mod.rs", "rank": 40, "score": 3.1021909995861177 }, { "content": "## Using the Wasm library\n\n\n\nAdd the nightrunner_lib package from npm to your repository:\n\n\n\n```shell\n\nyarn add @nightrunner/nightrunner_lib\n\n```\n\n\n\n---\n\n\n\n**NOTE**\n\n\n\nYou will need a bundler to use this package. Currently I recommend using\n\nVite. For examples on how to use Vite with this library check out the\n\n`examples/wasm` folder in the repository.\n\n\n\n---\n\n\n\nThe `parse` function is the only function that is necessary to be called by\n\nthe front-end. It receives a simple string input and returns the result as\n\na JSON parsed string.\n\n\n\nTo initialize the parser in wasm you must pass a JSON string with the configuration\n\nof the game when creating a new instance of the `NightRunner` class in JavaScript.\n\n\n\n### Example:\n\n\n\n```js\n\n// This data can also be retrieved from an api endpoint with the browser\n\n// fetch API.\n\nimport data from \"./data.json\";\n\nimport init, { NightRunner } from \"@nightrunner/nightrunner_lib\";\n\n\n\nawait init();\n\n\n\n// Load the NightRunner library.\n\n// The NightRunner class expects stringified JSON data.\n\nconst nr: NightRunner = await new NightRunner(JSON.stringify(data));\n\nlet result = nr.parse(\"look\");\n\n// {\"messageType\":\"look\",\"data\":\"first room\\n\\nHere you see: \\nan item1\\nan item2\\nsubject1\"}\n\n```\n", "file_path": "README.md", "rank": 41, "score": 2.8799923945018535 }, { "content": "#[serde(rename_all = \"snake_case\")]\n\npub struct Verb {\n\n /// The id of the verb used when referencing the verb.\n\n pub id: u16,\n\n /// A verb can be named anything and can have multiple aliases,\n\n /// so commands like `look` and `peek` can be used interchangeably.\n\n pub names: Vec<String>,\n\n /// The function that the verb serves. Since some verbs are reserved\n\n /// for specific functions, this field is used to determine what\n\n /// function the verb serves, and this allows verbs to be named\n\n /// anything. For the possible functions see the [VerbFunction](VerbFunction) enum.\n\n pub verb_function: VerbFunction,\n\n}\n\nimpl std::fmt::Display for Verb {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self.verb_function {\n\n VerbFunction::Look => write!(f, \"look\"),\n\n VerbFunction::Help => write!(f, \"help\"),\n\n VerbFunction::Take => write!(f, \"take\"),\n\n VerbFunction::Drop => write!(f, \"drop\"),\n", "file_path": "src/config/mod.rs", "rank": 42, "score": 2.834944866545607 }, { "content": "//! All possible errors that can occur when parsing\n\n//! the input. These errors should be returned to a\n\n//! front-end for handling display to the user.\n\n//!\n\n//! All errors have Display implemented for them,\n\n//! so they can be easily serialized to a string.\n\n\n\nuse std::error;\n\nuse std::fmt;\n\n\n\n/// Event exists but required events haven't been\n\n/// completed yet. The front-end should handle this\n\n/// error state since this isn't really an error,\n\n/// but rather an indication that the action is valid.\n\n///\n\n/// How to handle this depends on what the front-end\n\n/// should do. An example of this state could be\n\n/// and event where you talk to a subject, but you\n\n/// haven't yet completed a previous objective. Talking\n\n/// to the subject would be a valid action, but not\n", "file_path": "src/parser/errors.rs", "rank": 43, "score": 2.8334824823332765 }, { "content": "/// It contains the name of the item, the description\n\n/// and whether or not the item can be picked up.\n\n#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Eq, PartialOrd, Ord)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct Item {\n\n pub id: u16,\n\n /// The name of the item.\n\n pub name: String,\n\n /// The description of the item.\n\n /// This is used when the player looks at\n\n /// the item.\n\n pub description: String,\n\n /// Whether or not the item can be picked up.\n\n /// If this is true then the item can be\n\n /// picked up by the player. Most of the times\n\n /// if an item can't be picked up you will\n\n /// want to use a subject instead.\n\n pub can_pick: bool,\n\n}\n\n\n", "file_path": "src/config/rooms.rs", "rank": 44, "score": 2.782149964586534 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n/// Allowed movement verbs. This is used to\n\n/// determine if a movement is valid when\n\n/// parsing the input.\n\n#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Eq)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct AllowedMovements {\n\n pub movements: Vec<String>,\n\n}\n\n\n\nimpl AllowedMovements {\n\n pub fn init() -> AllowedMovements {\n\n let movements = vec![\n\n \"go\", \"move\", \"run\", \"walk\", \"jog\", \"amble\", \"dart\", \"limp\", \"saunter\", \"scamper\",\n\n \"scurry\", \"stagger\", \"strut\", \"swagger\", \"tiptoe\", \"waltz\", \"sneak\",\n\n ]\n\n .iter()\n\n .map(|s| s.to_string())\n\n .collect();\n\n AllowedMovements { movements }\n\n }\n\n}\n", "file_path": "src/config/movements.rs", "rank": 45, "score": 2.765449960257614 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n/// The parser expects simple commands,\n\n/// such as \"go north\" or \"look door\".\n\n/// Instead of expecting the user to\n\n/// figure out how to use the commands,\n\n/// the parser will drop any determiners\n\n/// from the user input.\n\n///\n\n/// For example, the input \"give my wallet\"\n\n/// will be translated as \"give wallet\" for\n\n/// parsing. The same will happen for \"take\n\n/// all the money\", and that will be translated\n\n/// as \"take money\". This is a very simplistic\n\n/// approach to parsing, since \"take all my money\"\n\n/// would be translated as \"take money\" and\n\n/// that has a different meaning from the original\n\n/// intent.\n\n#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Eq)]\n\n#[serde(rename_all = \"snake_case\")]\n", "file_path": "src/config/determiners.rs", "rank": 46, "score": 2.765449960257614 }, { "content": "/// This function is used when the player issues a command\n\n/// to take an item.\n\n///\n\n/// If the Item is not found in the room, then a ParsingError is returned\n\n/// with a message indicating that the item was not found.\n\n/// Otherwise, the item is removed from the room and added to the player's\n\n/// inventory and a ParsingResult is returned with a message indicating that\n\n/// the item was taken.\n\npub fn player_get_item(state: &mut State, item: Item) -> NRResult<ParsingResult> {\n\n let current_room_id = state.current_room;\n\n let current_room = state\n\n .rooms\n\n .iter_mut()\n\n .find(|room| room.id == current_room_id)\n\n .unwrap();\n\n\n\n match current_room.stash.remove_item(item) {\n\n Ok(item) => {\n\n state.player.inventory.add_item(item.clone());\n\n let message = format!(\"\\nYou now have a {}\\n\", item.name);\n\n Ok(ParsingResult::NewItem(message))\n\n }\n\n Err(_) => Err(NoItem.into()),\n\n }\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 47, "score": 2.605140291324803 }, { "content": "# React Typescript Wasm Example\n\n\n\nA React example using the wasm npm package for nightrunner-lib.\n\n\n\nLike the vanilla typescript example, this example uses the [`Vite`](https://vitejs.dev) bundler along with the [`vite-plugin-wasm-pack`](https://github.com/nshen/vite-plugin-wasm-pack#use-wasm-pack-package-installed-via-npm).\n\n\n\nThis example contains the configurations necessary to use the `vite-plugin-wasm-pack` plugin with `vite`, and add\n\n`nightrunner-lib` to your project.\n\n\n\nHere you will see more advanced usage of the `nightrunner-lib` library, along with some ideas on how to best\n\nparse the data returned from the library.\n\n\n\nSteps to reproduce this example:\n\n\n\n1. Install `vite` and `vite-plugin-wasm-pack`\n\n2. Create a new typescript project to use `nightrunner-lib` with `yarn create vite my-app --template react-ts`\n\n3. Install `nightrunner-lib` in your project:\n\n 1. From npm: `yarn add @nightrunner/nightrunner_lib`\n\n 2. From local folder: add `\"nightrunner_lib\": \"./path/to/lib` to your package.json, under dependencies, and run `yarn` inside the project folder\n\n4. Install the `\"vite-plugin-wasm-pack` package: `yarn add vite-plugin-wasm-pack`\n\n5. Add the `vite-plugin-wasm-pack` plugin to your `vite.config.ts` file and add the `nightrunner-lib` as an npm dependency:\n\n ```\n\n import { defineConfig } from 'vite'\n\n import wasmPack from 'vite-plugin-wasm-pack';\n\n export default defineConfig({\n\n plugins: [wasmPack([],['@nightrunner/nightrunner_lib'])],\n\n })\n\n ```\n\n Note: The first set of brackets in the `wasmPack` plugin call is for local packages, and the second set is for npm packages. Refer to the documentation for more information.\n\n6. Import the library in your entry file: `import init, {NightRunner} from '@nightrunner/nightrunner_lib';` - Here the init function will load the wasm module and bring the library into scope.\n\n7. Run the init() and resolve the promise to get the library into scope. Vite has global async/await support turned on by default, so you can simply use `await init()` to get the library into scope.\n\n8. Pass a data object to the NighRunner constructor and start calling the parse function with actions for parsing:\n\n ```\n\n const nightrunner = new NightRunner(data);\n\n let result = nr.parse(\"look\");\n\n ```\n\n9. Run the example: `yarn dev`\n", "file_path": "examples/wasm/react/README.md", "rank": 48, "score": 2.5966149985887315 }, { "content": "/// This function is used when the player attempts to move in a direction.\n\n/// If the direction given doesn't exist, then a\n\n/// `ParsingError::InvalidMovement(MoveError::NoExit)` is returned.\n\n/// If the player can move in the direction, then the player's current room\n\n/// is updated and a `ParsingResult::Movement(MoveSuccess)` is returned.\n\npub fn move_to_direction(state: &mut State, direction: Directions) -> NRResult<MoveSuccess> {\n\n let mut state_ref = state;\n\n let current_room_id = state_ref.current_room;\n\n if let Some(current_room) = state_ref\n\n .rooms\n\n .iter_mut()\n\n .find(|room| room.id == current_room_id)\n\n {\n\n if let Ok(room_id) = current_room.can_move(direction) {\n\n state_ref.current_room = room_id;\n\n Ok(MoveSuccess)\n\n } else {\n\n Err(InvalidMovement.into())\n\n }\n\n } else {\n\n Err(NoRoom.into())\n\n }\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 49, "score": 2.5760077401186154 }, { "content": " /// * `intro.yml`\n\n /// * `rooms.yml`\n\n ///\n\n /// ## Example:\n\n /// ```rust\n\n /// # use nightrunner_lib::config::Config;\n\n /// let config = Config::from_path(\"./fixtures/\");\n\n /// ```\n\n ///\n\n /// For examples of valid yaml files see the\n\n /// fixtures directory used for unit tests.\n\n pub fn from_path(path: &str) -> Config {\n\n let error_message = format!(\"Could not find config file at {}\", path);\n\n let narratives_config =\n\n std::fs::read_to_string(format!(\"{}narratives.yml\", path)).expect(&error_message);\n\n let items_config =\n\n std::fs::read_to_string(format!(\"{}items.yml\", path)).expect(&error_message);\n\n let rooms_config =\n\n std::fs::read_to_string(format!(\"{}rooms.yml\", path)).expect(&error_message);\n\n let allowed_verbs_config =\n", "file_path": "src/config/mod.rs", "rank": 50, "score": 2.5510689649137173 }, { "content": "# Vanilla Typescript Wasm Example\n\n\n\nA simple typescript example using the wasm npm package for nightrunner-lib.\n\n\n\nThe simplest way I could find to get a wasm module to work with typescript was to use the [`Vite`](https://vitejs.dev) bundler along with the [`vite-plugin-wasm-pack`](https://github.com/nshen/vite-plugin-wasm-pack#use-wasm-pack-package-installed-via-npm).\n\n\n\nYou can accomplish the same results with `webpack` or without any bundler, but the process for doing so is a bit more involved, but the documentation available on the [`wasm-pack`](https://rustwasm.github.io/docs/wasm-pack/) should help you get started.\n\n\n\nThis example contains the configurations necessary to use the `vite-plugin-wasm-pack` plugin with `vite`, and add\n\n`nightrunner-lib` to your project.\n\n\n\nSteps to reproduce this example:\n\n\n\n1. Install `vite` and `vite-plugin-wasm-pack`\n\n2. Create a new typescript project to use `nightrunner-lib` with `yarn create vite my-app --template vanilla-ts`\n\n3. Install `nightrunner-lib` in your project:\n\n 1. From npm: `yarn add @nightrunner/nightrunner_lib`\n\n 2. From local folder: add `\"nightrunner_lib\": \"./path/to/lib` to your package.json, under dependencies, and run `yarn` inside the project folder\n\n4. Install the `\"vite-plugin-wasm-pack` package: `yarn add vite-plugin-wasm-pack`\n\n5. Add the `vite-plugin-wasm-pack` plugin to your `vite.config.ts` file and add the `nightrunner-lib` as an npm dependency:\n\n ```\n\n import { defineConfig } from 'vite'\n\n import wasmPack from 'vite-plugin-wasm-pack';\n\n export default defineConfig({\n\n plugins: [wasmPack([],['@nightrunner/nightrunner_lib'])],\n\n })\n\n ```\n\n Note: The first set of brackets in the `wasmPack` plugin call is for local packages, and the second set is for npm packages. Refer to the documentation for more information.\n\n6. Import the library in your entry file: `import init, {NightRunner} from '@nightrunner/nightrunner_lib';` - Here the init function will load the wasm module and bring the library into scope.\n\n7. Run the init() and resolve the promise to get the library into scope. Vite has global async/await support turned on by default, so you can simply use `await init()` to get the library into scope.\n\n8. Pass a data object to the NighRunner constructor and start calling the parse function with actions for parsing:\n\n ```\n\n const nightrunner = new NightRunner(data);\n\n let result = nr.parse(\"look\");\n\n ```\n\n9. Run the example: `yarn dev`\n", "file_path": "examples/wasm/typescript/README.md", "rank": 51, "score": 2.53470965735163 }, { "content": " ParsingResult::EventSuccess(EventMessage {\n\n message: \"this is a templated which exists in the game item1.\\n\\nthis is a templated subject that exists in the game subject1.\\n\\n\\nExits:\\nto the north you see first room\".to_string(),\n\n templated_words: vec![],\n\n message_parts,\n\n })\n\n );\n\n\n\n result = parse(state, \"give item2 to subject2\");\n\n // There is no event for player giving item2 to subject2\n\n // so we expect an error. InvalidEvent should be used to\n\n // indicate that the event is not valid, and how to handle\n\n // this error is up to the front-end. Perhaps you display\n\n // a message to the user saying that this action is invalid,\n\n // or you don't understand the command.\n\n // For convenience, this error wraps the action as it was\n\n // interpreted by the parser from the input. This is useful\n\n // when writing custom logic for the front-end.\n\n assert_eq!(result.unwrap_err().to_string(), InvalidEvent.to_string());\n\n}\n", "file_path": "src/parser/tests.rs", "rank": 52, "score": 2.5231267068688745 }, { "content": "//! let data = mock_json_data();\n\n//! let nr = NightRunnerBuilder::new().with_json_data(&data).build();\n\n//! let result = nr.parse_input(\"look\");\n\n//! let json_result = nr.json_parse_input(\"look\");\n\n//! assert!(result.is_ok());\n\n//! assert_eq!(result.unwrap(),\n\n//! ParsingResult::Look(\n\n//! \"first room\\n\\nHere you see: \\nan item1\\nan item2\\nsubject1\".to_string()\n\n//! )\n\n//! );\n\n//! assert_eq!(json_result,\n\n//! r#\"{\"messageType\":\"look\",\"data\":\"first room\\n\\nHere you see: \\nan item1\\nan item2\\nsubject1\"}\"#.to_string()\n\n//! );\n\n//! ```\n\n//!\n\n//! for examples of valid YAML and JSON data, see the documentation for\n\n//! the `config` module.\n\n#![warn(missing_docs)]\n\nuse config::{Config, State};\n\nuse parser::interpreter::EventMessage;\n", "file_path": "src/lib.rs", "rank": 53, "score": 2.5231267068688745 }, { "content": " /// The talk verb is used to talk to a character in a room.\n\n Talk,\n\n #[serde(rename = \"normal\")]\n\n /// Any other verbs should be set to this variant and will\n\n /// be parsed by the event handling function.\n\n Normal,\n\n}\n\n\n\n/// Subjects are the people or things that can be interacted with.\n\n///\n\n/// For example, a person can be a subject, but an item cannot.\n\n///\n\n/// Some examples of things that can be subjects are:\n\n/// - A person\n\n/// - An object such as a desk, a chair, or a computer\n\n/// - An animal\n\n/// - A door\n\n///\n\n/// Example:\n\n/// ```rust\n", "file_path": "src/config/mod.rs", "rank": 54, "score": 2.364477449478408 }, { "content": " intro: config_data.intro,\n\n rooms,\n\n }\n\n }\n\n /// # Config::init_yaml\n\n /// Load config files from yaml files.\n\n ///\n\n /// This is useful for command line frontends\n\n /// and will read the path and try to load the\n\n /// files from the path.\n\n ///\n\n /// Arguments:\n\n /// * `path` - serialized yaml\n\n ///\n\n /// Required files:\n\n /// * `allowed_verbs.yml`\n\n /// * `items.yml`\n\n /// * `subjects.yml`\n\n /// * `narratives.yml`\n\n /// * `events.yml`\n", "file_path": "src/config/mod.rs", "rank": 55, "score": 2.364477449478408 }, { "content": " Verb,\n\n /// Action has a verb and a subject\n\n /// Example: \"talk to subject\"\n\n VerbSubject,\n\n /// Action has a verb and an item\n\n /// Example: \"take item\", \"drop item\"\n\n VerbItem,\n\n /// Action has a verb, an item, and a subject\n\n /// Example: \"give item to subject\"\n\n VerbItemSubject,\n\n /// An action that failed to be parsed from\n\n /// the input tokens.\n\n Invalid,\n\n /// Action has a direction\n\n /// Example: \"north\", \"south\", \"east\", \"west\"\n\n Movement,\n\n}\n\n\n\n/// Actions are the core of the parser.\n\n/// They are used to determine what the\n", "file_path": "src/parser/action.rs", "rank": 56, "score": 2.1645191825693826 }, { "content": " \"first room\\n\\nHere you see: \\nan item1\\nan item2\\nsubject1\"\n\n ))\n\n );\n\n assert_eq!(\n\n verb_result3.unwrap(),\n\n ParsingResult::Inventory(String::from(\"You are not carrying anything.\"))\n\n );\n\n assert_eq!(verb_result4.unwrap(), ParsingResult::Quit);\n\n assert_eq!(verb_result5.unwrap(), ParsingResult::Help(String::from(\"\\nTo play this game you type your commands and hit enter to execute them. Typically a command has at most three parts: a verb, a subject, and an item. A verb indicates an action you, the player, wants to execute. Many commands can be executed with just a verb such as look, help, quit. For more complex commands you will also need verb and either a subject or an item. A command can also have a verb, item, and subject. A complex command can be: look at dog, talk to person, pick the box, give the box to the dog.\\n\\nThe game will ignore words like 'to', 'the', 'at', 'from', so using them is optional. A valid command can be: talk person, pick box, go south, climb tree, use axe tree.\\n\\nValid verbs: quit, help, look, inventory, pick, drop, give, talk, hug\")));\n\n}\n\n\n", "file_path": "src/parser/interpreter_tests.rs", "rank": 57, "score": 2.069726036168441 }, { "content": "///\n\n/// An event that happens in room 1 as a response to\n\n/// talking to subject 2:\n\n/// ```rust\n\n/// # use nightrunner_lib::config::Event;\n\n/// let event = Event {\n\n/// id: 1,\n\n/// location: 1,\n\n/// name: \"Talking to subject 1\".to_string(),\n\n/// description: \"This event happens when you talk to subject 2.\".to_string(),\n\n/// destination: None,\n\n/// narrative: Some(2),\n\n/// // here verb id 3 has to be marked with VerbFunction::Talk\n\n/// required_verb: Some(3),\n\n/// required_subject: Some(2),\n\n/// required_item: None,\n\n/// completed: false,\n\n/// add_item: None,\n\n/// remove_old_narrative: false,\n\n/// remove_item: None,\n", "file_path": "src/config/mod.rs", "rank": 58, "score": 2.0673890686625334 }, { "content": " self.state.borrow().config.intro.clone()\n\n }\n\n /// Returns the text for the very first room of the game.\n\n ///\n\n /// Since there is no input to parse when the game starts,\n\n /// this function should be used to retrieve that text instead.\n\n pub fn first_room_text(&self) -> NRResult<EventMessage> {\n\n let narrative_id = self.state.borrow().rooms[0].narrative;\n\n let narrative_text = self\n\n .state\n\n .borrow()\n\n .config\n\n .narratives\n\n .iter()\n\n .find(|n| n.id == narrative_id)\n\n .unwrap()\n\n .text\n\n .clone();\n\n let event_message = parse_room_text(\n\n self.state.borrow().clone(),\n", "file_path": "src/lib.rs", "rank": 59, "score": 2.0309347965607416 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n/// Possible directions for a movement.\n\n/// The parser currently only supports\n\n/// cardinal directions but will be extended\n\n/// to support other directions such as\n\n/// \"up\" or \"left\" in the future.\n\n#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Eq)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct AllowedDirections {\n\n pub(crate) directions: Vec<String>,\n\n}\n\n\n\nimpl AllowedDirections {\n\n pub(crate) fn init() -> AllowedDirections {\n\n let directions = vec![\n\n \"north\", \"south\", \"east\", \"west\", \"up\", \"down\", \"left\", \"right\",\n\n ]\n\n .iter()\n\n .map(|s| s.to_string())\n", "file_path": "src/config/directions.rs", "rank": 60, "score": 1.995743844715475 }, { "content": "/// # use nightrunner_lib::config::{Verb, VerbFunction};\n\n/// let verb1 = Verb {\n\n/// id: 1,\n\n/// names: vec![\"look\".to_string(), \"peek\".to_string()],\n\n/// verb_function: VerbFunction::Look,\n\n/// };\n\n///\n\n/// let verb2 = Verb {\n\n/// id: 2,\n\n/// names: vec![\"take\".to_string(), \"pick\".to_string()],\n\n/// verb_function: VerbFunction::Take,\n\n/// };\n\n///\n\n/// let verb3 = Verb {\n\n/// id: 3,\n\n/// names: vec![\"parkour\".to_string(), \"flip\".to_string()],\n\n/// verb_function: VerbFunction::Normal,\n\n/// };\n\n/// ```\n\n#[derive(Debug, Clone, Eq, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]\n", "file_path": "src/config/mod.rs", "rank": 61, "score": 1.9617516612343624 }, { "content": " narrative_text,\n\n \"\".to_string(),\n\n None,\n\n )?;\n\n Ok(event_message)\n\n }\n\n}\n\n\n\n#[cfg(any(target_arch = \"wasm32\", doc))]\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\n#[serde(tag = \"messageType\", content = \"data\")]\n\n#[serde(rename_all = \"snake_case\")]\n\n/// When compiling for the web, this struct is used to\n\n/// serialize the game state to JSON. `messageType` is\n\n/// the type of the message returned by the library to\n\n/// indicate which action was processed by the parser.\n\npub enum JsMessage {\n\n /// Returned when the player sends a command corresponding\n\n /// to a verb that has VerbFunction::Help as its verb_function.\n\n /// The value is a string generated by the library displaying\n", "file_path": "src/lib.rs", "rank": 62, "score": 1.9288980184519038 }, { "content": "}\n\n\n\n/// This struct represents the storage for both the player\n\n/// and the room and implements functions to add and remove\n\n/// items from the storage.\n\n#[derive(Debug, Clone, Eq, Ord, PartialEq, PartialOrd, Deserialize, Serialize, Default)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct Storage {\n\n /// This field contains the list of actual\n\n /// items available in the storage struct\n\n /// and gets populated during the state\n\n /// initialization based on the item_ids field\n\n pub items: Vec<Item>,\n\n /// The list of item ids that are currently\n\n /// available in storage. Only used for the\n\n /// configuration data.\n\n pub item_ids: Vec<u16>,\n\n}\n\n\n\nimpl Storage {\n", "file_path": "src/config/rooms.rs", "rank": 63, "score": 1.9128804227145615 }, { "content": " self.state.borrow().config.intro.clone()\n\n }\n\n /// Returns the text for the very first room of the game.\n\n ///\n\n /// Since there is no input to parse when the game starts,\n\n /// this function should be used to retrieve that text instead.\n\n pub fn first_room_text(&self) -> Result<JsValue, JsError> {\n\n let narrative_id = self.state.borrow().rooms[0].narrative.clone();\n\n let narrative_text = self\n\n .state\n\n .borrow()\n\n .config\n\n .narratives\n\n .iter()\n\n .find(|n| n.id == narrative_id)\n\n .unwrap()\n\n .text\n\n .clone();\n\n let event_message = parse_room_text(\n\n self.state.borrow().clone(),\n\n narrative_text,\n\n \"\".to_string(),\n\n None,\n\n )\n\n .unwrap();\n\n Ok(JsValue::from_serde(&event_message).unwrap())\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 64, "score": 1.8077966448826692 }, { "content": " /// This is the description of the event. Also\n\n /// not used in the game, but rather as a way\n\n /// to make the event configuration more readable.\n\n pub description: String,\n\n /// If the event takes you to a different room,\n\n /// this is the room id.\n\n pub destination: Option<u16>,\n\n /// Narrative id to be displayed when the event\n\n /// is triggered.\n\n pub narrative: Option<u16>,\n\n /// Verb id that triggers the event.\n\n pub required_verb: Option<u16>,\n\n /// Subject id that triggers the event.\n\n pub required_subject: Option<u16>,\n\n /// Item id that triggers the event.\n\n pub required_item: Option<u16>,\n\n /// If the event is completed, it won't be triggered again.\n\n pub completed: bool,\n\n /// If the event adds an item to the inventory,\n\n /// this is the item id.\n", "file_path": "src/config/mod.rs", "rank": 65, "score": 1.779860544242874 }, { "content": "/// by the parser when an event is successfully parsed.\n\npub enum MessageParts {\n\n /// The current text of the room. This will be the either be\n\n /// the current event's narrative, or the current room's narrative\n\n /// and the event narrative, depending on whether or not the\n\n /// event is marked to replace the narrative.\n\n RoomText,\n\n /// The text generated while processing the event. Primarily used\n\n /// to indicate when the user lost or received an item.\n\n EventText,\n\n /// A string containing all of the current room's exits and the\n\n /// description of the room they lead to.\n\n Exits,\n\n}\n\n\n\n/// Represents the result of parsing an event.\n\n#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct EventMessage {\n\n /// The message to display to the user as a single string.\n", "file_path": "src/parser/interpreter.rs", "rank": 66, "score": 1.76621378639876 }, { "content": "/// currently. Story-wise the subject can be somewhere\n\n/// else, or could return a different narrative instad.\n\n#[derive(Debug, Clone)]\n\npub struct RequiredEventNotCompleted;\n\nimpl std::fmt::Display for RequiredEventNotCompleted {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"The required event has not been completed yet.\")\n\n }\n\n}\n\n\n\nimpl error::Error for RequiredEventNotCompleted {}\n\nimpl From<&std::boxed::Box<(dyn std::error::Error + 'static)>> for RequiredEventNotCompleted {\n\n fn from(_: &std::boxed::Box<(dyn std::error::Error + 'static)>) -> Self {\n\n RequiredEventNotCompleted\n\n }\n\n}\n\n\n\n/// # Examples\n\n/// ```rust\n\n/// use nightrunner_lib::{NightRunner, NightRunnerBuilder, ParsingResult};\n", "file_path": "src/parser/errors.rs", "rank": 67, "score": 1.7265008892900529 }, { "content": " /// or by losing it as the result of an event.\n\n DropItem(String),\n\n /// Returned when the player issues a command with a verb that\n\n /// has VerbFunction::Inventory as its verb_function. The value is\n\n /// a string containing each item in the player's inventory.\n\n Inventory(String),\n\n /// Returned when the player issues a command that interacts with\n\n /// a subject without a current event associated with it. The value\n\n /// is the default text for the subject.\n\n SubjectNoEvent(String),\n\n /// Returned when an event is triggered by the player's command. The\n\n /// returned struct contains the text to be displayed to the player.\n\n EventSuccess(EventMessage),\n\n /// Returned when the player issues a command with a verb that has\n\n /// VerbFunction::Quit as its verb_function. This variant is used\n\n /// to indicate to the front-end that the game should be quit.\n\n /// Implementation of how to quit the game is left to the front-end.\n\n Quit,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 68, "score": 1.7136571646358698 }, { "content": " /// config should be a JSON string.\n\n #[wasm_bindgen(constructor)]\n\n pub fn new(config: &str) -> NightRunner {\n\n let config = Config::from_json(config);\n\n let state = State::init(config);\n\n NightRunner { state }\n\n }\n\n /// This is the main function that executes the game. Pass\n\n /// the input string to this function and it will return\n\n /// a result that can be used on the front-end to display\n\n /// the game to the user.\n\n /// Unlike the non-wasm version, this function will return\n\n /// the result in JSON format. The conversion of the result\n\n /// to JSON is done by the `JsValue::from_serde` function from\n\n /// wasm_bindgen.\n\n pub fn parse(&self, input: &str) -> Result<JsValue, JsError> {\n\n let result = parser::parse(self.state.clone(), input);\n\n match result {\n\n Ok(ok) => {\n\n let message = match ok {\n", "file_path": "src/lib.rs", "rank": 69, "score": 1.6762475199628577 }, { "content": " pub add_item: Option<u16>,\n\n /// If the event narrative is supposed to replace\n\n /// the text currently displayed on the screen,\n\n /// this needs to be set to true.\n\n /// This is useful to avoid a lot of screen scrolling\n\n /// when the event narrative is long.\n\n pub remove_old_narrative: bool,\n\n /// If the event removes an item from the inventory,\n\n /// this is the item id.\n\n pub remove_item: Option<u16>,\n\n /// If the event requires other events to be completed,\n\n /// this is a list of event ids that need to be completed\n\n /// before this event can be triggered.\n\n pub required_events: Vec<u16>,\n\n}\n\n\n\n/// This struct holds the data deserialized from JSON.\n\n/// This is a temporary holding place for deserialing this data\n\n/// since the Config struct has fields that are loaded from data\n\n/// compiled with the library. It would make it so these fields\n\n/// have to be sent with the JSON data to deserialize a Config\n\n/// struct. Instead we deserialize the data into this struct and\n\n/// then we can just copy the fields into the Config struct.\n\n#[derive(Deserialize, Debug)]\n", "file_path": "src/config/mod.rs", "rank": 70, "score": 1.5625447972026065 }, { "content": "/// Each variant contains the output for the game and\n\n/// should be used by a front-end to display to the user.\n\n#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]\n\n#[serde(rename_all = \"snake_case\")]\n\n#[serde(tag = \"messageType\", content = \"data\")]\n\npub enum ParsingResult {\n\n /// Returned when the player sends a command corresponding\n\n /// to a verb that has VerbFunction::Help as its verb_function.\n\n /// The value is a string generated by the library displaying\n\n /// general commands as well as verbs available in the game.\n\n Help(String),\n\n /// Returned when the player sends a command for looking at the\n\n /// room, an item, or a subject. The value will be the message\n\n /// returned from the parser with the description associated with\n\n /// the room, item, or subject.\n\n Look(String),\n\n /// Returned when the player receives a new item, either by picking\n\n /// it up or by receiving it as the result of an event.\n\n NewItem(String),\n\n /// Returned when the player loses an item, either by dropping it\n", "file_path": "src/lib.rs", "rank": 71, "score": 1.5520172003323993 }, { "content": " ParsingResult::Look(msg) => JsMessage::Look(msg),\n\n ParsingResult::Help(msg) => JsMessage::Help(msg),\n\n ParsingResult::NewItem(msg) => JsMessage::NewItem(msg),\n\n ParsingResult::DropItem(msg) => JsMessage::DropItem(msg),\n\n ParsingResult::Inventory(msg) => JsMessage::Inventory(msg),\n\n ParsingResult::SubjectNoEvent(msg) => JsMessage::SubjectNoEvent(msg),\n\n ParsingResult::EventSuccess(event_msg) => JsMessage::EventSuccess(event_msg),\n\n ParsingResult::Quit => JsMessage::NoOp,\n\n };\n\n Ok(JsValue::from_serde(&message).unwrap())\n\n }\n\n Err(err) => Err(JsError::new(&err.to_string())),\n\n }\n\n }\n\n\n\n /// Returns the string with the game intro text. This can\n\n /// be used to display the game intro to the user, but isn't\n\n /// required.\n\n #[wasm_bindgen]\n\n pub fn game_intro(&self) -> String {\n", "file_path": "src/lib.rs", "rank": 72, "score": 1.418242130244177 }, { "content": " pub message: String,\n\n /// The parts of the message to display to the user. This\n\n /// hashmap uses the `MessageParts` enum as the key, and\n\n /// the string value of the message part as the value.\n\n /// For more information about the variants of `MessageParts`,\n\n /// see the [MessageParts](MessageParts) enum.\n\n pub message_parts: HashMap<MessageParts, String>,\n\n /// Items or subjects that the front-end implementation\n\n /// can choose to highlight. This field can be safely ignored\n\n /// by the front-end if no highlighting is being implemented.\n\n pub templated_words: Vec<String>,\n\n}\n\n\n\n/// This is the function that decides what to do with the\n\n/// input based on the action type.\n\npub(super) fn process_action(\n\n state: &Rc<RefCell<State>>,\n\n action: Action,\n\n) -> NRResult<ParsingResult> {\n\n match action.action_type() {\n\n ActionType::VerbItemSubject => handle_event(&mut *state.borrow_mut(), action),\n\n ActionType::VerbSubject => handle_verb_subject(&mut *state.borrow_mut(), action),\n\n ActionType::VerbItem => handle_verb_item(state, action),\n\n ActionType::Verb => handle_verb(state, action),\n\n ActionType::Movement => handle_movement(&mut *state.borrow_mut(), action.movement),\n\n ActionType::Invalid => Err(InvalidAction.into()),\n\n }\n\n}\n\n\n", "file_path": "src/parser/interpreter.rs", "rank": 73, "score": 1.2033835549558427 } ]
Rust
src/lib.rs
w4/scoped-vec.rs
e8913a9c571a94d44e8c7f5a364ccca1fe034441
use std::sync::{Arc, RwLock, RwLockReadGuard}; use owning_ref::OwningHandle; #[derive(Clone)] pub struct ScopedVec<T: Clone> { inner: Arc<RwLock<Vec<T>>>, children: Arc<RwLock<Vec<ScopedVec<T>>>>, } impl<T: Clone> ScopedVec<T> { pub fn new() -> Self { Self { inner: Arc::new(RwLock::default()), children: Arc::new(RwLock::default()) } } pub fn scope(&mut self) -> ScopedVec<T> { let new = ScopedVec::new(); self.children.write().unwrap().push(new.clone()); new } pub fn push(&mut self, val: T) { self.inner.write().unwrap().push(val); } pub fn iter(&self) -> ScopedVecIterator<T> { ScopedVecIterator::new(self) } } impl<T: Clone + PartialEq> ScopedVec<T> { pub fn contains(&self, val: &T) -> bool { self.iter().any(|f| *f == *val) } } pub struct ScopedVecGuardHolder<'a, T: Clone> { inner: RwLockReadGuard<'a, Vec<T>>, children: RwLockReadGuard<'a, Vec<ScopedVec<T>>>, } pub struct ScopedVecIterator<'a, T: Clone> { iterator: OwningHandle<Box<ScopedVecGuardHolder<'a, T>>, Box<dyn Iterator<Item = &'a T> + 'a>>, } impl<'a, T: Clone> ScopedVecIterator<'a, T> { fn new(vec: &'a ScopedVec<T>) -> Self { Self { iterator: OwningHandle::new_with_fn( Box::new(ScopedVecGuardHolder { inner: vec.inner.read().unwrap(), children: vec.children.read().unwrap() }), |g| { let guards = unsafe { &*g }; Box::new(guards.inner.iter() .chain( guards.children.iter() .map(ScopedVec::iter) .flatten() )) as Box<dyn Iterator<Item = &'a T>> } ) } } } impl<'a, T: Clone> Iterator for ScopedVecIterator<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<Self::Item> { self.iterator.next() } } #[cfg(test)] mod tests { use crate::ScopedVec; #[test] fn unscoped_standard() { let mut root = ScopedVec::new(); root.push(3); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), None); } #[test] fn scoped_cant_read_root() { let mut root = ScopedVec::new(); root.push(3); let scoped = root.scope(); let mut iter = scoped.iter(); assert_eq!(iter.next(), None); } #[test] fn root_can_read_scoped() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), None); } #[test] fn root_can_read_nested_scoped() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } #[test] fn scoped_can_read_nested_scoped() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); let mut iter = scoped.iter(); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } #[test] fn nested_scoped_cant_read_backwards() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); let mut iter = nested_scoped.iter(); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } #[test] fn can_drop_scopes() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); drop(root); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); { let mut iter = scoped.iter(); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } drop(scoped); { let mut iter = nested_scoped.iter(); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } } #[test] fn diverged_scopes_can_be_read() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped1 = scoped.scope(); nested_scoped1.push(5); let mut nested_scoped2 = scoped.scope(); nested_scoped2.push(6); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), Some(&6)); assert_eq!(iter.next(), None); } #[test] fn diverged_adjacent_scopes_cant_interact() { let mut root = ScopedVec::new(); root.push(3); let mut scoped1 = root.scope(); scoped1.push(4); let mut scoped2 = root.scope(); scoped2.push(5); let mut iter = scoped1.iter(); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), None); let mut iter = scoped2.iter(); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } }
use std::sync::{Arc, RwLock, RwLockReadGuard}; use owning_ref::OwningHandle; #[derive(Clone)] pub struct ScopedVec<T: Clone> { inner: Arc<RwLock<Vec<T>>>, children: Arc<RwLock<Vec<ScopedVec<T>>>>, } impl<T: Clone> ScopedVec<T> { pub fn new() -> Self { Self { inner: Arc::new(RwLock::default()), children: Arc::new(RwLock::default()) } } pub fn scope(&mut self) -> ScopedVec<T> { let new = ScopedVec::new(); self.children.write().unwrap().push(new.clone()); new } pub fn push(&mut self, val: T) { self.inner.write().unwrap().push(val); } pub fn iter(&self) -> ScopedVecIterator<T> { ScopedVecIterator::new(self) } } impl<T: Clone + PartialEq> ScopedVec<T> { pub fn contains(&self, val: &T) -> bool { self.iter().any(|f| *f == *val) } } pub struct ScopedVecGuardHolder<'a, T: Clone> { inner: RwLockReadGuard<'a, Vec<T>>, children: RwLockReadGuard<'a, Vec<ScopedVec<T>>>, } pub struct ScopedVecIterator<'a, T: Clone> { iterator: OwningHandle<Box<ScopedVecGuardHolder<'a, T>>, Box<dyn Iterator<Item = &'a T> + 'a>>, } impl<'a, T: Clone> ScopedVecIterator<'a, T> { fn new(vec: &'a ScopedVec<T>) -> Self { Self { iterator: OwningHandle::new_with_fn( Box::new(ScopedVecGuardHolder { inner: vec.inner.read().unwrap(), children: vec.children.read().unwrap() }), |g| { let guards = unsafe { &*g }; Box::new(guards.inner.iter() .chain( guards.children.iter() .map(ScopedVec::iter) .flatten() )) as Box<dyn Iterator<Item = &'a T>> } ) } } } impl<'a, T: Clone> Iterator for ScopedVecIterator<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<Self::Item> { self.iterator.next() } } #[cfg(test)] mod tests { use crate::ScopedVec; #[test] fn unscoped_standard() { let mut root = ScopedVec::new(); root.push(3); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), None); } #[test] fn scoped_cant_read_root() { let mut root = ScopedVec::new(); root.push(3); let scoped = root.scope(); let mut iter = scoped.iter(); assert_eq!(iter.next(), None); } #[test] fn root_can_read_scoped() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), None); } #[test] fn root_can_read_nested_scoped() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } #[test] fn scoped_can_read_nested_scoped() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); let mut iter = scoped.iter(); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } #[test] fn nested_scoped_cant_read_backwards() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); let mut iter = nested_scoped.iter(); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } #[test] fn can_drop_scopes() { let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); drop(root); let mut nested_scoped = scoped.scope(); nested_scoped.push(5); { let mut iter = scoped.iter(); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } drop(scoped); { let mut iter = nested_scoped.iter(); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } } #[test] fn diverged_scopes_can_be_read() {
#[test] fn diverged_adjacent_scopes_cant_interact() { let mut root = ScopedVec::new(); root.push(3); let mut scoped1 = root.scope(); scoped1.push(4); let mut scoped2 = root.scope(); scoped2.push(5); let mut iter = scoped1.iter(); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), None); let mut iter = scoped2.iter(); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), None); } }
let mut root = ScopedVec::new(); root.push(3); let mut scoped = root.scope(); scoped.push(4); let mut nested_scoped1 = scoped.scope(); nested_scoped1.push(5); let mut nested_scoped2 = scoped.scope(); nested_scoped2.push(6); let mut iter = root.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&4)); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), Some(&6)); assert_eq!(iter.next(), None); }
function_block-function_prefix_line
[ { "content": "# scoped-vec.rs\n\n\n\n[![License: WTFPL](https://img.shields.io/badge/License-WTFPL-brightgreen.svg?style=flat-square&logo=appveyor)](http://www.wtfpl.net/about/) ![https://docs.rs/scoped-vec/](https://docs.rs/scoped-vec/badge.svg) [![Downloads](https://img.shields.io/crates/d/scoped-vec.svg?style=flat-square&logo=appveyor)](https://crates.io/crates/scoped-vec)\n\n\n\nA library for scoped `Vec`s, allowing multi-level divergence from the root element.\n\n\n\nThis is useful for monitoring state within a de facto tree where\n\nlinks to parents aren't necessarily needed. Consumers can keep\n\nreferences to a specific parent if required and check the values\n\nfrom the scope of their choosing, parents are free to be dropped if\n\nthey're no longer required.\n\n\n\n\n\nThe full `std::vec::Vec` spec has not yet been implemented but as\n\nthe library stabilises, more and more of the `Vec` library will be\n\nsupported - however there will be some divergence from the API where\n\nnecessary given the structural differences of a `ScopedVec`.\n\n\n\nThe library isn't yet ready for consumption in any production-level\n\nsoftware but feel free to use it in side projects and make contributions\n", "file_path": "README.md", "rank": 0, "score": 6486.659775009785 } ]
Rust
libranoc/src/syntax/parse/statement/expression/operator.rs
rano-lang/rano
fe047a40dd17e4b35457a375d7749ca881dac4dc
use crate::{ core::ast::*, syntax::{parse::*, Span, TokenKind}, }; pub struct OperatorBindingPowerPrefix { pub constructor: Box<dyn FnOnce(Box<Expression>) -> PrefixOperator>, pub right_binding_power: u8, } pub struct OperatorBindingPowerInfix { pub operator: Token, pub constructor: Box<dyn FnOnce(Box<Expression>, Span, Box<Expression>) -> InfixOperator>, pub left_binding_power: u8, pub right_binding_power: u8, } pub struct OperatorBindingPowerPostfix { pub constructor: Box<dyn FnOnce(Box<Expression>, Vec<Expression>) -> PostfixOperator>, pub left_binding_power: u8, pub tails: Box<dyn FnOnce(ParseInput) -> ParseResult<Vec<Expression>>>, pub close: Box<dyn FnOnce(ParseInput) -> ParseResult<()>>, } pub fn parse_prefix_operator(i: ParseInput) -> ParseResult<OperatorBindingPowerPrefix> { alt(( map(tag(TokenKind::PunctuationExclamationMark), |_| { OperatorBindingPowerPrefix { constructor: Box::new(|expr| PrefixOperator::Not(Not(expr))), right_binding_power: 13, } }), map(tag(TokenKind::PunctuationPlusSign), |_| { OperatorBindingPowerPrefix { constructor: Box::new(|expr| PrefixOperator::UnaryPlus(UnaryPlus(expr))), right_binding_power: 13, } }), map(tag(TokenKind::PunctuationHyphenMinus), |_| { OperatorBindingPowerPrefix { constructor: Box::new(|expr| PrefixOperator::UnaryMinus(UnaryMinus(expr))), right_binding_power: 13, } }), ))(i) } pub fn parse_infix_operator(i: ParseInput) -> ParseResult<OperatorBindingPowerInfix> { alt(( map(tag(TokenKind::PunctuationsLogicalOr), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::LogicalOr(lhs, span, rhs)), left_binding_power: 1, right_binding_power: 2, } }), map(tag(TokenKind::PunctuationsLogicalAnd), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::LogicalAnd(lhs, span, rhs)), left_binding_power: 3, right_binding_power: 4, } }), map(tag(TokenKind::PunctuationsEqualTo), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::EqualTo(lhs, span, rhs)), left_binding_power: 5, right_binding_power: 6, } }), map(tag(TokenKind::PunctuationsNotEqualTo), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::NotEqualTo(lhs, span, rhs)), left_binding_power: 5, right_binding_power: 6, } }), map(tag(TokenKind::PunctuationGreaterThanSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::GreaterThan(lhs, span, rhs)), left_binding_power: 7, right_binding_power: 8, } }), map(tag(TokenKind::PunctuationLessThanSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::LessThan(lhs, span, rhs)), left_binding_power: 7, right_binding_power: 8, } }), map( tag(TokenKind::PunctuationsGreaterThanOrEqualTo), |operator| OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::GreaterThanOrEqualTo(lhs, span, rhs) }), left_binding_power: 7, right_binding_power: 8, }, ), map(tag(TokenKind::PunctuationsLessThanOrEqualTo), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::LessThanOrEqualTo(lhs, span, rhs) }), left_binding_power: 7, right_binding_power: 8, } }), map(tag(TokenKind::PunctuationPlusSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Add(lhs, span, rhs)), left_binding_power: 9, right_binding_power: 10, } }), map(tag(TokenKind::PunctuationHyphenMinus), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Subtract(lhs, span, rhs)), left_binding_power: 9, right_binding_power: 10, } }), map(tag(TokenKind::PunctuationAsterisk), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Multiply(lhs, span, rhs)), left_binding_power: 11, right_binding_power: 12, } }), map(tag(TokenKind::PunctuationSolidus), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Divide(lhs, span, rhs)), left_binding_power: 11, right_binding_power: 12, } }), map(tag(TokenKind::PunctuationPercentSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, operator, rhs| { InfixOperator::Remainder(lhs, operator, rhs) }), left_binding_power: 11, right_binding_power: 12, } }), map(tag(TokenKind::PunctuationFullStop), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::GetField(GetField(lhs, rhs))), left_binding_power: 17, right_binding_power: 16, } }), map(tag(TokenKind::PunctuationsGetFieldNullable), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::GetFieldNullable(GetFieldNullable(lhs, rhs)) }), left_binding_power: 17, right_binding_power: 16, } }), map( tag(TokenKind::PunctuationsRangeRightExclusive), |operator| OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::RangeRightExclusive(lhs, span, rhs) }), left_binding_power: 19, right_binding_power: 18, }, ), map( tag(TokenKind::PunctuationsRangeRightInclusive), |operator| OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::RangeRightInclusive(lhs, span, rhs) }), left_binding_power: 19, right_binding_power: 18, }, ), ))(i) } pub fn parse_postfix_operator(i: ParseInput) -> ParseResult<OperatorBindingPowerPostfix> { map(tag(TokenKind::PunctuationLeftSquareBracket), |_| { OperatorBindingPowerPostfix { constructor: Box::new(|expr, tails| PostfixOperator::Index(Index(expr, tails))), left_binding_power: 14, tails: Box::new(map(parse_expression, |expr| vec![expr])), close: Box::new(map(tag(TokenKind::PunctuationRightSquareBracket), |_| ())), } })(i) }
use crate::{ core::ast::*, syntax::{parse::*, Span, TokenKind}, }; pub struct OperatorBindingPowerPrefix { pub constructor: Box<dyn FnOnce(Box<Expression>) -> PrefixOperator>, pub right_binding_power: u8, } pub struct OperatorBindingPowerInfix { pub operator: Token, pub constructor: Box<dyn FnOnce(Box<Expression>, Span, Box<Expression>) -> InfixOperator>, pub left_binding_power: u8, pub right_binding_power: u8, } pub struct OperatorBindingPowerPostfix { pub constructor: Box<dyn FnOnce(Box<Expression>, Vec<Expression>) -> PostfixOperator>, pub left_binding_power: u8, pub tails: Box<dyn FnOnce(ParseInput) -> ParseResult<Vec<Expression>>>, pub close: Box<dyn FnOnce(ParseInput) -> ParseResult<()>>, } pub fn parse_prefix_operator(i: ParseInput) -> ParseResult<OperatorBindingPowerPrefix> { alt(( map(tag(TokenKind::PunctuationExclamationMark), |_| { OperatorBindingPowerPrefix { constructor: Box::new(|expr| PrefixOperator::Not(Not(expr))), right_binding_power: 13, } }), map(tag(TokenKind::PunctuationPlusSign),
mainder(lhs, operator, rhs) }), left_binding_power: 11, right_binding_power: 12, } }), map(tag(TokenKind::PunctuationFullStop), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::GetField(GetField(lhs, rhs))), left_binding_power: 17, right_binding_power: 16, } }), map(tag(TokenKind::PunctuationsGetFieldNullable), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::GetFieldNullable(GetFieldNullable(lhs, rhs)) }), left_binding_power: 17, right_binding_power: 16, } }), map( tag(TokenKind::PunctuationsRangeRightExclusive), |operator| OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::RangeRightExclusive(lhs, span, rhs) }), left_binding_power: 19, right_binding_power: 18, }, ), map( tag(TokenKind::PunctuationsRangeRightInclusive), |operator| OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::RangeRightInclusive(lhs, span, rhs) }), left_binding_power: 19, right_binding_power: 18, }, ), ))(i) } pub fn parse_postfix_operator(i: ParseInput) -> ParseResult<OperatorBindingPowerPostfix> { map(tag(TokenKind::PunctuationLeftSquareBracket), |_| { OperatorBindingPowerPostfix { constructor: Box::new(|expr, tails| PostfixOperator::Index(Index(expr, tails))), left_binding_power: 14, tails: Box::new(map(parse_expression, |expr| vec![expr])), close: Box::new(map(tag(TokenKind::PunctuationRightSquareBracket), |_| ())), } })(i) }
|_| { OperatorBindingPowerPrefix { constructor: Box::new(|expr| PrefixOperator::UnaryPlus(UnaryPlus(expr))), right_binding_power: 13, } }), map(tag(TokenKind::PunctuationHyphenMinus), |_| { OperatorBindingPowerPrefix { constructor: Box::new(|expr| PrefixOperator::UnaryMinus(UnaryMinus(expr))), right_binding_power: 13, } }), ))(i) } pub fn parse_infix_operator(i: ParseInput) -> ParseResult<OperatorBindingPowerInfix> { alt(( map(tag(TokenKind::PunctuationsLogicalOr), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::LogicalOr(lhs, span, rhs)), left_binding_power: 1, right_binding_power: 2, } }), map(tag(TokenKind::PunctuationsLogicalAnd), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::LogicalAnd(lhs, span, rhs)), left_binding_power: 3, right_binding_power: 4, } }), map(tag(TokenKind::PunctuationsEqualTo), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::EqualTo(lhs, span, rhs)), left_binding_power: 5, right_binding_power: 6, } }), map(tag(TokenKind::PunctuationsNotEqualTo), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::NotEqualTo(lhs, span, rhs)), left_binding_power: 5, right_binding_power: 6, } }), map(tag(TokenKind::PunctuationGreaterThanSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::GreaterThan(lhs, span, rhs)), left_binding_power: 7, right_binding_power: 8, } }), map(tag(TokenKind::PunctuationLessThanSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::LessThan(lhs, span, rhs)), left_binding_power: 7, right_binding_power: 8, } }), map( tag(TokenKind::PunctuationsGreaterThanOrEqualTo), |operator| OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::GreaterThanOrEqualTo(lhs, span, rhs) }), left_binding_power: 7, right_binding_power: 8, }, ), map(tag(TokenKind::PunctuationsLessThanOrEqualTo), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| { InfixOperator::LessThanOrEqualTo(lhs, span, rhs) }), left_binding_power: 7, right_binding_power: 8, } }), map(tag(TokenKind::PunctuationPlusSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Add(lhs, span, rhs)), left_binding_power: 9, right_binding_power: 10, } }), map(tag(TokenKind::PunctuationHyphenMinus), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Subtract(lhs, span, rhs)), left_binding_power: 9, right_binding_power: 10, } }), map(tag(TokenKind::PunctuationAsterisk), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Multiply(lhs, span, rhs)), left_binding_power: 11, right_binding_power: 12, } }), map(tag(TokenKind::PunctuationSolidus), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, span, rhs| InfixOperator::Divide(lhs, span, rhs)), left_binding_power: 11, right_binding_power: 12, } }), map(tag(TokenKind::PunctuationPercentSign), |operator| { OperatorBindingPowerInfix { operator, constructor: Box::new(|lhs, operator, rhs| { InfixOperator::Re
random
[ { "content": "pub fn parse(tokens: Vec<Token>) -> crate::core::Result<Module> {\n\n let i = ParseInput::new(tokens);\n\n let (_, nodes) = all_consuming(many0(parse_statement_node))(i)?;\n\n Ok(Module { nodes })\n\n}\n", "file_path": "libranoc/src/syntax/parse/mod.rs", "rank": 0, "score": 157775.78324144 }, { "content": "pub fn parse_identifier(i: ParseInput) -> ParseResult<Token> {\n\n let (i, token) = any(i)?;\n\n if let TokenKind::IdentifierIdentifier(_) = &token.kind {\n\n Ok((i, token))\n\n } else {\n\n err_tag(i)\n\n }\n\n}\n", "file_path": "libranoc/src/syntax/parse/fragment/identifier.rs", "rank": 4, "score": 138242.66715403966 }, { "content": "pub fn tokenize(src: &str) -> Vec<Token> {\n\n create_tokenizer(src).collect()\n\n}\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 5, "score": 129224.32948450088 }, { "content": "pub fn create_tokenizer<'a>(src: &'a str) -> impl Iterator<Item = Token> + 'a {\n\n RanoLexer {\n\n logos_lexer: TokenKind::lexer(src),\n\n }\n\n}\n\n\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 6, "score": 117418.33031129473 }, { "content": "pub fn any<Error: ParseError<ParseInput>>(i: ParseInput) -> IResult<ParseInput, Token, Error> {\n\n match i.slice_index(1) {\n\n Ok(index) => {\n\n let (i, part) = i.take_split(index);\n\n Ok((i, part.tokens[0].clone()))\n\n }\n\n Err(_needed) => err_kind(i, ErrorKind::Eof),\n\n }\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/nom/util.rs", "rank": 7, "score": 112806.88776164627 }, { "content": "pub fn compile_wasm(module: Module) -> (Vec<u8>, Vec<Error>) {\n\n let mut context = Context::new();\n\n\n\n match context.walk(module) {\n\n Ok(()) => {}\n\n Err(error) => {\n\n context.add_compilation_error(error);\n\n }\n\n }\n\n\n\n context.finish()\n\n}\n", "file_path": "libranoc/src/codegen/mod.rs", "rank": 8, "score": 109426.45079278504 }, { "content": "pub trait Spanned {\n\n fn span(&self) -> Span;\n\n}\n\n\n\nimpl<T> Spanned for &'_ T where T: Spanned {\n\n fn span(&self) -> Span {\n\n <T as Spanned>::span(self)\n\n }\n\n}\n\n\n\nimpl<T> Spanned for Vec<T>\n\nwhere\n\n T: Spanned,\n\n{\n\n fn span(&self) -> Span {\n\n self.iter()\n\n .fold(Span::EMPTY, |acc, curr| acc.joined(&curr.span()))\n\n }\n\n}\n\n\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 9, "score": 101112.51746326522 }, { "content": "pub fn parse_if(i: ParseInput) -> ParseResult<If> {\n\n let (i, if_token) = tag(TokenKind::KeywordIf)(i)?;\n\n let original_binding_power = i.binding_power;\n\n let (i, condition) = parse_expression(i.with_binding_power(0))?;\n\n let (i, body) = parse_block(i)?;\n\n let (i, else_part) = opt(parse_else)(i)?;\n\n\n\n Ok((\n\n i.with_binding_power(original_binding_power),\n\n If {\n\n if_token,\n\n condition: Box::new(condition),\n\n body: Box::new(body),\n\n else_part,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/statement/expression/if.rs", "rank": 10, "score": 95337.54107075394 }, { "content": "pub fn parse_path(i: ParseInput) -> ParseResult<Path> {\n\n map(\n\n separated_list1(tag(TokenKind::PunctuationFullStop), parse_identifier),\n\n Path,\n\n )(i)\n\n}\n", "file_path": "libranoc/src/syntax/parse/fragment/path.rs", "rank": 11, "score": 93035.8334266253 }, { "content": "pub fn parse_if_expression(i: ParseInput) -> ParseResult<Expression> {\n\n map(parse_if, Expression::If)(i)\n\n}\n", "file_path": "libranoc/src/syntax/parse/statement/expression/if.rs", "rank": 12, "score": 93035.83342662528 }, { "content": "pub fn parse_pattern(i: ParseInput) -> ParseResult<Pattern> {\n\n parse_pattern_slot(i)\n\n}\n", "file_path": "libranoc/src/syntax/parse/fragment/pattern.rs", "rank": 13, "score": 93035.8334266253 }, { "content": "pub fn parse_type(i: ParseInput) -> ParseResult<Type> {\n\n alt((parse_type_basic, parse_type_impl))(i)\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/fragment/type.rs", "rank": 14, "score": 93035.83342662528 }, { "content": "pub fn parse_block(i: ParseInput) -> ParseResult<Block> {\n\n let (i, curly_bracket_open_token) = tag(TokenKind::PunctuationLeftCurlyBracket)(i)?;\n\n let original_binding_power = i.binding_power;\n\n let (i, (body, last_expression)) =\n\n cut(tuple((many0(parse_statement), opt(parse_expression))))(i.with_binding_power(0))?;\n\n let (i, curly_bracket_close_token) =\n\n tag(TokenKind::PunctuationRightCurlyBracket)(i.with_binding_power(original_binding_power))?;\n\n\n\n Ok((\n\n i,\n\n Block {\n\n curly_bracket_open_token,\n\n body,\n\n last_expression,\n\n curly_bracket_close_token,\n\n },\n\n ))\n\n}\n", "file_path": "libranoc/src/syntax/parse/fragment/block.rs", "rank": 15, "score": 93035.8334266253 }, { "content": "pub fn parse_name(i: ParseInput) -> ParseResult<Name> {\n\n alt((parse_name_ident, parse_name_placeholder))(i)\n\n}\n", "file_path": "libranoc/src/syntax/parse/fragment/name.rs", "rank": 16, "score": 93035.8334266253 }, { "content": "pub fn parse_statement(i: ParseInput) -> ParseResult<Statement> {\n\n alt((parse_declaration_statement, parse_expression_statement))(i)\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/statement/mod.rs", "rank": 17, "score": 93035.83342662528 }, { "content": "pub fn parse_declaration(i: ParseInput) -> ParseResult<Declaration> {\n\n parse_function_declaration_declaration(i)\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/statement/declaration/mod.rs", "rank": 18, "score": 91965.31907803265 }, { "content": "pub fn parse_literal(i: ParseInput) -> ParseResult<Literal> {\n\n alt((\n\n parse_literal_string,\n\n parse_literal_character,\n\n parse_literal_boolean,\n\n parse_literal_integer,\n\n parse_literal_decimal,\n\n ))(i)\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/statement/expression/literal.rs", "rank": 19, "score": 91965.31907803264 }, { "content": "pub fn parse_name_placeholder(i: ParseInput) -> ParseResult<Name> {\n\n map(tag(TokenKind::KeywordPlaceholderName), |_| {\n\n Name::Placeholder\n\n })(i)\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/fragment/name.rs", "rank": 20, "score": 91965.31907803264 }, { "content": "pub fn parse_expression(i: ParseInput) -> ParseResult<Expression> {\n\n let (i, lhs) = alt((\n\n |i| {\n\n let (i, operator) = parse_prefix_operator(i)?;\n\n let original_binding_power = i.binding_power;\n\n let (i, rhs) = parse_expression(i.with_binding_power(operator.right_binding_power))?;\n\n Ok((\n\n i.with_binding_power(original_binding_power),\n\n Expression::Operator(Operator::Prefix((operator.constructor)(Box::new(rhs)))),\n\n ))\n\n },\n\n parse_group_tuple_expression,\n\n parse_simple_expression,\n\n ))(i)?;\n\n\n\n type Transformer = Box<dyn FnOnce(Box<Expression>) -> Expression>;\n\n let (i, lhs) = fold_many0(\n\n alt((\n\n |i| {\n\n let (i, operator) = parse_postfix_operator(i)?;\n", "file_path": "libranoc/src/syntax/parse/statement/expression/mod.rs", "rank": 21, "score": 91965.31907803264 }, { "content": "pub fn parse_type_impl(i: ParseInput) -> ParseResult<Type> {\n\n map(\n\n preceded(tag(TokenKind::KeywordImpl), cut(parse_type_basic)),\n\n |ty| Type::Impl(Box::new(ty)),\n\n )(i)\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/fragment/type.rs", "rank": 22, "score": 91965.31907803265 }, { "content": "pub fn parse_name_ident(i: ParseInput) -> ParseResult<Name> {\n\n map(parse_identifier, Name::Ident)(i)\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/fragment/name.rs", "rank": 23, "score": 91965.31907803265 }, { "content": "pub fn parse_statement_node(i: ParseInput) -> ParseResult<Node> {\n\n map(parse_statement, Node::Statement)(i)\n\n}\n", "file_path": "libranoc/src/syntax/parse/statement/mod.rs", "rank": 24, "score": 91965.31907803265 }, { "content": "pub fn parse_pattern_slot(i: ParseInput) -> ParseResult<Pattern> {\n\n map(parse_name, Pattern::Slot)(i)\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/fragment/pattern.rs", "rank": 25, "score": 91965.31907803265 }, { "content": "pub fn parse_type_annotation(i: ParseInput) -> ParseResult<Type> {\n\n preceded(tag(TokenKind::PunctuationColon), cut(parse_type))(i)\n\n}\n", "file_path": "libranoc/src/syntax/parse/fragment/type.rs", "rank": 26, "score": 91965.31907803265 }, { "content": "pub fn parse_type_basic(i: ParseInput) -> ParseResult<Type> {\n\n let (i, base) = parse_path(i)?;\n\n\n\n // TODO: TypeParameters\n\n let type_parameters = Vec::new();\n\n\n\n Ok((\n\n i,\n\n Type::Basic {\n\n base,\n\n type_parameters,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/fragment/type.rs", "rank": 27, "score": 91965.31907803265 }, { "content": "pub fn parse_literal_integer(i: ParseInput) -> ParseResult<Literal> {\n\n let (i, token) = any(i)?;\n\n if let TokenKind::LiteralNumberIntegral(v) = &token.kind {\n\n Ok((i, Literal::Integer(Integer(v.clone()))))\n\n } else {\n\n err_tag(i)\n\n }\n\n}\n", "file_path": "libranoc/src/syntax/parse/statement/expression/literal.rs", "rank": 28, "score": 90943.4925135464 }, { "content": "pub fn parse_literal_boolean(i: ParseInput) -> ParseResult<Literal> {\n\n let (i, token) = any(i)?;\n\n if let TokenKind::LiteralBoolean(v) = &token.kind {\n\n Ok((i, Literal::Boolean(v.clone())))\n\n } else {\n\n err_tag(i)\n\n }\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/statement/expression/literal.rs", "rank": 29, "score": 90943.4925135464 }, { "content": "pub fn parse_declaration_statement(i: ParseInput) -> ParseResult<Statement> {\n\n map(parse_declaration, Statement::Declaration)(i)\n\n}\n", "file_path": "libranoc/src/syntax/parse/statement/declaration/mod.rs", "rank": 30, "score": 90943.4925135464 }, { "content": "pub fn parse_literal_string(i: ParseInput) -> ParseResult<Literal> {\n\n let (i, token) = any(i)?;\n\n if let TokenKind::LiteralString(v) = &token.kind {\n\n Ok((i, Literal::String(v.clone())))\n\n } else {\n\n err_tag(i)\n\n }\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/statement/expression/literal.rs", "rank": 31, "score": 90943.4925135464 }, { "content": "pub fn parse_literal_character(i: ParseInput) -> ParseResult<Literal> {\n\n let (i, token) = any(i)?;\n\n if let TokenKind::LiteralCharacter(v) = &token.kind {\n\n Ok((i, Literal::Character(v.clone())))\n\n } else {\n\n err_tag(i)\n\n }\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/statement/expression/literal.rs", "rank": 32, "score": 90943.4925135464 }, { "content": "pub fn parse_literal_expression(i: ParseInput) -> ParseResult<Expression> {\n\n map(parse_literal, Expression::Literal)(i)\n\n}\n", "file_path": "libranoc/src/syntax/parse/statement/expression/literal.rs", "rank": 33, "score": 90943.4925135464 }, { "content": "pub fn parse_literal_decimal(i: ParseInput) -> ParseResult<Literal> {\n\n let (i, token) = any(i)?;\n\n if let TokenKind::LiteralNumberDecimal(v) = &token.kind {\n\n Ok((i, Literal::Decimal(v.clone())))\n\n } else {\n\n err_tag(i)\n\n }\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/statement/expression/literal.rs", "rank": 34, "score": 90943.4925135464 }, { "content": "pub fn parse_name_expression(i: ParseInput) -> ParseResult<Expression> {\n\n map(parse_name, Expression::Name)(i)\n\n}\n", "file_path": "libranoc/src/syntax/parse/statement/expression/name.rs", "rank": 35, "score": 90943.4925135464 }, { "content": "pub fn parse_expression_statement(i: ParseInput) -> ParseResult<Statement> {\n\n map(\n\n terminated(parse_expression, tag(TokenKind::PunctuationSemicolon)),\n\n Statement::Expression,\n\n )(i)\n\n}\n", "file_path": "libranoc/src/syntax/parse/statement/expression/mod.rs", "rank": 36, "score": 90943.4925135464 }, { "content": "pub fn parse_simple_expression(i: ParseInput) -> ParseResult<Expression> {\n\n alt((\n\n parse_literal_expression,\n\n parse_name_expression,\n\n parse_if_expression,\n\n ))(i)\n\n}\n", "file_path": "libranoc/src/syntax/parse/statement/expression/mod.rs", "rank": 37, "score": 90943.4925135464 }, { "content": "pub fn parse_function_declaration(i: ParseInput) -> ParseResult<FunctionDeclaration> {\n\n let (i, pub_token) = opt(tag(TokenKind::KeywordPub))(i)?;\n\n let (i, extern_token) = opt(tag(TokenKind::KeywordExtern))(i)?;\n\n let (i, _) = tag(TokenKind::KeywordFn)(i)?;\n\n let (i, name) = cut(parse_identifier)(i)?;\n\n\n\n let (i, parameters) = opt(delimited(\n\n tag(TokenKind::PunctuationLeftParenthesis),\n\n |i| {\n\n let (i, res) = separated_list0(tag(TokenKind::PunctuationComma), |i| {\n\n let (i, pattern) = parse_pattern(i)?;\n\n let (i, ty) = parse_type_annotation(i)?;\n\n Ok((i, (pattern, ty)))\n\n })(i)?;\n\n if res.len() > 0 {\n\n let (i, _) = opt(tag(TokenKind::PunctuationComma))(i)?;\n\n Ok((i, res))\n\n } else {\n\n Ok((i, res))\n\n }\n", "file_path": "libranoc/src/syntax/parse/statement/declaration/function.rs", "rank": 38, "score": 89967.10605200584 }, { "content": "pub fn parse_function_declaration_declaration(i: ParseInput) -> ParseResult<Declaration> {\n\n let (s, declaration) = parse_function_declaration(i)?;\n\n Ok((s, Declaration::FunctionDeclaration(declaration)))\n\n}\n", "file_path": "libranoc/src/syntax/parse/statement/declaration/function.rs", "rank": 39, "score": 89967.10605200584 }, { "content": "pub fn parse_group_tuple_expression(i: ParseInput) -> ParseResult<Expression> {\n\n let i = i.with_binding_power(0);\n\n let (i, (mut elements, last_comma)) = delimited(\n\n tag(TokenKind::PunctuationLeftParenthesis),\n\n |i| {\n\n let (i, elements) =\n\n separated_list0(tag(TokenKind::PunctuationComma), parse_expression)(i)?;\n\n let (i, last_comma) = opt(tag(TokenKind::PunctuationComma))(i)?;\n\n Ok((i, (elements, last_comma.is_some())))\n\n },\n\n tag(TokenKind::PunctuationRightParenthesis),\n\n )(i)?;\n\n\n\n let expr = if elements.len() > 1 || last_comma {\n\n elements.swap_remove(0)\n\n } else {\n\n Expression::Tuple(elements)\n\n };\n\n\n\n Ok((i, expr))\n\n}\n", "file_path": "libranoc/src/syntax/parse/statement/expression/group_tuple.rs", "rank": 40, "score": 89033.19457496986 }, { "content": "struct RanoLexer<'a> {\n\n logos_lexer: Lexer<'a, TokenKind>,\n\n}\n\n\n\nimpl<'a> Iterator for RanoLexer<'a> {\n\n type Item = Token;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.logos_lexer.next().map(|kind| Token {\n\n kind,\n\n span: Span {\n\n range: self.logos_lexer.span(),\n\n line: self.logos_lexer.extras.line,\n\n column: self.logos_lexer.span().end - self.logos_lexer.extras.last_linefeed,\n\n len: self.logos_lexer.span().len(),\n\n },\n\n content: self.logos_lexer.slice().to_string(),\n\n })\n\n }\n\n}\n\n\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 41, "score": 85736.41609162712 }, { "content": "pub fn tag<Error: ParseError<ParseInput>>(\n\n tag: TokenKind,\n\n) -> impl Fn(ParseInput) -> IResult<ParseInput, Token, Error> {\n\n move |i| match i.iter_elements().next().map(|t| {\n\n let b = t.kind == tag;\n\n (t, b)\n\n }) {\n\n Some((t, true)) => Ok((i.slice(1..), t)),\n\n _ => err_kind(i, ErrorKind::Tag),\n\n }\n\n}\n", "file_path": "libranoc/src/syntax/parse/nom/util.rs", "rank": 42, "score": 80156.84874412933 }, { "content": "pub fn satisfy<F, Error: ParseError<ParseInput>>(\n\n cond: F,\n\n) -> impl Fn(ParseInput) -> IResult<ParseInput, Token, Error>\n\nwhere\n\n F: Fn(&Token) -> bool,\n\n{\n\n move |i| match (i).iter_elements().next().map(|t| {\n\n let b = cond(&t);\n\n (t, b)\n\n }) {\n\n Some((t, true)) => Ok((i.slice(1..), t)),\n\n _ => err_kind(i, ErrorKind::Satisfy),\n\n }\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/nom/util.rs", "rank": 43, "score": 77706.0901511852 }, { "content": "#[inline(always)]\n\npub fn err_kind<T, Error: ParseError<ParseInput>>(\n\n i: ParseInput,\n\n kind: ErrorKind,\n\n) -> IResult<ParseInput, T, Error> {\n\n Err(Err::Error(Error::from_error_kind(i, kind)))\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/nom/util.rs", "rank": 44, "score": 76635.57580259256 }, { "content": "#[inline(always)]\n\npub fn err_tag<T, Error: ParseError<ParseInput>>(i: ParseInput) -> IResult<ParseInput, T, Error> {\n\n err_kind(i, ErrorKind::Tag)\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/nom/util.rs", "rank": 45, "score": 70448.87476599682 }, { "content": "fn parse_else(i: ParseInput) -> ParseResult<Else> {\n\n let (i, else_token) = tag(TokenKind::KeywordElse)(i)?;\n\n\n\n let (i, r#else) = alt((\n\n map(parse_if, |r#if| {\n\n Else::If(else_token.clone(), Box::new(r#if))\n\n }),\n\n map(parse_block, |block| {\n\n Else::Block(else_token.clone(), Box::new(block))\n\n }),\n\n ))(i)?;\n\n\n\n Ok((i, r#else))\n\n}\n\n\n", "file_path": "libranoc/src/syntax/parse/statement/expression/if.rs", "rank": 46, "score": 61291.31120833299 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "rano-vm/src/main.rs", "rank": 47, "score": 38816.61868571295 }, { "content": "fn main() -> anyhow::Result<()> {\n\n println!(\"Reading main.rano\");\n\n let src = fs::read_to_string(PathBuf::from(\"main.rano\"))?;\n\n\n\n println!(\"Parsing main.rano\");\n\n let tokens = syntax::tokenize(&src);\n\n let ast = match syntax::parse(tokens) {\n\n Ok(ast) => ast,\n\n Err(error) => {\n\n report_error(&src, error)?;\n\n bail!(\"Failed to parse sources\");\n\n }\n\n };\n\n\n\n println!(\"Compiling main.rano\");\n\n let (wasm_bytes, errors) = codegen::compile_wasm(ast);\n\n\n\n if errors.len() > 0 {\n\n for error in errors {\n\n report_error(&src, error)?;\n", "file_path": "ranoc/src/main.rs", "rank": 48, "score": 37078.292415834614 }, { "content": "fn report_error(src: &String, error: Error) -> anyhow::Result<()> {\n\n use codespan_reporting::{\n\n diagnostic::{Diagnostic, Label},\n\n files::SimpleFiles,\n\n term::{\n\n self,\n\n termcolor::{ColorChoice, StandardStream},\n\n },\n\n };\n\n let mut files = SimpleFiles::new();\n\n let file_id = files.add(\"main.rano\", src);\n\n let diagnostic = Diagnostic::error()\n\n .with_message(error.message)\n\n .with_code(format!(\"E{:04}\", error.code as u16))\n\n .with_labels(\n\n error\n\n .labels\n\n .iter()\n\n .map(|label| {\n\n let mut diagnostic_label = Label::primary(\n", "file_path": "ranoc/src/main.rs", "rank": 49, "score": 32091.790532140178 }, { "content": "use std::{\n\n cmp::{max, min},\n\n ops::{Range, RangeBounds},\n\n};\n\n\n\nuse logos::Lexer;\n\npub use logos::Logos;\n\n\n\n#[derive(Debug, Default)]\n\npub struct TokenExtras {\n\n last_linefeed: usize,\n\n line: usize,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Span {\n\n pub range: Range<usize>,\n\n pub line: usize,\n\n pub column: usize,\n\n pub len: usize,\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 50, "score": 31403.647798156006 }, { "content": "#[derive(Debug, PartialEq, Clone)]\n\npub struct Token {\n\n pub kind: TokenKind,\n\n pub span: Span,\n\n pub content: String,\n\n}\n\n\n\n#[derive(Logos, Debug, PartialEq, Clone)]\n\n#[logos(extras = TokenExtras)]\n\npub enum TokenKind {\n\n // #========== Punctuation ==========#\n\n #[token(\"!\")]\n\n PunctuationExclamationMark,\n\n #[token(\"#\")]\n\n PunctuationNumberSign,\n\n #[token(\"$\")]\n\n PunctuationDollarSign,\n\n #[token(\"%\")]\n\n PunctuationPercentSign,\n\n #[token(\"&\")]\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 51, "score": 31403.540231911033 }, { "content": " KeywordFn,\n\n #[token(\"for\")]\n\n KeywordFor,\n\n #[token(\"if\")]\n\n KeywordIf,\n\n #[token(\"impl\")]\n\n KeywordImpl,\n\n #[token(\"in\")]\n\n KeywordIn,\n\n #[token(\"let\")]\n\n KeywordLet,\n\n #[token(\"match\")]\n\n KeywordMatch,\n\n #[token(\"pub\")]\n\n KeywordPub,\n\n #[token(\"return\")]\n\n KeywordReturn,\n\n #[token(\"self\")]\n\n KeywordSelf,\n\n #[token(\"Self\")]\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 52, "score": 31403.030791412948 }, { "content": " KeywordSelfType,\n\n #[token(\"struct\")]\n\n KeywordStruct,\n\n #[token(\"trait\")]\n\n KeywordTrait,\n\n #[token(\"type\")]\n\n KeywordType,\n\n #[token(\"union\")]\n\n KeywordUnion,\n\n #[token(\"use\")]\n\n KeywordUse,\n\n #[token(\"where\")]\n\n KeywordWhere,\n\n #[token(\"while\")]\n\n KeywordWhile,\n\n\n\n // #========== Identifier ==========#\n\n #[regex(\n\n \"[^0-9\\n\\u{000B}\\u{000C}\\r\\u{0085}\\u{2028}\\u{2029}\\t \\u{00AD}\\u{00A0}\\u{1680}\\u{2000}\\u{2001}\\u{2002}\\u{2003}\\u{2004}\\u{2005}\\u{2006}\\u{2007}\\u{2008}\\u{2009}\\u{200A}\\u{200B}\\u{200E}\\u{200F}\\u{202F}\\u{205F}\\u{3000}\\u{FEFF}!#$%&*+,-./:;<=>?@\\\\^|~(\\\\[{)\\\\]}][^\\n\\u{000B}\\u{000C}\\r\\u{0085}\\u{2028}\\u{2029}\\t \\u{00AD}\\u{00A0}\\u{1680}\\u{2000}\\u{2001}\\u{2002}\\u{2003}\\u{2004}\\u{2005}\\u{2006}\\u{2007}\\u{2008}\\u{2009}\\u{200A}\\u{200B}\\u{200E}\\u{200F}\\u{202F}\\u{205F}\\u{3000}\\u{FEFF}!#$%&*+,-./:;<=>?@\\\\^|~(\\\\[{)\\\\]}]*\",\n\n callback = |lex| lex.slice().to_owned()\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 53, "score": 31401.664648041165 }, { "content": "}\n\n\n\nimpl Span {\n\n pub const EMPTY: Span = Span {\n\n range: usize::MAX..usize::MIN,\n\n line: 0,\n\n column: 0,\n\n len: 0,\n\n };\n\n\n\n pub fn joined(&self, other: &Span) -> Span {\n\n let range = min(self.range.start, other.range.start)..max(self.range.end, other.range.end);\n\n let len = range.end - range.start;\n\n Span {\n\n range,\n\n line: min(self.line, other.line),\n\n column: min(self.column, other.column),\n\n len,\n\n }\n\n }\n\n}\n\n\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 54, "score": 31400.27955558776 }, { "content": " PunctuationAmpersand,\n\n #[token(\"*\")]\n\n PunctuationAsterisk,\n\n #[token(\"+\")]\n\n PunctuationPlusSign,\n\n #[token(\",\")]\n\n PunctuationComma,\n\n #[token(\"-\")]\n\n PunctuationHyphenMinus,\n\n #[token(\".\")]\n\n PunctuationFullStop,\n\n #[token(\"/\")]\n\n PunctuationSolidus,\n\n #[token(\":\")]\n\n PunctuationColon,\n\n #[token(\";\")]\n\n PunctuationSemicolon,\n\n #[token(\"<\")]\n\n PunctuationLessThanSign,\n\n #[token(\"=\")]\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 55, "score": 31398.39287796797 }, { "content": " PunctuationEqualsSign,\n\n #[token(\">\")]\n\n PunctuationGreaterThanSign,\n\n #[token(\"?\")]\n\n PunctuationQuestionMark,\n\n #[token(\"@\")]\n\n PunctuationCommercialAt,\n\n #[token(\"\\\\\")]\n\n PunctuationReverseSolidus,\n\n #[token(\"^\")]\n\n PunctuationCircumflexAccent,\n\n #[token(\"|\")]\n\n PunctuationVerticalLine,\n\n #[token(\"~\")]\n\n PunctuationTilde,\n\n #[token(\"(\")]\n\n PunctuationLeftParenthesis,\n\n #[token(\"[\")]\n\n PunctuationLeftSquareBracket,\n\n #[token(\"{\")]\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 56, "score": 31398.336349025732 }, { "content": " #[token(\"->\")]\n\n PunctuationsSingleRightArrow,\n\n #[token(\"..\")]\n\n PunctuationsRangeRightExclusive,\n\n #[token(\"..=\")]\n\n PunctuationsRangeRightInclusive,\n\n #[token(\"?.\")]\n\n PunctuationsGetFieldNullable,\n\n // #========== Keyword ==========#\n\n #[token(\"as\")]\n\n KeywordAs,\n\n #[token(\"break\")]\n\n KeywordBreak,\n\n #[token(\"continue\")]\n\n KeywordContinue,\n\n #[token(\"else\")]\n\n KeywordElse,\n\n #[token(\"extern\")]\n\n KeywordExtern,\n\n #[token(\"fn\")]\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 57, "score": 31398.336349025732 }, { "content": " PunctuationLeftCurlyBracket,\n\n #[token(\")\")]\n\n PunctuationRightParenthesis,\n\n #[token(\"]\")]\n\n PunctuationRightSquareBracket,\n\n #[token(\"}\")]\n\n PunctuationRightCurlyBracket,\n\n // #========== Punctuations ==========#\n\n #[token(\"&&\")]\n\n PunctuationsLogicalAnd,\n\n #[token(\"||\")]\n\n PunctuationsLogicalOr,\n\n #[token(\"==\")]\n\n PunctuationsEqualTo,\n\n #[token(\"!=\")]\n\n PunctuationsNotEqualTo,\n\n #[token(\"<=\")]\n\n PunctuationsLessThanOrEqualTo,\n\n #[token(\">=\")]\n\n PunctuationsGreaterThanOrEqualTo,\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 58, "score": 31398.27800854321 }, { "content": " * '\\n' : LINE FEED\n\n * '\\u{000B}' : LINE TABULATION\n\n * '\\u{000C}' : FORM FEED\n\n * '\\r' : CARRIAGE RETURN\n\n * '\\u{0085}' : NEXT LINE\n\n * '\\u{2028}' : LINE SEPARATOR\n\n * '\\u{2029}' : PARAGRAPH SEPARATOR\n\n */\n\n #[regex(\n\n \"(\\r\\n|[\\n\\u{000B}\\u{000C}\\r\\u{0085}\\u{2028}\\u{2029}])\",\n\n priority = 2,\n\n callback = |lex| {\n\n lex.extras.line += 1;\n\n lex.extras.last_linefeed = lex.span().end;\n\n\n\n logos::Skip\n\n }\n\n )]\n\n VerticalSpace,\n\n /*\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 59, "score": 31396.305556877116 }, { "content": " )]\n\n IdentifierIdentifier(String),\n\n #[token(\"_\")]\n\n KeywordPlaceholderName,\n\n\n\n // #========== Literal ==========#\n\n #[regex(r#\"'(\\\\'|[^']*[^\\\\])'\"#, |lex| lex.slice().to_owned())]\n\n LiteralCharacter(String),\n\n #[regex(r#\"(\"\"|\"(\\\\\"|[^\"])*[^\\\\]\")\"#, |lex| lex.slice().to_owned())]\n\n LiteralString(String),\n\n #[regex(\"([0-9]+|0b[0-1]+|0o[0-7]+|0x[0-9a-fA-F]+)\", |lex| lex.slice().to_owned())]\n\n LiteralNumberIntegral(String),\n\n #[regex(\"[0-9]+\\\\.[0-9]+\", |lex| lex.slice().to_owned())]\n\n LiteralNumberDecimal(String),\n\n #[regex(\"[0-9]+(\\\\.[0-9]+)?[eE][+-][0-9]+\", |lex| lex.slice().to_owned())]\n\n LiteralNumberExponent(String),\n\n #[regex(\"(true|false)\", |lex| lex.slice().to_owned())]\n\n LiteralBoolean(String),\n\n\n\n /*\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 60, "score": 31395.807156967978 }, { "content": " * '\\t' : CHARACTER TABULATION\n\n * ' ' : SPACE\n\n * '\\u{00AD}' : SOFT HYPHEN\n\n * '\\u{00A0}' : NO-BREAK SPACE\n\n * '\\u{1680}' : OGHAM SPACE MARK\n\n * '\\u{2000}' : EN QUAD\n\n * '\\u{2001}' : EM QUAD\n\n * '\\u{2002}' : EN SPACE\n\n * '\\u{2003}' : EM SPACE\n\n * '\\u{2004}' : THREE-PER-EM SPACE\n\n * '\\u{2005}' : FOUR-PER-EM SPACE\n\n * '\\u{2006}' : SIX-PER-EM SPACE\n\n * '\\u{2007}' : FIGURE SPACE\n\n * '\\u{2008}' : PUNCTUATION SPACE\n\n * '\\u{2009}' : THIN SPACE\n\n * '\\u{200A}' : HAIR SPACE\n\n * '\\u{200B}' : ZERO WIDTH SPACE\n\n * '\\u{200E}' : LEFT-TO-RIGHT MARK\n\n * '\\u{200F}' : RIGHT-TO-LEFT MARK\n\n * '\\u{202F}' : NARROW NO-BREAK SPACE\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 61, "score": 31394.513164403805 }, { "content": " * '\\u{205F}' : MEDIUM MATHEMATICAL SPACE\n\n * '\\u{3000}' : IDEPGRAPHIC SPACE\n\n * '\\u{FEFF}' : ZERO WIDTH NO-BREAK SPACE\n\n */\n\n #[regex(\n\n \"[\\t \\u{00AD}\\u{00A0}\\u{1680}\\u{2000}\\u{2001}\\u{2002}\\u{2003}\\u{2004}\\u{2005}\\u{2006}\\u{2007}\\u{2008}\\u{2009}\\u{200A}\\u{200B}\\u{200E}\\u{200F}\\u{202F}\\u{205F}\\u{3000}\\u{FEFF}]+\",\n\n logos::skip,\n\n )]\n\n HorizontalSpaces,\n\n #[error]\n\n Error,\n\n}\n\n\n", "file_path": "libranoc/src/syntax/tokenize.rs", "rank": 62, "score": 31394.513164403805 }, { "content": "use crate::{codegen::*, core::ast::Operator};\n\n\n\n// mod prefix;\n\nmod infix;\n\nmod postfix;\n\n\n\nimpl<'a> Walker<Operator> for Context<'a> {\n\n fn walk(&mut self, operator: Operator) -> Result<(), Error> {\n\n match operator {\n\n Operator::Prefix(operator) => {\n\n todo!(\"Prefix operator is not implemented\");\n\n }\n\n Operator::Infix(operator) => self.walk(operator),\n\n Operator::Postfix(operator) => self.walk(operator),\n\n }\n\n }\n\n}\n", "file_path": "libranoc/src/codegen/walker/statement/expression/operator/mod.rs", "rank": 71, "score": 27947.578962166768 }, { "content": "use wasm_encoder::Instruction;\n\n\n\nuse crate::{codegen::*, core::ast::InfixOperator};\n\n\n\nimpl<'a> Walker<InfixOperator> for Context<'a> {\n\n fn walk(&mut self, operator: InfixOperator) -> Result<(), Error> {\n\n match &operator {\n\n InfixOperator::LogicalOr(lhs, operator_span, rhs)\n\n | InfixOperator::LogicalAnd(lhs, operator_span, rhs)\n\n | InfixOperator::Add(lhs, operator_span, rhs)\n\n | InfixOperator::Subtract(lhs, operator_span, rhs)\n\n | InfixOperator::Multiply(lhs, operator_span, rhs)\n\n | InfixOperator::Divide(lhs, operator_span, rhs)\n\n | InfixOperator::Remainder(lhs, operator_span, rhs)\n\n | InfixOperator::RangeRightExclusive(lhs, operator_span, rhs)\n\n | InfixOperator::RangeRightInclusive(lhs, operator_span, rhs) => {\n\n let trait_name = operator.trait_name();\n\n let lhs_type = \"i32\";\n\n let rhs_type = \"i32\";\n\n let function_id = self.import(\n", "file_path": "libranoc/src/codegen/walker/statement/expression/operator/infix/mod.rs", "rank": 72, "score": 27194.007163430582 }, { "content": "use crate::{codegen::*, core::ast::PostfixOperator};\n\n\n\nmod function_call;\n\n\n\nimpl<'a> Walker<PostfixOperator> for Context<'a> {\n\n fn walk(&mut self, operator: PostfixOperator) -> Result<(), Error> {\n\n match operator {\n\n PostfixOperator::Index(_) => {\n\n todo!(\"Postfix operator index is not implemented\");\n\n }\n\n PostfixOperator::FunctionCall(operator) => self.walk(operator),\n\n }\n\n }\n\n}\n", "file_path": "libranoc/src/codegen/walker/statement/expression/operator/postfix/mod.rs", "rank": 73, "score": 27191.858456419246 }, { "content": " self.walk(lhs)?;\n\n self.walk(rhs)?;\n\n self.instructions.push(Instruction::Call(partial_eq));\n\n if to_negate {\n\n let not = self.import(\n\n \"extern\",\n\n &format!(\"Not__{}\", result_type),\n\n operator_span.clone(),\n\n )?;\n\n self.instructions.push(Instruction::Call(not));\n\n }\n\n Ok(())\n\n }\n\n InfixOperator::GreaterThan(lhs, operator_span, rhs)\n\n | InfixOperator::LessThan(lhs, operator_span, rhs)\n\n | InfixOperator::GreaterThanOrEqualTo(lhs, operator_span, rhs)\n\n | InfixOperator::LessThanOrEqualTo(lhs, operator_span, rhs) => {\n\n let to_negate = matches!(operator, InfixOperator::NotEqualTo(..));\n\n let lhs_type = \"i32\";\n\n let rhs_type = \"i32\";\n", "file_path": "libranoc/src/codegen/walker/statement/expression/operator/infix/mod.rs", "rank": 74, "score": 27191.25128749568 }, { "content": " \"extern\",\n\n &format!(\"{}__{}_{}\", trait_name, lhs_type, rhs_type),\n\n operator_span.clone(),\n\n )?;\n\n self.walk(lhs)?;\n\n self.walk(rhs)?;\n\n self.instructions.push(Instruction::Call(function_id));\n\n Ok(())\n\n }\n\n InfixOperator::EqualTo(lhs, operator_span, rhs)\n\n | InfixOperator::NotEqualTo(lhs, operator_span, rhs) => {\n\n let to_negate = matches!(operator, InfixOperator::NotEqualTo(..));\n\n let lhs_type = \"i32\";\n\n let rhs_type = \"i32\";\n\n let partial_eq = self.import(\n\n \"extern\",\n\n &format!(\"PartialEq__{}_{}\", lhs_type, rhs_type),\n\n operator_span.clone(),\n\n )?;\n\n let result_type = \"i32\";\n", "file_path": "libranoc/src/codegen/walker/statement/expression/operator/infix/mod.rs", "rank": 75, "score": 27190.80337103896 }, { "content": " let partial_eq = self.import(\n\n \"extern\",\n\n &format!(\"PartialOrd__{}_{}\", lhs_type, rhs_type),\n\n operator_span.clone(),\n\n )?;\n\n self.walk(lhs)?;\n\n self.walk(rhs)?;\n\n self.instructions.push(Instruction::Call(partial_eq));\n\n match operator {\n\n InfixOperator::GreaterThan(..) => {\n\n self.instructions.push(Instruction::I32Const(0));\n\n self.instructions.push(Instruction::I32GtS);\n\n }\n\n InfixOperator::LessThan(..) => {\n\n self.instructions.push(Instruction::I32Const(0));\n\n self.instructions.push(Instruction::I32LtS);\n\n }\n\n InfixOperator::GreaterThanOrEqualTo(..) => {\n\n self.instructions.push(Instruction::I32Const(0));\n\n self.instructions.push(Instruction::I32GeS);\n", "file_path": "libranoc/src/codegen/walker/statement/expression/operator/infix/mod.rs", "rank": 76, "score": 27188.797651395715 }, { "content": " }\n\n InfixOperator::LessThanOrEqualTo(..) => {\n\n self.instructions.push(Instruction::I32Const(0));\n\n self.instructions.push(Instruction::I32LeS);\n\n }\n\n _ => {}\n\n }\n\n Ok(())\n\n }\n\n InfixOperator::GetField(_) => {\n\n todo!()\n\n }\n\n InfixOperator::GetFieldNullable(_) => {\n\n todo!()\n\n }\n\n }\n\n }\n\n}\n", "file_path": "libranoc/src/codegen/walker/statement/expression/operator/infix/mod.rs", "rank": 77, "score": 27187.834041077655 }, { "content": "use wasm_encoder::Instruction;\n\n\n\nuse crate::{\n\n codegen::*,\n\n core::ast::{Expression, FunctionCall, Name},\n\n};\n\n\n\nimpl<'a> Walker<FunctionCall> for Context<'a> {\n\n fn walk(&mut self, operator: FunctionCall) -> Result<(), Error> {\n\n for params in &operator.1 {\n\n self.walk(params.clone())?;\n\n }\n\n if let Expression::Name(Name::Ident(name)) = operator.0.as_ref() {\n\n let id = self.resolve(&name.content, name.span.clone())?;\n\n self.instructions.push(Instruction::Call(id));\n\n } else {\n\n return Err(Error::unimplemented(&operator));\n\n }\n\n // TODO\n\n Ok(())\n\n }\n\n}\n", "file_path": "libranoc/src/codegen/walker/statement/expression/operator/postfix/function_call.rs", "rank": 78, "score": 26476.583502653353 }, { "content": "mod parse;\n\nmod tokenize;\n\n\n\npub use parse::parse;\n\npub(crate) use parse::Error;\n\npub use tokenize::{create_tokenizer, tokenize, Span, Spanned, Token, TokenKind};\n", "file_path": "libranoc/src/syntax/mod.rs", "rank": 79, "score": 15.007049701602877 }, { "content": "use std::{iter::Enumerate, ops::RangeFrom, vec::IntoIter};\n\n\n\nuse nom::{InputIter, InputLength, InputTake, Needed, Slice};\n\n\n\nuse crate::syntax::Token;\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct ParseInput {\n\n pub(crate) tokens: Vec<Token>,\n\n pub(crate) binding_power: u8,\n\n}\n\n\n\nimpl ParseInput {\n\n pub(crate) fn new(tokens: Vec<Token>) -> Self {\n\n ParseInput {\n\n tokens,\n\n binding_power: 0,\n\n }\n\n }\n\n\n", "file_path": "libranoc/src/syntax/parse/nom/input.rs", "rank": 80, "score": 12.953868676210824 }, { "content": "use std::fmt;\n\n\n\nuse thiserror::Error;\n\n\n\nuse crate::syntax::{Span, Spanned, Token};\n\n\n\nuse super::ast::Type;\n\n\n\n#[derive(Debug)]\n\n#[repr(u16)]\n\npub enum ErrorCode {\n\n SyntaxError = 0001,\n\n Redefined = 0002,\n\n UndefinedSymbol = 0003,\n\n MismatchedType = 0004,\n\n Unimplemented = 0005,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Location {\n", "file_path": "libranoc/src/core/error.rs", "rank": 81, "score": 11.760793621052068 }, { "content": "use crate::core::ast::Module;\n\n\n\nmod fragment;\n\nmod nom;\n\nmod statement;\n\n\n\npub(super) use self::nom::*;\n\npub(super) use fragment::*;\n\npub(super) use statement::*;\n\n\n\npub use crate::syntax::{\n\n parse::nom::{Error, ParseResult},\n\n Token, TokenKind,\n\n};\n\n\n", "file_path": "libranoc/src/syntax/parse/mod.rs", "rank": 82, "score": 11.569061066066565 }, { "content": "use crate::{core::ast::*, syntax::parse::*};\n\n\n\nmod group_tuple;\n\nmod r#if;\n\nmod literal;\n\nmod name;\n\nmod operator;\n\n\n\npub use group_tuple::*;\n\npub use literal::*;\n\npub use name::*;\n\npub use operator::*;\n\npub use r#if::*;\n\n\n", "file_path": "libranoc/src/syntax/parse/statement/expression/mod.rs", "rank": 83, "score": 11.547353427674722 }, { "content": "use std::fmt;\n\n\n\nuse crate::syntax::{Span, Spanned, Token};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Module {\n\n pub(crate) nodes: Vec<Node>,\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum Node {\n\n Directive,\n\n Statement(Statement),\n\n}\n\n\n\n// TODO: directives like #![deny(unused_variable)]\n\n#[derive(Debug, PartialEq)]\n\npub struct Directive {}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n", "file_path": "libranoc/src/core/ast.rs", "rank": 84, "score": 11.337629525040002 }, { "content": "use crate::core::ast::Module;\n\n\n\nmod context;\n\nmod walker;\n\n\n\npub(super) use crate::core::Error;\n\npub(super) use context::*;\n\npub(super) use walker::*;\n\n\n", "file_path": "libranoc/src/codegen/mod.rs", "rank": 85, "score": 9.451803354440148 }, { "content": "use nom::{\n\n error::{ErrorKind, ParseError},\n\n Err, IResult, InputIter, InputTake, Slice,\n\n};\n\n\n\nuse crate::syntax::{parse::nom::ParseInput, Token, TokenKind};\n\n\n\npub use ::nom::{\n\n branch::alt,\n\n combinator::{all_consuming, cut, map, opt},\n\n multi::{fold_many0, many0, separated_list0, separated_list1},\n\n sequence::{delimited, preceded, terminated, tuple},\n\n};\n\n\n\n#[inline(always)]\n", "file_path": "libranoc/src/syntax/parse/nom/util.rs", "rank": 86, "score": 9.446259698418364 }, { "content": "\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Index(pub Box<Expression>, pub Vec<Expression>);\n\n\n\nimpl Spanned for Index {\n\n fn span(&self) -> crate::syntax::Span {\n\n self.0.span().joined(&self.1.span())\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct FunctionCall(pub Box<Expression>, pub Vec<Expression>);\n\n\n\nimpl Spanned for FunctionCall {\n\n fn span(&self) -> crate::syntax::Span {\n\n self.0.span().joined(&self.1.span())\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "libranoc/src/core/ast.rs", "rank": 87, "score": 9.009707646118505 }, { "content": "use crate::syntax::{parse::*, TokenKind};\n\n\n", "file_path": "libranoc/src/syntax/parse/fragment/identifier.rs", "rank": 88, "score": 8.786017542361808 }, { "content": "use crate::{\n\n core::ast::{Node, Statement},\n\n syntax::parse::*,\n\n};\n\n\n\nmod declaration;\n\nmod expression;\n\n\n\npub use declaration::*;\n\npub use expression::*;\n\n\n", "file_path": "libranoc/src/syntax/parse/statement/mod.rs", "rank": 89, "score": 8.603720058769618 }, { "content": " pub body: Box<Block>,\n\n pub else_part: Option<Else>,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum Else {\n\n If(Token, Box<If>),\n\n Block(Token, Box<Block>),\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Block {\n\n pub curly_bracket_open_token: Token,\n\n pub body: Vec<Statement>,\n\n pub last_expression: Option<Expression>,\n\n pub curly_bracket_close_token: Token,\n\n}\n", "file_path": "libranoc/src/core/ast.rs", "rank": 90, "score": 8.416602125414752 }, { "content": " pub(crate) fn with_binding_power(self, binding_power: u8) -> Self {\n\n ParseInput {\n\n tokens: self.tokens,\n\n binding_power,\n\n }\n\n }\n\n}\n\n\n\nimpl InputLength for ParseInput {\n\n #[inline]\n\n fn input_len(&self) -> usize {\n\n self.tokens.len()\n\n }\n\n}\n\n\n\nimpl InputIter for ParseInput {\n\n type Item = Token;\n\n type Iter = Enumerate<Self::IterElem>;\n\n type IterElem = IntoIter<Token>;\n\n\n", "file_path": "libranoc/src/syntax/parse/nom/input.rs", "rank": 91, "score": 8.329839593421521 }, { "content": "use crate::{\n\n core::ast::{Declaration, Statement},\n\n syntax::parse::*,\n\n};\n\n\n\nmod function;\n\n\n\npub use function::*;\n\n\n", "file_path": "libranoc/src/syntax/parse/statement/declaration/mod.rs", "rank": 92, "score": 8.18005150394447 }, { "content": " if operator.left_binding_power < i.binding_power {\n\n return err_tag(i);\n\n }\n\n\n\n let original_binding_power = i.binding_power;\n\n let (i, tails) = (operator.tails)(i.with_binding_power(0))?;\n\n let (i, _) = (operator.close)(i.with_binding_power(original_binding_power))?;\n\n let constructor = operator.constructor;\n\n\n\n let transformer: Transformer = Box::new(move |lhs| {\n\n Expression::Operator(Operator::Postfix(constructor(lhs, tails)))\n\n });\n\n Ok((i, transformer))\n\n },\n\n |i| {\n\n let (i, operator) = parse_infix_operator(i)?;\n\n if operator.left_binding_power < i.binding_power {\n\n return err_tag(i);\n\n }\n\n\n", "file_path": "libranoc/src/syntax/parse/statement/expression/mod.rs", "rank": 93, "score": 7.998076469986806 }, { "content": " pub name: Token,\n\n // pub type_parameters: Vec<TypeParameter>,\n\n pub parameters: Vec<(Pattern, Type)>,\n\n pub return_type: Type,\n\n // pub where_clauses: Vec<WhereClause>,\n\n pub body: Option<Block>,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum Expression {\n\n Match,\n\n Closure,\n\n Literal(Literal),\n\n Path,\n\n Array,\n\n Tuple(Vec<Expression>),\n\n Init,\n\n Operator(Operator),\n\n Name(Name),\n\n If(If),\n", "file_path": "libranoc/src/core/ast.rs", "rank": 94, "score": 7.369866080342886 }, { "content": "mod identifier;\n\nmod name;\n\nmod path;\n\nmod pattern;\n\nmod block;\n\nmod r#type;\n\n\n\npub use identifier::*;\n\npub use name::*;\n\npub use path::*;\n\npub use pattern::*;\n\npub use r#type::*;\n\npub use block::*;\n", "file_path": "libranoc/src/syntax/parse/fragment/mod.rs", "rank": 95, "score": 7.168844752548319 }, { "content": "mod module;\n\nmod statement;\n\n\n\npub use module::*;\n\npub use statement::*;\n\n\n\nuse super::Error;\n\n\n\npub(crate) trait Walker<T> {\n\n fn walk(&mut self, params: T) -> Result<(), Error>;\n\n}\n\n\n\nimpl<T, U> Walker<Box<T>> for U\n\nwhere\n\n T: Clone,\n\n U: Walker<T>,\n\n{\n\n fn walk(&mut self, params: Box<T>) -> Result<(), Error> {\n\n self.walk(*params)\n\n }\n", "file_path": "libranoc/src/codegen/walker/mod.rs", "rank": 96, "score": 7.057010467169393 }, { "content": "use std::collections::{HashMap, VecDeque};\n\n\n\nuse wasm_encoder::{\n\n CodeSection, DataSection, EntityType, Export, ExportSection, Function, FunctionSection,\n\n GlobalType, ImportSection, Instruction, Limits, MemoryType, Module, TableType, TypeSection,\n\n ValType,\n\n};\n\n\n\nuse crate::{\n\n core::{ast::Type, Error},\n\n syntax::{Span, Token},\n\n};\n\n\n\npub struct Context<'a> {\n\n import_section: ImportSection,\n\n import_index_function: u32,\n\n import_index_table: u32,\n\n import_index_memory: u32,\n\n import_index_global: u32,\n\n import_index_instance: u32,\n", "file_path": "libranoc/src/codegen/context.rs", "rank": 97, "score": 6.987973112228251 }, { "content": "}\n\n\n\nimpl Spanned for Expression {\n\n fn span(&self) -> crate::syntax::Span {\n\n match self {\n\n _ => todo!(\"SPAN PLEASE\"),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum Literal {\n\n String(String),\n\n Character(String),\n\n Integer(Integer),\n\n Decimal(String),\n\n Boolean(String),\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n", "file_path": "libranoc/src/core/ast.rs", "rank": 98, "score": 6.881604659496782 }, { "content": "use crate::{codegen::*, core::ast::Statement};\n\n\n\nmod declaration;\n\nmod expression;\n\n\n\npub use declaration::*;\n\npub use expression::*;\n\nuse wasm_encoder::Instruction;\n\n\n\nimpl<'a> Walker<Statement> for Context<'a> {\n\n fn walk(&mut self, statement: Statement) -> Result<(), Error> {\n\n match statement {\n\n Statement::Declaration(declaration) => self.walk(declaration),\n\n Statement::Expression(expression) => {\n\n self.walk(expression)?;\n\n self.instructions.push(Instruction::Drop);\n\n Ok(())\n\n }\n\n }\n\n }\n\n}\n", "file_path": "libranoc/src/codegen/walker/statement/mod.rs", "rank": 99, "score": 6.691506755828764 } ]
Rust
coresimd/mod.rs
peterhj/stdsimd-nvptx
ee6e4c833e4cc76ab437e0f5d0c4899ad7138478
#[macro_use] mod macros; mod simd; #[stable(feature = "simd_arch", since = "1.27.0")] pub mod arch { #[cfg(any(target_arch = "x86", dox))] #[doc(cfg(target_arch = "x86"))] #[stable(feature = "simd_x86", since = "1.27.0")] pub mod x86 { #[stable(feature = "simd_x86", since = "1.27.0")] pub use coresimd::x86::*; } #[cfg(any(target_arch = "x86_64", dox))] #[doc(cfg(target_arch = "x86_64"))] #[stable(feature = "simd_x86", since = "1.27.0")] pub mod x86_64 { #[stable(feature = "simd_x86", since = "1.27.0")] pub use coresimd::x86::*; #[stable(feature = "simd_x86", since = "1.27.0")] pub use coresimd::x86_64::*; } #[cfg(any(target_arch = "arm", dox))] #[doc(cfg(target_arch = "arm"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod arm { pub use coresimd::arm::*; } #[cfg(any(target_arch = "aarch64", dox))] #[doc(cfg(target_arch = "aarch64"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod aarch64 { pub use coresimd::aarch64::*; pub use coresimd::arm::*; } #[cfg(any(target_arch = "wasm32", dox))] #[doc(cfg(target_arch = "wasm32"))] #[stable(feature = "simd_wasm32", since = "1.33.0")] pub mod wasm32 { #[stable(feature = "simd_wasm32", since = "1.33.0")] pub use coresimd::wasm32::*; } #[cfg(any(target_arch = "mips", dox))] #[doc(cfg(target_arch = "mips"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod mips { pub use coresimd::mips::*; } #[cfg(any(target_arch = "mips64", dox))] #[doc(cfg(target_arch = "mips64"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod mips64 { pub use coresimd::mips::*; } #[cfg(any(target_arch = "powerpc", dox))] #[doc(cfg(target_arch = "powerpc"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod powerpc { pub use coresimd::powerpc::*; } #[cfg(any(target_arch = "powerpc64", dox))] #[doc(cfg(target_arch = "powerpc64"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod powerpc64 { pub use coresimd::powerpc64::*; } #[cfg(any(target_arch = "nvptx", target_arch = "nvptx64", dox))] #[doc(cfg(any(target_arch = "nvptx", target_arch = "nvptx64")))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod nvptx { pub use coresimd::nvptx::*; } } mod simd_llvm; #[cfg(any(target_arch = "x86", target_arch = "x86_64", dox))] #[doc(cfg(any(target_arch = "x86", target_arch = "x86_64")))] mod x86; #[cfg(any(target_arch = "x86_64", dox))] #[doc(cfg(target_arch = "x86_64"))] mod x86_64; #[cfg(any(target_arch = "aarch64", dox))] #[doc(cfg(target_arch = "aarch64"))] mod aarch64; #[cfg(any(target_arch = "arm", target_arch = "aarch64", dox))] #[doc(cfg(any(target_arch = "arm", target_arch = "aarch64")))] mod arm; #[cfg(any(target_arch = "wasm32", dox))] #[doc(cfg(target_arch = "wasm32"))] mod wasm32; #[cfg(any(target_arch = "mips", target_arch = "mips64", dox))] #[doc(cfg(any(target_arch = "mips", target_arch = "mips64")))] mod mips; #[cfg(any(target_arch = "powerpc", target_arch = "powerpc64", dox))] #[doc(cfg(any(target_arch = "powerpc", target_arch = "powerpc64")))] mod powerpc; #[cfg(any(target_arch = "powerpc64", dox))] #[doc(cfg(target_arch = "powerpc64"))] mod powerpc64; #[cfg(any(target_arch = "nvptx", target_arch = "nvptx64", dox))] #[doc(cfg(any(target_arch = "nvptx", target_arch = "nvptx64")))] mod nvptx;
#[macro_use] mod macros; mod simd; #[stable(feature = "simd_arch", since = "1.27.0")] pub mod arch { #[cfg(any(target_arch = "x86", dox))] #[doc(cfg(target_arch = "x86"))] #[stable(feature = "simd_x86", since = "1.27.0")] pub mod x86 { #[stable(feature = "simd_x86", since = "1.27.0")] pub use coresimd::x86::*; }
t_arch = "nvptx64")))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod nvptx { pub use coresimd::nvptx::*; } } mod simd_llvm; #[cfg(any(target_arch = "x86", target_arch = "x86_64", dox))] #[doc(cfg(any(target_arch = "x86", target_arch = "x86_64")))] mod x86; #[cfg(any(target_arch = "x86_64", dox))] #[doc(cfg(target_arch = "x86_64"))] mod x86_64; #[cfg(any(target_arch = "aarch64", dox))] #[doc(cfg(target_arch = "aarch64"))] mod aarch64; #[cfg(any(target_arch = "arm", target_arch = "aarch64", dox))] #[doc(cfg(any(target_arch = "arm", target_arch = "aarch64")))] mod arm; #[cfg(any(target_arch = "wasm32", dox))] #[doc(cfg(target_arch = "wasm32"))] mod wasm32; #[cfg(any(target_arch = "mips", target_arch = "mips64", dox))] #[doc(cfg(any(target_arch = "mips", target_arch = "mips64")))] mod mips; #[cfg(any(target_arch = "powerpc", target_arch = "powerpc64", dox))] #[doc(cfg(any(target_arch = "powerpc", target_arch = "powerpc64")))] mod powerpc; #[cfg(any(target_arch = "powerpc64", dox))] #[doc(cfg(target_arch = "powerpc64"))] mod powerpc64; #[cfg(any(target_arch = "nvptx", target_arch = "nvptx64", dox))] #[doc(cfg(any(target_arch = "nvptx", target_arch = "nvptx64")))] mod nvptx;
#[cfg(any(target_arch = "x86_64", dox))] #[doc(cfg(target_arch = "x86_64"))] #[stable(feature = "simd_x86", since = "1.27.0")] pub mod x86_64 { #[stable(feature = "simd_x86", since = "1.27.0")] pub use coresimd::x86::*; #[stable(feature = "simd_x86", since = "1.27.0")] pub use coresimd::x86_64::*; } #[cfg(any(target_arch = "arm", dox))] #[doc(cfg(target_arch = "arm"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod arm { pub use coresimd::arm::*; } #[cfg(any(target_arch = "aarch64", dox))] #[doc(cfg(target_arch = "aarch64"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod aarch64 { pub use coresimd::aarch64::*; pub use coresimd::arm::*; } #[cfg(any(target_arch = "wasm32", dox))] #[doc(cfg(target_arch = "wasm32"))] #[stable(feature = "simd_wasm32", since = "1.33.0")] pub mod wasm32 { #[stable(feature = "simd_wasm32", since = "1.33.0")] pub use coresimd::wasm32::*; } #[cfg(any(target_arch = "mips", dox))] #[doc(cfg(target_arch = "mips"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod mips { pub use coresimd::mips::*; } #[cfg(any(target_arch = "mips64", dox))] #[doc(cfg(target_arch = "mips64"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod mips64 { pub use coresimd::mips::*; } #[cfg(any(target_arch = "powerpc", dox))] #[doc(cfg(target_arch = "powerpc"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod powerpc { pub use coresimd::powerpc::*; } #[cfg(any(target_arch = "powerpc64", dox))] #[doc(cfg(target_arch = "powerpc64"))] #[unstable(feature = "stdsimd", issue = "27731")] pub mod powerpc64 { pub use coresimd::powerpc64::*; } #[cfg(any(target_arch = "nvptx", target_arch = "nvptx64", dox))] #[doc(cfg(any(target_arch = "nvptx", targe
random
[ { "content": "#[proc_macro_attribute]\n\npub fn simd_test(\n\n attr: proc_macro::TokenStream,\n\n item: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n let tokens = TokenStream::from(attr).into_iter().collect::<Vec<_>>();\n\n if tokens.len() != 3 {\n\n panic!(\"expected #[simd_test(enable = \\\"feature\\\")]\");\n\n }\n\n match &tokens[0] {\n\n TokenTree::Ident(tt) if *tt == \"enable\" => {}\n\n _ => panic!(\"expected #[simd_test(enable = \\\"feature\\\")]\"),\n\n }\n\n match &tokens[1] {\n\n TokenTree::Punct(tt) if tt.as_char() == '=' => {}\n\n _ => panic!(\"expected #[simd_test(enable = \\\"feature\\\")]\"),\n\n }\n\n let enable_feature = match &tokens[2] {\n\n TokenTree::Literal(tt) => tt.to_string(),\n\n _ => panic!(\"expected #[simd_test(enable = \\\"feature\\\")]\"),\n\n };\n", "file_path": "crates/simd-test-macro/src/lib.rs", "rank": 0, "score": 142052.43984034887 }, { "content": "#[inline]\n\npub fn check_for(x: Feature) -> bool {\n\n cache::test(x as u32, detect_features)\n\n}\n\n\n\n/// Run-time feature detection on x86 works by using the CPUID instruction.\n\n///\n\n/// The [CPUID Wikipedia page][wiki_cpuid] contains\n\n/// all the information about which flags to set to query which values, and in\n\n/// which registers these are reported.\n\n///\n\n/// The definitive references are:\n\n/// - [Intel 64 and IA-32 Architectures Software Developer's Manual Volume 2:\n\n/// Instruction Set Reference, A-Z][intel64_ref].\n\n/// - [AMD64 Architecture Programmer's Manual, Volume 3: General-Purpose and\n\n/// System Instructions][amd64_ref].\n\n///\n\n/// [wiki_cpuid]: https://en.wikipedia.org/wiki/CPUID\n\n/// [intel64_ref]: http://www.intel.de/content/dam/www/public/us/en/documents/manuals/64-ia-32-architectures-software-developer-instruction-set-reference-manual-325383.pdf\n\n/// [amd64_ref]: http://support.amd.com/TechDocs/24594.pdf\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 1, "score": 116225.35862963414 }, { "content": "#[inline]\n\npub fn has_cpuid() -> bool {\n\n #[cfg(target_env = \"sgx\")]\n\n {\n\n false\n\n }\n\n #[cfg(all(not(target_env = \"sgx\"), target_arch = \"x86_64\"))]\n\n {\n\n true\n\n }\n\n #[cfg(all(not(target_env = \"sgx\"), target_arch = \"x86\"))]\n\n {\n\n // Optimization for i586 and i686 Rust targets which SSE enabled\n\n // and support cpuid:\n\n #[cfg(target_feature = \"sse\")]\n\n {\n\n true\n\n }\n\n\n\n // If SSE is not enabled, detect whether cpuid is available:\n\n #[cfg(not(target_feature = \"sse\"))]\n", "file_path": "coresimd/x86/cpuid.rs", "rank": 2, "score": 94770.24340901634 }, { "content": "#[proc_macro_attribute]\n\npub fn assert_instr(\n\n attr: proc_macro::TokenStream,\n\n item: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n let invoc = match syn::parse::<Invoc>(attr) {\n\n Ok(s) => s,\n\n Err(e) => return e.to_compile_error().into(),\n\n };\n\n let item = match syn::parse::<syn::Item>(item) {\n\n Ok(s) => s,\n\n Err(e) => return e.to_compile_error().into(),\n\n };\n\n let func = match item {\n\n syn::Item::Fn(ref f) => f,\n\n _ => panic!(\"must be attached to a function\"),\n\n };\n\n\n\n let instr = &invoc.instr;\n\n let name = &func.ident;\n\n\n", "file_path": "crates/assert-instr-macro/src/lib.rs", "rank": 3, "score": 94047.43202566548 }, { "content": "#[stable(feature = \"simd_x86\", since = \"1.27.0\")]\n\n#[allow_internal_unstable]\n\nmacro_rules! is_x86_feature_detected {\n\n (\"aes\") => {\n\n cfg!(target_feature = \"aes\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::aes) };\n\n (\"pclmulqdq\") => {\n\n cfg!(target_feature = \"pclmulqdq\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::pclmulqdq) };\n\n (\"rdrand\") => {\n\n cfg!(target_feature = \"rdrand\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::rdrand) };\n\n (\"rdseed\") => {\n\n cfg!(target_feature = \"rdseed\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::rdseed) };\n\n (\"tsc\") => {\n\n cfg!(target_feature = \"tsc\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::tsc) };\n\n (\"mmx\") => {\n\n cfg!(target_feature = \"mmx\") || $crate::arch::detect::check_for(\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 4, "score": 89107.43270254978 }, { "content": "//! This module implements minimal run-time feature detection for x86.\n\n//!\n\n//! The features are detected using the `detect_features` function below.\n\n//! This function uses the CPUID instruction to read the feature flags from the\n\n//! CPU and encodes them in an `usize` where each bit position represents\n\n//! whether a feature is available (bit is set) or unavaiable (bit is cleared).\n\n//!\n\n//! The enum `Feature` is used to map bit positions to feature names, and the\n\n//! the `__crate::arch::detect::check_for!` macro is used to map string literals (e.g.\n\n//! \"avx\") to these bit positions (e.g. `Feature::avx`).\n\n//!\n\n//!\n\n//! The run-time feature detection is performed by the\n\n//! `__crate::arch::detect::check_for(Feature) -> bool` function. On its first call,\n\n//! this functions queries the CPU for the available features and stores them\n\n//! in a global `AtomicUsize` variable. The query is performed by just checking\n\n//! whether the feature bit in this global variable is set or cleared.\n\n\n\n/// A macro to test at *runtime* whether a CPU feature is available on\n\n/// x86/x86-64 platforms.\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 5, "score": 89106.48428982911 }, { "content": " (\"xsaves\") => {\n\n cfg!(target_feature = \"xsaves\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::xsaves)\n\n };\n\n (\"xsavec\") => {\n\n cfg!(target_feature = \"xsavec\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::xsavec)\n\n };\n\n ($t:tt) => {\n\n compile_error!(concat!(\"unknown target feature: \", $t))\n\n };\n\n}\n\n\n\n/// X86 CPU Feature enum. Each variant denotes a position in a bitset for a\n\n/// particular feature.\n\n///\n\n/// This is an unstable implementation detail subject to change.\n\n#[allow(non_camel_case_types)]\n\n#[repr(u8)]\n\n#[doc(hidden)]\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 6, "score": 89098.04719581746 }, { "content": "#[unstable(feature = \"stdsimd_internal\", issue = \"0\")]\n\npub enum Feature {\n\n /// AES (Advanced Encryption Standard New Instructions AES-NI)\n\n aes,\n\n /// CLMUL (Carry-less Multiplication)\n\n pclmulqdq,\n\n /// RDRAND\n\n rdrand,\n\n /// RDSEED\n\n rdseed,\n\n /// TSC (Time Stamp Counter)\n\n tsc,\n\n /// MMX\n\n mmx,\n\n /// SSE (Streaming SIMD Extensions)\n\n sse,\n\n /// SSE2 (Streaming SIMD Extensions 2)\n\n sse2,\n\n /// SSE3 (Streaming SIMD Extensions 3)\n\n sse3,\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 7, "score": 89097.47364526291 }, { "content": " $crate::arch::detect::Feature::mmx) };\n\n (\"sse\") => {\n\n cfg!(target_feature = \"sse\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::sse) };\n\n (\"sse2\") => {\n\n cfg!(target_feature = \"sse2\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::sse2)\n\n };\n\n (\"sse3\") => {\n\n cfg!(target_feature = \"sse3\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::sse3)\n\n };\n\n (\"ssse3\") => {\n\n cfg!(target_feature = \"ssse3\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::ssse3)\n\n };\n\n (\"sse4.1\") => {\n\n cfg!(target_feature = \"sse4.1\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::sse4_1)\n\n };\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 8, "score": 89095.54169387835 }, { "content": " (\"tbm\") => {\n\n cfg!(target_feature = \"tbm\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::tbm)\n\n };\n\n (\"popcnt\") => {\n\n cfg!(target_feature = \"popcnt\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::popcnt)\n\n };\n\n (\"fxsr\") => {\n\n cfg!(target_feature = \"fxsr\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::fxsr)\n\n };\n\n (\"xsave\") => {\n\n cfg!(target_feature = \"xsave\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::xsave)\n\n };\n\n (\"xsaveopt\") => {\n\n cfg!(target_feature = \"xsaveopt\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::xsaveopt)\n\n };\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 9, "score": 89095.50853971553 }, { "content": " (\"sse4.2\") => {\n\n cfg!(target_feature = \"sse4.2\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::sse4_2)\n\n };\n\n (\"sse4a\") => {\n\n cfg!(target_feature = \"sse4a\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::sse4a)\n\n };\n\n (\"sha\") => {\n\n cfg!(target_feature = \"sha\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::sha)\n\n };\n\n (\"avx\") => {\n\n cfg!(target_feature = \"avx\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::avx)\n\n };\n\n (\"avx2\") => {\n\n cfg!(target_feature = \"avx2\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::avx2)\n\n };\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 10, "score": 89095.50853971553 }, { "content": " (\"avx512f\") => {\n\n cfg!(target_feature = \"avx512f\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::avx512f)\n\n };\n\n (\"avx512cd\") => {\n\n cfg!(target_feature = \"avx512cd\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::avx512cd)\n\n };\n\n (\"avx512er\") => {\n\n cfg!(target_feature = \"avx512er\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::avx512er)\n\n };\n\n (\"avx512pf\") => {\n\n cfg!(target_feature = \"avx512pf\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::avx512pf)\n\n };\n\n (\"avx512bw\") => {\n\n cfg!(target_feature = \"avx512bw\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::avx512bw)\n\n };\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 11, "score": 89095.50853971553 }, { "content": " (\"fma\") => {\n\n cfg!(target_feature = \"fma\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::fma)\n\n };\n\n (\"bmi1\") => {\n\n cfg!(target_feature = \"bmi1\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::bmi)\n\n };\n\n (\"bmi2\") => {\n\n cfg!(target_feature = \"bmi2\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::bmi2)\n\n };\n\n (\"abm\") => {\n\n cfg!(target_feature = \"abm\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::abm)\n\n };\n\n (\"lzcnt\") => {\n\n cfg!(target_feature = \"lzcnt\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::abm)\n\n };\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 12, "score": 89095.50853971553 }, { "content": " (\"avx512dq\") => {\n\n cfg!(target_feature = \"avx512dq\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::avx512dq)\n\n };\n\n (\"avx512vl\") => {\n\n cfg!(target_Feature = \"avx512vl\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::avx512vl)\n\n };\n\n (\"avx512ifma\") => {\n\n cfg!(target_feature = \"avx512ifma\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::avx512_ifma)\n\n };\n\n (\"avx512vbmi\") => {\n\n cfg!(target_feature = \"avx512vbmi\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::avx512_vbmi)\n\n };\n\n (\"avx512vpopcntdq\") => {\n\n cfg!(target_feature = \"avx512vpopcntdq\") || $crate::arch::detect::check_for(\n\n $crate::arch::detect::Feature::avx512_vpopcntdq)\n\n };\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 13, "score": 89095.48090821336 }, { "content": "///\n\n/// This macro is provided in the standard library and will detect at runtime\n\n/// whether the specified CPU feature is detected. This does *not* resolve at\n\n/// compile time unless the specified feature is already enabled for the entire\n\n/// crate. Runtime detection currently relies mostly on the `cpuid` instruction.\n\n///\n\n/// This macro only takes one argument which is a string literal of the feature\n\n/// being tested for. The feature names supported are the lowercase versions of\n\n/// the ones defined by Intel in [their documentation][docs].\n\n///\n\n/// ## Supported arguments\n\n///\n\n/// This macro supports the same names that `#[target_feature]` supports. Unlike\n\n/// `#[target_feature]`, however, this macro does not support names separated\n\n/// with a comma. Instead testing for multiple features must be done through\n\n/// separate macro invocations for now.\n\n///\n\n/// Supported arguments are:\n\n///\n\n/// * `\"aes\"`\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 14, "score": 89094.66121162091 }, { "content": " /// SSSE3 (Supplemental Streaming SIMD Extensions 3)\n\n ssse3,\n\n /// SSE4.1 (Streaming SIMD Extensions 4.1)\n\n sse4_1,\n\n /// SSE4.2 (Streaming SIMD Extensions 4.2)\n\n sse4_2,\n\n /// SSE4a (Streaming SIMD Extensions 4a)\n\n sse4a,\n\n /// SHA\n\n sha,\n\n /// AVX (Advanced Vector Extensions)\n\n avx,\n\n /// AVX2 (Advanced Vector Extensions 2)\n\n avx2,\n\n /// AVX-512 F (Foundation)\n\n avx512f,\n\n /// AVX-512 CD (Conflict Detection Instructions)\n\n avx512cd,\n\n /// AVX-512 ER (Exponential and Reciprocal Instructions)\n\n avx512er,\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 15, "score": 89094.22592577092 }, { "content": "/// * `\"avx512dq\"`\n\n/// * `\"avx512vl\"`\n\n/// * `\"avx512ifma\"`\n\n/// * `\"avx512vbmi\"`\n\n/// * `\"avx512vpopcntdq\"`\n\n/// * `\"fma\"`\n\n/// * `\"bmi1\"`\n\n/// * `\"bmi2\"`\n\n/// * `\"abm\"`\n\n/// * `\"lzcnt\"`\n\n/// * `\"tbm\"`\n\n/// * `\"popcnt\"`\n\n/// * `\"fxsr\"`\n\n/// * `\"xsave\"`\n\n/// * `\"xsaveopt\"`\n\n/// * `\"xsaves\"`\n\n/// * `\"xsavec\"`\n\n///\n\n/// [docs]: https://software.intel.com/sites/landingpage/IntrinsicsGuide\n\n#[macro_export]\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 16, "score": 89093.19715856569 }, { "content": "/// * `\"pclmulqdq\"`\n\n/// * `\"rdrand\"`\n\n/// * `\"rdseed\"`\n\n/// * `\"tsc\"`\n\n/// * `\"mmx\"`\n\n/// * `\"sse\"`\n\n/// * `\"sse2\"`\n\n/// * `\"sse3\"`\n\n/// * `\"ssse3\"`\n\n/// * `\"sse4.1\"`\n\n/// * `\"sse4.2\"`\n\n/// * `\"sse4a\"`\n\n/// * `\"sha\"`\n\n/// * `\"avx\"`\n\n/// * `\"avx2\"`\n\n/// * `\"avx512f\"`\n\n/// * `\"avx512cd\"`\n\n/// * `\"avx512er\"`\n\n/// * `\"avx512pf\"`\n\n/// * `\"avx512bw\"`\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 17, "score": 89088.75158596122 }, { "content": " /// AVX-512 PF (Prefetch Instructions)\n\n avx512pf,\n\n /// AVX-512 BW (Byte and Word Instructions)\n\n avx512bw,\n\n /// AVX-512 DQ (Doubleword and Quadword)\n\n avx512dq,\n\n /// AVX-512 VL (Vector Length Extensions)\n\n avx512vl,\n\n /// AVX-512 IFMA (Integer Fused Multiply Add)\n\n avx512_ifma,\n\n /// AVX-512 VBMI (Vector Byte Manipulation Instructions)\n\n avx512_vbmi,\n\n /// AVX-512 VPOPCNTDQ (Vector Population Count Doubleword and\n\n /// Quadword)\n\n avx512_vpopcntdq,\n\n /// FMA (Fused Multiply Add)\n\n fma,\n\n /// BMI1 (Bit Manipulation Instructions 1)\n\n bmi,\n\n /// BMI1 (Bit Manipulation Instructions 2)\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 18, "score": 89088.75158596122 }, { "content": " bmi2,\n\n /// ABM (Advanced Bit Manipulation) on AMD / LZCNT (Leading Zero\n\n /// Count) on Intel\n\n abm,\n\n /// TBM (Trailing Bit Manipulation)\n\n tbm,\n\n /// POPCNT (Population Count)\n\n popcnt,\n\n /// FXSR (Floating-point context fast save and restor)\n\n fxsr,\n\n /// XSAVE (Save Processor Extended States)\n\n xsave,\n\n /// XSAVEOPT (Save Processor Extended States Optimized)\n\n xsaveopt,\n\n /// XSAVES (Save Processor Extended States Supervisor)\n\n xsaves,\n\n /// XSAVEC (Save Processor Extended States Compacted)\n\n xsavec,\n\n}\n", "file_path": "stdsimd/arch/detect/arch/x86.rs", "rank": 19, "score": 89088.75158596122 }, { "content": "#[inline]\n\npub fn check_for(_x: Feature) -> bool {\n\n false\n\n}\n", "file_path": "stdsimd/arch/detect/os/other.rs", "rank": 20, "score": 88873.52456691823 }, { "content": "mod error_macros;\n\n\n\ncfg_if! {\n\n if #[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))] {\n\n #[path = \"arch/x86.rs\"]\n\n #[macro_use]\n\n mod arch;\n\n } else if #[cfg(target_arch = \"arm\")] {\n\n #[path = \"arch/arm.rs\"]\n\n #[macro_use]\n\n mod arch;\n\n } else if #[cfg(target_arch = \"aarch64\")] {\n\n #[path = \"arch/aarch64.rs\"]\n\n #[macro_use]\n\n mod arch;\n\n } else if #[cfg(target_arch = \"powerpc\")] {\n\n #[path = \"arch/powerpc.rs\"]\n\n #[macro_use]\n\n mod arch;\n\n } else if #[cfg(target_arch = \"powerpc64\")] {\n", "file_path": "stdsimd/arch/detect/mod.rs", "rank": 21, "score": 85461.23567572239 }, { "content": " #[path = \"arch/powerpc64.rs\"]\n\n #[macro_use]\n\n mod arch;\n\n } else if #[cfg(target_arch = \"mips\")] {\n\n #[path = \"arch/mips.rs\"]\n\n #[macro_use]\n\n mod arch;\n\n } else if #[cfg(target_arch = \"mips64\")] {\n\n #[path = \"arch/mips64.rs\"]\n\n #[macro_use]\n\n mod arch;\n\n } else {\n\n // Unimplemented architecture:\n\n mod arch {\n\n pub enum Feature {\n\n Null\n\n }\n\n }\n\n }\n\n}\n", "file_path": "stdsimd/arch/detect/mod.rs", "rank": 22, "score": 85459.53265885898 }, { "content": "pub use self::arch::Feature;\n\n\n\nmod bit;\n\nmod cache;\n\n\n\ncfg_if! {\n\n if #[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))] {\n\n // On x86/x86_64 no OS specific functionality is required.\n\n #[path = \"os/x86.rs\"]\n\n mod os;\n\n } else if #[cfg(target_os = \"linux\")] {\n\n #[path = \"os/linux/mod.rs\"]\n\n mod os;\n\n } else if #[cfg(target_os = \"freebsd\")] {\n\n #[cfg(target_arch = \"aarch64\")]\n\n #[path = \"os/aarch64.rs\"]\n\n mod aarch64;\n\n #[path = \"os/freebsd/mod.rs\"]\n\n mod os;\n\n } else {\n\n #[path = \"os/other.rs\"]\n\n mod os;\n\n }\n\n}\n\npub use self::os::check_for;\n", "file_path": "stdsimd/arch/detect/mod.rs", "rank": 23, "score": 85457.68104178245 }, { "content": "//! This module implements run-time feature detection.\n\n//!\n\n//! The `is_{arch}_feature_detected!(\"feature-name\")` macros take the name of a\n\n//! feature as a string-literal, and return a boolean indicating whether the\n\n//! feature is enabled at run-time or not.\n\n//!\n\n//! These macros do two things:\n\n//! * map the string-literal into an integer stored as a `Feature` enum,\n\n//! * call a `os::check_for(x: Feature)` function that returns `true` if the\n\n//! feature is enabled.\n\n//!\n\n//! The `Feature` enums are also implemented in the `arch/{target_arch}.rs`\n\n//! modules.\n\n//!\n\n//! The `check_for` functions are, in general, Operating System dependent. Most\n\n//! architectures do not allow user-space programs to query the feature bits\n\n//! due to security concerns (x86 is the big exception). These functions are\n\n//! implemented in the `os/{target_os}.rs` modules.\n\n\n\n#[macro_use]\n", "file_path": "stdsimd/arch/detect/mod.rs", "rank": 24, "score": 85447.04567149922 }, { "content": "//! Utility macros.\n\n\n\nmacro_rules! constify_imm6 {\n\n ($imm8:expr, $expand:ident) => {\n\n #[allow(overflowing_literals)]\n\n match ($imm8) & 0b1_1111 {\n\n 0 => $expand!(0),\n\n 1 => $expand!(1),\n\n 2 => $expand!(2),\n\n 3 => $expand!(3),\n\n 4 => $expand!(4),\n\n 5 => $expand!(5),\n\n 6 => $expand!(6),\n\n 7 => $expand!(7),\n\n 8 => $expand!(8),\n\n 9 => $expand!(9),\n\n 10 => $expand!(10),\n\n 11 => $expand!(11),\n\n 12 => $expand!(12),\n\n 13 => $expand!(13),\n", "file_path": "coresimd/x86/macros.rs", "rank": 25, "score": 84214.97178198106 }, { "content": "}\n\n\n\nmacro_rules! constify_imm2 {\n\n ($imm8:expr, $expand:ident) => {\n\n #[allow(overflowing_literals)]\n\n match ($imm8) & 0b11 {\n\n 0 => $expand!(0),\n\n 1 => $expand!(1),\n\n 2 => $expand!(2),\n\n _ => $expand!(3),\n\n }\n\n };\n\n}\n\n\n\n#[cfg(test)]\n\nmacro_rules! assert_approx_eq {\n\n ($a:expr, $b:expr, $eps:expr) => {{\n\n let (a, b) = (&$a, &$b);\n\n assert!(\n\n (*a - *b).abs() < $eps,\n", "file_path": "coresimd/x86/macros.rs", "rank": 26, "score": 84214.90963079827 }, { "content": " 14 => $expand!(14),\n\n _ => $expand!(15),\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! constify_imm3 {\n\n ($imm8:expr, $expand:ident) => {\n\n #[allow(overflowing_literals)]\n\n match ($imm8) & 0b111 {\n\n 0 => $expand!(0),\n\n 1 => $expand!(1),\n\n 2 => $expand!(2),\n\n 3 => $expand!(3),\n\n 4 => $expand!(4),\n\n 5 => $expand!(5),\n\n 6 => $expand!(6),\n\n _ => $expand!(7),\n\n }\n\n };\n", "file_path": "coresimd/x86/macros.rs", "rank": 27, "score": 84214.00439930036 }, { "content": "}\n\n\n\nmacro_rules! constify_imm4 {\n\n ($imm8:expr, $expand:ident) => {\n\n #[allow(overflowing_literals)]\n\n match ($imm8) & 0b1111 {\n\n 0 => $expand!(0),\n\n 1 => $expand!(1),\n\n 2 => $expand!(2),\n\n 3 => $expand!(3),\n\n 4 => $expand!(4),\n\n 5 => $expand!(5),\n\n 6 => $expand!(6),\n\n 7 => $expand!(7),\n\n 8 => $expand!(8),\n\n 9 => $expand!(9),\n\n 10 => $expand!(10),\n\n 11 => $expand!(11),\n\n 12 => $expand!(12),\n\n 13 => $expand!(13),\n", "file_path": "coresimd/x86/macros.rs", "rank": 28, "score": 84213.83879545325 }, { "content": " 14 => $expand!(14),\n\n 15 => $expand!(15),\n\n 16 => $expand!(16),\n\n 17 => $expand!(17),\n\n 18 => $expand!(18),\n\n 19 => $expand!(19),\n\n 20 => $expand!(20),\n\n 21 => $expand!(21),\n\n 22 => $expand!(22),\n\n 23 => $expand!(23),\n\n 24 => $expand!(24),\n\n 25 => $expand!(25),\n\n 26 => $expand!(26),\n\n 27 => $expand!(27),\n\n 28 => $expand!(28),\n\n 29 => $expand!(29),\n\n 30 => $expand!(30),\n\n _ => $expand!(31),\n\n }\n\n };\n", "file_path": "coresimd/x86/macros.rs", "rank": 29, "score": 84209.39322284878 }, { "content": " \"assertion failed: `(left !== right)` \\\n\n (left: `{:?}`, right: `{:?}`, expect diff: `{:?}`, real diff: `{:?}`)\",\n\n *a,\n\n *b,\n\n $eps,\n\n (*a - *b).abs()\n\n );\n\n }};\n\n}\n", "file_path": "coresimd/x86/macros.rs", "rank": 30, "score": 84209.39322284878 }, { "content": " /// #[cfg(target_arch = \"x86\")]\n\n /// use std::arch::x86::*;\n\n /// #[cfg(target_arch = \"x86_64\")]\n\n /// use std::arch::x86_64::*;\n\n ///\n\n /// # fn main() {\n\n /// # #[target_feature(enable = \"sse\")]\n\n /// # unsafe fn foo() {\n\n /// let two_zeros = _mm_setzero_pd();\n\n /// let two_ones = _mm_set1_pd(1.0);\n\n /// let two_floats = _mm_set_pd(1.0, 2.0);\n\n /// # }\n\n /// # if is_x86_feature_detected!(\"sse\") { unsafe { foo() } }\n\n /// # }\n\n /// ```\n\n #[stable(feature = \"simd_x86\", since = \"1.27.0\")]\n\n pub struct __m128d(f64, f64);\n\n\n\n /// 256-bit wide integer vector type, x86-specific\n\n ///\n", "file_path": "coresimd/x86/mod.rs", "rank": 31, "score": 84144.76700835838 }, { "content": " /// # #[macro_use]\n\n /// # extern crate stdsimd as std;\n\n /// #[cfg(target_arch = \"x86\")]\n\n /// use std::arch::x86::*;\n\n /// #[cfg(target_arch = \"x86_64\")]\n\n /// use std::arch::x86_64::*;\n\n ///\n\n /// # fn main() {\n\n /// # #[target_feature(enable = \"mmx\")]\n\n /// # unsafe fn foo() {\n\n /// let all_bytes_zero = _mm_setzero_si64();\n\n /// let all_bytes_one = _mm_set1_pi8(1);\n\n /// let two_i32 = _mm_set_pi32(1, 2);\n\n /// # }\n\n /// # if is_x86_feature_detected!(\"mmx\") { unsafe { foo() } }\n\n /// # }\n\n /// ```\n\n pub struct __m64(i64);\n\n\n\n /// 128-bit wide integer vector type, x86-specific\n", "file_path": "coresimd/x86/mod.rs", "rank": 32, "score": 84142.28782972285 }, { "content": "//! `x86_64` intrinsics\n\n\n\nmod fxsr;\n\npub use self::fxsr::*;\n\n\n\nmod sse;\n\npub use self::sse::*;\n\n\n\nmod sse2;\n\npub use self::sse2::*;\n\n\n\nmod sse41;\n\npub use self::sse41::*;\n\n\n\nmod sse42;\n\npub use self::sse42::*;\n\n\n\nmod xsave;\n\npub use self::xsave::*;\n\n\n", "file_path": "coresimd/x86_64/mod.rs", "rank": 33, "score": 84142.18639781092 }, { "content": "//! `x86` and `x86_64` intrinsics.\n\n\n\nuse mem;\n\nuse prelude::v1::*;\n\n\n\n#[macro_use]\n\nmod macros;\n\n\n\ntypes! {\n\n /// 64-bit wide integer vector type, x86-specific\n\n ///\n\n /// This type is the same as the `__m64` type defined by Intel,\n\n /// representing a 64-bit SIMD register. Usage of this type typically\n\n /// corresponds to the `mmx` target feature.\n\n ///\n\n /// Internally this type may be viewed as:\n\n ///\n\n /// * `i8x8` - eight `i8` variables packed together\n\n /// * `i16x4` - four `i16` variables packed together\n\n /// * `i32x2` - two `i32` variables packed together\n", "file_path": "coresimd/x86/mod.rs", "rank": 34, "score": 84140.97449419588 }, { "content": " /// \"pd\" which is used for `__m256d`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # #![cfg_attr(not(dox), feature(stdsimd))]\n\n /// # #![cfg_attr(not(dox), no_std)]\n\n /// # #[cfg(not(dox))]\n\n /// # extern crate std as real_std;\n\n /// # #[cfg(not(dox))]\n\n /// # #[macro_use]\n\n /// # extern crate stdsimd as std;\n\n /// #[cfg(target_arch = \"x86\")]\n\n /// use std::arch::x86::*;\n\n /// #[cfg(target_arch = \"x86_64\")]\n\n /// use std::arch::x86_64::*;\n\n ///\n\n /// # fn main() {\n\n /// # #[target_feature(enable = \"avx\")]\n\n /// # unsafe fn foo() {\n", "file_path": "coresimd/x86/mod.rs", "rank": 35, "score": 84139.93337798312 }, { "content": " /// together.\n\n ///\n\n /// Most intrinsics using `__m128` are prefixed with `_mm_` and are\n\n /// suffixed with \"ps\" (or otherwise contain \"ps\"). Not to be confused with\n\n /// \"pd\" which is used for `__m128d`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # #![cfg_attr(not(dox), feature(stdsimd))]\n\n /// # #![cfg_attr(not(dox), no_std)]\n\n /// # #[cfg(not(dox))]\n\n /// # extern crate std as real_std;\n\n /// # #[cfg(not(dox))]\n\n /// # #[macro_use]\n\n /// # extern crate stdsimd as std;\n\n /// #[cfg(target_arch = \"x86\")]\n\n /// use std::arch::x86::*;\n\n /// #[cfg(target_arch = \"x86_64\")]\n\n /// use std::arch::x86_64::*;\n", "file_path": "coresimd/x86/mod.rs", "rank": 36, "score": 84139.78915356667 }, { "content": " /// together.\n\n ///\n\n /// Most intrinsics using `__m256d` are prefixed with `_mm256_` and are\n\n /// suffixed with \"pd\" (or otherwise contain \"pd\"). Not to be confused with\n\n /// \"ps\" which is used for `__m256`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # #![cfg_attr(not(dox), feature(stdsimd))]\n\n /// # #![cfg_attr(not(dox), no_std)]\n\n /// # #[cfg(not(dox))]\n\n /// # extern crate std as real_std;\n\n /// # #[cfg(not(dox))]\n\n /// # #[macro_use]\n\n /// # extern crate stdsimd as std;\n\n /// #[cfg(target_arch = \"x86\")]\n\n /// use std::arch::x86::*;\n\n /// #[cfg(target_arch = \"x86_64\")]\n\n /// use std::arch::x86_64::*;\n", "file_path": "coresimd/x86/mod.rs", "rank": 37, "score": 84139.78915356667 }, { "content": " /// integer types tend to correspond to suffixes like \"epi8\" or \"epi32\".\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # #![cfg_attr(not(dox), feature(stdsimd))]\n\n /// # #![cfg_attr(not(dox), no_std)]\n\n /// # #[cfg(not(dox))]\n\n /// # extern crate std as real_std;\n\n /// # #[cfg(not(dox))]\n\n /// # #[macro_use]\n\n /// # extern crate stdsimd as std;\n\n /// #[cfg(target_arch = \"x86\")]\n\n /// use std::arch::x86::*;\n\n /// #[cfg(target_arch = \"x86_64\")]\n\n /// use std::arch::x86_64::*;\n\n ///\n\n /// # fn main() {\n\n /// # #[target_feature(enable = \"sse2\")]\n\n /// # unsafe fn foo() {\n", "file_path": "coresimd/x86/mod.rs", "rank": 38, "score": 84139.19998878015 }, { "content": " /// ```\n\n /// # #![cfg_attr(not(dox), feature(stdsimd))]\n\n /// # #![cfg_attr(not(dox), no_std)]\n\n /// # #[cfg(not(dox))]\n\n /// # extern crate std as real_std;\n\n /// # #[cfg(not(dox))]\n\n /// # #[macro_use]\n\n /// # extern crate stdsimd as std;\n\n /// #[cfg(target_arch = \"x86\")]\n\n /// use std::arch::x86::*;\n\n /// #[cfg(target_arch = \"x86_64\")]\n\n /// use std::arch::x86_64::*;\n\n ///\n\n /// # fn main() {\n\n /// # #[target_feature(enable = \"avx\")]\n\n /// # unsafe fn foo() {\n\n /// let all_bytes_zero = _mm256_setzero_si256();\n\n /// let all_bytes_one = _mm256_set1_epi8(1);\n\n /// let eight_i32 = _mm256_set_epi32(1, 2, 3, 4, 5, 6, 7, 8);\n\n /// # }\n", "file_path": "coresimd/x86/mod.rs", "rank": 39, "score": 84138.6248379131 }, { "content": "mod xsave;\n\npub use self::xsave::*;\n\n\n\nmod sse;\n\npub use self::sse::*;\n\nmod sse2;\n\npub use self::sse2::*;\n\nmod sse3;\n\npub use self::sse3::*;\n\nmod ssse3;\n\npub use self::ssse3::*;\n\nmod sse41;\n\npub use self::sse41::*;\n\nmod sse42;\n\npub use self::sse42::*;\n\nmod avx;\n\npub use self::avx::*;\n\nmod avx2;\n\npub use self::avx2::*;\n\nmod fma;\n", "file_path": "coresimd/x86/mod.rs", "rank": 40, "score": 84138.49838042454 }, { "content": "mod abm;\n\npub use self::abm::*;\n\n\n\nmod avx;\n\npub use self::avx::*;\n\n\n\nmod bmi;\n\npub use self::bmi::*;\n\n\n\nmod bmi2;\n\npub use self::bmi2::*;\n\n\n\nmod avx2;\n\npub use self::avx2::*;\n\n\n\nmod bswap;\n\npub use self::bswap::*;\n\n\n\nmod rdrand;\n\npub use self::rdrand::*;\n", "file_path": "coresimd/x86_64/mod.rs", "rank": 41, "score": 84138.31605609896 }, { "content": " #[inline]\n\n fn as_m512i(self) -> Self {\n\n self\n\n }\n\n}\n\n\n\nmod eflags;\n\npub use self::eflags::*;\n\n\n\nmod fxsr;\n\npub use self::fxsr::*;\n\n\n\nmod bswap;\n\npub use self::bswap::*;\n\n\n\nmod rdtsc;\n\npub use self::rdtsc::*;\n\n\n\nmod cpuid;\n\npub use self::cpuid::*;\n", "file_path": "coresimd/x86/mod.rs", "rank": 42, "score": 84137.84952984899 }, { "content": "mod mmx;\n\npub use self::mmx::*;\n\n\n\nmod pclmulqdq;\n\npub use self::pclmulqdq::*;\n\n\n\nmod aes;\n\npub use self::aes::*;\n\n\n\nmod rdrand;\n\npub use self::rdrand::*;\n\n\n\nmod sha;\n\npub use self::sha::*;\n\n\n\n#[cfg(test)]\n\nuse stdsimd_test::assert_instr;\n\n\n\n/// Generates the trap instruction `UD2`\n\n#[cfg_attr(test, assert_instr(ud2))]\n\n#[inline]\n\npub unsafe fn ud2() -> ! {\n\n ::intrinsics::abort()\n\n}\n\n\n\nmod avx512f;\n\npub use self::avx512f::*;\n", "file_path": "coresimd/x86/mod.rs", "rank": 43, "score": 84137.46724969962 }, { "content": "pub use self::fma::*;\n\n\n\nmod abm;\n\npub use self::abm::*;\n\nmod bmi1;\n\npub use self::bmi1::*;\n\n\n\nmod bmi2;\n\npub use self::bmi2::*;\n\n\n\n#[cfg(not(stdsimd_intel_sde))]\n\nmod sse4a;\n\n#[cfg(not(stdsimd_intel_sde))]\n\npub use self::sse4a::*;\n\n\n\n#[cfg(not(stdsimd_intel_sde))]\n\nmod tbm;\n\n#[cfg(not(stdsimd_intel_sde))]\n\npub use self::tbm::*;\n\n\n", "file_path": "coresimd/x86/mod.rs", "rank": 44, "score": 84137.37769076135 }, { "content": " /// # if is_x86_feature_detected!(\"avx\") { unsafe { foo() } }\n\n /// # }\n\n /// ```\n\n #[stable(feature = \"simd_x86\", since = \"1.27.0\")]\n\n pub struct __m256i(i64, i64, i64, i64);\n\n\n\n /// 256-bit wide set of eight `f32` types, x86-specific\n\n ///\n\n /// This type is the same as the `__m256` type defined by Intel,\n\n /// representing a 256-bit SIMD register which internally is consisted of\n\n /// eight packed `f32` instances. Usage of this type typically corresponds\n\n /// to the `avx` and up target features for x86/x86_64.\n\n ///\n\n /// Note that unlike `__m256i`, the integer version of the 256-bit\n\n /// registers, this `__m256` type has *one* interpretation. Each instance\n\n /// of `__m256` always corresponds to `f32x8`, or eight `f32` types packed\n\n /// together.\n\n ///\n\n /// Most intrinsics using `__m256` are prefixed with `_mm256_` and are\n\n /// suffixed with \"ps\" (or otherwise contain \"ps\"). Not to be confused with\n", "file_path": "coresimd/x86/mod.rs", "rank": 45, "score": 84136.84608518695 }, { "content": " /// to the `avx` and up target features for x86/x86_64.\n\n ///\n\n /// Note that unlike `__m512i`, the integer version of the 512-bit\n\n /// registers, this `__m512d` type has *one* interpretation. Each instance\n\n /// of `__m512d` always corresponds to `f64x4`, or eight `f64` types packed\n\n /// together.\n\n ///\n\n /// Most intrinsics using `__m512d` are prefixed with `_mm512_` and are\n\n /// suffixed with \"pd\" (or otherwise contain \"pd\"). Not to be confused with\n\n /// \"ps\" which is used for `__m512`.\n\n pub struct __m512d(f64, f64, f64, f64, f64, f64, f64, f64);\n\n}\n\n\n\n/// The `__mmask16` type used in AVX-512 intrinsics, a 16-bit integer\n\n#[allow(non_camel_case_types)]\n\npub type __mmask16 = i16;\n\n\n\n#[cfg(test)]\n\nmod test;\n\n#[cfg(test)]\n", "file_path": "coresimd/x86/mod.rs", "rank": 46, "score": 84135.34513894208 }, { "content": " ///\n\n /// # fn main() {\n\n /// # #[target_feature(enable = \"avx\")]\n\n /// # unsafe fn foo() {\n\n /// let four_zeros = _mm256_setzero_pd();\n\n /// let four_ones = _mm256_set1_pd(1.0);\n\n /// let four_floats = _mm256_set_pd(1.0, 2.0, 3.0, 4.0);\n\n /// # }\n\n /// # if is_x86_feature_detected!(\"avx\") { unsafe { foo() } }\n\n /// # }\n\n /// ```\n\n #[stable(feature = \"simd_x86\", since = \"1.27.0\")]\n\n pub struct __m256d(f64, f64, f64, f64);\n\n\n\n /// 512-bit wide integer vector type, x86-specific\n\n ///\n\n /// This type is the same as the `__m512i` type defined by Intel,\n\n /// representing a 512-bit SIMD register. Usage of this type typically\n\n /// corresponds to the `avx512*` and up target features for x86/x86_64.\n\n ///\n", "file_path": "coresimd/x86/mod.rs", "rank": 47, "score": 84134.88752616753 }, { "content": " ///\n\n /// # fn main() {\n\n /// # #[target_feature(enable = \"sse\")]\n\n /// # unsafe fn foo() {\n\n /// let four_zeros = _mm_setzero_ps();\n\n /// let four_ones = _mm_set1_ps(1.0);\n\n /// let four_floats = _mm_set_ps(1.0, 2.0, 3.0, 4.0);\n\n /// # }\n\n /// # if is_x86_feature_detected!(\"sse\") { unsafe { foo() } }\n\n /// # }\n\n /// ```\n\n #[stable(feature = \"simd_x86\", since = \"1.27.0\")]\n\n pub struct __m128(f32, f32, f32, f32);\n\n\n\n /// 128-bit wide set of two `f64` types, x86-specific\n\n ///\n\n /// This type is the same as the `__m128d` type defined by Intel,\n\n /// representing a 128-bit SIMD register which internally is consisted of\n\n /// two packed `f64` instances. Usage of this type typically corresponds\n\n /// to the `sse` and up target features for x86/x86_64.\n", "file_path": "coresimd/x86/mod.rs", "rank": 48, "score": 84134.3396131615 }, { "content": " /// let all_bytes_zero = _mm_setzero_si128();\n\n /// let all_bytes_one = _mm_set1_epi8(1);\n\n /// let four_i32 = _mm_set_epi32(1, 2, 3, 4);\n\n /// # }\n\n /// # if is_x86_feature_detected!(\"sse2\") { unsafe { foo() } }\n\n /// # }\n\n /// ```\n\n #[stable(feature = \"simd_x86\", since = \"1.27.0\")]\n\n pub struct __m128i(i64, i64);\n\n\n\n /// 128-bit wide set of four `f32` types, x86-specific\n\n ///\n\n /// This type is the same as the `__m128` type defined by Intel,\n\n /// representing a 128-bit SIMD register which internally is consisted of\n\n /// four packed `f32` instances. Usage of this type typically corresponds\n\n /// to the `sse` and up target features for x86/x86_64.\n\n ///\n\n /// Note that unlike `__m128i`, the integer version of the 128-bit\n\n /// registers, this `__m128` type has *one* interpretation. Each instance\n\n /// of `__m128` always corresponds to `f32x4`, or four `f32` types packed\n", "file_path": "coresimd/x86/mod.rs", "rank": 49, "score": 84133.6926161255 }, { "content": " /// let eight_zeros = _mm256_setzero_ps();\n\n /// let eight_ones = _mm256_set1_ps(1.0);\n\n /// let eight_floats = _mm256_set_ps(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0);\n\n /// # }\n\n /// # if is_x86_feature_detected!(\"avx\") { unsafe { foo() } }\n\n /// # }\n\n /// ```\n\n #[stable(feature = \"simd_x86\", since = \"1.27.0\")]\n\n pub struct __m256(f32, f32, f32, f32, f32, f32, f32, f32);\n\n\n\n /// 256-bit wide set of four `f64` types, x86-specific\n\n ///\n\n /// This type is the same as the `__m256d` type defined by Intel,\n\n /// representing a 256-bit SIMD register which internally is consisted of\n\n /// four packed `f64` instances. Usage of this type typically corresponds\n\n /// to the `avx` and up target features for x86/x86_64.\n\n ///\n\n /// Note that unlike `__m256i`, the integer version of the 256-bit\n\n /// registers, this `__m256d` type has *one* interpretation. Each instance\n\n /// of `__m256d` always corresponds to `f64x4`, or four `f64` types packed\n", "file_path": "coresimd/x86/mod.rs", "rank": 50, "score": 84133.29297398035 }, { "content": " /// to the `avx512*` and up target features for x86/x86_64.\n\n ///\n\n /// Note that unlike `__m512i`, the integer version of the 512-bit\n\n /// registers, this `__m512` type has *one* interpretation. Each instance\n\n /// of `__m512` always corresponds to `f32x16`, or sixteen `f32` types\n\n /// packed together.\n\n ///\n\n /// Most intrinsics using `__m512` are prefixed with `_mm512_` and are\n\n /// suffixed with \"ps\" (or otherwise contain \"ps\"). Not to be confused with\n\n /// \"pd\" which is used for `__m512d`.\n\n pub struct __m512(\n\n f32, f32, f32, f32, f32, f32, f32, f32,\n\n f32, f32, f32, f32, f32, f32, f32, f32,\n\n );\n\n\n\n /// 512-bit wide set of eight `f64` types, x86-specific\n\n ///\n\n /// This type is the same as the `__m512d` type defined by Intel,\n\n /// representing a 512-bit SIMD register which internally is consisted of\n\n /// eight packed `f64` instances. Usage of this type typically corresponds\n", "file_path": "coresimd/x86/mod.rs", "rank": 51, "score": 84132.90172471717 }, { "content": "pub use self::test::*;\n\n\n\n#[allow(non_camel_case_types)]\n\n#[unstable(feature = \"stdimd_internal\", issue = \"0\")]\n\npub(crate) trait m128iExt: Sized {\n\n fn as_m128i(self) -> __m128i;\n\n\n\n #[inline]\n\n fn as_u8x16(self) -> ::coresimd::simd::u8x16 {\n\n unsafe { mem::transmute(self.as_m128i()) }\n\n }\n\n\n\n #[inline]\n\n fn as_u16x8(self) -> ::coresimd::simd::u16x8 {\n\n unsafe { mem::transmute(self.as_m128i()) }\n\n }\n\n\n\n #[inline]\n\n fn as_u32x4(self) -> ::coresimd::simd::u32x4 {\n\n unsafe { mem::transmute(self.as_m128i()) }\n", "file_path": "coresimd/x86/mod.rs", "rank": 52, "score": 84131.72690328675 }, { "content": " ///\n\n /// This type is the same as the `__m128i` type defined by Intel,\n\n /// representing a 128-bit SIMD register. Usage of this type typically\n\n /// corresponds to the `sse` and up target features for x86/x86_64.\n\n ///\n\n /// Internally this type may be viewed as:\n\n ///\n\n /// * `i8x16` - sixteen `i8` variables packed together\n\n /// * `i16x8` - eight `i16` variables packed together\n\n /// * `i32x4` - four `i32` variables packed together\n\n /// * `i64x2` - two `i64` variables packed together\n\n ///\n\n /// (as well as unsigned versions). Each intrinsic may interpret the\n\n /// internal bits differently, check the documentation of the intrinsic\n\n /// to see how it's being used.\n\n ///\n\n /// Note that this means that an instance of `__m128i` typically just means\n\n /// a \"bag of bits\" which is left up to interpretation at the point of use.\n\n ///\n\n /// Most intrinsics using `__m128i` are prefixed with `_mm_` and the\n", "file_path": "coresimd/x86/mod.rs", "rank": 53, "score": 84130.33213354294 }, { "content": " /// Internally this type may be viewed as:\n\n ///\n\n /// * `i8x64` - sixty-four `i8` variables packed together\n\n /// * `i16x32` - thirty-two `i16` variables packed together\n\n /// * `i32x16` - sixteen `i32` variables packed together\n\n /// * `i64x8` - eight `i64` variables packed together\n\n ///\n\n /// (as well as unsigned versions). Each intrinsic may interpret the\n\n /// internal bits differently, check the documentation of the intrinsic\n\n /// to see how it's being used.\n\n ///\n\n /// Note that this means that an instance of `__m512i` typically just means\n\n /// a \"bag of bits\" which is left up to interpretation at the point of use.\n\n pub struct __m512i(i64, i64, i64, i64, i64, i64, i64, i64);\n\n\n\n /// 512-bit wide set of sixteen `f32` types, x86-specific\n\n ///\n\n /// This type is the same as the `__m512` type defined by Intel,\n\n /// representing a 512-bit SIMD register which internally is consisted of\n\n /// eight packed `f32` instances. Usage of this type typically corresponds\n", "file_path": "coresimd/x86/mod.rs", "rank": 54, "score": 84130.26109459955 }, { "content": " /// This type is the same as the `__m256i` type defined by Intel,\n\n /// representing a 256-bit SIMD register. Usage of this type typically\n\n /// corresponds to the `avx` and up target features for x86/x86_64.\n\n ///\n\n /// Internally this type may be viewed as:\n\n ///\n\n /// * `i8x32` - thirty two `i8` variables packed together\n\n /// * `i16x16` - sixteen `i16` variables packed together\n\n /// * `i32x8` - eight `i32` variables packed together\n\n /// * `i64x4` - four `i64` variables packed together\n\n ///\n\n /// (as well as unsigned versions). Each intrinsic may interpret the\n\n /// internal bits differently, check the documentation of the intrinsic\n\n /// to see how it's being used.\n\n ///\n\n /// Note that this means that an instance of `__m256i` typically just means\n\n /// a \"bag of bits\" which is left up to interpretation at the point of use.\n\n ///\n\n /// # Examples\n\n ///\n", "file_path": "coresimd/x86/mod.rs", "rank": 55, "score": 84129.78621612748 }, { "content": " ///\n\n /// Note that unlike `__m128i`, the integer version of the 128-bit\n\n /// registers, this `__m128d` type has *one* interpretation. Each instance\n\n /// of `__m128d` always corresponds to `f64x2`, or two `f64` types packed\n\n /// together.\n\n ///\n\n /// Most intrinsics using `__m128d` are prefixed with `_mm_` and are\n\n /// suffixed with \"pd\" (or otherwise contain \"pd\"). Not to be confused with\n\n /// \"ps\" which is used for `__m128`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # #![cfg_attr(not(dox), feature(stdsimd))]\n\n /// # #![cfg_attr(not(dox), no_std)]\n\n /// # #[cfg(not(dox))]\n\n /// # extern crate std as real_std;\n\n /// # #[cfg(not(dox))]\n\n /// # #[macro_use]\n\n /// # extern crate stdsimd as std;\n", "file_path": "coresimd/x86/mod.rs", "rank": 56, "score": 84127.2372255046 }, { "content": " }\n\n\n\n #[inline]\n\n fn as_i64x2(self) -> ::coresimd::simd::i64x2 {\n\n unsafe { mem::transmute(self.as_m128i()) }\n\n }\n\n}\n\n\n\nimpl m128iExt for __m128i {\n\n #[inline]\n\n fn as_m128i(self) -> Self {\n\n self\n\n }\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\n#[unstable(feature = \"stdimd_internal\", issue = \"0\")]\n\npub(crate) trait m256iExt: Sized {\n\n fn as_m256i(self) -> __m256i;\n\n\n", "file_path": "coresimd/x86/mod.rs", "rank": 57, "score": 84125.86458734705 }, { "content": "\n\nimpl m256iExt for __m256i {\n\n #[inline]\n\n fn as_m256i(self) -> Self {\n\n self\n\n }\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\n#[unstable(feature = \"stdimd_internal\", issue = \"0\")]\n\npub(crate) trait m512iExt: Sized {\n\n fn as_m512i(self) -> __m512i;\n\n\n\n #[inline]\n\n fn as_i32x16(self) -> ::coresimd::simd::i32x16 {\n\n unsafe { mem::transmute(self.as_m512i()) }\n\n }\n\n}\n\n\n\nimpl m512iExt for __m512i {\n", "file_path": "coresimd/x86/mod.rs", "rank": 58, "score": 84125.60924105975 }, { "content": " ///\n\n /// (as well as unsigned versions). Each intrinsic may interpret the\n\n /// internal bits differently, check the documentation of the intrinsic\n\n /// to see how it's being used.\n\n ///\n\n /// Note that this means that an instance of `__m64` typically just means\n\n /// a \"bag of bits\" which is left up to interpretation at the point of use.\n\n ///\n\n /// Most intrinsics using `__m64` are prefixed with `_mm_` and the\n\n /// integer types tend to correspond to suffixes like \"pi8\" or \"pi32\" (not\n\n /// to be confused with \"epiXX\", used for `__m128i`).\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # #![feature(stdsimd, mmx_target_feature)]\n\n /// # #![cfg_attr(not(dox), no_std)]\n\n /// # #[cfg(not(dox))]\n\n /// # extern crate std as real_std;\n\n /// # #[cfg(not(dox))]\n", "file_path": "coresimd/x86/mod.rs", "rank": 59, "score": 84123.93553915776 }, { "content": " }\n\n\n\n #[inline]\n\n fn as_u64x2(self) -> ::coresimd::simd::u64x2 {\n\n unsafe { mem::transmute(self.as_m128i()) }\n\n }\n\n\n\n #[inline]\n\n fn as_i8x16(self) -> ::coresimd::simd::i8x16 {\n\n unsafe { mem::transmute(self.as_m128i()) }\n\n }\n\n\n\n #[inline]\n\n fn as_i16x8(self) -> ::coresimd::simd::i16x8 {\n\n unsafe { mem::transmute(self.as_m128i()) }\n\n }\n\n\n\n #[inline]\n\n fn as_i32x4(self) -> ::coresimd::simd::i32x4 {\n\n unsafe { mem::transmute(self.as_m128i()) }\n", "file_path": "coresimd/x86/mod.rs", "rank": 60, "score": 84123.81879154855 }, { "content": " #[inline]\n\n fn as_i8x32(self) -> ::coresimd::simd::i8x32 {\n\n unsafe { mem::transmute(self.as_m256i()) }\n\n }\n\n\n\n #[inline]\n\n fn as_i16x16(self) -> ::coresimd::simd::i16x16 {\n\n unsafe { mem::transmute(self.as_m256i()) }\n\n }\n\n\n\n #[inline]\n\n fn as_i32x8(self) -> ::coresimd::simd::i32x8 {\n\n unsafe { mem::transmute(self.as_m256i()) }\n\n }\n\n\n\n #[inline]\n\n fn as_i64x4(self) -> ::coresimd::simd::i64x4 {\n\n unsafe { mem::transmute(self.as_m256i()) }\n\n }\n\n}\n", "file_path": "coresimd/x86/mod.rs", "rank": 61, "score": 84123.81879154855 }, { "content": " #[inline]\n\n fn as_u8x32(self) -> ::coresimd::simd::u8x32 {\n\n unsafe { mem::transmute(self.as_m256i()) }\n\n }\n\n\n\n #[inline]\n\n fn as_u16x16(self) -> ::coresimd::simd::u16x16 {\n\n unsafe { mem::transmute(self.as_m256i()) }\n\n }\n\n\n\n #[inline]\n\n fn as_u32x8(self) -> ::coresimd::simd::u32x8 {\n\n unsafe { mem::transmute(self.as_m256i()) }\n\n }\n\n\n\n #[inline]\n\n fn as_u64x4(self) -> ::coresimd::simd::u64x4 {\n\n unsafe { mem::transmute(self.as_m256i()) }\n\n }\n\n\n", "file_path": "coresimd/x86/mod.rs", "rank": 62, "score": 84123.81879154855 }, { "content": "#[cfg(stage0)]\n\npub fn simd_select_bitmask<M, T>(m: M, a: T, b: T) -> T {\n\n drop((m, b));\n\n a\n\n}\n", "file_path": "coresimd/simd_llvm.rs", "rank": 63, "score": 83812.18115987709 }, { "content": "#[inline]\n\npub fn check_for(x: Feature) -> bool {\n\n cache::test(x as u32, detect_features)\n\n}\n\n\n", "file_path": "stdsimd/arch/detect/os/linux/aarch64.rs", "rank": 64, "score": 83548.5334610744 }, { "content": "#[inline]\n\npub fn check_for(x: Feature) -> bool {\n\n cache::test(x as u32, detect_features)\n\n}\n\n\n", "file_path": "stdsimd/arch/detect/os/linux/arm.rs", "rank": 65, "score": 83548.5334610744 }, { "content": "#[inline]\n\npub fn check_for(x: Feature) -> bool {\n\n cache::test(x as u32, detect_features)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn dump() {\n\n println!(\"asimd: {:?}\", is_aarch64_feature_detected!(\"asimd\"));\n\n println!(\"pmull: {:?}\", is_aarch64_feature_detected!(\"pmull\"));\n\n println!(\"fp: {:?}\", is_aarch64_feature_detected!(\"fp\"));\n\n println!(\"fp16: {:?}\", is_aarch64_feature_detected!(\"fp16\"));\n\n println!(\"sve: {:?}\", is_aarch64_feature_detected!(\"sve\"));\n\n println!(\"crc: {:?}\", is_aarch64_feature_detected!(\"crc\"));\n\n println!(\"crypto: {:?}\", is_aarch64_feature_detected!(\"crypto\"));\n\n println!(\"lse: {:?}\", is_aarch64_feature_detected!(\"lse\"));\n\n println!(\"rdm: {:?}\", is_aarch64_feature_detected!(\"rdm\"));\n\n println!(\"rcpc: {:?}\", is_aarch64_feature_detected!(\"rcpc\"));\n\n println!(\"dotprod: {:?}\", is_aarch64_feature_detected!(\"dotprod\"));\n\n }\n\n}\n", "file_path": "stdsimd/arch/detect/os/freebsd/aarch64.rs", "rank": 66, "score": 83548.5334610744 }, { "content": "#[inline]\n\npub fn check_for(x: Feature) -> bool {\n\n cache::test(x as u32, detect_features)\n\n}\n\n\n", "file_path": "stdsimd/arch/detect/os/linux/mips.rs", "rank": 67, "score": 83548.5334610744 }, { "content": "#[inline]\n\npub fn check_for(x: Feature) -> bool {\n\n cache::test(x as u32, detect_features)\n\n}\n\n\n", "file_path": "stdsimd/arch/detect/os/linux/powerpc.rs", "rank": 68, "score": 83548.5334610744 }, { "content": "//! The `is_{target_arch}_feature_detected!` macro are only available on their\n\n//! architecture. These macros provide a better error messages when the user\n\n//! attempts to call them in a different architecture.\n\n\n\n/// Prevents compilation if `is_x86_feature_detected` is used somewhere\n\n/// else than `x86` and `x86_64` targets.\n\n#[cfg(not(any(target_arch = \"x86\", target_arch = \"x86_64\")))]\n\n#[macro_export]\n\n#[unstable(feature = \"stdsimd\", issue = \"27731\")]\n\nmacro_rules! is_x86_feature_detected {\n\n ($t: tt) => {\n\n compile_error!(\n\n r#\"\n\n is_x86_feature_detected can only be used on x86 and x86_64 targets.\n\n You can prevent it from being used in other architectures by\n\n guarding it behind a cfg(target_arch) as follows:\n\n\n\n #[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))] {\n\n if is_x86_feature_detected(...) { ... }\n\n }\n", "file_path": "stdsimd/arch/detect/error_macros.rs", "rank": 69, "score": 81632.98265050992 }, { "content": " }\n\n \"#\n\n )\n\n };\n\n}\n\n\n\n/// Prevents compilation if `is_aarch64_feature_detected` is used somewhere else\n\n/// than `aarch64` targets.\n\n#[cfg(not(target_arch = \"aarch64\"))]\n\n#[macro_export]\n\n#[unstable(feature = \"stdsimd\", issue = \"27731\")]\n\nmacro_rules! is_aarch64_feature_detected {\n\n ($t: tt) => {\n\n compile_error!(\n\n r#\"\n\n is_aarch64_feature_detected can only be used on AArch64 targets.\n\n You can prevent it from being used in other architectures by\n\n guarding it behind a cfg(target_arch) as follows:\n\n\n\n #[cfg(target_arch = \"aarch64\")] {\n", "file_path": "stdsimd/arch/detect/error_macros.rs", "rank": 70, "score": 81625.95292790631 }, { "content": " \"#\n\n )\n\n };\n\n}\n\n\n\n/// Prevents compilation if `is_arm_feature_detected` is used somewhere else\n\n/// than `ARM` targets.\n\n#[cfg(not(target_arch = \"arm\"))]\n\n#[macro_export]\n\n#[unstable(feature = \"stdsimd\", issue = \"27731\")]\n\nmacro_rules! is_arm_feature_detected {\n\n ($t:tt) => {\n\n compile_error!(\n\n r#\"\n\n is_arm_feature_detected can only be used on ARM targets.\n\n You can prevent it from being used in other architectures by\n\n guarding it behind a cfg(target_arch) as follows:\n\n\n\n #[cfg(target_arch = \"arm\")] {\n\n if is_arm_feature_detected(...) { ... }\n", "file_path": "stdsimd/arch/detect/error_macros.rs", "rank": 71, "score": 81625.74676797837 }, { "content": " #[cfg(target_arch = \"powerpc\")] {\n\n if is_powerpc_feature_detected(...) { ... }\n\n }\n\n\"#\n\n )\n\n };\n\n}\n\n\n\n/// Prevents compilation if `is_powerpc64_feature_detected` is used somewhere\n\n/// else than `PowerPC64` targets.\n\n#[cfg(not(target_arch = \"powerpc64\"))]\n\n#[macro_export]\n\n#[unstable(feature = \"stdsimd\", issue = \"27731\")]\n\nmacro_rules! is_powerpc64_feature_detected {\n\n ($t:tt) => {\n\n compile_error!(\n\n r#\"\n\nis_powerpc64_feature_detected can only be used on PowerPC64 targets.\n\nYou can prevent it from being used in other architectures by\n\nguarding it behind a cfg(target_arch) as follows:\n", "file_path": "stdsimd/arch/detect/error_macros.rs", "rank": 72, "score": 81625.6122329989 }, { "content": "\n\n #[cfg(target_arch = \"powerpc64\")] {\n\n if is_powerpc64_feature_detected(...) { ... }\n\n }\n\n\"#\n\n )\n\n };\n\n}\n\n\n\n/// Prevents compilation if `is_mips_feature_detected` is used somewhere else\n\n/// than `MIPS` targets.\n\n#[cfg(not(target_arch = \"mips\"))]\n\n#[macro_export]\n\n#[unstable(feature = \"stdsimd\", issue = \"27731\")]\n\nmacro_rules! is_mips_feature_detected {\n\n ($t:tt) => {\n\n compile_error!(\n\n r#\"\n\n is_mips_feature_detected can only be used on MIPS targets.\n\n You can prevent it from being used in other architectures by\n", "file_path": "stdsimd/arch/detect/error_macros.rs", "rank": 73, "score": 81625.52264434264 }, { "content": " guarding it behind a cfg(target_arch) as follows:\n\n\n\n #[cfg(target_arch = \"mips\")] {\n\n if is_mips_feature_detected(...) { ... }\n\n }\n\n \"#\n\n )\n\n };\n\n}\n\n\n\n/// Prevents compilation if `is_mips64_feature_detected` is used somewhere else\n\n/// than `MIPS64` targets.\n\n#[cfg(not(target_arch = \"mips64\"))]\n\n#[macro_export]\n\n#[unstable(feature = \"stdsimd\", issue = \"27731\")]\n\nmacro_rules! is_mips64_feature_detected {\n\n ($t:tt) => {\n\n compile_error!(\n\n r#\"\n\n is_mips64_feature_detected can only be used on MIPS64 targets.\n", "file_path": "stdsimd/arch/detect/error_macros.rs", "rank": 74, "score": 81625.3796533953 }, { "content": " if is_aarch64_feature_detected(...) { ... }\n\n }\n\n \"#\n\n )\n\n };\n\n}\n\n\n\n/// Prevents compilation if `is_powerpc_feature_detected` is used somewhere else\n\n/// than `PowerPC` targets.\n\n#[cfg(not(target_arch = \"powerpc\"))]\n\n#[macro_export]\n\n#[unstable(feature = \"stdsimd\", issue = \"27731\")]\n\nmacro_rules! is_powerpc_feature_detected {\n\n ($t:tt) => {\n\n compile_error!(\n\n r#\"\n\nis_powerpc_feature_detected can only be used on PowerPC targets.\n\nYou can prevent it from being used in other architectures by\n\nguarding it behind a cfg(target_arch) as follows:\n\n\n", "file_path": "stdsimd/arch/detect/error_macros.rs", "rank": 75, "score": 81625.225920566 }, { "content": " You can prevent it from being used in other architectures by\n\n guarding it behind a cfg(target_arch) as follows:\n\n\n\n #[cfg(target_arch = \"mips64\")] {\n\n if is_mips64_feature_detected(...) { ... }\n\n }\n\n \"#\n\n )\n\n };\n\n}\n", "file_path": "stdsimd/arch/detect/error_macros.rs", "rank": 76, "score": 81620.40812425928 }, { "content": "//! Implementation of the `#[simd_test]` macro\n\n//!\n\n//! This macro expands to a `#[test]` function which tests the local machine\n\n//! for the appropriate cfg before calling the inner test function.\n\n\n\nextern crate proc_macro;\n\nextern crate proc_macro2;\n\n#[macro_use]\n\nextern crate quote;\n\n\n\nuse proc_macro2::{Ident, Literal, Span, TokenStream, TokenTree};\n\nuse quote::ToTokens;\n\nuse std::env;\n\n\n", "file_path": "crates/simd-test-macro/src/lib.rs", "rank": 77, "score": 78277.02971981723 }, { "content": " force_test = true;\n\n \"is_mips64_feature_detected\"\n\n }\n\n t => panic!(\"unknown target: {}\", t),\n\n };\n\n let macro_test = Ident::new(macro_test, Span::call_site());\n\n\n\n let mut cfg_target_features = TokenStream::new();\n\n for feature in target_features {\n\n let q = quote_spanned! {\n\n proc_macro2::Span::call_site() =>\n\n #macro_test!(#feature) &&\n\n };\n\n q.to_tokens(&mut cfg_target_features);\n\n }\n\n let q = quote! { true };\n\n q.to_tokens(&mut cfg_target_features);\n\n\n\n let test_norun = std::env::var(\"STDSIMD_TEST_NORUN\").is_ok();\n\n let maybe_ignore = if test_norun {\n", "file_path": "crates/simd-test-macro/src/lib.rs", "rank": 78, "score": 78266.79481853997 }, { "content": " .next()\n\n .unwrap_or_else(|| panic!(\"target triple contained no \\\"-\\\": {}\", target))\n\n {\n\n \"i686\" | \"x86_64\" | \"i586\" => \"is_x86_feature_detected\",\n\n \"arm\" | \"armv7\" => \"is_arm_feature_detected\",\n\n \"aarch64\" => \"is_aarch64_feature_detected\",\n\n \"powerpc\" | \"powerpcle\" => \"is_powerpc_feature_detected\",\n\n \"powerpc64\" | \"powerpc64le\" => \"is_powerpc64_feature_detected\",\n\n \"mips\" | \"mipsel\" => {\n\n // FIXME:\n\n // On MIPS CI run-time feature detection always returns false due\n\n // to this qemu bug: https://bugs.launchpad.net/qemu/+bug/1754372\n\n //\n\n // This is a workaround to force the MIPS tests to always run on\n\n // CI.\n\n force_test = true;\n\n \"is_mips_feature_detected\"\n\n }\n\n \"mips64\" | \"mips64el\" => {\n\n // FIXME: see above\n", "file_path": "crates/simd-test-macro/src/lib.rs", "rank": 79, "score": 78265.70600675419 }, { "content": " let enable_feature = enable_feature.trim_start_matches('\"').trim_end_matches('\"');\n\n let target_features: Vec<String> = enable_feature\n\n .replace('+', \"\")\n\n .split(',')\n\n .map(String::from)\n\n .collect();\n\n\n\n let enable_feature = string(enable_feature);\n\n let item = TokenStream::from(item);\n\n let name = find_name(item.clone());\n\n\n\n let name: TokenStream = name\n\n .to_string()\n\n .parse()\n\n .unwrap_or_else(|_| panic!(\"failed to parse name: {}\", name.to_string()));\n\n\n\n let target = env::var(\"TARGET\").expect(\"TARGET environment variable should be set for rustc\");\n\n let mut force_test = false;\n\n let macro_test = match target\n\n .split('-')\n", "file_path": "crates/simd-test-macro/src/lib.rs", "rank": 80, "score": 78264.65932526485 }, { "content": " quote! { #[ignore] }\n\n } else {\n\n TokenStream::new()\n\n };\n\n\n\n let ret: TokenStream = quote_spanned! {\n\n proc_macro2::Span::call_site() =>\n\n #[allow(non_snake_case)]\n\n #[test]\n\n #maybe_ignore\n\n fn #name() {\n\n if #force_test | (#cfg_target_features) {\n\n return unsafe { #name() };\n\n } else {\n\n ::stdsimd_test::assert_skip_test_ok(stringify!(#name));\n\n }\n\n\n\n #[target_feature(enable = #enable_feature)]\n\n #item\n\n }\n\n };\n\n ret.into()\n\n}\n\n\n", "file_path": "crates/simd-test-macro/src/lib.rs", "rank": 81, "score": 78261.36669239208 }, { "content": "//! Run-time feature detection on Linux\n\n\n\nmod auxvec;\n\nmod cpuinfo;\n\n\n\ncfg_if! {\n\n if #[cfg(target_arch = \"aarch64\")] {\n\n mod aarch64;\n\n pub use self::aarch64::check_for;\n\n } else if #[cfg(target_arch = \"arm\")] {\n\n mod arm;\n\n pub use self::arm::check_for;\n\n } else if #[cfg(any(target_arch = \"mips\", target_arch = \"mips64\"))] {\n\n mod mips;\n\n pub use self::mips::check_for;\n\n } else if #[cfg(any(target_arch = \"powerpc\", target_arch = \"powerpc64\"))] {\n\n mod powerpc;\n\n pub use self::powerpc::check_for;\n\n } else {\n\n use arch::detect::Feature;\n\n /// Performs run-time feature detection.\n\n pub fn check_for(_x: Feature) -> bool {\n\n false\n\n }\n\n }\n\n}\n", "file_path": "stdsimd/arch/detect/os/linux/mod.rs", "rank": 82, "score": 77988.15988211661 }, { "content": "//! Run-time feature detection on FreeBSD\n\n\n\ncfg_if! {\n\n if #[cfg(target_arch = \"aarch64\")] {\n\n mod aarch64;\n\n pub use self::aarch64::check_for;\n\n } else {\n\n use arch::detect::Feature;\n\n /// Performs run-time feature detection.\n\n pub fn check_for(_x: Feature) -> bool {\n\n false\n\n }\n\n }\n\n}\n", "file_path": "stdsimd/arch/detect/os/freebsd/mod.rs", "rank": 83, "score": 77984.37109383829 }, { "content": "#[inline]\n\n#[target_feature(enable = \"tbm\")]\n\npub fn _bextr2_u32(a: u32, control: u32) -> u32 {\n\n unsafe { x86_tbm_bextri_u32(a, control) }\n\n}\n\n\n\n/// Extracts bits of `a` specified by `control` into\n\n/// the least significant bits of the result.\n\n///\n\n/// Bits `[7,0]` of `control` specify the index to the first bit in the range to\n\n/// be extracted, and bits `[15,8]` specify the length of the range.\n", "file_path": "coresimd/x86/tbm.rs", "rank": 84, "score": 76433.51617419429 }, { "content": "#[inline]\n\n#[target_feature(enable = \"tbm\")]\n\npub fn _bextr2_u64(a: u64, control: u64) -> u64 {\n\n unsafe { x86_tbm_bextri_u64(a, control) }\n\n}\n\n*/\n\n\n\n/// Clears all bits below the least significant zero bit of `x`.\n\n///\n\n/// If there is no zero bit in `x`, it returns zero.\n\n#[inline]\n\n#[target_feature(enable = \"tbm\")]\n\n#[cfg_attr(test, assert_instr(blcfill))]\n\n#[stable(feature = \"simd_x86\", since = \"1.27.0\")]\n\npub unsafe fn _blcfill_u32(x: u32) -> u32 {\n\n x & (x.wrapping_add(1))\n\n}\n\n\n\n/// Clears all bits below the least significant zero bit of `x`.\n\n///\n\n/// If there is no zero bit in `x`, it returns zero.\n\n#[inline]\n", "file_path": "coresimd/x86/tbm.rs", "rank": 85, "score": 76433.51617419429 }, { "content": "//! x86 run-time feature detection is OS independent.\n\n\n\nuse core::prelude::v1::*;\n\nuse core::mem;\n\n#[cfg(target_arch = \"x86\")]\n\nuse coresimd::arch::x86::*;\n\n#[cfg(target_arch = \"x86_64\")]\n\nuse coresimd::arch::x86_64::*;\n\n\n\nuse arch::detect::Feature;\n\nuse arch::detect::cache;\n\nuse arch::detect::bit;\n\n\n\n/// Performs run-time feature detection.\n\n#[inline]\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 86, "score": 76360.79949924757 }, { "content": " // These features are only available on AMD CPUs:\n\n enable(extended_proc_info_ecx, 6, Feature::sse4a);\n\n enable(extended_proc_info_ecx, 21, Feature::tbm);\n\n }\n\n }\n\n\n\n value\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n extern crate cupid;\n\n\n\n #[test]\n\n fn dump() {\n\n println!(\"aes: {:?}\", is_x86_feature_detected!(\"aes\"));\n\n println!(\"pclmulqdq: {:?}\", is_x86_feature_detected!(\"pclmulqdq\"));\n\n println!(\"rdrand: {:?}\", is_x86_feature_detected!(\"rdrand\"));\n\n println!(\"rdseed: {:?}\", is_x86_feature_detected!(\"rdseed\"));\n\n println!(\"tsc: {:?}\", is_x86_feature_detected!(\"tsc\"));\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 87, "score": 76350.81445899805 }, { "content": " enable(extended_features_ecx, 1, Feature::avx512_vbmi);\n\n enable(\n\n extended_features_ecx,\n\n 14,\n\n Feature::avx512_vpopcntdq,\n\n );\n\n }\n\n }\n\n }\n\n }\n\n\n\n // This detects ABM on AMD CPUs and LZCNT on Intel CPUs.\n\n // On intel CPUs with popcnt, lzcnt implements the\n\n // \"missing part\" of ABM, so we map both to the same\n\n // internal feature.\n\n //\n\n // The `is_x86_feature_detected!(\"lzcnt\")` macro then\n\n // internally maps to Feature::abm.\n\n enable(extended_proc_info_ecx, 5, Feature::abm);\n\n if vendor_id == *b\"AuthenticAMD\" {\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 88, "score": 76348.38627150853 }, { "content": " println!(\"sse: {:?}\", is_x86_feature_detected!(\"sse\"));\n\n println!(\"sse2: {:?}\", is_x86_feature_detected!(\"sse2\"));\n\n println!(\"sse3: {:?}\", is_x86_feature_detected!(\"sse3\"));\n\n println!(\"ssse3: {:?}\", is_x86_feature_detected!(\"ssse3\"));\n\n println!(\"sse4.1: {:?}\", is_x86_feature_detected!(\"sse4.1\"));\n\n println!(\"sse4.2: {:?}\", is_x86_feature_detected!(\"sse4.2\"));\n\n println!(\"sse4a: {:?}\", is_x86_feature_detected!(\"sse4a\"));\n\n println!(\"sha: {:?}\", is_x86_feature_detected!(\"sha\"));\n\n println!(\"avx: {:?}\", is_x86_feature_detected!(\"avx\"));\n\n println!(\"avx2: {:?}\", is_x86_feature_detected!(\"avx2\"));\n\n println!(\"avx512f {:?}\", is_x86_feature_detected!(\"avx512f\"));\n\n println!(\"avx512cd {:?}\", is_x86_feature_detected!(\"avx512cd\"));\n\n println!(\"avx512er {:?}\", is_x86_feature_detected!(\"avx512er\"));\n\n println!(\"avx512pf {:?}\", is_x86_feature_detected!(\"avx512pf\"));\n\n println!(\"avx512bw {:?}\", is_x86_feature_detected!(\"avx512bw\"));\n\n println!(\"avx512dq {:?}\", is_x86_feature_detected!(\"avx512dq\"));\n\n println!(\"avx512vl {:?}\", is_x86_feature_detected!(\"avx512vl\"));\n\n println!(\"avx512_ifma {:?}\", is_x86_feature_detected!(\"avx512ifma\"));\n\n println!(\"avx512_vbmi {:?}\", is_x86_feature_detected!(\"avx512vbmi\"));\n\n println!(\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 89, "score": 76348.01591077576 }, { "content": " \"avx512_vpopcntdq {:?}\",\n\n is_x86_feature_detected!(\"avx512vpopcntdq\")\n\n );\n\n println!(\"fma: {:?}\", is_x86_feature_detected!(\"fma\"));\n\n println!(\"abm: {:?}\", is_x86_feature_detected!(\"abm\"));\n\n println!(\"bmi: {:?}\", is_x86_feature_detected!(\"bmi1\"));\n\n println!(\"bmi2: {:?}\", is_x86_feature_detected!(\"bmi2\"));\n\n println!(\"tbm: {:?}\", is_x86_feature_detected!(\"tbm\"));\n\n println!(\"popcnt: {:?}\", is_x86_feature_detected!(\"popcnt\"));\n\n println!(\"lzcnt: {:?}\", is_x86_feature_detected!(\"lzcnt\"));\n\n println!(\"fxsr: {:?}\", is_x86_feature_detected!(\"fxsr\"));\n\n println!(\"xsave: {:?}\", is_x86_feature_detected!(\"xsave\"));\n\n println!(\"xsaveopt: {:?}\", is_x86_feature_detected!(\"xsaveopt\"));\n\n println!(\"xsaves: {:?}\", is_x86_feature_detected!(\"xsaves\"));\n\n println!(\"xsavec: {:?}\", is_x86_feature_detected!(\"xsavec\"));\n\n }\n\n\n\n #[test]\n\n fn compare_with_cupid() {\n\n let information = cupid::master().unwrap();\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 90, "score": 76347.93813394282 }, { "content": " assert_eq!(is_x86_feature_detected!(\"aes\"), information.aesni());\n\n assert_eq!(is_x86_feature_detected!(\"pclmulqdq\"), information.pclmulqdq());\n\n assert_eq!(is_x86_feature_detected!(\"rdrand\"), information.rdrand());\n\n assert_eq!(is_x86_feature_detected!(\"rdseed\"), information.rdseed());\n\n assert_eq!(is_x86_feature_detected!(\"tsc\"), information.tsc());\n\n assert_eq!(is_x86_feature_detected!(\"sse\"), information.sse());\n\n assert_eq!(is_x86_feature_detected!(\"sse2\"), information.sse2());\n\n assert_eq!(is_x86_feature_detected!(\"sse3\"), information.sse3());\n\n assert_eq!(is_x86_feature_detected!(\"ssse3\"), information.ssse3());\n\n assert_eq!(is_x86_feature_detected!(\"sse4.1\"), information.sse4_1());\n\n assert_eq!(is_x86_feature_detected!(\"sse4.2\"), information.sse4_2());\n\n assert_eq!(is_x86_feature_detected!(\"sse4a\"), information.sse4a());\n\n assert_eq!(is_x86_feature_detected!(\"sha\"), information.sha());\n\n assert_eq!(is_x86_feature_detected!(\"avx\"), information.avx());\n\n assert_eq!(is_x86_feature_detected!(\"avx2\"), information.avx2());\n\n assert_eq!(is_x86_feature_detected!(\"avx512f\"), information.avx512f());\n\n assert_eq!(is_x86_feature_detected!(\"avx512cd\"), information.avx512cd());\n\n assert_eq!(is_x86_feature_detected!(\"avx512er\"), information.avx512er());\n\n assert_eq!(is_x86_feature_detected!(\"avx512pf\"), information.avx512pf());\n\n assert_eq!(is_x86_feature_detected!(\"avx512bw\"), information.avx512bw());\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 91, "score": 76347.85371465897 }, { "content": " assert_eq!(is_x86_feature_detected!(\"avx512dq\"), information.avx512dq());\n\n assert_eq!(is_x86_feature_detected!(\"avx512vl\"), information.avx512vl());\n\n assert_eq!(\n\n is_x86_feature_detected!(\"avx512ifma\"),\n\n information.avx512_ifma()\n\n );\n\n assert_eq!(\n\n is_x86_feature_detected!(\"avx512vbmi\"),\n\n information.avx512_vbmi()\n\n );\n\n assert_eq!(\n\n is_x86_feature_detected!(\"avx512vpopcntdq\"),\n\n information.avx512_vpopcntdq()\n\n );\n\n assert_eq!(is_x86_feature_detected!(\"fma\"), information.fma());\n\n assert_eq!(is_x86_feature_detected!(\"bmi1\"), information.bmi1());\n\n assert_eq!(is_x86_feature_detected!(\"bmi2\"), information.bmi2());\n\n assert_eq!(is_x86_feature_detected!(\"popcnt\"), information.popcnt());\n\n assert_eq!(is_x86_feature_detected!(\"abm\"), information.lzcnt());\n\n assert_eq!(is_x86_feature_detected!(\"tbm\"), information.tbm());\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 92, "score": 76347.76144594529 }, { "content": " assert_eq!(is_x86_feature_detected!(\"lzcnt\"), information.lzcnt());\n\n assert_eq!(is_x86_feature_detected!(\"xsave\"), information.xsave());\n\n assert_eq!(is_x86_feature_detected!(\"xsaveopt\"), information.xsaveopt());\n\n assert_eq!(\n\n is_x86_feature_detected!(\"xsavec\"),\n\n information.xsavec_and_xrstor()\n\n );\n\n assert_eq!(\n\n is_x86_feature_detected!(\"xsaves\"),\n\n information.xsaves_xrstors_and_ia32_xss()\n\n );\n\n }\n\n}\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 93, "score": 76347.54378347754 }, { "content": " ebx,\n\n ecx,\n\n edx,\n\n } = __cpuid(0);\n\n let vendor_id: [[u8; 4]; 3] = [\n\n mem::transmute(ebx),\n\n mem::transmute(edx),\n\n mem::transmute(ecx),\n\n ];\n\n let vendor_id: [u8; 12] = mem::transmute(vendor_id);\n\n (max_basic_leaf, vendor_id)\n\n };\n\n\n\n if max_basic_leaf < 1 {\n\n // Earlier Intel 486, CPUID not implemented\n\n return value;\n\n }\n\n\n\n // EAX = 1, ECX = 0: Queries \"Processor Info and Feature Bits\";\n\n // Contains information about most x86 features.\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 94, "score": 76345.01737122395 }, { "content": " }\n\n\n\n // FMA (uses 256-bit wide registers):\n\n enable(proc_info_ecx, 12, Feature::fma);\n\n\n\n // And AVX/AVX2:\n\n enable(proc_info_ecx, 28, Feature::avx);\n\n enable(extended_features_ebx, 5, Feature::avx2);\n\n\n\n // For AVX-512 the OS also needs to support saving/restoring\n\n // the extended state, only then we enable AVX-512 support:\n\n if os_avx512_support {\n\n enable(extended_features_ebx, 16, Feature::avx512f);\n\n enable(extended_features_ebx, 17, Feature::avx512dq);\n\n enable(extended_features_ebx, 21, Feature::avx512_ifma);\n\n enable(extended_features_ebx, 26, Feature::avx512pf);\n\n enable(extended_features_ebx, 27, Feature::avx512er);\n\n enable(extended_features_ebx, 28, Feature::avx512cd);\n\n enable(extended_features_ebx, 30, Feature::avx512bw);\n\n enable(extended_features_ebx, 31, Feature::avx512vl);\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 95, "score": 76344.23163618386 }, { "content": " eax: extended_max_basic_leaf,\n\n ..\n\n } = unsafe { __cpuid(0x8000_0000_u32) };\n\n\n\n // EAX = 0x8000_0001, ECX=0: Queries \"Extended Processor Info and Feature\n\n // Bits\"\n\n let extended_proc_info_ecx = if extended_max_basic_leaf >= 1 {\n\n let CpuidResult { ecx, .. } = unsafe { __cpuid(0x8000_0001_u32) };\n\n ecx\n\n } else {\n\n 0\n\n };\n\n\n\n {\n\n // borrows value till the end of this scope:\n\n let mut enable = |r, rb, f| {\n\n if bit::test(r as usize, rb) {\n\n value.set(f as u32);\n\n }\n\n };\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 96, "score": 76341.39764303918 }, { "content": " let CpuidResult {\n\n ecx: proc_info_ecx,\n\n edx: proc_info_edx,\n\n ..\n\n } = unsafe { __cpuid(0x0000_0001_u32) };\n\n\n\n // EAX = 7, ECX = 0: Queries \"Extended Features\";\n\n // Contains information about bmi,bmi2, and avx2 support.\n\n let (extended_features_ebx, extended_features_ecx) = if max_basic_leaf >= 7\n\n {\n\n let CpuidResult { ebx, ecx, .. } = unsafe { __cpuid(0x0000_0007_u32) };\n\n (ebx, ecx)\n\n } else {\n\n (0, 0) // CPUID does not support \"Extended Features\"\n\n };\n\n\n\n // EAX = 0x8000_0000, ECX = 0: Get Highest Extended Function Supported\n\n // - EAX returns the max leaf value for extended information, that is,\n\n // `cpuid` calls in range [0x8000_0000; u32::MAX]:\n\n let CpuidResult {\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 97, "score": 76341.39764303918 }, { "content": "\n\n enable(proc_info_ecx, 0, Feature::sse3);\n\n enable(proc_info_ecx, 9, Feature::ssse3);\n\n enable(proc_info_ecx, 19, Feature::sse4_1);\n\n enable(proc_info_ecx, 20, Feature::sse4_2);\n\n enable(proc_info_ecx, 23, Feature::popcnt);\n\n enable(proc_info_ecx, 25, Feature::aes);\n\n enable(proc_info_ecx, 1, Feature::pclmulqdq);\n\n enable(proc_info_ecx, 30, Feature::rdrand);\n\n enable(extended_features_ebx, 18, Feature::rdseed);\n\n enable(proc_info_edx, 4, Feature::tsc);\n\n enable(proc_info_edx, 23, Feature::mmx);\n\n enable(proc_info_edx, 24, Feature::fxsr);\n\n enable(proc_info_edx, 25, Feature::sse);\n\n enable(proc_info_edx, 26, Feature::sse2);\n\n enable(extended_features_ebx, 29, Feature::sha);\n\n\n\n enable(extended_features_ebx, 3, Feature::bmi);\n\n enable(extended_features_ebx, 8, Feature::bmi2);\n\n\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 98, "score": 76341.39764303918 }, { "content": " // `XSAVE` and `AVX` support:\n\n let cpu_xsave = bit::test(proc_info_ecx as usize, 26);\n\n if cpu_xsave {\n\n // 0. Here the CPU supports `XSAVE`.\n\n\n\n // 1. Detect `OSXSAVE`, that is, whether the OS is AVX enabled and\n\n // supports saving the state of the AVX/AVX2 vector registers on\n\n // context-switches, see:\n\n //\n\n // - [intel: is avx enabled?][is_avx_enabled],\n\n // - [mozilla: sse.cpp][mozilla_sse_cpp].\n\n //\n\n // [is_avx_enabled]: https://software.intel.com/en-us/blogs/2011/04/14/is-avx-enabled\n\n // [mozilla_sse_cpp]: https://hg.mozilla.org/mozilla-central/file/64bab5cbb9b6/mozglue/build/SSE.cpp#l190\n\n let cpu_osxsave = bit::test(proc_info_ecx as usize, 27);\n\n\n\n if cpu_osxsave {\n\n // 2. The OS must have signaled the CPU that it supports saving and\n\n // restoring the:\n\n //\n", "file_path": "stdsimd/arch/detect/os/x86.rs", "rank": 99, "score": 76341.39764303918 } ]
Rust
nalgebra-sparse/src/ops/impl_std_ops.rs
ThatGeoGuy/nalgebra
10deb03b71793c5d98de04267a99d7af69a715d5
use super::serial::{scalar::*, spadd::*, spmm::*, spsub::*}; use crate::cs::{ CompressedColumnStorage, CompressedRowStorage, Compression, CsMatrix, CscMatrix, CsrMatrix, }; use nalgebra::{Dim, Matrix, RawStorage, RawStorageMut, Scalar}; use num_traits::Zero; use std::{ borrow::Borrow, ops::{Add, AddAssign, Div, Mul, Neg, Sub}, }; impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>, T2: Scalar + Into<<T1 as Add<T2>>::Output>, <T1 as Add<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Add<T2>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spadd_csr_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T2 as Add<T1>>::Output>, T2: Scalar + Into<<T2 as Add<T1>>::Output> + Add<T1>, <T2 as Add<T1>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T2 as Add<T1>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spadd_csc_csr(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>, T2: Scalar + Into<<T1 as Add<T2>>::Output>, <T1 as Add<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T1 as Add<T2>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spadd_csc_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>, T2: Scalar + Into<<T1 as Add<T2>>::Output>, <T1 as Add<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Add<T2>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spadd_csr_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Add<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedColumnStorage> where T2: Scalar + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn add(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spadd_csc_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Add<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedRowStorage> where T2: Scalar + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn add(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spadd_csr_dense(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spsub_csr_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spsub_csc_csr(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spsub_csc_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spsub_csr_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Sub<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedColumnStorage> where T2: Scalar + Neg<Output = T2> + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn sub(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spsub_csc_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Sub<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedRowStorage> where T2: Scalar + Neg<Output = T2> + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn sub(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spsub_csr_dense(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero, T2: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spmm_csr_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero, T2: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spmm_csc_csr(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero, T2: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spmm_csc_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T2: Scalar + Mul<T1>, <T2 as Mul<T1>>::Output: Scalar + AddAssign + Zero, T1: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T2 as Mul<T1>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spmm_csr_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedRowStorage> where T2: Scalar, R: Dim, C: Dim, S: RawStorage<T2, R, C>, T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = CscMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spmm_csr_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<CsMatrix<T2, MO, MI, D, CompressedRowStorage>> for Matrix<T1, R, C, S> where T1: Scalar, R: Dim, C: Dim, S: RawStorage<T1, R, C>, T2: Scalar + Mul<T1>, <T2 as Mul<T1>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T2]>, { type Output = CscMatrix<<T2 as Mul<T1>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO, MI, D, CompressedRowStorage>) -> Self::Output { spmm_dense_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedColumnStorage> where T2: Scalar, R: Dim, C: Dim, S: RawStorage<T2, R, C>, T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spmm_csc_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<CsMatrix<T2, MO, MI, D, CompressedColumnStorage>> for Matrix<T1, R, C, S> where T1: Scalar, R: Dim, C: Dim, S: RawStorage<T1, R, C>, T2: Scalar + Mul<T1>, <T2 as Mul<T1>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T2]>, { type Output = CsrMatrix<<T2 as Mul<T1>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO, MI, D, CompressedColumnStorage>) -> Self::Output { spmm_dense_csc(self, rhs).unwrap() } } macro_rules! impl_sparse_scalar_product_and_div { ($($t:ty)*) => ($( impl<T1, MO, MI, D, C> Mul<$t> for CsMatrix<T1, MO, MI, D, C> where T1: Scalar + Mul<$t>, <T1 as Mul<$t>>::Output: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, C: Compression, { type Output = CsMatrix<<T1 as Mul<$t>>::Output, MO, MI, Vec<<T1 as Mul<$t>>::Output>, C>; fn mul(self, rhs: $t) -> Self::Output { sp_cs_scalar_prod(self, rhs) } } impl<T1, MO, MI, D, C> Mul<CsMatrix<T1, MO, MI, D, C>> for $t where T1: Scalar + Mul<$t>, <T1 as Mul<$t>>::Output: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, C: Compression, { type Output = CsMatrix<<T1 as Mul<$t>>::Output, MO, MI, Vec<<T1 as Mul<$t>>::Output>, C>; fn mul(self, rhs: CsMatrix<T1, MO, MI, D, C>) -> Self::Output { sp_cs_scalar_prod(rhs, self) } } impl<T1, MO, MI, D, C> Div<$t> for CsMatrix<T1, MO, MI, D, C> where T1: Scalar + Div<$t>, <T1 as Div<$t>>::Output: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, C: Compression, { type Output = CsMatrix<<T1 as Div<$t>>::Output, MO, MI, Vec<<T1 as Div<$t>>::Output>, C>; fn div(self, rhs: $t) -> Self::Output { sp_cs_scalar_div(self, rhs) } } )*) } impl_sparse_scalar_product_and_div!(isize usize u8 i8 u16 i16 u32 i32 u64 i64 f32 f64);
use super::serial::{scalar::*, spadd::*, spmm::*, spsub::*}; use crate::cs::{ CompressedColumnStorage, CompressedRowStorage, Compression, CsMatrix, CscMatrix, CsrMatrix, }; use nalgebra::{Dim, Matrix, RawStorage, RawStorageMut, Scalar}; use num_traits::Zero; use std::{ borrow::Borrow, ops::{Add, AddAssign, Div, Mul, Neg, Sub}, }; impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>, T2: Scalar + Into<<T1 as Add<T2>>::Output>, <T1 as Add<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Add<T2>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spadd_csr_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T2 as Add<T1>>::Output>, T2: Scalar + Into<<T2 as Add<T1>>::Output> + Add<T1>, <T2 as Add<T1>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T2 as Add<T1>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spadd_csc_csr(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>, T2: Scalar + Into<<T1 as Add<T2>>::Output>, <T1 as Add<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T1 as Add<T2>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spadd_csc_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Add<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>, T2: Scalar + Into<<T1 as Add<T2>>::Output>, <T1 as Add<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Add<T2>>::Output>; fn add(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spadd_csr_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Add<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedColumnStorage> where T2: Scalar + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn add(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spadd_csc_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Add<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedRowStorage> where T2: Scalar + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn add(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spadd_csr_dense(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spsub_csr_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spsub_csc_csr(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spsub_csc_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Sub<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero, T2: Scalar + Into<<T1 as Sub<T2>>::Output>, <T1 as Sub<T2>>::Output: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Sub<T2>>::Output>; fn sub(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spsub_csr_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Sub<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedColumnStorage> where T2: Scalar + Neg<Output = T2> + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn sub(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spsub_csc_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Sub<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedRowStorage> where T2: Scalar + Neg<Output = T2> + Add<T1, Output = T2>, R: Dim, C: Dim, S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>, T1: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = Matrix<T2, R, C, S>; fn sub(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spsub_csr_dense(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero, T2: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spmm_csr_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2
type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>) -> Self::Output { spmm_csc_csc(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage> where T2: Scalar + Mul<T1>, <T2 as Mul<T1>>::Output: Scalar + AddAssign + Zero, T1: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CscMatrix<<T2 as Mul<T1>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spmm_csr_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedRowStorage> where T2: Scalar, R: Dim, C: Dim, S: RawStorage<T2, R, C>, T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = CscMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spmm_csr_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<CsMatrix<T2, MO, MI, D, CompressedRowStorage>> for Matrix<T1, R, C, S> where T1: Scalar, R: Dim, C: Dim, S: RawStorage<T1, R, C>, T2: Scalar + Mul<T1>, <T2 as Mul<T1>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T2]>, { type Output = CscMatrix<<T2 as Mul<T1>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO, MI, D, CompressedRowStorage>) -> Self::Output { spmm_dense_csr(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<Matrix<T2, R, C, S>> for CsMatrix<T1, MO, MI, D, CompressedColumnStorage> where T2: Scalar, R: Dim, C: Dim, S: RawStorage<T2, R, C>, T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: Matrix<T2, R, C, S>) -> Self::Output { spmm_csc_dense(self, rhs).unwrap() } } impl<T1, T2, R, C, S, MO, MI, D> Mul<CsMatrix<T2, MO, MI, D, CompressedColumnStorage>> for Matrix<T1, R, C, S> where T1: Scalar, R: Dim, C: Dim, S: RawStorage<T1, R, C>, T2: Scalar + Mul<T1>, <T2 as Mul<T1>>::Output: Scalar + Add + Zero, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T2]>, { type Output = CsrMatrix<<T2 as Mul<T1>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO, MI, D, CompressedColumnStorage>) -> Self::Output { spmm_dense_csc(self, rhs).unwrap() } } macro_rules! impl_sparse_scalar_product_and_div { ($($t:ty)*) => ($( impl<T1, MO, MI, D, C> Mul<$t> for CsMatrix<T1, MO, MI, D, C> where T1: Scalar + Mul<$t>, <T1 as Mul<$t>>::Output: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, C: Compression, { type Output = CsMatrix<<T1 as Mul<$t>>::Output, MO, MI, Vec<<T1 as Mul<$t>>::Output>, C>; fn mul(self, rhs: $t) -> Self::Output { sp_cs_scalar_prod(self, rhs) } } impl<T1, MO, MI, D, C> Mul<CsMatrix<T1, MO, MI, D, C>> for $t where T1: Scalar + Mul<$t>, <T1 as Mul<$t>>::Output: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, C: Compression, { type Output = CsMatrix<<T1 as Mul<$t>>::Output, MO, MI, Vec<<T1 as Mul<$t>>::Output>, C>; fn mul(self, rhs: CsMatrix<T1, MO, MI, D, C>) -> Self::Output { sp_cs_scalar_prod(rhs, self) } } impl<T1, MO, MI, D, C> Div<$t> for CsMatrix<T1, MO, MI, D, C> where T1: Scalar + Div<$t>, <T1 as Div<$t>>::Output: Scalar, MO: Borrow<[usize]>, MI: Borrow<[usize]>, D: Borrow<[T1]>, C: Compression, { type Output = CsMatrix<<T1 as Div<$t>>::Output, MO, MI, Vec<<T1 as Div<$t>>::Output>, C>; fn div(self, rhs: $t) -> Self::Output { sp_cs_scalar_div(self, rhs) } } )*) } impl_sparse_scalar_product_and_div!(isize usize u8 i8 u16 i16 u32 i32 u64 i64 f32 f64);
, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero, T2: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, { type Output = CsrMatrix<<T1 as Mul<T2>>::Output>; fn mul(self, rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>) -> Self::Output { spmm_csc_csr(self, rhs).unwrap() } } impl<T1, T2, MO1, MO2, MI1, MI2, D1, D2> Mul<CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>> for CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage> where T1: Scalar + Mul<T2>, <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero, T2: Scalar, MO1: Borrow<[usize]>, MO2: Borrow<[usize]>, MI1: Borrow<[usize]>, MI2: Borrow<[usize]>, D1: Borrow<[T1]>, D2: Borrow<[T2]>, {
random
[ { "content": "/// Sparse-sparse matrix subtraction.\n\n///\n\n/// This function takes two arguments, a CSC matrix and a CSR matrix, and performs sparse-matrix\n\n/// subtraction between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spsub_csc_csr<T1, T2, MO1, MO2, MI1, MI2, D1, D2>(\n\n csc: CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage>,\n\n csr: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>,\n\n) -> Result<CscMatrix<<T1 as Sub<T2>>::Output>, OperationError>\n\nwhere\n\n T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero,\n\n T2: Scalar,\n\n <T1 as Sub<T2>>::Output: Scalar,\n\n MO1: Borrow<[usize]>,\n\n MO2: Borrow<[usize]>,\n\n MI1: Borrow<[usize]>,\n\n MI2: Borrow<[usize]>,\n\n D1: Borrow<[T1]>,\n\n D2: Borrow<[T2]>,\n\n{\n\n let (lrows, lcols) = csc.shape();\n\n let (rrows, rcols) = csr.shape();\n\n\n\n if lrows != rrows || lcols != rcols {\n\n return Err(OperationError::from_kind_and_message(\n", "file_path": "nalgebra-sparse/src/ops/serial/spsub.rs", "rank": 0, "score": 651640.8556383532 }, { "content": "/// Sparse-sparse matrix subtraction.\n\n///\n\n/// This function takes two arguments, a CSR matrix and CSC matrix, and performs sparse-matrix\n\n/// subtraction between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spsub_csr_csc<T1, T2, MO1, MO2, MI1, MI2, D1, D2>(\n\n csr: CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage>,\n\n csc: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>,\n\n) -> Result<CsrMatrix<<T1 as Sub<T2>>::Output>, OperationError>\n\nwhere\n\n T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero,\n\n T2: Scalar,\n\n <T1 as Sub<T2>>::Output: Scalar,\n\n MO1: Borrow<[usize]>,\n\n MO2: Borrow<[usize]>,\n\n MI1: Borrow<[usize]>,\n\n MI2: Borrow<[usize]>,\n\n D1: Borrow<[T1]>,\n\n D2: Borrow<[T2]>,\n\n{\n\n let (lrows, lcols) = csr.shape();\n\n let (rrows, rcols) = csc.shape();\n\n\n\n if lrows != rrows || lcols != rcols {\n\n return Err(OperationError::from_kind_and_message(\n", "file_path": "nalgebra-sparse/src/ops/serial/spsub.rs", "rank": 1, "score": 651640.8556383532 }, { "content": "/// Sparse-sparse matrix subtraction.\n\n///\n\n/// This function takes two CSR matrices, and performs sparse-matrix subtraction between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spsub_csr_csr<T1, T2, MO1, MO2, MI1, MI2, D1, D2>(\n\n lhs: CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage>,\n\n rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>,\n\n) -> Result<CsrMatrix<<T1 as Sub<T2>>::Output>, OperationError>\n\nwhere\n\n T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero,\n\n T2: Scalar,\n\n <T1 as Sub<T2>>::Output: Scalar,\n\n MO1: Borrow<[usize]>,\n\n MO2: Borrow<[usize]>,\n\n MI1: Borrow<[usize]>,\n\n MI2: Borrow<[usize]>,\n\n D1: Borrow<[T1]>,\n\n D2: Borrow<[T2]>,\n\n{\n\n Ok(spsub_csc_csc(lhs.transpose(), rhs.transpose())?.transpose_owned())\n\n}\n\n\n", "file_path": "nalgebra-sparse/src/ops/serial/spsub.rs", "rank": 2, "score": 651640.0991908907 }, { "content": "/// Sparse-sparse matrix subtraction.\n\n///\n\n/// This function takes two CSC matrices, and performs sparse-matrix subtraction between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spsub_csc_csc<T1, T2, MO1, MO2, MI1, MI2, D1, D2>(\n\n lhs: CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage>,\n\n rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>,\n\n) -> Result<CscMatrix<<T1 as Sub<T2>>::Output>, OperationError>\n\nwhere\n\n T1: Scalar + Into<<T1 as Sub<T2>>::Output> + Sub<T2> + Zero,\n\n T2: Scalar,\n\n <T1 as Sub<T2>>::Output: Scalar,\n\n MO1: Borrow<[usize]>,\n\n MO2: Borrow<[usize]>,\n\n MI1: Borrow<[usize]>,\n\n MI2: Borrow<[usize]>,\n\n D1: Borrow<[T1]>,\n\n D2: Borrow<[T2]>,\n\n{\n\n let (lrows, lcols) = lhs.shape();\n\n let (rrows, rcols) = rhs.shape();\n\n\n\n if lrows != rrows || lcols != rcols {\n\n return Err(OperationError::from_kind_and_message(\n", "file_path": "nalgebra-sparse/src/ops/serial/spsub.rs", "rank": 3, "score": 651640.0991908907 }, { "content": "/// Sparse-sparse matrix addition.\n\n///\n\n/// This function takes two arguments, a CSC matrix and a CSR matrix.to_view(), and performs sparse-matrix\n\n/// addition between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spadd_csc_csr<T1, T2, MO1, MO2, MI1, MI2, D1, D2>(\n\n csc: CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage>,\n\n csr: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>,\n\n) -> Result<CsrMatrix<<T2 as Add<T1>>::Output>, OperationError>\n\nwhere\n\n T1: Scalar + Into<<T2 as Add<T1>>::Output>,\n\n T2: Scalar + Into<<T2 as Add<T1>>::Output> + Add<T1>,\n\n <T2 as Add<T1>>::Output: Scalar,\n\n MO1: Borrow<[usize]>,\n\n MO2: Borrow<[usize]>,\n\n MI1: Borrow<[usize]>,\n\n MI2: Borrow<[usize]>,\n\n D1: Borrow<[T1]>,\n\n D2: Borrow<[T2]>,\n\n{\n\n spadd_csr_csc(csr, csc)\n\n}\n\n\n", "file_path": "nalgebra-sparse/src/ops/serial/spadd.rs", "rank": 4, "score": 651628.6340025116 }, { "content": "/// Sparse-sparse matrix addition.\n\n///\n\n/// This function takes two arguments, a CSR matrix and CSC matrix.to_view(), and performs sparse-matrix\n\n/// addition between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spadd_csr_csc<T1, T2, MO1, MO2, MI1, MI2, D1, D2>(\n\n csr: CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage>,\n\n csc: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>,\n\n) -> Result<CsrMatrix<<T1 as Add<T2>>::Output>, OperationError>\n\nwhere\n\n T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>,\n\n T2: Scalar + Into<<T1 as Add<T2>>::Output>,\n\n <T1 as Add<T2>>::Output: Scalar,\n\n MO1: Borrow<[usize]>,\n\n MO2: Borrow<[usize]>,\n\n MI1: Borrow<[usize]>,\n\n MI2: Borrow<[usize]>,\n\n D1: Borrow<[T1]>,\n\n D2: Borrow<[T2]>,\n\n{\n\n let (lrows, lcols) = csr.shape();\n\n let (rrows, rcols) = csc.shape();\n\n\n\n if lrows != rrows || lcols != rcols {\n\n return Err(OperationError::from_kind_and_message(\n", "file_path": "nalgebra-sparse/src/ops/serial/spadd.rs", "rank": 5, "score": 651628.6340025117 }, { "content": "/// Sparse-sparse matrix addition.\n\n///\n\n/// This function takes two CSR matrices, and performs sparse-matrix addition between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spadd_csr_csr<T1, T2, MO1, MO2, MI1, MI2, D1, D2>(\n\n lhs: CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage>,\n\n rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>,\n\n) -> Result<CsrMatrix<<T1 as Add<T2>>::Output>, OperationError>\n\nwhere\n\n T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>,\n\n T2: Scalar + Into<<T1 as Add<T2>>::Output>,\n\n <T1 as Add<T2>>::Output: Scalar,\n\n MO1: Borrow<[usize]>,\n\n MO2: Borrow<[usize]>,\n\n MI1: Borrow<[usize]>,\n\n MI2: Borrow<[usize]>,\n\n D1: Borrow<[T1]>,\n\n D2: Borrow<[T2]>,\n\n{\n\n Ok(spadd_csc_csc(lhs.transpose(), rhs.transpose())?.transpose_owned())\n\n}\n\n\n", "file_path": "nalgebra-sparse/src/ops/serial/spadd.rs", "rank": 6, "score": 651627.8933672435 }, { "content": "/// Sparse-sparse matrix addition.\n\n///\n\n/// This function takes two CSC matrices, and performs sparse-matrix addition between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spadd_csc_csc<T1, T2, MO1, MO2, MI1, MI2, D1, D2>(\n\n lhs: CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage>,\n\n rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>,\n\n) -> Result<CscMatrix<<T1 as Add<T2>>::Output>, OperationError>\n\nwhere\n\n T1: Scalar + Into<<T1 as Add<T2>>::Output> + Add<T2>,\n\n T2: Scalar + Into<<T1 as Add<T2>>::Output>,\n\n <T1 as Add<T2>>::Output: Scalar,\n\n MO1: Borrow<[usize]>,\n\n MO2: Borrow<[usize]>,\n\n MI1: Borrow<[usize]>,\n\n MI2: Borrow<[usize]>,\n\n D1: Borrow<[T1]>,\n\n D2: Borrow<[T2]>,\n\n{\n\n let (lrows, lcols) = lhs.shape();\n\n let (rrows, rcols) = rhs.shape();\n\n\n\n if lrows != rrows || lcols != rcols {\n\n return Err(OperationError::from_kind_and_message(\n", "file_path": "nalgebra-sparse/src/ops/serial/spadd.rs", "rank": 7, "score": 651627.8933672435 }, { "content": "/// The fundamental (slowest) sparse-matrix multiply.\n\n///\n\n/// This function takes two arguments, a CSC matrix and a CSR matrix, and performs a sparse-matrix\n\n/// multiplication with the CSC matrix on the left side and the CSR matrix on the right.\n\n///\n\n/// This matrix product is the slowest because lane orientation on both matrices is opposite of the\n\n/// most efficient access pattern. As a result, we end up using the more expensive\n\n/// [`CsMatrix::minor_lane_iter`] in order to iterate through lanes of the underlying data. As a\n\n/// result, there is little that can be done for good cache performance since the access pattern\n\n/// needed for `spmm` is antithetical to the structure of the matrices.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices have incompatible shapes for a\n\n/// matrix product.\n\npub fn spmm_csc_csr<T1, T2, MO1, MO2, MI1, MI2, D1, D2>(\n\n csc: CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage>,\n\n csr: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>,\n\n) -> Result<CsrMatrix<<T1 as Mul<T2>>::Output>, OperationError>\n\nwhere\n\n T1: Scalar + Mul<T2>,\n\n <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero,\n\n T2: Scalar,\n\n MO1: Borrow<[usize]>,\n\n MO2: Borrow<[usize]>,\n\n MI1: Borrow<[usize]>,\n\n MI2: Borrow<[usize]>,\n\n D1: Borrow<[T1]>,\n\n D2: Borrow<[T2]>,\n\n{\n\n let (rows, lc) = csc.shape();\n\n let (rr, columns) = csr.shape();\n\n\n\n if lc != rr {\n\n return Err(OperationError::from_kind_and_message(\n", "file_path": "nalgebra-sparse/src/ops/serial/spmm.rs", "rank": 8, "score": 651622.6618378973 }, { "content": "/// The fundamental sparse-matrix multiply for sparse matrices of similar structure.\n\n///\n\n/// This function takes in two arguments, both CSC matrices, and computes the sparse matrix product\n\n/// of the left and right hand sides. In this product, the lane access of the left hand side is the\n\n/// opposite of what we want -- lanes are along columns, not rows. However, the right hand side\n\n/// matrix has the correct layout and so therefore can be made fast and cache-friendly.\n\n///\n\n/// Unlike the `CSR * CSC` case, we have to use [`CsMatrix::minor_lane_iter`] for the left-hand\n\n/// side matrix. However, rather than iterate through that iterator for every row / col pairing of\n\n/// the matrix product we can cache it once to make the minor-lane access far less expensive and\n\n/// cache friendly on re-use. This means that we have a slightly slower matrix product than in the\n\n/// `CSR * CSC` case, but otherwise performs well on average at the cost of some extra memory\n\n/// (retaining an extra copy of the current minor lane of the left hand side matrix).\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices have incompatible shapes for a\n\n/// matrix product.\n\npub fn spmm_csc_csc<T1, T2, MO1, MO2, MI1, MI2, D1, D2>(\n\n lhs: CsMatrix<T1, MO1, MI1, D1, CompressedColumnStorage>,\n\n rhs: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>,\n\n) -> Result<CsrMatrix<<T1 as Mul<T2>>::Output>, OperationError>\n\nwhere\n\n T1: Scalar + Mul<T2>,\n\n <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero,\n\n T2: Scalar,\n\n MO1: Borrow<[usize]>,\n\n MO2: Borrow<[usize]>,\n\n MI1: Borrow<[usize]>,\n\n MI2: Borrow<[usize]>,\n\n D1: Borrow<[T1]>,\n\n D2: Borrow<[T2]>,\n\n{\n\n let (rows, lc) = lhs.shape();\n\n let (rr, columns) = rhs.shape();\n\n\n\n if lc != rr {\n\n return Err(OperationError::from_kind_and_message(\n", "file_path": "nalgebra-sparse/src/ops/serial/spmm.rs", "rank": 9, "score": 651619.2192008734 }, { "content": "/// The fundamental matrix product of two CSR matrices.\n\n///\n\n/// This function behaves identically to the `CSC * CSC` matrix product. It does this by exploiting\n\n/// an identity in mathematics:\n\n///\n\n/// ```text\n\n/// (B' <dot> A')' = A <dot> B\n\n/// ```\n\n///\n\n/// Where the apostrophes in the above identity refer to matrix transposition. Matrix\n\n/// transposition with the [`CsMatrix`] type is \"free\" in the sense that it doesn't require us to\n\n/// reconfigure or move any data, only re-interpret it (see [`CsMatrix::transpose`] and\n\n/// [`CsMatrix::transpose_owned`]). As a result, we can define the `CSR * CSR` product in terms of\n\n/// the `CSC * CSC` product, and get the exact same performance as a result.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices have incompatible shapes for a\n\n/// matrix product.\n\npub fn spmm_csr_csr<T1, T2, MO1, MO2, MI1, MI2, D1, D2>(\n\n lhs: CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage>,\n\n rhs: CsMatrix<T2, MO2, MI2, D2, CompressedRowStorage>,\n\n) -> Result<CscMatrix<<T2 as Mul<T1>>::Output>, OperationError>\n\nwhere\n\n T2: Scalar + Mul<T1>,\n\n <T2 as Mul<T1>>::Output: Scalar + AddAssign + Zero,\n\n T1: Scalar,\n\n MO1: Borrow<[usize]>,\n\n MO2: Borrow<[usize]>,\n\n MI1: Borrow<[usize]>,\n\n MI2: Borrow<[usize]>,\n\n D1: Borrow<[T1]>,\n\n D2: Borrow<[T2]>,\n\n{\n\n Ok(spmm_csc_csc(rhs.transpose(), lhs.transpose())?.transpose_owned())\n\n}\n\n\n", "file_path": "nalgebra-sparse/src/ops/serial/spmm.rs", "rank": 10, "score": 651618.9395181955 }, { "content": "/// The fundamental (fastest) sparse-matrix multiply.\n\n///\n\n/// This function takes two arguments, a CSR matrix and a CSC matrix, and performs a sparse-matrix\n\n/// multiplication with the CSR matrix on the left side and the CSC matrix on the right.\n\n///\n\n/// Because of the way that lane access works on the underlying data structures, this is the\n\n/// theoretical fastest matrix product.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices have incompatible shapes for a\n\n/// matrix product.\n\npub fn spmm_csr_csc<T1, T2, MO1, MO2, MI1, MI2, D1, D2>(\n\n csr: CsMatrix<T1, MO1, MI1, D1, CompressedRowStorage>,\n\n csc: CsMatrix<T2, MO2, MI2, D2, CompressedColumnStorage>,\n\n) -> Result<CsrMatrix<<T1 as Mul<T2>>::Output>, OperationError>\n\nwhere\n\n T1: Scalar + Mul<T2>,\n\n <T1 as Mul<T2>>::Output: Scalar + AddAssign + Zero,\n\n T2: Scalar,\n\n MO1: Borrow<[usize]>,\n\n MO2: Borrow<[usize]>,\n\n MI1: Borrow<[usize]>,\n\n MI2: Borrow<[usize]>,\n\n D1: Borrow<[T1]>,\n\n D2: Borrow<[T2]>,\n\n{\n\n let (rows, lc) = csr.shape();\n\n let (rr, columns) = csc.shape();\n\n\n\n if lc != rr {\n\n return Err(OperationError::from_kind_and_message(\n", "file_path": "nalgebra-sparse/src/ops/serial/spmm.rs", "rank": 11, "score": 651615.3936701791 }, { "content": "/// Dense-sparse matrix subtraction.\n\n///\n\n/// This function takes in two matrices, one dense and one CSC matrix, and performs dense-sparse\n\n/// subtraction between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// Thsi function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spsub_dense_csc<T1, T2, R, C, S, MO, MI, D>(\n\n mut dense: Matrix<T1, R, C, S>,\n\n csc: CsMatrix<T2, MO, MI, D, CompressedColumnStorage>,\n\n) -> Result<Matrix<T1, R, C, S>, OperationError>\n\nwhere\n\n T1: Scalar + Sub<T2, Output = T1>,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T1, R, C> + RawStorageMut<T1, R, C>,\n\n T2: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T2]>,\n\n{\n\n let (lrows, lcols) = dense.shape();\n\n let (rrows, rcols) = csc.shape();\n\n\n\n if lrows != rrows || lcols != rcols {\n\n return Err(OperationError::from_kind_and_message(\n\n OperationErrorKind::InvalidPattern,\n", "file_path": "nalgebra-sparse/src/ops/serial/spsub.rs", "rank": 12, "score": 514995.5624418318 }, { "content": "/// Dense-sparse matrix subtraction.\n\n///\n\n/// This function takes in two matrices, one dense and one CSR matrix, and performs dense-sparse\n\n/// subtraction between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// Thsi function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spsub_dense_csr<T1, T2, R, C, S, MO, MI, D>(\n\n mut dense: Matrix<T1, R, C, S>,\n\n csr: CsMatrix<T2, MO, MI, D, CompressedRowStorage>,\n\n) -> Result<Matrix<T1, R, C, S>, OperationError>\n\nwhere\n\n T1: Scalar + Sub<T2, Output = T1>,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T1, R, C> + RawStorageMut<T1, R, C>,\n\n T2: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T2]>,\n\n{\n\n let (lrows, lcols) = dense.shape();\n\n let (rrows, rcols) = csr.shape();\n\n\n\n if lrows != rrows || lcols != rcols {\n\n return Err(OperationError::from_kind_and_message(\n\n OperationErrorKind::InvalidPattern,\n", "file_path": "nalgebra-sparse/src/ops/serial/spsub.rs", "rank": 13, "score": 514995.5624418318 }, { "content": "/// Dense-sparse matrix subtraction.\n\n///\n\n/// This function takes in two matrices, one dense and one CSC matrix, and performs dense-sparse\n\n/// subtraction between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// Thsi function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spsub_csc_dense<T1, T2, R, C, S, MO, MI, D>(\n\n csc: CsMatrix<T1, MO, MI, D, CompressedColumnStorage>,\n\n mut dense: Matrix<T2, R, C, S>,\n\n) -> Result<Matrix<T2, R, C, S>, OperationError>\n\nwhere\n\n T2: Scalar + Neg<Output = T2> + Add<T1, Output = T2>,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>,\n\n T1: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T1]>,\n\n{\n\n let (lrows, lcols) = dense.shape();\n\n let (rrows, rcols) = csc.shape();\n\n\n\n if lrows != rrows || lcols != rcols {\n\n return Err(OperationError::from_kind_and_message(\n\n OperationErrorKind::InvalidPattern,\n", "file_path": "nalgebra-sparse/src/ops/serial/spsub.rs", "rank": 14, "score": 514995.5624418318 }, { "content": "/// Dense-sparse matrix subtraction.\n\n///\n\n/// This function takes in two matrices, one dense and one CSR matrix, and performs dense-sparse\n\n/// subtraction between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// Thsi function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spsub_csr_dense<T1, T2, R, C, S, MO, MI, D>(\n\n csr: CsMatrix<T1, MO, MI, D, CompressedRowStorage>,\n\n mut dense: Matrix<T2, R, C, S>,\n\n) -> Result<Matrix<T2, R, C, S>, OperationError>\n\nwhere\n\n T2: Scalar + Neg<Output = T2> + Add<T1, Output = T2>,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>,\n\n T1: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T1]>,\n\n{\n\n let (lrows, lcols) = dense.shape();\n\n let (rrows, rcols) = csr.shape();\n\n\n\n if lrows != rrows || lcols != rcols {\n\n return Err(OperationError::from_kind_and_message(\n\n OperationErrorKind::InvalidPattern,\n", "file_path": "nalgebra-sparse/src/ops/serial/spsub.rs", "rank": 15, "score": 514995.5624418318 }, { "content": "/// Dense-sparse matrix addition.\n\n///\n\n/// This function takes in two matrices, one dense and one CSC matrix.to_view(), and performs dense-sparse\n\n/// addition between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// Thsi function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spadd_csc_dense<T1, T2, R, C, S, MO, MI, D>(\n\n csc: CsMatrix<T1, MO, MI, D, CompressedColumnStorage>,\n\n dense: Matrix<T2, R, C, S>,\n\n) -> Result<Matrix<T2, R, C, S>, OperationError>\n\nwhere\n\n T2: Scalar + Add<T1, Output = T2>,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>,\n\n T1: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T1]>,\n\n{\n\n spadd_dense_csc(dense, csc)\n\n}\n\n\n", "file_path": "nalgebra-sparse/src/ops/serial/spadd.rs", "rank": 16, "score": 514983.33286074933 }, { "content": "/// Dense-sparse matrix addition.\n\n///\n\n/// This function takes in two matrices, one dense and one CSR matrix.to_view(), and performs dense-sparse\n\n/// addition between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// Thsi function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spadd_dense_csr<T1, T2, R, C, S, MO, MI, D>(\n\n mut dense: Matrix<T1, R, C, S>,\n\n csr: CsMatrix<T2, MO, MI, D, CompressedRowStorage>,\n\n) -> Result<Matrix<T1, R, C, S>, OperationError>\n\nwhere\n\n T1: Scalar + Add<T2, Output = T1>,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T1, R, C> + RawStorageMut<T1, R, C>,\n\n T2: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T2]>,\n\n{\n\n let (lrows, lcols) = dense.shape();\n\n let (rrows, rcols) = csr.shape();\n\n\n\n if lrows != rrows || lcols != rcols {\n\n return Err(OperationError::from_kind_and_message(\n\n OperationErrorKind::InvalidPattern,\n", "file_path": "nalgebra-sparse/src/ops/serial/spadd.rs", "rank": 17, "score": 514983.33286074933 }, { "content": "/// Dense-sparse matrix addition.\n\n///\n\n/// This function takes in two matrices, one dense and one CSR matrix.to_view(), and performs dense-sparse\n\n/// addition between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// Thsi function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spadd_csr_dense<T1, T2, R, C, S, MO, MI, D>(\n\n csr: CsMatrix<T1, MO, MI, D, CompressedRowStorage>,\n\n dense: Matrix<T2, R, C, S>,\n\n) -> Result<Matrix<T2, R, C, S>, OperationError>\n\nwhere\n\n T2: Scalar + Add<T1, Output = T2>,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T2, R, C> + RawStorageMut<T2, R, C>,\n\n T1: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T1]>,\n\n{\n\n spadd_dense_csr(dense, csr)\n\n}\n\n\n", "file_path": "nalgebra-sparse/src/ops/serial/spadd.rs", "rank": 18, "score": 514983.33286074933 }, { "content": "/// Dense-sparse matrix addition.\n\n///\n\n/// This function takes in two matrices, one dense and one CSC matrix.to_view(), and performs dense-sparse\n\n/// addition between the two.\n\n///\n\n/// # Errors\n\n///\n\n/// Thsi function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices do not have the exact same shape.\n\npub fn spadd_dense_csc<T1, T2, R, C, S, MO, MI, D>(\n\n mut dense: Matrix<T1, R, C, S>,\n\n csc: CsMatrix<T2, MO, MI, D, CompressedColumnStorage>,\n\n) -> Result<Matrix<T1, R, C, S>, OperationError>\n\nwhere\n\n T1: Scalar + Add<T2, Output = T1>,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T1, R, C> + RawStorageMut<T1, R, C>,\n\n T2: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T2]>,\n\n{\n\n let (lrows, lcols) = dense.shape();\n\n let (rrows, rcols) = csc.shape();\n\n\n\n if lrows != rrows || lcols != rcols {\n\n return Err(OperationError::from_kind_and_message(\n\n OperationErrorKind::InvalidPattern,\n", "file_path": "nalgebra-sparse/src/ops/serial/spadd.rs", "rank": 19, "score": 514983.33286074933 }, { "content": "/// Sparse-Dense matrix multiplication.\n\n///\n\n/// This function takes in two matrices, one dense and one sparse in CSC format, and computes the\n\n/// `Dense * CSR` matrix product.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices have incompatible shapes for a\n\n/// matrix product.\n\npub fn spmm_dense_csr<T1, T2, R, C, S, MO, MI, D>(\n\n dense: Matrix<T1, R, C, S>,\n\n csr: CsMatrix<T2, MO, MI, D, CompressedRowStorage>,\n\n) -> Result<CscMatrix<<T2 as Mul<T1>>::Output>, OperationError>\n\nwhere\n\n T1: Scalar,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T1, R, C>,\n\n T2: Scalar + Mul<T1>,\n\n <T2 as Mul<T1>>::Output: Scalar + Add + Zero,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T2]>,\n\n{\n\n let (rows, lc) = dense.shape();\n\n let (rr, columns) = csr.shape();\n\n\n\n if lc != rr {\n\n return Err(OperationError::from_kind_and_message(\n", "file_path": "nalgebra-sparse/src/ops/serial/spmm.rs", "rank": 20, "score": 514970.2689050295 }, { "content": "/// Sparse-Dense matrix multiplication.\n\n///\n\n/// This function takes in two matrices, one sparse in CSC format and one dense, and computes the\n\n/// `CSC * Dense` matrix product.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices have incompatible shapes for a\n\n/// matrix product.\n\npub fn spmm_csc_dense<T1, T2, R, C, S, MO, MI, D>(\n\n csc: CsMatrix<T1, MO, MI, D, CompressedColumnStorage>,\n\n dense: Matrix<T2, R, C, S>,\n\n) -> Result<CsrMatrix<<T1 as Mul<T2>>::Output>, OperationError>\n\nwhere\n\n T2: Scalar,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T2, R, C>,\n\n T1: Scalar + Mul<T2>,\n\n <T1 as Mul<T2>>::Output: Scalar + Add + Zero,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T1]>,\n\n{\n\n let (rows, lc) = csc.shape();\n\n let (rr, columns) = dense.shape();\n\n\n\n if lc != rr {\n\n return Err(OperationError::from_kind_and_message(\n", "file_path": "nalgebra-sparse/src/ops/serial/spmm.rs", "rank": 21, "score": 514970.26890502946 }, { "content": "/// Sparse-Dense matrix multiplication.\n\n///\n\n/// This function takes in two matrices, one dense and one sparse in CSC format, and computes the\n\n/// `Dense * CSC` matrix product.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices have incompatible shapes for a\n\n/// matrix product.\n\npub fn spmm_dense_csc<T1, T2, R, C, S, MO, MI, D>(\n\n dense: Matrix<T1, R, C, S>,\n\n csc: CsMatrix<T2, MO, MI, D, CompressedColumnStorage>,\n\n) -> Result<CsrMatrix<<T2 as Mul<T1>>::Output>, OperationError>\n\nwhere\n\n T1: Scalar,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T1, R, C>,\n\n T2: Scalar + Mul<T1>,\n\n <T2 as Mul<T1>>::Output: Scalar + Add + Zero,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T2]>,\n\n{\n\n let (rows, lc) = dense.shape();\n\n let (rr, columns) = csc.shape();\n\n\n\n if lc != rr {\n\n return Err(OperationError::from_kind_and_message(\n", "file_path": "nalgebra-sparse/src/ops/serial/spmm.rs", "rank": 22, "score": 514970.2689050295 }, { "content": "/// Sparse-Dense matrix multiplication.\n\n///\n\n/// This function takes in two matrices, one sparse in CSR format and one dense, and computes the\n\n/// `CSR * Dense` matrix product.\n\n///\n\n/// # Errors\n\n///\n\n/// This function fails and produces an [`OperationError`] with kind\n\n/// [`OperationErrorKind::InvalidPattern`] if the two matrices have incompatible shapes for a\n\n/// matrix product.\n\npub fn spmm_csr_dense<T1, T2, R, C, S, MO, MI, D>(\n\n csr: CsMatrix<T1, MO, MI, D, CompressedRowStorage>,\n\n dense: Matrix<T2, R, C, S>,\n\n) -> Result<CscMatrix<<T1 as Mul<T2>>::Output>, OperationError>\n\nwhere\n\n T2: Scalar,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T2, R, C>,\n\n T1: Scalar + Mul<T2>,\n\n <T1 as Mul<T2>>::Output: Scalar + Add + Zero,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T1]>,\n\n{\n\n let (rows, lc) = csr.shape();\n\n let (rr, columns) = dense.shape();\n\n\n\n if lc != rr {\n\n return Err(OperationError::from_kind_and_message(\n", "file_path": "nalgebra-sparse/src/ops/serial/spmm.rs", "rank": 23, "score": 514970.26890502946 }, { "content": "/// Scalar division for sparse matrices.\n\n///\n\n/// This does not perform any checks to ensure that the division will result in non-zeros. This\n\n/// means that if for example you have an explicit zero in the sparse-matrix, and you divide by\n\n/// `1.0f32`, then the explicit zero will remain in the output.\n\npub fn sp_cs_scalar_div<T1, T2, MO, MI, D, C>(\n\n cs: CsMatrix<T1, MO, MI, D, C>,\n\n scalar: T2,\n\n) -> CsMatrix<<T1 as Div<T2>>::Output, MO, MI, Vec<<T1 as Div<T2>>::Output>, C>\n\nwhere\n\n T1: Scalar + Div<T2>,\n\n T2: Scalar,\n\n <T1 as Div<T2>>::Output: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T1]>,\n\n C: Compression,\n\n{\n\n let (rows, columns) = cs.shape();\n\n let (offsets, indices, data) = cs.disassemble();\n\n\n\n let data = data\n\n .borrow()\n\n .iter()\n\n .map(|x| x.clone() / scalar.clone())\n\n .collect();\n\n\n\n unsafe { CsMatrix::from_parts_unchecked(rows, columns, offsets, indices, data) }\n\n}\n", "file_path": "nalgebra-sparse/src/ops/serial/scalar.rs", "rank": 24, "score": 506713.84359659953 }, { "content": "/// Scalar product for sparse matrices.\n\n///\n\n/// This does not perform any checks to ensure that the Scalar is non-zero. This means that if zero\n\n/// (or close to zero) is passed in, the resulting sparse matrix will store the final values as\n\n/// explicit zeros.\n\npub fn sp_cs_scalar_prod<T1, T2, MO, MI, D, C>(\n\n cs: CsMatrix<T1, MO, MI, D, C>,\n\n scalar: T2,\n\n) -> CsMatrix<<T1 as Mul<T2>>::Output, MO, MI, Vec<<T1 as Mul<T2>>::Output>, C>\n\nwhere\n\n T1: Scalar + Mul<T2>,\n\n T2: Scalar,\n\n <T1 as Mul<T2>>::Output: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T1]>,\n\n C: Compression,\n\n{\n\n let (rows, columns) = cs.shape();\n\n let (offsets, indices, data) = cs.disassemble();\n\n\n\n let data = data\n\n .borrow()\n\n .iter()\n\n .map(|x| x.clone() * scalar.clone())\n\n .collect();\n\n\n\n unsafe { CsMatrix::from_parts_unchecked(rows, columns, offsets, indices, data) }\n\n}\n\n\n", "file_path": "nalgebra-sparse/src/ops/serial/scalar.rs", "rank": 25, "score": 467370.6888122307 }, { "content": "/// Computes the pattern of non-zeros for the Cholesky decomposition of the input matrix.\n\nfn nonzero_pattern<T, MO, MI, D, C>(matrix: &CsMatrix<T, MO, MI, D, C>) -> CholeskyPattern\n\nwhere\n\n T: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T]>,\n\n C: Compression,\n\n{\n\n let etree = elimination_tree(matrix);\n\n let nmajor = matrix.nmajor();\n\n\n\n let mut counts = vec![0usize; nmajor];\n\n let mut new_indices = Vec::with_capacity(matrix.nnz());\n\n let mut marks = vec![false; etree.len()];\n\n\n\n for (i, lane) in matrix.iter().enumerate() {\n\n marks.fill(false);\n\n\n\n let mut indices = lane\n\n .flat_map(|(j, _)| {\n", "file_path": "nalgebra-sparse/src/factorization/cholesky.rs", "rank": 26, "score": 411054.33909293736 }, { "content": "/// Computes the elimination tree of the input matrix.\n\nfn elimination_tree<T, MO, MI, D, C>(matrix: &CsMatrix<T, MO, MI, D, C>) -> Vec<Option<usize>>\n\nwhere\n\n T: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T]>,\n\n C: Compression,\n\n{\n\n let n = matrix.nmajor();\n\n\n\n let mut forest = iter::repeat(None).take(n).collect::<Vec<_>>();\n\n let mut ancestor = iter::repeat(None).take(n).collect::<Vec<_>>();\n\n\n\n for (k, lane) in matrix.iter().enumerate() {\n\n for (i_minor, _) in lane {\n\n let mut index = Some(i_minor);\n\n\n\n while let Some(i) = index {\n\n if i >= k {\n\n break;\n", "file_path": "nalgebra-sparse/src/factorization/cholesky.rs", "rank": 27, "score": 397932.2272338574 }, { "content": "#[deprecated(note = \"use the inherent method `Matrix::inf` instead\")]\n\n#[inline]\n\npub fn inf<T, R: Dim, C: Dim>(a: &OMatrix<T, R, C>, b: &OMatrix<T, R, C>) -> OMatrix<T, R, C>\n\nwhere\n\n T: Scalar + SimdPartialOrd,\n\n DefaultAllocator: Allocator<T, R, C>,\n\n{\n\n a.inf(b)\n\n}\n\n\n\n/// Returns the supremum of `a` and `b`.\n", "file_path": "src/lib.rs", "rank": 28, "score": 391462.37019365106 }, { "content": "#[deprecated(note = \"use the inherent method `Matrix::sup` instead\")]\n\n#[inline]\n\npub fn sup<T, R: Dim, C: Dim>(a: &OMatrix<T, R, C>, b: &OMatrix<T, R, C>) -> OMatrix<T, R, C>\n\nwhere\n\n T: Scalar + SimdPartialOrd,\n\n DefaultAllocator: Allocator<T, R, C>,\n\n{\n\n a.sup(b)\n\n}\n\n\n\n/// Returns simultaneously the infimum and supremum of `a` and `b`.\n", "file_path": "src/lib.rs", "rank": 29, "score": 391462.3701936512 }, { "content": "/// Executes one step of gaussian elimination on the i-th row and column of `matrix`. The diagonal\n\n/// element `matrix[(i, i)]` is provided as argument.\n\npub fn gauss_step<T, R: Dim, C: Dim, S>(matrix: &mut Matrix<T, R, C, S>, diag: T, i: usize)\n\nwhere\n\n T: Scalar + Field,\n\n S: StorageMut<T, R, C>,\n\n{\n\n let mut submat = matrix.slice_range_mut(i.., i..);\n\n\n\n let inv_diag = T::one() / diag;\n\n\n\n let (mut coeffs, mut submat) = submat.columns_range_pair_mut(0, 1..);\n\n\n\n let mut coeffs = coeffs.rows_range_mut(1..);\n\n coeffs *= inv_diag;\n\n\n\n let (pivot_row, mut down) = submat.rows_range_pair_mut(0, 1..);\n\n\n\n for k in 0..pivot_row.ncols() {\n\n down.column_mut(k)\n\n .axpy(-pivot_row[k].clone(), &coeffs, T::one());\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n", "file_path": "src/linalg/lu.rs", "rank": 30, "score": 378028.8591439957 }, { "content": "/// Create a strategy to generate matrices containing values drawn from the given strategy,\n\n/// with rows and columns in the provided ranges.\n\n///\n\n/// ## Examples\n\n/// ```\n\n/// use nalgebra::proptest::matrix;\n\n/// use nalgebra::{OMatrix, Const, Dynamic};\n\n/// use proptest::prelude::*;\n\n///\n\n/// proptest! {\n\n/// # /*\n\n/// #[test]\n\n/// # */\n\n/// fn my_test(a in matrix(0 .. 5i32, Const::<3>, 0 ..= 5)) {\n\n/// // Let's make sure we've got the correct type first\n\n/// let a: OMatrix<_, Const::<3>, Dynamic> = a;\n\n/// prop_assert!(a.nrows() == 3);\n\n/// prop_assert!(a.ncols() <= 5);\n\n/// prop_assert!(a.iter().all(|x_ij| *x_ij >= 0 && *x_ij < 5));\n\n/// }\n\n/// }\n\n///\n\n/// # fn main() { my_test(); }\n\n/// ```\n\n///\n\n/// ## Limitations\n\n/// The current implementation has some limitations that lead to suboptimal shrinking behavior.\n\n/// See the [module-level documentation](index.html) for more.\n\npub fn matrix<R, C, ScalarStrategy>(\n\n value_strategy: ScalarStrategy,\n\n rows: impl Into<DimRange<R>>,\n\n cols: impl Into<DimRange<C>>,\n\n) -> MatrixStrategy<ScalarStrategy, R, C>\n\nwhere\n\n ScalarStrategy: Strategy + Clone + 'static,\n\n ScalarStrategy::Value: Scalar,\n\n R: Dim,\n\n C: Dim,\n\n DefaultAllocator: Allocator<ScalarStrategy::Value, R, C>,\n\n{\n\n matrix_(value_strategy, rows.into(), cols.into())\n\n}\n\n\n", "file_path": "src/proptest/mod.rs", "rank": 31, "score": 366391.81629009685 }, { "content": "/// Trait implemented by floats (`f32`, `f64`) and complex floats (`Complex<f32>`, `Complex<f64>`)\n\n/// supported by the Singular Value Decompotition.\n\npub trait SVDScalar<R: DimMin<C>, C: Dim>: Scalar\n\nwhere\n\n DefaultAllocator: Allocator<Self, R, R>\n\n + Allocator<Self, R, C>\n\n + Allocator<Self, DimMinimum<R, C>>\n\n + Allocator<Self, C, C>,\n\n{\n\n /// Computes the SVD decomposition of `m`.\n\n fn compute(m: OMatrix<Self, R, C>) -> Option<SVD<Self, R, C>>;\n\n}\n\n\n\nimpl<T: SVDScalar<R, C>, R: DimMin<C>, C: Dim> SVD<T, R, C>\n\nwhere\n\n DefaultAllocator: Allocator<T, R, R>\n\n + Allocator<T, R, C>\n\n + Allocator<T, DimMinimum<R, C>>\n\n + Allocator<T, C, C>,\n\n{\n\n /// Computes the Singular Value Decomposition of `matrix`.\n\n pub fn new(m: OMatrix<T, R, C>) -> Option<Self> {\n", "file_path": "nalgebra-lapack/src/svd.rs", "rank": 32, "score": 349535.3879003405 }, { "content": "/// Same as `matrix`, but without the additional anonymous generic types\n\nfn matrix_<R, C, ScalarStrategy>(\n\n value_strategy: ScalarStrategy,\n\n rows: DimRange<R>,\n\n cols: DimRange<C>,\n\n) -> MatrixStrategy<ScalarStrategy, R, C>\n\nwhere\n\n ScalarStrategy: Strategy + Clone + 'static,\n\n ScalarStrategy::Value: Scalar,\n\n R: Dim,\n\n C: Dim,\n\n DefaultAllocator: Allocator<ScalarStrategy::Value, R, C>,\n\n{\n\n let nrows = rows.lower_bound().value()..=rows.upper_bound().value();\n\n let ncols = cols.lower_bound().value()..=cols.upper_bound().value();\n\n\n\n // Even though we can use this function to generate fixed-size matrices,\n\n // we currently generate all matrices with heap allocated Vec data.\n\n // TODO: Avoid heap allocation for fixed-size matrices.\n\n // Doing this *properly* would probably require us to implement a custom\n\n // strategy and valuetree with custom shrinking logic, which is not trivial\n", "file_path": "src/proptest/mod.rs", "rank": 33, "score": 340385.47826667666 }, { "content": "/// A helper trait used for indexing operations.\n\npub trait MatrixIndexMut<'a, T, R: Dim, C: Dim, S: RawStorageMut<T, R, C>>:\n\n MatrixIndex<'a, T, R, C, S>\n\n{\n\n /// The output type returned by methods.\n\n type OutputMut: 'a;\n\n\n\n /// Produces a mutable view of the data at this location, without\n\n /// performing any bounds checking.\n\n #[doc(hidden)]\n\n unsafe fn get_unchecked_mut(self, matrix: &'a mut Matrix<T, R, C, S>) -> Self::OutputMut;\n\n\n\n /// Produces a mutable view of the data at this location, if in\n\n /// bounds.\n\n #[doc(hidden)]\n\n #[inline(always)]\n\n fn get_mut(self, matrix: &'a mut Matrix<T, R, C, S>) -> Option<Self::OutputMut> {\n\n if self.contained_by(matrix) {\n\n Some(unsafe { self.get_unchecked_mut(matrix) })\n\n } else {\n\n None\n", "file_path": "src/base/indexing.rs", "rank": 34, "score": 334117.76061513735 }, { "content": "#[deprecated(note = \"use the inherent method `Matrix::inf_sup` instead\")]\n\n#[inline]\n\npub fn inf_sup<T, R: Dim, C: Dim>(\n\n a: &OMatrix<T, R, C>,\n\n b: &OMatrix<T, R, C>,\n\n) -> (OMatrix<T, R, C>, OMatrix<T, R, C>)\n\nwhere\n\n T: Scalar + SimdPartialOrd,\n\n DefaultAllocator: Allocator<T, R, C>,\n\n{\n\n a.inf_sup(b)\n\n}\n\n\n\n/// Compare `a` and `b` using a partial ordering relation.\n", "file_path": "src/lib.rs", "rank": 35, "score": 328287.58663611143 }, { "content": "/// The transpose of the matrix `m`.\n\npub fn transpose<T: Scalar, const R: usize, const C: usize>(x: &TMat<T, R, C>) -> TMat<T, C, R> {\n\n x.transpose()\n\n}\n", "file_path": "nalgebra-glm/src/matrix.rs", "rank": 36, "score": 328268.7027975188 }, { "content": "/// Sparse-dense matrix solver for upper-triangular CSR matrices and a dense right hand side.\n\n///\n\n/// Solves the system `A x = B` where:\n\n///\n\n/// - `A` is a square, CSR matrix that is upper-triangular.\n\n/// - `B` is a dense matrix that has a number of rows equal to the dimensions of `A`.\n\n///\n\n/// NOTE: If `A` is not actually upper-triangular, this function will ignore values on the lower\n\n/// portion of the matrix.\n\n///\n\n/// # Errors\n\n///\n\n/// Returns an [`OperationError`] with kind `OperationErrorKind::InvalidPattern` if `A` is not\n\n/// square, or if `B` has an invalid number of rows.\n\n///\n\n/// Returns an [`OperationError`] with kind `OperationErrorKind::Singular` if `A` contains zero\n\n/// values along the diagonal (i.e. the matrix is singular an non-invertible).\n\npub fn spsolve_upper_triangular_csr_dense<T, R, C, S, MO, MI, D>(\n\n csr: CsMatrix<T, MO, MI, D, CompressedRowStorage>,\n\n mut dense: Matrix<T, R, C, S>,\n\n) -> Result<Matrix<T, R, C, S>, OperationError>\n\nwhere\n\n T: RealField,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T, R, C> + RawStorageMut<T, R, C>,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T]>,\n\n{\n\n let (nrows, ncols) = csr.shape();\n\n\n\n if nrows != ncols {\n\n return Err(OperationError::from_kind_and_message(\n\n OperationErrorKind::InvalidPattern,\n\n String::from(\"Lefthand matrix is not square.\"),\n\n ));\n", "file_path": "nalgebra-sparse/src/ops/serial/spsolve.rs", "rank": 37, "score": 328260.38391440676 }, { "content": "/// Sparse-dense matrix solver for lower-triangular CSC matrices and a dense right hand side.\n\n///\n\n/// Solves the system `A x = B` where:\n\n///\n\n/// - `A` is a square, CSC matrix that is lower-triangular.\n\n/// - `B` is a dense matrix that has a number of rows equal to the dimensions of `A`.\n\n///\n\n/// NOTE: If `A` is not actually lower-triangular, this function will ignore values on the upper\n\n/// portion of the matrix.\n\n///\n\n/// # Errors\n\n///\n\n/// Returns an [`OperationError`] with kind `OperationErrorKind::InvalidPattern` if `A` is not\n\n/// square, or if `B` has an invalid number of rows.\n\n///\n\n/// Returns an [`OperationError`] with kind `OperationErrorKind::Singular` if `A` contains zero\n\n/// values along the diagonal (i.e. the matrix is singular an non-invertible).\n\npub fn spsolve_lower_triangular_csc_dense<T, R, C, S, MO, MI, D>(\n\n csc: CsMatrix<T, MO, MI, D, CompressedColumnStorage>,\n\n mut dense: Matrix<T, R, C, S>,\n\n) -> Result<Matrix<T, R, C, S>, OperationError>\n\nwhere\n\n T: RealField,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T, R, C> + RawStorageMut<T, R, C>,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T]>,\n\n{\n\n let (nrows, ncols) = csc.shape();\n\n\n\n if nrows != ncols {\n\n return Err(OperationError::from_kind_and_message(\n\n OperationErrorKind::InvalidPattern,\n\n String::from(\"Lefthand matrix is not square.\"),\n\n ));\n", "file_path": "nalgebra-sparse/src/ops/serial/spsolve.rs", "rank": 38, "score": 328260.38391440676 }, { "content": "/// Converts a dense matrix to a [`CsrMatrix`].\n\npub fn convert_dense_csr<T, R, C, S>(dense: &Matrix<T, R, C, S>) -> CsrMatrix<T>\n\nwhere\n\n T: Scalar + Zero,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T, R, C>,\n\n{\n\n let mut row_offsets = Vec::with_capacity(dense.nrows());\n\n let mut col_idx = Vec::new();\n\n let mut values = Vec::new();\n\n\n\n // We have to iterate row-by-row to build the CSR matrix, which is at odds with\n\n // nalgebra's column-major storage. The alternative would be to perform an initial sweep\n\n // to count number of non-zeros per row.\n\n row_offsets.push(0);\n\n for i in 0..dense.nrows() {\n\n for j in 0..dense.ncols() {\n\n let v = dense.index((i, j));\n\n if v != &T::zero() {\n\n col_idx.push(j);\n", "file_path": "nalgebra-sparse/src/convert/serial.rs", "rank": 39, "score": 323555.86717316555 }, { "content": "/// Converts a dense matrix to a [`CscMatrix`].\n\npub fn convert_dense_csc<T, R, C, S>(dense: &Matrix<T, R, C, S>) -> CscMatrix<T>\n\nwhere\n\n T: Scalar + Zero,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T, R, C>,\n\n{\n\n let mut col_offsets = Vec::with_capacity(dense.ncols());\n\n let mut row_idx = Vec::new();\n\n let mut values = Vec::new();\n\n\n\n col_offsets.push(0);\n\n\n\n for j in 0..dense.ncols() {\n\n for i in 0..dense.nrows() {\n\n let v = dense.index((i, j));\n\n if v != &T::zero() {\n\n row_idx.push(i);\n\n values.push(v.clone());\n\n }\n", "file_path": "nalgebra-sparse/src/convert/serial.rs", "rank": 40, "score": 323555.8671731656 }, { "content": "/// Swaps the rows `i` with the row `piv` and executes one step of gaussian elimination on the i-th\n\n/// row and column of `matrix`. The diagonal element `matrix[(i, i)]` is provided as argument.\n\npub fn gauss_step_swap<T, R: Dim, C: Dim, S>(\n\n matrix: &mut Matrix<T, R, C, S>,\n\n diag: T,\n\n i: usize,\n\n piv: usize,\n\n) where\n\n T: Scalar + Field,\n\n S: StorageMut<T, R, C>,\n\n{\n\n let piv = piv - i;\n\n let mut submat = matrix.slice_range_mut(i.., i..);\n\n\n\n let inv_diag = T::one() / diag;\n\n\n\n let (mut coeffs, mut submat) = submat.columns_range_pair_mut(0, 1..);\n\n\n\n coeffs.swap((0, 0), (piv, 0));\n\n let mut coeffs = coeffs.rows_range_mut(1..);\n\n coeffs *= inv_diag;\n\n\n\n let (mut pivot_row, mut down) = submat.rows_range_pair_mut(0, 1..);\n\n\n\n for k in 0..pivot_row.ncols() {\n\n mem::swap(&mut pivot_row[k], &mut down[(piv - 1, k)]);\n\n down.column_mut(k)\n\n .axpy(-pivot_row[k].clone(), &coeffs, T::one());\n\n }\n\n}\n", "file_path": "src/linalg/lu.rs", "rank": 41, "score": 311006.76616713544 }, { "content": "/// Converts a matrix or vector to a slice arranged in column-major order.\n\npub fn value_ptr<T: Scalar, const R: usize, const C: usize>(x: &TMat<T, R, C>) -> &[T] {\n\n x.as_slice()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtc/type_ptr.rs", "rank": 42, "score": 310069.58120621386 }, { "content": "/// A helper trait used for indexing operations.\n\npub trait MatrixIndex<'a, T, R: Dim, C: Dim, S: RawStorage<T, R, C>>: Sized {\n\n /// The output type returned by methods.\n\n type Output: 'a;\n\n\n\n /// Produces true if the given matrix is contained by this index.\n\n #[doc(hidden)]\n\n fn contained_by(&self, matrix: &Matrix<T, R, C, S>) -> bool;\n\n\n\n /// Produces a shared view of the data at this location if in bounds,\n\n /// or `None`, otherwise.\n\n #[doc(hidden)]\n\n #[inline(always)]\n\n fn get(self, matrix: &'a Matrix<T, R, C, S>) -> Option<Self::Output> {\n\n if self.contained_by(matrix) {\n\n Some(unsafe { self.get_unchecked(matrix) })\n\n } else {\n\n None\n\n }\n\n }\n\n\n", "file_path": "src/base/indexing.rs", "rank": 43, "score": 307780.41312095046 }, { "content": "#[doc(hidden)]\n\n#[must_use]\n\npub fn clear_column_unchecked<T: ComplexField, R: Dim, C: Dim>(\n\n matrix: &mut OMatrix<T, R, C>,\n\n icol: usize,\n\n shift: usize,\n\n bilateral: Option<&mut OVector<T, R>>,\n\n) -> T\n\nwhere\n\n DefaultAllocator: Allocator<T, R, C> + Allocator<T, R>,\n\n{\n\n let (mut left, mut right) = matrix.columns_range_pair_mut(icol, icol + 1..);\n\n let mut axis = left.rows_range_mut(icol + shift..);\n\n\n\n let (reflection_norm, not_zero) = reflection_axis_mut(&mut axis);\n\n\n\n if not_zero {\n\n let refl = Reflection::new(Unit::new_unchecked(axis), T::zero());\n\n let sign = reflection_norm.clone().signum();\n\n if let Some(mut work) = bilateral {\n\n refl.reflect_rows_with_sign(&mut right, &mut work, sign.clone());\n\n }\n\n refl.reflect_with_sign(&mut right.rows_range_mut(icol + shift..), sign.conjugate());\n\n }\n\n\n\n reflection_norm\n\n}\n\n\n\n/// Uses an householder reflection to zero out the `irow`-th row, ending before the `shift + 1`-th\n\n/// superdiagonal element.\n\n///\n\n/// Returns the signed norm of the column.\n", "file_path": "src/linalg/householder.rs", "rank": 44, "score": 307348.4656777985 }, { "content": "#[doc(hidden)]\n\n#[must_use]\n\npub fn clear_row_unchecked<T: ComplexField, R: Dim, C: Dim>(\n\n matrix: &mut OMatrix<T, R, C>,\n\n axis_packed: &mut OVector<T, C>,\n\n work: &mut OVector<T, R>,\n\n irow: usize,\n\n shift: usize,\n\n) -> T\n\nwhere\n\n DefaultAllocator: Allocator<T, R, C> + Allocator<T, R> + Allocator<T, C>,\n\n{\n\n let (mut top, mut bottom) = matrix.rows_range_pair_mut(irow, irow + 1..);\n\n let mut axis = axis_packed.rows_range_mut(irow + shift..);\n\n axis.tr_copy_from(&top.columns_range(irow + shift..));\n\n\n\n let (reflection_norm, not_zero) = reflection_axis_mut(&mut axis);\n\n axis.conjugate_mut(); // So that reflect_rows actually cancels the first row.\n\n\n\n if not_zero {\n\n let refl = Reflection::new(Unit::new_unchecked(axis), T::zero());\n\n refl.reflect_rows_with_sign(\n", "file_path": "src/linalg/householder.rs", "rank": 45, "score": 307348.4656777985 }, { "content": "/// The `index`-th column of the matrix `m`.\n\n///\n\n/// # See also:\n\n///\n\n/// * [`row`](fn.row.html)\n\n/// * [`set_column`](fn.set_column.html)\n\n/// * [`set_row`](fn.set_row.html)\n\npub fn column<T: Scalar, const R: usize, const C: usize>(\n\n m: &TMat<T, R, C>,\n\n index: usize,\n\n) -> TVec<T, R> {\n\n m.column(index).into_owned()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtc/matrix_access.rs", "rank": 46, "score": 303858.31910207344 }, { "content": "/// The `index`-th row of the matrix `m`.\n\n///\n\n/// # See also:\n\n///\n\n/// * [`column`](fn.column.html)\n\n/// * [`set_column`](fn.set_column.html)\n\n/// * [`set_row`](fn.set_row.html)\n\npub fn row<T: Scalar, const R: usize, const C: usize>(\n\n m: &TMat<T, R, C>,\n\n index: usize,\n\n) -> TVec<T, C> {\n\n m.row(index).into_owned().transpose()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtc/matrix_access.rs", "rank": 47, "score": 303858.31910207344 }, { "content": "/// Sets to `x` the `index`-th column of the matrix `m`.\n\n///\n\n/// # See also:\n\n///\n\n/// * [`column`](fn.column.html)\n\n/// * [`row`](fn.row.html)\n\n/// * [`set_row`](fn.set_row.html)\n\npub fn set_column<T: Scalar, const R: usize, const C: usize>(\n\n m: &TMat<T, R, C>,\n\n index: usize,\n\n x: &TVec<T, R>,\n\n) -> TMat<T, R, C> {\n\n let mut res = m.clone();\n\n res.set_column(index, x);\n\n res\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtc/matrix_access.rs", "rank": 48, "score": 299732.46374366345 }, { "content": "/// Sets to `x` the `index`-th row of the matrix `m`.\n\n///\n\n/// # See also:\n\n///\n\n/// * [`column`](fn.column.html)\n\n/// * [`row`](fn.row.html)\n\n/// * [`set_column`](fn.set_column.html)\n\npub fn set_row<T: Scalar, const R: usize, const C: usize>(\n\n m: &TMat<T, R, C>,\n\n index: usize,\n\n x: &TVec<T, C>,\n\n) -> TMat<T, R, C> {\n\n let mut res = m.clone();\n\n res.set_row(index, &x.transpose());\n\n res\n\n}\n", "file_path": "nalgebra-glm/src/gtc/matrix_access.rs", "rank": 49, "score": 299732.46374366345 }, { "content": "/// Converts a dense matrix to [`CooMatrix`].\n\npub fn convert_dense_coo<T, R, C, S>(dense: &Matrix<T, R, C, S>) -> CooMatrix<T>\n\nwhere\n\n T: Scalar + Zero,\n\n R: Dim,\n\n C: Dim,\n\n S: RawStorage<T, R, C>,\n\n{\n\n let mut coo = CooMatrix::new(dense.nrows(), dense.ncols());\n\n\n\n for (index, v) in dense.iter().enumerate() {\n\n if v != &T::zero() {\n\n // We use the fact that matrix iteration is guaranteed to be column-major\n\n let i = index % dense.nrows();\n\n let j = index / dense.nrows();\n\n coo.push(i, j, v.clone());\n\n }\n\n }\n\n\n\n coo\n\n}\n\n\n", "file_path": "nalgebra-sparse/src/convert/serial.rs", "rank": 50, "score": 298210.1628926874 }, { "content": "/// A matrix reallocator. Changes the size of the memory buffer that initially contains (`RFrom` ×\n\n/// `CFrom`) elements to a smaller or larger size (`RTo`, `CTo`).\n\npub trait Reallocator<T: Scalar, RFrom: Dim, CFrom: Dim, RTo: Dim, CTo: Dim>:\n\n Allocator<T, RFrom, CFrom> + Allocator<T, RTo, CTo>\n\n{\n\n /// Reallocates a buffer of shape `(RTo, CTo)`, possibly reusing a previously allocated buffer\n\n /// `buf`. Data stored by `buf` are linearly copied to the output:\n\n ///\n\n /// # Safety\n\n /// The following invariants must be respected by the implementors of this method:\n\n /// * The copy is performed as if both were just arrays (without taking into account the matrix structure).\n\n /// * If the underlying buffer is being shrunk, the removed elements must **not** be dropped\n\n /// by this method. Dropping them is the responsibility of the caller.\n\n unsafe fn reallocate_copy(\n\n nrows: RTo,\n\n ncols: CTo,\n\n buf: <Self as Allocator<T, RFrom, CFrom>>::Buffer,\n\n ) -> <Self as Allocator<T, RTo, CTo>>::BufferUninit;\n\n}\n\n\n\n/// The number of rows of the result of a componentwise operation on two matrices.\n\npub type SameShapeR<R1, R2> = <ShapeConstraint as SameNumberOfRows<R1, R2>>::Representative;\n\n\n\n/// The number of columns of the result of a componentwise operation on two matrices.\n\npub type SameShapeC<C1, C2> = <ShapeConstraint as SameNumberOfColumns<C1, C2>>::Representative;\n\n\n", "file_path": "src/base/allocator.rs", "rank": 51, "score": 295274.5802473736 }, { "content": "/// Converts a matrix or vector to a mutable slice arranged in column-major order.\n\npub fn value_ptr_mut<T: Scalar, const R: usize, const C: usize>(x: &mut TMat<T, R, C>) -> &mut [T] {\n\n x.as_mut_slice()\n\n}\n", "file_path": "nalgebra-glm/src/gtc/type_ptr.rs", "rank": 52, "score": 295263.3980280218 }, { "content": "/// Constraints `D1` and `D2` to be equivalent.\n\npub trait DimEq<D1: Dim, D2: Dim> {\n\n /// This is either equal to `D1` or `D2`, always choosing the one (if any) which is a type-level\n\n /// constant.\n\n type Representative: Dim;\n\n}\n\n\n\nimpl<D: Dim> DimEq<D, D> for ShapeConstraint {\n\n type Representative = D;\n\n}\n\n\n\nimpl<D: DimName> DimEq<D, Dynamic> for ShapeConstraint {\n\n type Representative = D;\n\n}\n\n\n\nimpl<D: DimName> DimEq<Dynamic, D> for ShapeConstraint {\n\n type Representative = D;\n\n}\n\n\n\nmacro_rules! equality_trait_decl(\n\n ($($doc: expr, $Trait: ident),* $(,)*) => {$(\n", "file_path": "src/base/constraint.rs", "rank": 53, "score": 275796.6028990877 }, { "content": "/// Constraints D1 and D2 to be equivalent, where they both designate dimensions of algebraic\n\n/// entities (e.g. square matrices).\n\npub trait SameDimension<D1: Dim, D2: Dim>:\n\n SameNumberOfRows<D1, D2> + SameNumberOfColumns<D1, D2>\n\n{\n\n /// This is either equal to `D1` or `D2`, always choosing the one (if any) which is a type-level\n\n /// constant.\n\n type Representative: Dim;\n\n}\n\n\n\nimpl<D: Dim> SameDimension<D, D> for ShapeConstraint {\n\n type Representative = D;\n\n}\n\n\n\nimpl<D: DimName> SameDimension<D, Dynamic> for ShapeConstraint {\n\n type Representative = D;\n\n}\n\n\n\nimpl<D: DimName> SameDimension<Dynamic, D> for ShapeConstraint {\n\n type Representative = D;\n\n}\n", "file_path": "src/base/constraint.rs", "rank": 54, "score": 272557.4109196385 }, { "content": "/// The product of every component of the given matrix or vector.\n\n///\n\n/// # Examples:\n\n///\n\n/// ```\n\n/// # use nalgebra_glm as glm;\n\n/// let vec = glm::vec2(3.0, 4.0);\n\n/// assert_eq!(12.0, glm::comp_mul(&vec));\n\n///\n\n/// let mat = glm::mat2(1.0, 1.0, -3.0, 3.0);\n\n/// assert_eq!(-9.0, glm::comp_mul(&mat));\n\n/// ```\n\n///\n\n/// # See also:\n\n///\n\n/// * [`comp_add`](fn.comp_add.html)\n\n/// * [`comp_max`](fn.comp_max.html)\n\n/// * [`comp_min`](fn.comp_min.html)\n\npub fn comp_mul<T: Number, const R: usize, const C: usize>(m: &TMat<T, R, C>) -> T {\n\n m.iter().fold(T::one(), |x, y| x * *y)\n\n}\n\n\n\n//pub fn vec< L, floatType, Q > compNormalize (vec< L, T, Q > const &v)\n\n//pub fn vec< L, T, Q > compScale (vec< L, floatType, Q > const &v)\n", "file_path": "nalgebra-glm/src/gtx/component_wise.rs", "rank": 55, "score": 251895.1858163366 }, { "content": "/// Component-wise multiplication of two matrices.\n\npub fn matrix_comp_mult<T: Number, const R: usize, const C: usize>(\n\n x: &TMat<T, R, C>,\n\n y: &TMat<T, R, C>,\n\n) -> TMat<T, R, C> {\n\n x.component_mul(y)\n\n}\n\n\n", "file_path": "nalgebra-glm/src/matrix.rs", "rank": 56, "score": 248713.76236089703 }, { "content": "/// A matrix allocator of a memory buffer that may contain `R::to_usize() * C::to_usize()`\n\n/// elements of type `T`.\n\n///\n\n/// An allocator is said to be:\n\n/// − static: if `R` and `C` both implement `DimName`.\n\n/// − dynamic: if either one (or both) of `R` or `C` is equal to `Dynamic`.\n\n///\n\n/// Every allocator must be both static and dynamic. Though not all implementations may share the\n\n/// same `Buffer` type.\n\npub trait Allocator<T, R: Dim, C: Dim = U1>: Any + Sized {\n\n /// The type of buffer this allocator can instanciate.\n\n type Buffer: StorageMut<T, R, C> + IsContiguous + Clone + Debug;\n\n /// The type of buffer with uninitialized components this allocator can instanciate.\n\n type BufferUninit: RawStorageMut<MaybeUninit<T>, R, C> + IsContiguous;\n\n\n\n /// Allocates a buffer with the given number of rows and columns without initializing its content.\n\n fn allocate_uninit(nrows: R, ncols: C) -> Self::BufferUninit;\n\n\n\n /// Assumes a data buffer to be initialized.\n\n ///\n\n /// # Safety\n\n /// The user must make sure that every single entry of the buffer has been initialized,\n\n /// or Undefined Behavior will immediately occur. \n\n unsafe fn assume_init(uninit: Self::BufferUninit) -> Self::Buffer;\n\n\n\n /// Allocates a buffer initialized with the content of the given iterator.\n\n fn allocate_from_iterator<I: IntoIterator<Item = T>>(\n\n nrows: R,\n\n ncols: C,\n\n iter: I,\n\n ) -> Self::Buffer;\n\n}\n\n\n", "file_path": "src/base/allocator.rs", "rank": 57, "score": 245614.51725291932 }, { "content": "/// For each matrix or vector component `x` if `x >= 0`; otherwise, it returns `-x`.\n\n///\n\n/// # Examples:\n\n///\n\n/// ```\n\n/// # use nalgebra_glm as glm;\n\n/// let vec = glm::vec3(-1.0, 0.0, 2.0);\n\n/// assert_eq!(glm::vec3(1.0, 0.0, 2.0), glm::abs(&vec));\n\n///\n\n/// let mat = glm::mat2(-0.0, 1.0, -3.0, 2.0);\n\n/// assert_eq!(glm::mat2(0.0, 1.0, 3.0, 2.0), glm::abs(&mat));\n\n/// ```\n\n///\n\n/// # See also:\n\n///\n\n/// * [`sign`](fn.sign.html)\n\npub fn abs<T: Number, const R: usize, const C: usize>(x: &TMat<T, R, C>) -> TMat<T, R, C> {\n\n x.abs()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/common.rs", "rank": 58, "score": 238986.68128116944 }, { "content": "/// Treats the first parameter `c` as a column vector and the second parameter `r` as a row vector and does a linear algebraic matrix multiply `c * r`.\n\npub fn outer_product<T: Number, const R: usize, const C: usize>(\n\n c: &TVec<T, R>,\n\n r: &TVec<T, C>,\n\n) -> TMat<T, R, C> {\n\n c * r.transpose()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/matrix.rs", "rank": 59, "score": 238101.24028940362 }, { "content": "/// Perform a component-wise equal-to comparison of two matrices.\n\n///\n\n/// Return a boolean vector which components value is True if this expression is satisfied per column of the matrices.\n\npub fn equal_columns<T: Number, const R: usize, const C: usize>(\n\n x: &TMat<T, R, C>,\n\n y: &TMat<T, R, C>,\n\n) -> TVec<bool, C> {\n\n let mut res = TVec::<_, C>::repeat(false);\n\n\n\n for i in 0..C {\n\n res[i] = x.column(i) == y.column(i)\n\n }\n\n\n\n res\n\n}\n\n\n", "file_path": "nalgebra-glm/src/ext/matrix_relationnal.rs", "rank": 60, "score": 231363.6624702662 }, { "content": "/// Perform a component-wise not-equal-to comparison of two matrices.\n\n///\n\n/// Return a boolean vector which components value is True if this expression is satisfied per column of the matrices.\n\npub fn not_equal_columns<T: Number, const R: usize, const C: usize>(\n\n x: &TMat<T, R, C>,\n\n y: &TMat<T, R, C>,\n\n) -> TVec<bool, C> {\n\n let mut res = TVec::<_, C>::repeat(false);\n\n\n\n for i in 0..C {\n\n res[i] = x.column(i) != y.column(i)\n\n }\n\n\n\n res\n\n}\n\n\n", "file_path": "nalgebra-glm/src/ext/matrix_relationnal.rs", "rank": 61, "score": 231363.6624702662 }, { "content": "#[test]\n\nfn simple_scalar_mul() {\n\n let a = Matrix2x3::new(1.0, 2.0, 3.0, 4.0, 5.0, 6.0);\n\n\n\n let expected = Matrix2x3::new(10.0, 20.0, 30.0, 40.0, 50.0, 60.0);\n\n\n\n assert_eq!(expected, a * 10.0);\n\n assert_eq!(expected, &a * 10.0);\n\n assert_eq!(expected, 10.0 * a);\n\n assert_eq!(expected, 10.0 * &a);\n\n}\n\n\n", "file_path": "tests/core/matrix.rs", "rank": 62, "score": 228889.86294599844 }, { "content": "/// Returns the component-wise comparison of `|x - y| < epsilon`.\n\n///\n\n/// True if this expression is satisfied.\n\npub fn equal_columns_eps<T: Number, const R: usize, const C: usize>(\n\n x: &TMat<T, R, C>,\n\n y: &TMat<T, R, C>,\n\n epsilon: T,\n\n) -> TVec<bool, C> {\n\n equal_columns_eps_vec(x, y, &TVec::<_, C>::repeat(epsilon))\n\n}\n\n\n", "file_path": "nalgebra-glm/src/ext/matrix_relationnal.rs", "rank": 63, "score": 228202.95242022967 }, { "content": "/// Returns the component-wise comparison of `|x - y| < epsilon`.\n\n///\n\n/// True if this expression is not satisfied.\n\npub fn not_equal_columns_eps<T: Number, const R: usize, const C: usize>(\n\n x: &TMat<T, R, C>,\n\n y: &TMat<T, R, C>,\n\n epsilon: T,\n\n) -> TVec<bool, C> {\n\n not_equal_columns_eps_vec(x, y, &TVec::<_, C>::repeat(epsilon))\n\n}\n\n\n", "file_path": "nalgebra-glm/src/ext/matrix_relationnal.rs", "rank": 64, "score": 228202.95242022967 }, { "content": "/// Returns the component-wise comparison on each matrix column `|x - y| < epsilon`.\n\n///\n\n/// True if this expression is satisfied.\n\npub fn equal_columns_eps_vec<T: Number, const R: usize, const C: usize>(\n\n x: &TMat<T, R, C>,\n\n y: &TMat<T, R, C>,\n\n epsilon: &TVec<T, C>,\n\n) -> TVec<bool, C> {\n\n let mut res = TVec::<_, C>::repeat(false);\n\n\n\n for i in 0..C {\n\n res[i] = (x.column(i) - y.column(i)).abs() < TVec::<_, R>::repeat(epsilon[i])\n\n }\n\n\n\n res\n\n}\n\n\n", "file_path": "nalgebra-glm/src/ext/matrix_relationnal.rs", "rank": 65, "score": 225173.73828998615 }, { "content": "/// Returns the component-wise comparison of `|x - y| >= epsilon`.\n\n///\n\n/// True if this expression is not satisfied.\n\npub fn not_equal_columns_eps_vec<T: Number, const R: usize, const C: usize>(\n\n x: &TMat<T, R, C>,\n\n y: &TMat<T, R, C>,\n\n epsilon: &TVec<T, C>,\n\n) -> TVec<bool, C> {\n\n let mut res = TVec::<_, C>::repeat(false);\n\n\n\n for i in 0..C {\n\n res[i] = (x.column(i) - y.column(i)).abs() >= TVec::<_, R>::repeat(epsilon[i])\n\n }\n\n\n\n res\n\n}\n", "file_path": "nalgebra-glm/src/ext/matrix_relationnal.rs", "rank": 66, "score": 225168.25820585553 }, { "content": "#[test]\n\nfn test_matrix_output_types() {\n\n // Test that the dimension types are correct for the given inputs\n\n let _: MatrixStrategy<_, U3, U4> = matrix(-5..5, Const::<3>, Const::<4>);\n\n let _: MatrixStrategy<_, U3, U3> = matrix(-5..5, Const::<3>, Const::<3>);\n\n let _: MatrixStrategy<_, U3, Dynamic> = matrix(-5..5, Const::<3>, 1..=5);\n\n let _: MatrixStrategy<_, Dynamic, U3> = matrix(-5..5, 1..=5, Const::<3>);\n\n let _: MatrixStrategy<_, Dynamic, Dynamic> = matrix(-5..5, 1..=5, 1..=5);\n\n}\n\n\n\n// Below we have some tests to ensure that specific instances of OMatrix are usable\n\n// in a typical proptest scenario where we (implicitly) use the `Arbitrary` trait\n\nproptest! {\n\n #[test]\n\n fn ensure_arbitrary_test_compiles_matrix3(_: Matrix3<i32>) {}\n\n\n\n #[test]\n\n fn ensure_arbitrary_test_compiles_matrixmn_u3_dynamic(_: OMatrix<i32, U3, Dynamic>) {}\n\n\n\n #[test]\n\n fn ensure_arbitrary_test_compiles_matrixmn_dynamic_u3(_: OMatrix<i32, Dynamic, U3>) {}\n", "file_path": "tests/proptest/mod.rs", "rank": 67, "score": 223752.0655521622 }, { "content": "/// Trait for compressed column sparse matrix storage.\n\npub trait CsStorage<T, R, C = U1>: for<'a> CsStorageIter<'a, T, R, C> {\n\n /// The shape of the stored matrix.\n\n fn shape(&self) -> (R, C);\n\n /// Retrieve the i-th row index of the underlying row index buffer.\n\n ///\n\n /// # Safety\n\n /// No bound-checking is performed.\n\n unsafe fn row_index_unchecked(&self, i: usize) -> usize;\n\n /// The i-th value on the contiguous value buffer of this storage.\n\n ///\n\n /// # Safety\n\n /// No bound-checking is performed.\n\n unsafe fn get_value_unchecked(&self, i: usize) -> &T;\n\n /// The i-th value on the contiguous value buffer of this storage.\n\n fn get_value(&self, i: usize) -> &T;\n\n /// Retrieve the i-th row index of the underlying row index buffer.\n\n fn row_index(&self, i: usize) -> usize;\n\n /// The value indices for the `i`-th column.\n\n fn column_range(&self, i: usize) -> Range<usize>;\n\n /// The size of the value buffer (i.e. the entries known as possibly being non-zero).\n\n fn len(&self) -> usize;\n\n}\n\n\n", "file_path": "src/sparse/cs_matrix.rs", "rank": 68, "score": 221985.00450127496 }, { "content": "/// Converts a [`CsrMatrix`] to a dense matrix.\n\npub fn convert_csr_dense<T, MO, MI, D>(\n\n csr: &CsMatrix<T, MO, MI, D, CompressedRowStorage>,\n\n) -> DMatrix<T>\n\nwhere\n\n T: Scalar + ClosedAdd + Zero,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T]>,\n\n{\n\n let mut output = DMatrix::zeros(csr.nrows(), csr.ncols());\n\n\n\n for (i, j, v) in csr.triplet_iter() {\n\n output[(i, j)] += v.clone();\n\n }\n\n\n\n output\n\n}\n\n\n", "file_path": "nalgebra-sparse/src/convert/serial.rs", "rank": 69, "score": 218675.0229154413 }, { "content": "/// Converts a [`CsrMatrix`] to a [`CscMatrix`].\n\npub fn convert_csr_csc<T, MO, MI, D>(\n\n csr: &CsMatrix<T, MO, MI, D, CompressedRowStorage>,\n\n) -> CscMatrix<T>\n\nwhere\n\n T: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T]>,\n\n{\n\n let (nrows, ncols) = csr.shape();\n\n\n\n let (counts, indices_and_data) = csr\n\n .minor_lane_iter()\n\n .map(|lane| {\n\n let (indices, data) = lane\n\n .map(|(i, v)| (i, v.clone()))\n\n .unzip::<_, _, Vec<_>, Vec<_>>();\n\n\n\n (indices.len(), (indices, data))\n\n })\n", "file_path": "nalgebra-sparse/src/convert/serial.rs", "rank": 70, "score": 218675.0229154413 }, { "content": "/// Converts a [`CsrMatrix`] to a [`CooMatrix`].\n\npub fn convert_csr_coo<T, MO, MI, D>(\n\n csr: &CsMatrix<T, MO, MI, D, CompressedRowStorage>,\n\n) -> CooMatrix<T>\n\nwhere\n\n T: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T]>,\n\n{\n\n let mut result = CooMatrix::new(csr.nrows(), csr.ncols());\n\n for (i, j, v) in csr.triplet_iter() {\n\n result.push(i, j, v.clone());\n\n }\n\n result\n\n}\n\n\n", "file_path": "nalgebra-sparse/src/convert/serial.rs", "rank": 71, "score": 218675.0229154413 }, { "content": "/// Converts a [`CscMatrix`] to a [`CooMatrix`].\n\npub fn convert_csc_coo<T, MO, MI, D>(\n\n csc: &CsMatrix<T, MO, MI, D, CompressedColumnStorage>,\n\n) -> CooMatrix<T>\n\nwhere\n\n T: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T]>,\n\n{\n\n let mut coo = CooMatrix::new(csc.nrows(), csc.ncols());\n\n for (i, j, v) in csc.triplet_iter() {\n\n coo.push(j, i, v.clone());\n\n }\n\n coo\n\n}\n\n\n", "file_path": "nalgebra-sparse/src/convert/serial.rs", "rank": 72, "score": 218675.0229154413 }, { "content": "/// Converts a [`CscMatrix`] to a [`CsrMatrix`].\n\npub fn convert_csc_csr<T, MO, MI, D>(\n\n csc: &CsMatrix<T, MO, MI, D, CompressedColumnStorage>,\n\n) -> CsrMatrix<T>\n\nwhere\n\n T: Scalar,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T]>,\n\n{\n\n let (nrows, ncols) = csc.shape();\n\n\n\n let (counts, indices_and_data) = csc\n\n .minor_lane_iter()\n\n .map(|lane| {\n\n let (indices, data) = lane\n\n .map(|(i, v)| (i, v.clone()))\n\n .unzip::<_, _, Vec<_>, Vec<_>>();\n\n\n\n (indices.len(), (indices, data))\n\n })\n", "file_path": "nalgebra-sparse/src/convert/serial.rs", "rank": 73, "score": 218675.0229154413 }, { "content": "/// Converts a [`CscMatrix`] to a dense matrix.\n\npub fn convert_csc_dense<T, MO, MI, D>(\n\n csc: &CsMatrix<T, MO, MI, D, CompressedColumnStorage>,\n\n) -> DMatrix<T>\n\nwhere\n\n T: Scalar + ClosedAdd + Zero,\n\n MO: Borrow<[usize]>,\n\n MI: Borrow<[usize]>,\n\n D: Borrow<[T]>,\n\n{\n\n let mut output = DMatrix::zeros(csc.nrows(), csc.ncols());\n\n\n\n for (i, j, v) in csc.triplet_iter() {\n\n output[(j, i)] += v.clone();\n\n }\n\n\n\n output\n\n}\n\n\n", "file_path": "nalgebra-sparse/src/convert/serial.rs", "rank": 74, "score": 218675.0229154413 }, { "content": "/// The sum of every component of the given matrix or vector.\n\n///\n\n/// # Examples:\n\n///\n\n/// ```\n\n/// # use nalgebra_glm as glm;\n\n/// let vec = glm::vec2(3.0, 4.0);\n\n/// assert_eq!(7.0, glm::comp_add(&vec));\n\n///\n\n/// let mat = glm::mat2(0.0, 1.0, -3.0, 3.0);\n\n/// assert_eq!(1.0, glm::comp_add(&mat));\n\n/// ```\n\n///\n\n/// # See also:\n\n///\n\n/// * [`comp_max`](fn.comp_max.html)\n\n/// * [`comp_min`](fn.comp_min.html)\n\n/// * [`comp_mul`](fn.comp_mul.html)\n\npub fn comp_add<T: Number, const R: usize, const C: usize>(m: &TMat<T, R, C>) -> T {\n\n m.iter().fold(T::zero(), |x, y| x + *y)\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/component_wise.rs", "rank": 75, "score": 216931.57264362293 }, { "content": "/// The minimum of every component of the given matrix or vector.\n\n///\n\n/// # Examples:\n\n///\n\n/// ```\n\n/// # use nalgebra_glm as glm;\n\n/// let vec = glm::vec2(3.0, 4.0);\n\n/// assert_eq!(3.0, glm::comp_min(&vec));\n\n///\n\n/// let mat = glm::mat2(0.0, 1.0, -3.0, 3.0);\n\n/// assert_eq!(-3.0, glm::comp_min(&mat));\n\n/// ```\n\n///\n\n/// # See also:\n\n///\n\n/// * [`comp_add`](fn.comp_add.html)\n\n/// * [`comp_max`](fn.comp_max.html)\n\n/// * [`comp_mul`](fn.comp_mul.html)\n\n/// * [`min`](fn.min.html)\n\n/// * [`min2`](fn.min2.html)\n\n/// * [`min3`](fn.min3.html)\n\n/// * [`min4`](fn.min4.html)\n\npub fn comp_min<T: Number, const R: usize, const C: usize>(m: &TMat<T, R, C>) -> T {\n\n m.iter()\n\n .fold(T::max_value(), |x, y| crate::min2_scalar(x, *y))\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/component_wise.rs", "rank": 76, "score": 216931.2858801433 }, { "content": "/// The maximum of every component of the given matrix or vector.\n\n///\n\n/// # Examples:\n\n///\n\n/// ```\n\n/// # use nalgebra_glm as glm;\n\n/// let vec = glm::vec2(3.0, 4.0);\n\n/// assert_eq!(4.0, glm::comp_max(&vec));\n\n///\n\n/// let mat = glm::mat2(0.0, 1.0, -3.0, 3.0);\n\n/// assert_eq!(3.0, glm::comp_max(&mat));\n\n/// ```\n\n///\n\n/// # See also:\n\n///\n\n/// * [`comp_add`](fn.comp_add.html)\n\n/// * [`comp_max`](fn.comp_max.html)\n\n/// * [`comp_min`](fn.comp_min.html)\n\n/// * [`max`](fn.max.html)\n\n/// * [`max2`](fn.max2.html)\n\n/// * [`max3`](fn.max3.html)\n\n/// * [`max4`](fn.max4.html)\n\npub fn comp_max<T: Number, const R: usize, const C: usize>(m: &TMat<T, R, C>) -> T {\n\n m.iter()\n\n .fold(T::min_value(), |x, y| crate::max2_scalar(x, *y))\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/component_wise.rs", "rank": 77, "score": 216925.8514128753 }, { "content": "fn mat_div_scalar(b: &mut criterion::Criterion) {\n\n let a = DMatrix::from_row_slice(1000, 1000, &vec![2.0; 1000000]);\n\n let n = 42.0;\n\n\n\n b.bench_function(\"mat_div_scalar\", move |bh| {\n\n bh.iter(|| {\n\n let mut aa = a.clone();\n\n let mut b = aa.slice_mut((0, 0), (1000, 1000));\n\n b /= n\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/core/matrix.rs", "rank": 78, "score": 208124.36702737104 }, { "content": "/// Trait for compressed column sparse matrix mutable storage.\n\npub trait CsStorageMut<T, R, C = U1>:\n\n CsStorage<T, R, C> + for<'a> CsStorageIterMut<'a, T, R, C>\n\n{\n\n}\n\n\n\n/// A storage of column-compressed sparse matrix based on a Vec.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct CsVecStorage<T: Scalar, R: Dim, C: Dim>\n\nwhere\n\n DefaultAllocator: Allocator<usize, C>,\n\n{\n\n pub(crate) shape: (R, C),\n\n pub(crate) p: OVector<usize, C>,\n\n pub(crate) i: Vec<usize>,\n\n pub(crate) vals: Vec<T>,\n\n}\n\n\n\nimpl<T: Scalar, R: Dim, C: Dim> CsVecStorage<T, R, C>\n\nwhere\n\n DefaultAllocator: Allocator<usize, C>,\n", "file_path": "src/sparse/cs_matrix.rs", "rank": 79, "score": 201210.18936609966 }, { "content": "// TODO: this structure exists for now only because impl trait\n\n// cannot be used for trait method return types.\n\n/// Trait for iterable compressed-column matrix storage.\n\npub trait CsStorageIter<'a, T, R, C = U1> {\n\n /// Iterator through all the rows of a specific columns.\n\n ///\n\n /// The elements are given as a tuple (`row_index`, value).\n\n type ColumnEntries: Iterator<Item = (usize, T)>;\n\n /// Iterator through the row indices of a specific column.\n\n type ColumnRowIndices: Iterator<Item = usize>;\n\n\n\n /// Iterates through all the row indices of the j-th column.\n\n fn column_row_indices(&'a self, j: usize) -> Self::ColumnRowIndices;\n\n /// Iterates through all the entries of the j-th column.\n\n fn column_entries(&'a self, j: usize) -> Self::ColumnEntries;\n\n}\n\n\n", "file_path": "src/sparse/cs_matrix.rs", "rank": 80, "score": 197898.573900857 }, { "content": "/// Trait for mutably iterable compressed-column sparse matrix storage.\n\npub trait CsStorageIterMut<'a, T: 'a, R, C = U1> {\n\n /// Mutable iterator through all the values of the sparse matrix.\n\n type ValuesMut: Iterator<Item = &'a mut T>;\n\n /// Mutable iterator through all the rows of a specific columns.\n\n ///\n\n /// The elements are given as a tuple (`row_index`, value).\n\n type ColumnEntriesMut: Iterator<Item = (usize, &'a mut T)>;\n\n\n\n /// A mutable iterator through the values buffer of the sparse matrix.\n\n fn values_mut(&'a mut self) -> Self::ValuesMut;\n\n /// Iterates mutably through all the entries of the j-th column.\n\n fn column_entries_mut(&'a mut self, j: usize) -> Self::ColumnEntriesMut;\n\n}\n\n\n", "file_path": "src/sparse/cs_matrix.rs", "rank": 81, "score": 191902.73086076992 }, { "content": "#[test]\n\nfn empty_matrix_mul_matrix() {\n\n let m1 = DMatrix::<f32>::zeros(3, 0);\n\n let m2 = DMatrix::<f32>::zeros(0, 4);\n\n assert_eq!(m1 * m2, DMatrix::zeros(3, 4));\n\n\n\n // Still works with larger matrices.\n\n let m1 = DMatrix::<f32>::zeros(13, 0);\n\n let m2 = DMatrix::<f32>::zeros(0, 14);\n\n assert_eq!(m1 * m2, DMatrix::zeros(13, 14));\n\n}\n\n\n", "file_path": "tests/core/empty.rs", "rank": 82, "score": 186830.93115718005 }, { "content": "#[test]\n\nfn empty_matrix_tr_mul_matrix() {\n\n let m1 = DMatrix::<f32>::zeros(0, 3);\n\n let m2 = DMatrix::<f32>::zeros(0, 4);\n\n assert_eq!(m1.tr_mul(&m2), DMatrix::zeros(3, 4));\n\n}\n\n\n", "file_path": "tests/core/empty.rs", "rank": 83, "score": 183336.7509466903 }, { "content": "#[test]\n\nfn simple_mul() {\n\n let a = Matrix2x3::new(1.0, 2.0, 3.0, 4.0, 5.0, 6.0);\n\n\n\n let b = Matrix3x4::new(\n\n 10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0, 110.0, 120.0,\n\n );\n\n\n\n let expected = Matrix2x4::new(380.0, 440.0, 500.0, 560.0, 830.0, 980.0, 1130.0, 1280.0);\n\n\n\n assert_eq!(expected, &a * &b);\n\n assert_eq!(expected, a * &b);\n\n assert_eq!(expected, &a * b);\n\n assert_eq!(expected, a * b);\n\n}\n\n\n", "file_path": "tests/core/matrix.rs", "rank": 84, "score": 182247.94620781322 }, { "content": "#[inline]\n\npub fn zero<T: Zero>() -> T {\n\n T::zero()\n\n}\n\n\n\n/*\n\n *\n\n * Ordering\n\n *\n\n */\n\n// XXX: this is very naive and could probably be optimized for specific types.\n\n// XXX: also, we might just want to use divisions, but assuming `val` is usually not far from `min`\n\n// or `max`, would it still be more efficient?\n\n/// Wraps `val` into the range `[min, max]` using modular arithmetics.\n\n///\n\n/// The range must not be empty.\n", "file_path": "src/lib.rs", "rank": 85, "score": 181180.68711296236 }, { "content": "/// Applies in-place a modified Parlett and Reinsch matrix balancing with 2-norm to the matrix and returns\n\n/// the corresponding diagonal transformation.\n\n///\n\n/// See <https://arxiv.org/pdf/1401.5766.pdf>\n\npub fn balance_parlett_reinsch<T: RealField, D: Dim>(matrix: &mut OMatrix<T, D, D>) -> OVector<T, D>\n\nwhere\n\n DefaultAllocator: Allocator<T, D, D> + Allocator<T, D>,\n\n{\n\n assert!(matrix.is_square(), \"Unable to balance a non-square matrix.\");\n\n\n\n let dim = matrix.shape_generic().0;\n\n let radix: T = crate::convert(2.0f64);\n\n let mut d = OVector::from_element_generic(dim, Const::<1>, T::one());\n\n\n\n let mut converged = false;\n\n\n\n while !converged {\n\n converged = true;\n\n\n\n for i in 0..dim.value() {\n\n let mut n_col = matrix.column(i).norm_squared();\n\n let mut n_row = matrix.row(i).norm_squared();\n\n let mut f = T::one();\n\n\n", "file_path": "src/linalg/balancing.rs", "rank": 86, "score": 180754.1087147325 }, { "content": "fn use_dedicated_types() {\n\n let iso = Isometry2::new(Vector2::new(1.0, 1.0), f32::consts::PI);\n\n let pt = Point2::new(1.0, 0.0);\n\n let vec = Vector2::x();\n\n\n\n let transformed_pt = iso * pt;\n\n let transformed_vec = iso * vec;\n\n\n\n assert_relative_eq!(transformed_pt, Point2::new(0.0, 1.0));\n\n assert_relative_eq!(transformed_vec, Vector2::new(-1.0, 0.0));\n\n}\n\n\n", "file_path": "examples/homogeneous_coordinates.rs", "rank": 87, "score": 178831.8645793533 }, { "content": "#[test]\n\nfn simple_scalar_conversion() {\n\n let a = Matrix2x3::new(1.0, 2.0, 3.0, 4.0, 5.0, 6.0);\n\n let expected = Matrix2x3::new(1, 2, 3, 4, 5, 6);\n\n\n\n let a_u32: Matrix2x3<u32> = na::try_convert(a).unwrap(); // f32 -> u32\n\n let a_f32: Matrix2x3<f32> = na::convert(a_u32); // u32 -> f32\n\n\n\n assert_eq!(a, a_f32);\n\n assert_eq!(expected, a_u32);\n\n}\n\n\n", "file_path": "tests/core/matrix.rs", "rank": 88, "score": 177932.64622289568 }, { "content": "pub fn eulerAngleXZX<T: RealNumber>(t1: T, t2: T, t3: T) -> TMat4<T> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/euler_angles.rs", "rank": 89, "score": 176054.79451086617 }, { "content": "pub fn eulerAngleXYZ<T: RealNumber>(t1: T, t2: T, t3: T) -> TMat4<T> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/euler_angles.rs", "rank": 90, "score": 176054.79451086617 }, { "content": "pub fn eulerAngleYXY<T: RealNumber>(t1: T, t2: T, t3: T) -> TMat4<T> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/euler_angles.rs", "rank": 91, "score": 176054.79451086617 }, { "content": "pub fn eulerAngleZYX<T: RealNumber>(t1: T, t2: T, t3: T) -> TMat4<T> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/euler_angles.rs", "rank": 92, "score": 176054.79451086617 }, { "content": "pub fn eulerAngleYZY<T: RealNumber>(t1: T, t2: T, t3: T) -> TMat4<T> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/euler_angles.rs", "rank": 93, "score": 176054.79451086617 }, { "content": "pub fn eulerAngleZXY<T: RealNumber>(t1: T, t2: T, t3: T) -> TMat4<T> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/euler_angles.rs", "rank": 94, "score": 176054.79451086617 }, { "content": "pub fn eulerAngleZXZ<T: RealNumber>(t1: T, t2: T, t3: T) -> TMat4<T> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/euler_angles.rs", "rank": 95, "score": 176054.79451086617 }, { "content": "pub fn eulerAngleYZX<T: RealNumber>(t1: T, t2: T, t3: T) -> TMat4<T> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/euler_angles.rs", "rank": 96, "score": 176054.79451086617 }, { "content": "pub fn eulerAngleZYZ<T: RealNumber>(t1: T, t2: T, t3: T) -> TMat4<T> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/euler_angles.rs", "rank": 97, "score": 176054.79451086617 }, { "content": "pub fn eulerAngleXZY<T: RealNumber>(t1: T, t2: T, t3: T) -> TMat4<T> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/euler_angles.rs", "rank": 98, "score": 176054.79451086617 }, { "content": "pub fn eulerAngleXYX<T: RealNumber>(t1: T, t2: T, t3: T) -> TMat4<T> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "nalgebra-glm/src/gtx/euler_angles.rs", "rank": 99, "score": 176054.79451086617 } ]
Rust
smart_contracts/interpreter/iflow/lib.rs
Liqum/rust-onchain-execution-engine
9db88f17283a320ac896b4431a7579384ae22f8f
#![feature(proc_macro_hygiene)] #![cfg_attr(not(feature = "std"), no_std)] use ink_lang as ink; #[ink::contract(version = "0.1.0")] mod iflow { use ink_core::storage::{self, Flush}; use ink_prelude::vec::Vec; #[ink(storage)] struct Iflow { start_event: storage::Value<u128>, factory: storage::Value<AccountId>, interpreter: storage::Value<AccountId>, cond_table: storage::HashMap<u128, [u128; 3]>, next_elem: storage::HashMap<u128, Vec<u128>>, subprocesses: storage::Value<Vec<u128>>, events: storage::Value<Vec<u128>>, attached_to: storage::HashMap<u128, u128>, event_code: storage::HashMap<u128, [u8; 32]>, parent_references: storage::HashMap<u128, AccountId>, instance_count: storage::HashMap<u128, u128>, } impl Iflow { #[ink(constructor)] fn new(&mut self) { self.start_event.set(0); self.factory.set(AccountId::default()); self.interpreter.set(AccountId::default()); self.events.set(Vec::new()); } #[ink(message)] fn get_pre_condition(&self, element_index: u128) -> u128 { self.cond_table .get(&element_index) .map_or(0, |cond| cond[0]) } #[ink(message)] fn get_post_condition(&self, element_index: u128) -> u128 { self.cond_table .get(&element_index) .map_or(0, |cond| cond[1]) } #[ink(message)] fn get_type_info(&self, element_index: u128) -> u128 { self.cond_table .get(&element_index) .map_or(0, |cond| cond[2]) } #[ink(message)] fn get_first_element(&self) -> u128 { *self.start_event } #[ink(message)] fn get_element_info(&self, element_index: u128) -> ([u128; 3], Vec<u128>) { ( *self.cond_table.get(&element_index).unwrap_or(&[0; 3]), self.next_elem .get(&element_index) .unwrap_or(&Vec::default()) .clone(), ) } #[ink(message)] fn get_ady_elements(&self, element_index: u128) -> Vec<u128> { self.next_elem .get(&element_index) .unwrap_or(&Vec::default()) .clone() } #[ink(message)] fn get_subprocess_list(&self) -> Vec<u128> { self.subprocesses.clone() } #[ink(message)] fn get_instance_count(&self, element_index: u128) -> u128 { *self.instance_count.get(&element_index).unwrap_or(&0) } #[ink(message)] fn get_event_code(&self, element_index: u128) -> [u8; 32] { *self.event_code.get(&element_index).unwrap_or(&[0; 32]) } #[ink(message)] fn get_event_list(&self) -> Vec<u128> { self.events.clone() } #[ink(message)] fn get_attached_to(&self, element_index: u128) -> u128 { *self.attached_to.get(&element_index).unwrap_or(&0) } #[ink(message)] fn get_subprocess_instance(&self, element_index: u128) -> AccountId { *self .parent_references .get(&element_index) .unwrap_or(&AccountId::default()) } #[ink(message)] fn get_factory_instance(&self) -> AccountId { *self.factory } #[ink(message)] fn set_factory_instance(&mut self, _factory: AccountId) { self.factory.set(_factory) } #[ink(message)] fn get_interpreter_instance(&self) -> AccountId { *self.interpreter } #[ink(message)] fn set_interpreter_instance(&mut self, _inerpreter: AccountId) { self.interpreter.set(_inerpreter) } #[ink(message)] fn set_element( &mut self, element_index: u128, pre_condition: u128, post_condition: u128, type_info: u128, event_code: [u8; 32], _next_elem: Vec<u128>, ) { let _type_info = self.get_type_info(element_index); match _type_info { 0 => { if type_info & 4 == 4 { self.events.push(element_index); if type_info & 36 == 36 { self.start_event.set(element_index); } self.event_code.insert(element_index, event_code); } else if type_info & 33 == 33 { self.subprocesses.push(element_index); } } _ => { if type_info != _type_info { return; } } } self.cond_table .insert(element_index, [pre_condition, post_condition, type_info]); self.next_elem.insert(element_index, _next_elem); } #[ink(message)] fn link_sub_process( &mut self, parent_index: u128, child_flow_inst: AccountId, attached_events: Vec<u128>, count_instances: u128, ) { if self.get_type_info(parent_index) & 33 != 33 { return; } self.parent_references.insert(parent_index, child_flow_inst); for attached_event in attached_events.iter() { if self.get_type_info(parent_index) & 4 == 4 { self.attached_to.insert(*attached_event, parent_index); } } self.instance_count.insert(parent_index, count_instances); } } }
#![feature(proc_macro_hygiene)] #![cfg_attr(not(feature = "std"), no_std)] use ink_lang as ink; #[ink::contract(version = "0.1.0")] mod iflow { use ink_core::storage::{self, Flush}; use ink_prelude::vec::Vec; #[ink(storage)] struct Iflow { start_event: storage::Value<u128>, factory: storage::Value<AccountId>, interpreter: storage::Value<AccountId>, cond_table: storage::HashMap<u128, [u128; 3]>, next_elem: storage::HashMap<u128, Vec<u128>>, subprocesses: storage::Value<Vec<u128>>, events: storage::Value<Vec<u128>>, attached_to: storage::HashMap<u128, u128>, event_code: storage::HashMap<u128, [u8; 32]>, parent_references: storage::HashMap<u128, AccountId>, instance_count: storage::HashMap<u128, u128>, } impl Iflow { #[ink(constructor)] fn new(&mut self) { self.start_event.set(0); self.factory.set(AccountId::default()); self.interpreter.set(AccountId::default()); self.events.set(Vec::new()); } #[ink(message)] fn get_pre_condition(&self, element_index: u128) -> u128 { self.cond_ta
33 { self.subprocesses.push(element_index); } } _ => { if type_info != _type_info { return; } } } self.cond_table .insert(element_index, [pre_condition, post_condition, type_info]); self.next_elem.insert(element_index, _next_elem); } #[ink(message)] fn link_sub_process( &mut self, parent_index: u128, child_flow_inst: AccountId, attached_events: Vec<u128>, count_instances: u128, ) { if self.get_type_info(parent_index) & 33 != 33 { return; } self.parent_references.insert(parent_index, child_flow_inst); for attached_event in attached_events.iter() { if self.get_type_info(parent_index) & 4 == 4 { self.attached_to.insert(*attached_event, parent_index); } } self.instance_count.insert(parent_index, count_instances); } } }
ble .get(&element_index) .map_or(0, |cond| cond[0]) } #[ink(message)] fn get_post_condition(&self, element_index: u128) -> u128 { self.cond_table .get(&element_index) .map_or(0, |cond| cond[1]) } #[ink(message)] fn get_type_info(&self, element_index: u128) -> u128 { self.cond_table .get(&element_index) .map_or(0, |cond| cond[2]) } #[ink(message)] fn get_first_element(&self) -> u128 { *self.start_event } #[ink(message)] fn get_element_info(&self, element_index: u128) -> ([u128; 3], Vec<u128>) { ( *self.cond_table.get(&element_index).unwrap_or(&[0; 3]), self.next_elem .get(&element_index) .unwrap_or(&Vec::default()) .clone(), ) } #[ink(message)] fn get_ady_elements(&self, element_index: u128) -> Vec<u128> { self.next_elem .get(&element_index) .unwrap_or(&Vec::default()) .clone() } #[ink(message)] fn get_subprocess_list(&self) -> Vec<u128> { self.subprocesses.clone() } #[ink(message)] fn get_instance_count(&self, element_index: u128) -> u128 { *self.instance_count.get(&element_index).unwrap_or(&0) } #[ink(message)] fn get_event_code(&self, element_index: u128) -> [u8; 32] { *self.event_code.get(&element_index).unwrap_or(&[0; 32]) } #[ink(message)] fn get_event_list(&self) -> Vec<u128> { self.events.clone() } #[ink(message)] fn get_attached_to(&self, element_index: u128) -> u128 { *self.attached_to.get(&element_index).unwrap_or(&0) } #[ink(message)] fn get_subprocess_instance(&self, element_index: u128) -> AccountId { *self .parent_references .get(&element_index) .unwrap_or(&AccountId::default()) } #[ink(message)] fn get_factory_instance(&self) -> AccountId { *self.factory } #[ink(message)] fn set_factory_instance(&mut self, _factory: AccountId) { self.factory.set(_factory) } #[ink(message)] fn get_interpreter_instance(&self) -> AccountId { *self.interpreter } #[ink(message)] fn set_interpreter_instance(&mut self, _inerpreter: AccountId) { self.interpreter.set(_inerpreter) } #[ink(message)] fn set_element( &mut self, element_index: u128, pre_condition: u128, post_condition: u128, type_info: u128, event_code: [u8; 32], _next_elem: Vec<u128>, ) { let _type_info = self.get_type_info(element_index); match _type_info { 0 => { if type_info & 4 == 4 { self.events.push(element_index); if type_info & 36 == 36 { self.start_event.set(element_index); } self.event_code.insert(element_index, event_code); } else if type_info & 33 ==
random
[ { "content": "/// Construct a `BpmnInterpreter::continue_execution` call\n\npub fn continue_execution(instance_id: u64, element_index: u128) -> Call {\n\n BpmnInterpreter::continue_execution(instance_id, element_index).into()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{calls, NodeRuntimeTypes};\n\n use super::Call;\n\n\n\n use node_runtime::{self, Runtime, InstanceId};\n\n use pallet_indices::address;\n\n use scale::{Decode, Encode};\n\n\n\n\n\n #[test]\n\n fn call_continue_execution() {\n\n let element_index = 0;\n\n let instance_id: u64 = 0;\n\n\n\n let contract_continue_execution =\n", "file_path": "liqum-node/ink-types-node-runtime/src/calls.rs", "rank": 0, "score": 94901.42617377616 }, { "content": "/// Builds a new service for a full client.\n\npub fn new_full(config: Configuration) -> Result<impl AbstractService, ServiceError> {\n\n let role = config.role.clone();\n\n let force_authoring = config.force_authoring;\n\n let name = config.network.node_name.clone();\n\n let disable_grandpa = config.disable_grandpa;\n\n\n\n let (builder, mut import_setup, inherent_data_providers) = new_full_start!(config);\n\n\n\n let (block_import, grandpa_link) = import_setup.take().expect(\n\n \"Link Half and Block Import are present for Full Services or setup failed before. qed\",\n\n );\n\n\n\n let service = builder\n\n .with_finality_proof_provider(|client, backend| {\n\n // GenesisAuthoritySetProvider is implemented for StorageAndProofProvider\n\n let provider = client as Arc<dyn StorageAndProofProvider<_, _>>;\n\n Ok(Arc::new(GrandpaFinalityProofProvider::new(backend, provider)) as _)\n\n })?\n\n .build()?;\n\n\n", "file_path": "liqum-node/node/src/service.rs", "rank": 1, "score": 62432.35545859572 }, { "content": "/// Builds a new service for a light client.\n\npub fn new_light(config: Configuration) -> Result<impl AbstractService, ServiceError> {\n\n let inherent_data_providers = InherentDataProviders::new();\n\n\n\n ServiceBuilder::new_light::<Block, RuntimeApi, Executor>(config)?\n\n .with_select_chain(|_config, backend| Ok(LongestChain::new(backend.clone())))?\n\n .with_transaction_pool(|builder| {\n\n let fetcher = builder\n\n .fetcher()\n\n .ok_or_else(|| \"Trying to start light transaction pool without active fetcher\")?;\n\n\n\n let pool_api =\n\n sc_transaction_pool::LightChainApi::new(builder.client().clone(), fetcher.clone());\n\n let pool = sc_transaction_pool::BasicPool::with_revalidation_type(\n\n builder.config().transaction_pool.clone(),\n\n Arc::new(pool_api),\n\n builder.prometheus_registry(),\n\n sc_transaction_pool::RevalidationType::Light,\n\n );\n\n Ok(pool)\n\n })?\n", "file_path": "liqum-node/node/src/service.rs", "rank": 2, "score": 62432.35545859572 }, { "content": "fn main() {\n\n generate_cargo_keys();\n\n\n\n rerun_if_git_head_changed();\n\n}\n", "file_path": "liqum-node/node/build.rs", "rank": 13, "score": 41066.0653305315 }, { "content": "fn main() {\n\n WasmBuilder::new()\n\n .with_current_project()\n\n .with_wasm_builder_from_crates(\"1.0.11\")\n\n .export_heap_base()\n\n .import_memory()\n\n .build()\n\n}\n", "file_path": "liqum-node/runtime/build.rs", "rank": 14, "score": 41066.0653305315 }, { "content": "/// Helper function to generate an account ID from seed\n\npub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId\n\nwhere\n\n AccountPublic: From<<TPublic::Pair as Pair>::Public>,\n\n{\n\n AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()\n\n}\n\n\n", "file_path": "liqum-node/node/src/chain_spec.rs", "rank": 15, "score": 40094.897735844315 }, { "content": "fn testnet_genesis(\n\n initial_authorities: Vec<(AuraId, GrandpaId)>,\n\n root_key: AccountId,\n\n endowed_accounts: Vec<AccountId>,\n\n enable_println: bool,\n\n) -> GenesisConfig {\n\n GenesisConfig {\n\n system: Some(SystemConfig {\n\n code: WASM_BINARY.to_vec(),\n\n changes_trie_config: Default::default(),\n\n }),\n\n balances: Some(BalancesConfig {\n\n balances: endowed_accounts\n\n .iter()\n\n .cloned()\n\n .map(|k| (k, 1 << 60))\n\n .collect(),\n\n }),\n\n aura: Some(AuraConfig {\n\n authorities: initial_authorities.iter().map(|x| (x.0.clone())).collect(),\n", "file_path": "liqum-node/node/src/chain_spec.rs", "rank": 16, "score": 38162.511524445494 }, { "content": "fn main() -> sc_cli::Result<()> {\n\n command::run()\n\n}\n", "file_path": "liqum-node/node/src/main.rs", "rank": 17, "score": 35788.37091307872 }, { "content": "#[cfg(feature = \"std\")]\n\npub fn native_version() -> NativeVersion {\n\n NativeVersion {\n\n runtime_version: VERSION,\n\n can_author_with: Default::default(),\n\n }\n\n}\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: BlockNumber = 2400;\n\n /// We allow for 2 seconds of compute with a 6 second average block time.\n\n pub const MaximumBlockWeight: Weight = 2 * WEIGHT_PER_SECOND;\n\n pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75);\n\n /// Assume 10% of weight for average on_initialize calls.\n\n pub MaximumExtrinsicWeight: Weight = AvailableBlockRatio::get()\n\n .saturating_sub(Perbill::from_percent(10)) * MaximumBlockWeight::get();\n\n pub const MaximumBlockLength: u32 = 5 * 1024 * 1024;\n\n pub const Version: RuntimeVersion = VERSION;\n\n}\n\n\n\nimpl system::Trait for Runtime {\n", "file_path": "liqum-node/runtime/src/lib.rs", "rank": 18, "score": 34992.55686350366 }, { "content": "/// Parse and run command line arguments\n\npub fn run() -> sc_cli::Result<()> {\n\n let cli = Cli::from_args();\n\n\n\n match &cli.subcommand {\n\n Some(subcommand) => {\n\n let runner = cli.create_runner(subcommand)?;\n\n runner.run_subcommand(subcommand, |config| Ok(new_full_start!(config).0))\n\n }\n\n None => {\n\n let runner = cli.create_runner(&cli.run)?;\n\n runner.run_node(\n\n service::new_light,\n\n service::new_full,\n\n liqum_node_runtime::VERSION,\n\n )\n\n }\n\n }\n\n}\n", "file_path": "liqum-node/node/src/command.rs", "rank": 19, "score": 34384.61709819901 }, { "content": "pub fn development_config() -> ChainSpec {\n\n ChainSpec::from_genesis(\n\n \"Development\",\n\n \"dev\",\n\n ChainType::Development,\n\n || {\n\n testnet_genesis(\n\n vec![authority_keys_from_seed(\"Alice\")],\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n vec![\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Bob\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice//stash\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Bob//stash\"),\n\n ],\n\n true,\n\n )\n\n },\n\n vec![],\n\n None,\n\n None,\n\n None,\n\n None,\n\n )\n\n}\n\n\n", "file_path": "liqum-node/node/src/chain_spec.rs", "rank": 20, "score": 34234.99047620362 }, { "content": "pub fn local_testnet_config() -> ChainSpec {\n\n ChainSpec::from_genesis(\n\n \"Local Testnet\",\n\n \"local_testnet\",\n\n ChainType::Local,\n\n || {\n\n testnet_genesis(\n\n vec![\n\n authority_keys_from_seed(\"Alice\"),\n\n authority_keys_from_seed(\"Bob\"),\n\n ],\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n vec![\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Bob\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Charlie\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Dave\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Eve\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Ferdie\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice//stash\"),\n", "file_path": "liqum-node/node/src/chain_spec.rs", "rank": 21, "score": 33523.34221241018 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\nuse ink_lang as ink;\n\n\n\n#[ink::contract(version = \"0.1.0\")]\n\nmod bpmn_interpreter {\n\n use ink_core::env::call::*;\n\n use ink_core::env::EnvError;\n\n use ink_core::storage::{self, Flush};\n\n use ink_prelude::vec::Vec;\n\n use lazy_static::lazy_static;\n\n\n\n lazy_static! {\n\n // ifactory\n\n static ref NEW_INST: Selector = Selector::from([0xE8, 0xF9, 0xD4, 0xF7]);\n\n // iflow\n\n static ref GET_FACTORY_INSTANCE: Selector = Selector::from([0x17, 0x8E, 0x4F, 0x8E]);\n\n static ref GET_SUB_PROC_INSTANCE: Selector = Selector::from([0xF0,0xF9,0xF5,0xB7]);\n\n static ref GET_FIRST_ELEMENT: Selector = Selector::from([0x4E,0x9D,0xBE,0x0E]);\n\n static ref GET_POST_CONDITION: Selector = Selector::from([0xB6,0xCF,0xED,0xE5]);\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 22, "score": 33331.28082387999 }, { "content": "\n\n #[cfg_attr(feature = \"ink-generate-abi\", derive(type_metadata::Metadata))]\n\n #[derive(scale::Encode, scale::Decode)]\n\n pub enum Errors {\n\n EnviromentError,\n\n Other,\n\n }\n\n\n\n impl From<EnvError> for Errors {\n\n fn from(_: EnvError) -> Self {\n\n Errors::EnviromentError\n\n }\n\n }\n\n\n\n impl BpmnInterpreter {\n\n #[ink(constructor)]\n\n fn new(&mut self) {}\n\n\n\n /// Instantiation of Root-Process\n\n #[ink(message)]\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 23, "score": 33327.08827579976 }, { "content": " static ref GET_CHILD_PROC_INSTANCE: Selector = Selector::from([0x1D,0x2F,0xC3,0x66]);\n\n static ref SET_INSTANCE_COUNT: Selector = Selector::from([0x9B,0x70,0x40,0x9A]);\n\n // data & scripts\n\n static ref EXECUTE_SCRIPT: Selector = Selector::from([0xAC,0x52,0xC8,0xD3]);\n\n }\n\n\n\n #[ink(storage)]\n\n struct BpmnInterpreter {}\n\n\n\n #[ink(event)]\n\n struct MessageSent {\n\n #[ink(topic)]\n\n event_code: [u8; 32],\n\n }\n\n\n\n #[ink(event)]\n\n struct NewCaseCreated {\n\n #[ink(topic)]\n\n parent_case: AccountId,\n\n }\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 24, "score": 33326.36621326082 }, { "content": " &self,\n\n parent_case: AccountId,\n\n event_code: [u8; 32],\n\n event_info: u128,\n\n ) -> Result<(), Errors> {\n\n // This function only receive THROW EVENTS (throw event verification made in function executeElement)\n\n let mut parent_state: [u128; 2] = [0; 2];\n\n parent_state[0] =\n\n CallParams::<EnvTypes, u128>::eval(parent_case, *GET_MARKING).fire()?;\n\n parent_state[1] =\n\n CallParams::<EnvTypes, u128>::eval(parent_case, *GET_STARTED_ACTIVITIES).fire()?;\n\n match event_info {\n\n event_info if event_info & 4096 == 4096 => {\n\n // Message (BIT 15), to publish a Message in the Ethereum Event Log\n\n self.env().emit_event(MessageSent { event_code });\n\n }\n\n event_info if event_info & 5632 == 5632 => {\n\n // 9- End, 10- Default, 12- Message\n\n // If there are not tokens to consume nor started activities in any subprocess\n\n if parent_state[0] | parent_state[1] == 0 {\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 25, "score": 33325.79832029431 }, { "content": " while catch_case != AccountId::default() {\n\n parent_case = catch_case;\n\n catch_case =\n\n CallParams::<EnvTypes, AccountId>::eval(parent_case, *GET_PARENT)\n\n .fire()?;\n\n }\n\n self.broadcast_signal(parent_case)?;\n\n return Ok(());\n\n }\n\n\n\n let events =\n\n CallParams::<EnvTypes, Vec<u128>>::eval(child_flow, *GET_EVENT_LIST).fire()?;\n\n // The event can be catched only once, unless it is a signal where a broadcast must happen.\n\n // Precondition: Event-subprocess must appear before boundary events on the event list.\n\n for event in events {\n\n let ev_code =\n\n CallParams::<EnvTypes, [u8; 32]>::eval(child_flow, *GET_EVENT_CODE)\n\n .push_arg::<u128>(&event)\n\n .fire()?;\n\n if ev_code == event_code {\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 26, "score": 33325.35954000013 }, { "content": " .push_arg::<u128>(&(parent_state[1] | (1 << attached_to)))\n\n .fire()?;\n\n return Ok(());\n\n } else if catch_event_info & 256 == 256 && attached_to == sub_process_index\n\n {\n\n // Boundary (BIT 6) of the subproces propagating the event\n\n if catch_event_info & 16 == 16 {\n\n // Interrupting (BIT 4 must be 1, 0 if non-interrupting)\n\n self.kill_process(parent_case)?;\n\n }\n\n // The subprocess propagating the event must be interrupted\n\n let post_condition =\n\n CallParams::<EnvTypes, u128>::eval(child_flow, *GET_POST_CONDITION)\n\n .push_arg::<u128>(&event)\n\n .fire()?;\n\n let first_ady_element = CallParams::<EnvTypes, Vec<u128>>::eval(\n\n child_flow,\n\n *GET_ADY_ELEMENTS,\n\n )\n\n .push_arg::<u128>(&event)\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 27, "score": 33324.34448249621 }, { "content": " // Verifiying there is a match with the throw-cath events.\n\n let catch_event_info =\n\n CallParams::<EnvTypes, u128>::eval(child_flow, *GET_TYPE_INFO)\n\n .push_arg::<u128>(&event)\n\n .fire()?;\n\n let attached_to =\n\n CallParams::<EnvTypes, u128>::eval(child_flow, *GET_ATTACHED_TO)\n\n .push_arg::<u128>(&event)\n\n .fire()?;\n\n if catch_event_info & 6 == 6 {\n\n // Start event-sub-process (BIT 6)\n\n if catch_event_info & 16 == 16 {\n\n // Interrupting (BIT 4 must be 1, 0 if non-interrupting)\n\n // Before starting the event subprocess, the parent is killed\n\n self.kill_process(catch_case)?;\n\n }\n\n // Starting event sub-process\n\n self.create_instance(attached_to, parent_case)?;\n\n // Marking the event-sub-process as started\n\n CallParams::<EnvTypes, ()>::invoke(catch_case, *SET_ACTIVITY_MARKING)\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 28, "score": 33324.16420772351 }, { "content": " if catch_event_info & 16 == 16 {\n\n // Interrupting (BIT 4 must be 1, 0 if non-interrupting)\n\n // Before starting the event subprocess, the current process-instance is killed\n\n self.kill_process(parent_case)?;\n\n }\n\n self.create_instance(attached_to, parent_case)?;\n\n CallParams::<EnvTypes, ()>::invoke(parent_case, *SET_ACTIVITY_MARKING)\n\n .push_arg::<u128>(&(1 << attached_to))\n\n .fire()?;\n\n } else if catch_event_info & 256 == 256 {\n\n // Boundary (BIT 6) of the subproces propagating the event\n\n if catch_event_info & 16 == 16 {\n\n // Interrupting (BIT 4 must be 1, 0 if non-interrupting)\n\n // The subprocess propagating the event must be interrupted\n\n let child_proc_inst = CallParams::<EnvTypes, Vec<AccountId>>::eval(\n\n parent_case,\n\n *GET_CHILD_PROC_INSTANCE,\n\n )\n\n .push_arg::<u128>(&attached_to)\n\n .fire()?;\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 29, "score": 33323.99499878532 }, { "content": " self.kill_processes(child_proc_inst)?;\n\n }\n\n let marking =\n\n CallParams::<EnvTypes, u128>::eval(parent_case, *GET_MARKING).fire()?;\n\n let post_condition =\n\n CallParams::<EnvTypes, u128>::eval(child_flow, *GET_POST_CONDITION)\n\n .push_arg::<u128>(&event)\n\n .fire()?;\n\n // Update the marking with the output of the boundary event\n\n CallParams::<EnvTypes, ()>::invoke(parent_case, *SET_MARKING)\n\n .push_arg::<u128>(&(marking & !post_condition))\n\n .fire()?;\n\n let first_ady_element =\n\n CallParams::<EnvTypes, Vec<u128>>::eval(child_flow, *GET_ADY_ELEMENTS)\n\n .push_arg::<u128>(&event)\n\n .fire()?[0];\n\n // Continue the execution of possible internal elements\n\n self.execute_elements(parent_case, first_ady_element)?;\n\n } else if event_info & 160 == 160 {\n\n // Start (not Event Subprocess) OR Intermediate Event\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 30, "score": 33323.819206728505 }, { "content": " .fire()?;\n\n let event_code =\n\n CallParams::<EnvTypes, [u8; 32]>::eval(child_flow, *GET_EVENT_CODE)\n\n .push_arg::<u128>(&element_index)\n\n .fire()?;\n\n self.throw_event(parent_case, event_code, type_info)?;\n\n let marking =\n\n CallParams::<EnvTypes, u128>::eval(parent_case, *GET_MARKING).fire()?;\n\n let started_activities = CallParams::<EnvTypes, u128>::eval(\n\n parent_case,\n\n *GET_STARTED_ACTIVITIES,\n\n )\n\n .fire()?;\n\n if marking | started_activities == 0 {\n\n // By throwing the event, a kill was performed so the current instance was terminated\n\n return Ok(());\n\n }\n\n parent_state[0] = marking;\n\n parent_state[1] = started_activities;\n\n if type_info & 128 == 128 {\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 31, "score": 33323.70588131854 }, { "content": " }\n\n\n\n fn try_catch_event(\n\n &self,\n\n parent_case: AccountId,\n\n event_code: [u8; 32],\n\n event_info: u128,\n\n instance_completed: bool,\n\n ) -> Result<(), Errors> {\n\n let mut catch_case =\n\n CallParams::<EnvTypes, AccountId>::eval(parent_case, *GET_PARENT).fire()?;\n\n let mut parent_case = parent_case;\n\n if catch_case == AccountId::default() {\n\n // No Parent exist, root node\n\n if event_info & 8192 == 8192 {\n\n // Error event (BIT 13), only END EVENT from standard, in the root process.\n\n self.kill_process(parent_case)?;\n\n }\n\n return Ok(());\n\n }\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 32, "score": 33323.0581682878 }, { "content": " // parent_state[1] =\n\n // CallParams::<EnvTypes, u128>::eval(parent_case, *GET_STARTED_ACTIVITIES).fire()?;\n\n for event in events {\n\n let event_info = CallParams::<EnvTypes, u128>::eval(child_flow, *GET_TYPE_INFO)\n\n .push_arg::<u128>(&event)\n\n .fire()?;\n\n\n\n if event_info & 32780 == 32772 {\n\n // Event Catch Signal (BITs 2, 3 [0-catch, 1-throw], 15)\n\n let catch_event_info =\n\n CallParams::<EnvTypes, u128>::eval(child_flow, *GET_TYPE_INFO)\n\n .push_arg::<u128>(&event)\n\n .fire()?;\n\n let attached_to =\n\n CallParams::<EnvTypes, u128>::eval(child_flow, *GET_ATTACHED_TO)\n\n .push_arg::<u128>(&event)\n\n .fire()?;\n\n\n\n if catch_event_info & 6 == 6 {\n\n // Start event-sub-process (BIT 6)\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 33, "score": 33322.95637220906 }, { "content": " let child_instances =\n\n CallParams::<EnvTypes, u128>::eval(child_flow, *GET_INSTANCE_COUNT)\n\n .push_arg::<u128>(&element_index)\n\n .fire()?;\n\n for _ in 0..child_instances {\n\n self.create_instance(element_index, parent_case)?;\n\n }\n\n parent_state[1] |= 1 << element_index;\n\n }\n\n type_info\n\n if (type_info & 129 == 129\n\n || (type_info & 1 == 1\n\n && type_info & 48 != 0\n\n && type_info & 4096 == 0)) =>\n\n {\n\n // If (0- Activity, 7- Sequential Multi-Instance) ||\n\n // Sub-process(0- Activity, 5- Sub-process) or Call-Activity(0- Activity, 4- Call-Activity)\n\n // but NOT Event Sub-process(12- Event Subprocess)\n\n let instance = self.create_instance(element_index, parent_case)?;\n\n let instance_count =\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 34, "score": 33322.55243345276 }, { "content": " fn create_root_instance(&self, child_flow: AccountId) -> Result<(), Errors> {\n\n let factory =\n\n CallParams::<EnvTypes, AccountId>::eval(child_flow, *GET_FACTORY_INSTANCE)\n\n .fire()?;\n\n if factory == AccountId::default() {\n\n Err(Errors::Other)\n\n } else {\n\n let parent_case =\n\n CallParams::<EnvTypes, AccountId>::eval(factory, *NEW_INST).fire()?;\n\n CallParams::<EnvTypes, ()>::invoke(parent_case, *SET_PARENT)\n\n .push_arg::<AccountId>(&AccountId::default())\n\n .push_arg::<AccountId>(&child_flow)\n\n .push_arg::<u128>(&0)\n\n .fire()?;\n\n self.env().emit_event(NewCaseCreated { parent_case });\n\n self.execution_required(child_flow, parent_case)?;\n\n Ok(())\n\n }\n\n }\n\n\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 35, "score": 33321.91097822219 }, { "content": " let marking =\n\n CallParams::<EnvTypes, u128>::eval(parent_case, *GET_MARKING).fire()?;\n\n let post_condition =\n\n CallParams::<EnvTypes, u128>::eval(child_flow, *GET_POST_CONDITION)\n\n .push_arg::<u128>(&event)\n\n .fire()?;\n\n let pre_condition =\n\n CallParams::<EnvTypes, u128>::eval(child_flow, *GET_PRE_CONDITION)\n\n .push_arg::<u128>(&event)\n\n .fire()?;\n\n CallParams::<EnvTypes, ()>::invoke(parent_case, *SET_MARKING)\n\n .push_arg::<u128>(&(marking & !pre_condition | post_condition))\n\n .fire()?;\n\n let first_ady_element =\n\n CallParams::<EnvTypes, Vec<u128>>::eval(child_flow, *GET_ADY_ELEMENTS)\n\n .push_arg::<u128>(&event)\n\n .fire()?[0];\n\n self.execute_elements(parent_case, first_ady_element)?;\n\n }\n\n }\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 36, "score": 33321.68833255239 }, { "content": " .fire()?[0];\n\n // Update the marking with the output of the boundary event\n\n CallParams::<EnvTypes, ()>::invoke(catch_case, *SET_MARKING)\n\n .push_arg::<u128>(&(parent_state[0] & !post_condition))\n\n .fire()?;\n\n // Continue the execution of possible internal elements\n\n self.execute_elements(catch_case, first_ady_element)?;\n\n return Ok(());\n\n }\n\n }\n\n }\n\n // If the event was not caught the propagation continues to the parent unless it's the root process\n\n self.throw_event(catch_case, event_code, event_info)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn kill_process(&self, parent_case: AccountId) -> Result<(), Errors> {\n\n let started_activities =\n\n CallParams::<EnvTypes, u128>::eval(parent_case, *GET_STARTED_ACTIVITIES).fire()?;\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 37, "score": 33321.29400257494 }, { "content": " fn create_instance(\n\n &self,\n\n element_index: u128,\n\n parent_case: AccountId,\n\n ) -> Result<AccountId, Errors> {\n\n let parent_flow =\n\n CallParams::<EnvTypes, AccountId>::eval(parent_case, *GET_CHILD_FLOW_INSTANCE)\n\n .fire()?;\n\n let child_flow =\n\n CallParams::<EnvTypes, AccountId>::eval(parent_flow, *GET_SUB_PROC_INSTANCE)\n\n .push_arg::<u128>(&element_index)\n\n .fire()?;\n\n let factory =\n\n CallParams::<EnvTypes, AccountId>::eval(child_flow, *GET_FACTORY_INSTANCE)\n\n .fire()?;\n\n if factory == AccountId::default() {\n\n Err(Errors::Other)\n\n } else {\n\n let child_case =\n\n CallParams::<EnvTypes, AccountId>::eval(factory, *NEW_INST).fire()?;\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 38, "score": 33321.01543003725 }, { "content": " CallParams::<EnvTypes, u128>::eval(child_flow, *GET_FIRST_ELEMENT).fire()?;\n\n let post_condition =\n\n CallParams::<EnvTypes, u128>::eval(child_flow, *GET_POST_CONDITION)\n\n .push_arg::<u128>(&first_element)\n\n .fire()?;\n\n CallParams::<EnvTypes, ()>::invoke(parent_case, *SET_MARKING)\n\n .push_arg::<u128>(&post_condition)\n\n .fire()?;\n\n let next: Vec<u128> =\n\n CallParams::<EnvTypes, Vec<u128>>::eval(child_flow, *GET_ADY_ELEMENTS)\n\n .push_arg::<u128>(&first_element)\n\n .fire()?;\n\n if next.len() != 0 {\n\n self.execute_elements(parent_case, next[0])\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n\n\n fn throw_event(\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 39, "score": 33320.60331345984 }, { "content": " };\n\n\n\n if run_inst_count == 0 {\n\n // Update the corresponding sub-process, call activity as completed\n\n CallParams::<EnvTypes, ()>::invoke(catch_case, *SET_ACTIVITY_MARKING)\n\n .push_arg(&(parent_state[1] & !(1 << 1 << sub_process_index)))\n\n .fire()?\n\n }\n\n\n\n let sub_process_info =\n\n CallParams::<EnvTypes, u128>::eval(child_flow, *GET_INSTANCE_COUNT)\n\n .push_arg(&sub_process_index)\n\n .fire()?;\n\n\n\n if event_info & 7168 != 0 {\n\n // If receiving 10- Default, 11- Terminate or 12- Message\n\n if run_inst_count == 0 && sub_process_info & 4096 != 4096 {\n\n // No Instances of the sub-process propagating the event and The sub-process isn't an event-sub-process (BIT 12)\n\n let post_condition =\n\n CallParams::<EnvTypes, u128>::eval(child_flow, *GET_POST_CONDITION)\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 40, "score": 33320.446783556734 }, { "content": " self.kill_processes(child_proc_inst)?;\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n fn kill_processes(&self, parent_cases: Vec<AccountId>) -> Result<(), Errors> {\n\n for parent_case in parent_cases {\n\n self.kill_process(parent_case)?\n\n }\n\n Ok(())\n\n }\n\n\n\n fn broadcast_signal(&self, parent_case: AccountId) -> Result<(), Errors> {\n\n let child_flow =\n\n CallParams::<EnvTypes, AccountId>::eval(parent_case, *GET_CHILD_FLOW_INSTANCE)\n\n .fire()?;\n\n let events =\n\n CallParams::<EnvTypes, Vec<u128>>::eval(child_flow, *GET_EVENT_LIST).fire()?;\n\n // let mut parent_state: [u128; 2] = [0; 2];\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 41, "score": 33320.43804704676 }, { "content": " CallParams::<EnvTypes, u128>::eval(parent_case, *EXECUTE_SCRIPT)\n\n .push_arg::<u128>(&element_index)\n\n .fire()?;\n\n }\n\n type_info\n\n if ((type_info & 9 == 9 && type_info & 27657 != 0)\n\n || type_info & 2 == 2) =>\n\n {\n\n // If (User(11), Service(13), Receive(14) or Default(10) Task || Gateways(1) not XOR/OR Split)\n\n // The execution of User/Service/Receive is triggered off-chain,\n\n // Thus the starting point would be the data contract which executes any script/data-update related to the task.\n\n parent_state[0] |= post_condition;\n\n }\n\n type_info if type_info & 12 == 12 => {\n\n // If (2- Event, 3- Throw(1))\n\n CallParams::<EnvTypes, ()>::invoke(parent_case, *SET_MARKING)\n\n .push_arg::<u128>(&parent_state[0])\n\n .fire()?;\n\n CallParams::<EnvTypes, ()>::invoke(parent_case, *SET_ACTIVITY_MARKING)\n\n .push_arg::<u128>(&parent_state[1])\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 42, "score": 33319.948362161005 }, { "content": " // If Intermediate event (BIT 7)\n\n parent_state[0] |= post_condition;\n\n }\n\n }\n\n _ => (),\n\n }\n\n\n\n // Adding the possible candidates to be executed to the queue.\n\n // The enablement of the element is checked at the moment it gets out of the queue.\n\n for next_elem in next {\n\n queue[count] = next_elem;\n\n count = (count + 1) % 100;\n\n }\n\n }\n\n\n\n // Updating the state (storage) after the execution of each internal element.\n\n CallParams::<EnvTypes, ()>::invoke(parent_case, *SET_MARKING)\n\n .push_arg::<u128>(&parent_state[0])\n\n .fire()?;\n\n CallParams::<EnvTypes, ()>::invoke(parent_case, *SET_ACTIVITY_MARKING)\n\n .push_arg::<u128>(&parent_state[1])\n\n .fire()?;\n\n Ok(())\n\n }\n\n }\n\n}\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 43, "score": 33319.62963688244 }, { "content": " .push_arg::<u128>(&sub_process_index)\n\n .fire()?;\n\n CallParams::<EnvTypes, ()>::invoke(catch_case, *SET_MARKING)\n\n .push_arg(&(parent_state[0] & !post_condition))\n\n .fire()?;\n\n let first_ady_element =\n\n CallParams::<EnvTypes, Vec<u128>>::eval(child_flow, *GET_ADY_ELEMENTS)\n\n .push_arg::<u128>(&sub_process_info)\n\n .fire()?[0];\n\n self.execute_elements(catch_case, first_ady_element)?;\n\n } else if sub_process_info & 128 == 128 {\n\n // Multi-Instance Sequential (BIT 7), with pending instances to be started.\n\n self.create_instance(sub_process_index, parent_case)?;\n\n }\n\n } else {\n\n // Signal, Error or Escalation\n\n\n\n // Signals are only handled from the Root-Process by Broadcast, thus the propagation must reach the Root-Process.\n\n if event_info & 32768 == 32768 {\n\n // Propagating the Signal to the Root-Process\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 44, "score": 33319.547009017435 }, { "content": " // Sub-process ended, thus continue execution on parent\n\n self.try_catch_event(parent_case, event_code, event_info, true)?;\n\n }\n\n }\n\n event_info => {\n\n if event_info & 2048 == 2048 {\n\n // Terminate Event (BIT 11), only END EVENT from standard,\n\n // Terminate the execution in the current Sub-process and each children\n\n self.kill_process(parent_case)?;\n\n }\n\n // Continue the execution on parent\n\n self.try_catch_event(\n\n parent_case,\n\n event_code,\n\n event_info,\n\n parent_state[0] | parent_state[1] == 0,\n\n )?;\n\n }\n\n }\n\n Ok(())\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 45, "score": 33319.336359819186 }, { "content": " Ok(())\n\n }\n\n\n\n fn broadcast_signals(&self, parent_cases: Vec<AccountId>) -> Result<(), Errors> {\n\n for parent_case in parent_cases {\n\n self.broadcast_signal(parent_case)?\n\n }\n\n Ok(())\n\n }\n\n\n\n #[ink(message)]\n\n fn execute_elements(\n\n &self,\n\n parent_case: AccountId,\n\n element_index: u128,\n\n ) -> Result<(), Errors> {\n\n let mut element_index = element_index;\n\n let child_flow =\n\n CallParams::<EnvTypes, AccountId>::eval(parent_case, *GET_CHILD_FLOW_INSTANCE)\n\n .fire()?;\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 46, "score": 33319.26104812712 }, { "content": "\n\n // 0- tokensOnEdges\n\n // 1- startedActivities\n\n let mut parent_state: [u128; 2] = [0; 2];\n\n parent_state[0] =\n\n CallParams::<EnvTypes, u128>::eval(parent_case, *GET_MARKING).fire()?;\n\n parent_state[1] =\n\n CallParams::<EnvTypes, u128>::eval(parent_case, *GET_STARTED_ACTIVITIES).fire()?;\n\n\n\n // Execution queue and pointers to the first & last element (i.e. basic circular queue implementation)\n\n let mut queue: [u128; 100] = [0; 100];\n\n let mut i: usize = 0;\n\n let mut count: usize = 0;\n\n queue[count] = element_index;\n\n count += 1;\n\n while i < count {\n\n element_index = queue[i];\n\n i += 1;\n\n let ([pre_condition, post_condition, type_info], next) =\n\n CallParams::<EnvTypes, ([u128; 3], Vec<u128>)>::eval(\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 47, "score": 33318.69976078627 }, { "content": " CallParams::<EnvTypes, u128>::eval(child_flow, *GET_INSTANCE_COUNT)\n\n .push_arg::<u128>(&element_index)\n\n .fire()?;\n\n CallParams::<EnvTypes, ()>::invoke(instance, *SET_INSTANCE_COUNT)\n\n .push_arg::<u128>(&element_index)\n\n .push_arg::<u128>(&instance_count)\n\n .fire()?;\n\n parent_state[1] |= 1 << element_index;\n\n }\n\n type_info\n\n if (type_info & 4105 == 4105\n\n || (type_info & 10 == 2 && type_info & 80 != 0)) =>\n\n {\n\n // (0- Activity, 3- Task, 12- Script) ||\n\n // Exclusive(XOR) Split (1- Gateway, 3- Split(0), 4- Exclusive) ||\n\n // Inclusive(OR) Split (1- Gateway, 3- Split(0), 6- Inclusive)\n\n CallParams::<EnvTypes, u128>::eval(parent_case, *EXECUTE_SCRIPT)\n\n .push_arg::<u128>(&element_index)\n\n .fire()?;\n\n parent_state[0] |=\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 48, "score": 33318.69070410072 }, { "content": " let child_flow =\n\n CallParams::<EnvTypes, AccountId>::eval(catch_case, *GET_CHILD_FLOW_INSTANCE)\n\n .fire()?;\n\n\n\n let mut parent_state: [u128; 2] = [0; 2];\n\n parent_state[0] =\n\n CallParams::<EnvTypes, u128>::eval(catch_case, *GET_MARKING).fire()?;\n\n parent_state[1] =\n\n CallParams::<EnvTypes, u128>::eval(catch_case, *GET_STARTED_ACTIVITIES).fire()?;\n\n\n\n let sub_process_index =\n\n CallParams::<EnvTypes, u128>::eval(parent_case, *GET_INDEX_IN_PARENT).fire()?;\n\n let run_inst_count = if instance_completed {\n\n CallParams::<EnvTypes, u128>::eval(catch_case, *DECREASE_INSTANCE_COUNT)\n\n .push_arg(&sub_process_index)\n\n .fire()?\n\n } else {\n\n CallParams::<EnvTypes, u128>::eval(catch_case, *GET_INSTANCE_COUNT)\n\n .push_arg(&sub_process_index)\n\n .fire()?\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 49, "score": 33318.52668688909 }, { "content": " CallParams::<EnvTypes, ()>::invoke(parent_case, *SET_MARKING)\n\n .push_arg::<u128>(&0)\n\n .fire()?;\n\n CallParams::<EnvTypes, ()>::invoke(parent_case, *SET_ACTIVITY_MARKING)\n\n .push_arg::<u128>(&0)\n\n .fire()?;\n\n let child_flow_instance =\n\n CallParams::<EnvTypes, AccountId>::eval(parent_case, *GET_CHILD_FLOW_INSTANCE)\n\n .fire()?;\n\n let children =\n\n CallParams::<EnvTypes, Vec<u128>>::eval(child_flow_instance, *GET_SUB_PROC_LIST)\n\n .fire()?;\n\n for child in children {\n\n if started_activities & (1 << child) != 0 {\n\n let child_proc_inst = CallParams::<EnvTypes, Vec<AccountId>>::eval(\n\n parent_case,\n\n *GET_CHILD_PROC_INSTANCE,\n\n )\n\n .push_arg::<u128>(&child)\n\n .fire()?;\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 50, "score": 33318.20867655239 }, { "content": " }\n\n let child_flow_instance =\n\n CallParams::<EnvTypes, AccountId>::eval(parent_case, *GET_CHILD_FLOW_INSTANCE)\n\n .fire()?;\n\n let children =\n\n CallParams::<EnvTypes, Vec<u128>>::eval(child_flow_instance, *GET_SUB_PROC_LIST)\n\n .fire()?;\n\n let started_activities =\n\n CallParams::<EnvTypes, u128>::eval(parent_case, *GET_STARTED_ACTIVITIES).fire()?;\n\n for child in children {\n\n if started_activities & (1 << child) != 0 {\n\n let child_proc_inst = CallParams::<EnvTypes, Vec<AccountId>>::eval(\n\n parent_case,\n\n *GET_CHILD_PROC_INSTANCE,\n\n )\n\n .push_arg::<u128>(&child)\n\n .fire()?;\n\n self.broadcast_signals(child_proc_inst)?;\n\n }\n\n }\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 51, "score": 33317.984091241175 }, { "content": " CallParams::<EnvTypes, ()>::invoke(parent_case, *SET_PARENT)\n\n .push_arg::<AccountId>(&parent_case)\n\n .push_arg::<AccountId>(&child_flow)\n\n .push_arg::<u128>(&element_index)\n\n .fire()?;\n\n CallParams::<EnvTypes, ()>::invoke(parent_case, *ADD_CHILD)\n\n .push_arg::<u128>(&element_index)\n\n .push_arg::<AccountId>(&child_case)\n\n .fire()?;\n\n self.execution_required(child_flow, child_case)?;\n\n Ok(child_case)\n\n }\n\n }\n\n\n\n fn execution_required(\n\n &self,\n\n child_flow: AccountId,\n\n parent_case: AccountId,\n\n ) -> Result<(), Errors> {\n\n let first_element =\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 52, "score": 33317.61776037692 }, { "content": " if (type_info & 1 == 1\n\n || (type_info & 4 == 4 && type_info & 640 != 0)\n\n || type_info & 2 == 2) =>\n\n {\n\n // If (Activity || Intermediate/End Event || Gateway != AND/OR Join)\n\n if parent_state[0] & pre_condition == 0 {\n\n continue;\n\n }\n\n // Removing tokens from input arcs\n\n parent_state[0] &= !pre_condition;\n\n }\n\n _ => {\n\n continue;\n\n }\n\n }\n\n\n\n // Executing current element (If enabled)\n\n match type_info {\n\n type_info if type_info & 65 == 65 => {\n\n // (0- Activity, 6- Parallel Multi-Instance)\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 53, "score": 33317.46806781683 }, { "content": " child_flow,\n\n *GET_ELEMENT_INFO,\n\n )\n\n .push_arg::<u128>(&element_index)\n\n .fire()?;\n\n\n\n // Verifying Preconditions (i.e. Is the element enabled?)\n\n match type_info {\n\n type_info if type_info & 42 == 42 => {\n\n // else if (AND Join)\n\n if parent_state[0] & pre_condition != pre_condition {\n\n continue;\n\n }\n\n parent_state[0] &= !pre_condition;\n\n }\n\n type_info if type_info & 74 == 74 => {\n\n // else if (OR Join)\n\n ///// OR Join Implementation //////\n\n }\n\n type_info\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 54, "score": 33317.38496594948 }, { "content": " static ref GET_ADY_ELEMENTS: Selector = Selector::from([0xE5,0x71,0xE7,0x01]);\n\n static ref GET_INSTANCE_COUNT: Selector = Selector::from([0xCB,0x60,0x17,0xBC]);\n\n static ref GET_TYPE_INFO: Selector = Selector::from([0x64,0x2F,0x2F,0xF9]);\n\n static ref GET_ELEMENT_INFO: Selector = Selector::from([0xED,0x21,0x9E,0x57]);\n\n static ref GET_ATTACHED_TO: Selector = Selector::from([0xD7,0x96,0xA9,0x47]);\n\n static ref GET_EVENT_LIST: Selector = Selector::from([0x39,0x1B,0x79,0x46]);\n\n static ref GET_PRE_CONDITION: Selector = Selector::from([0x6B,0xE9,0x5D,0x46]);\n\n static ref GET_EVENT_CODE: Selector = Selector::from([0x9E,0x8F,0x22,0xC3]);\n\n static ref GET_SUB_PROC_LIST: Selector = Selector::from([0xEF,0xA0,0x8B,0x35]);\n\n // idata\n\n static ref SET_PARENT: Selector = Selector::from([0x09, 0x86, 0x1D,0xD5]);\n\n static ref ADD_CHILD: Selector = Selector::from([0x77,0x01,0x66,0x39]);\n\n static ref GET_CHILD_FLOW_INSTANCE: Selector = Selector::from([0xB7, 0x69, 0xE4, 0x9D]);\n\n static ref GET_MARKING: Selector = Selector::from([0xE1,0x7D,0x66,0x77]);\n\n static ref GET_STARTED_ACTIVITIES: Selector = Selector::from([0xDC,0x98,0x88,0x81]);\n\n static ref GET_PARENT: Selector = Selector::from([0xC8,0x79,0x9A,0x47]);\n\n static ref GET_INDEX_IN_PARENT: Selector = Selector::from([0x8F,0x31,0x80,0xBB]);\n\n static ref DECREASE_INSTANCE_COUNT: Selector = Selector::from([0x2B,0x87,0x57,0x63]);\n\n static ref SET_ACTIVITY_MARKING: Selector = Selector::from([0x91,0x0A,0x14,0xC2]);\n\n static ref SET_MARKING: Selector = Selector::from([0x5A,0x1D,0x86,0x60]);\n", "file_path": "smart_contracts/interpreter/bpmn_interpreter/lib.rs", "rank": 55, "score": 33316.50790477964 }, { "content": "/// Helper function to generate an authority key for Aura\n\npub fn authority_keys_from_seed(s: &str) -> (AuraId, GrandpaId) {\n\n (get_from_seed::<AuraId>(s), get_from_seed::<GrandpaId>(s))\n\n}\n\n\n", "file_path": "liqum-node/node/src/chain_spec.rs", "rank": 56, "score": 30016.139917937486 }, { "content": " trait Store for Module<T: Trait> as BpmnInterpreter {\n\n pub IflowById get(fn iflow_by_id): map hasher(blake2_128_concat) T::InstanceId => Iflow<T>;\n\n\n\n pub IdataById get(fn idata_by_id): map hasher(blake2_128_concat) T::InstanceId => Idata<T>;\n\n\n\n InstanceIdCount get(fn instance_id_count): T::InstanceId;\n\n }\n\n}\n\n\n\n// The pallet's dispatchable functions.\n\ndecl_module! {\n\n pub struct Module<T: Trait> for enum Call where origin: T::Origin {\n\n\n\n // Initializing events\n\n fn deposit_event() = default;\n\n\n\n #[weight = 10_000]\n\n pub fn set_element(\n\n origin,\n\n iflow_index: T::InstanceId,\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 57, "score": 28965.130293999897 }, { "content": "use crate::{AccountId, AccountIndex, Balance, NodeRuntimeTypes};\n\n\n\n/// Default runtime Call type, a subset of the runtime Call module variants\n\n///\n\n/// The codec indices of the modules *MUST* match those in the concrete runtime.\n\n#[derive(Encode, Decode)]\n\n#[cfg_attr(feature = \"std\", derive(Clone, PartialEq, Eq))]\n\npub enum Call {\n\n #[codec(index = \"6\")]\n\n BpmnInterpreter(BpmnInterpreter),\n\n}\n\n\n\nimpl From<BpmnInterpreter> for Call {\n\n fn from(bpmn_interpreter_call: BpmnInterpreter) -> Call {\n\n Call::BpmnInterpreter(bpmn_interpreter_call)\n\n }\n\n}\n\n/// Generic Balance Call, could be used with other runtimes\n\n#[derive(Encode, Decode, Clone, PartialEq, Eq)]\n\npub enum BpmnInterpreter\n\n{\n\n #[allow(non_camel_case_types)]\n\n continue_execution(u64, u128),\n\n}\n\n\n\n/// Construct a `BpmnInterpreter::continue_execution` call\n", "file_path": "liqum-node/ink-types-node-runtime/src/calls.rs", "rank": 58, "score": 27855.656323180832 }, { "content": "impl AsRef<[u8]> for Hash {\n\n fn as_ref(&self) -> &[u8] {\n\n &self.0[..]\n\n }\n\n}\n\n\n\nimpl AsMut<[u8]> for Hash {\n\n fn as_mut(&mut self) -> &mut [u8] {\n\n &mut self.0[..]\n\n }\n\n}\n\n\n\nimpl Clear for Hash {\n\n fn is_clear(&self) -> bool {\n\n self.as_ref().iter().all(|&byte| byte == 0x00)\n\n }\n\n\n\n fn clear() -> Self {\n\n Self([0x00; 32])\n\n }\n", "file_path": "liqum-node/ink-types-node-runtime/src/lib.rs", "rank": 59, "score": 27855.624303435798 }, { "content": "\n\nuse core::{array::TryFromSliceError, convert::TryFrom};\n\nuse ink_core::env::Clear;\n\nuse scale::{Decode, Encode};\n\npub use sp_core::crypto::AccountId32;\n\n#[cfg(feature = \"ink-generate-abi\")]\n\nuse type_metadata::{HasTypeId, HasTypeDef, Metadata, MetaType, TypeId, TypeDef, TypeIdArray};\n\n\n\npub mod calls;\n\n\n\n/// Contract environment types defined in substrate node-runtime\n\n#[cfg_attr(feature = \"ink-generate-abi\", derive(Metadata))]\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum NodeRuntimeTypes {}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Encode, Decode)]\n\npub struct AccountId (AccountId32);\n\n\n\nimpl From<AccountId32> for AccountId {\n\n fn from(account: AccountId32) -> Self {\n", "file_path": "liqum-node/ink-types-node-runtime/src/lib.rs", "rank": 60, "score": 27855.22851580852 }, { "content": " AccountId(account)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"ink-generate-abi\")]\n\nimpl HasTypeId for AccountId {\n\n fn type_id() -> TypeId {\n\n TypeIdArray::new(32, MetaType::new::<u8>()).into()\n\n }\n\n}\n\n\n\n#[cfg(feature = \"ink-generate-abi\")]\n\nimpl HasTypeDef for AccountId {\n\n fn type_def() -> TypeDef {\n\n TypeDef::builtin()\n\n }\n\n}\n\n\n\n/// The default SRML balance type.\n\npub type Balance = u128;\n", "file_path": "liqum-node/ink-types-node-runtime/src/lib.rs", "rank": 61, "score": 27854.76787695334 }, { "content": "// Copyright 2018-2019 Parity Technologies (UK) Ltd.\n\n// This file is part of ink!.\n\n//\n\n// ink! is free software: you can redistribute it and/or modify\n\n// it under the terms of the GNU General Public License as published by\n\n// the Free Software Foundation, either version 3 of the License, or\n\n// (at your option) any later version.\n\n//\n\n// ink! is distributed in the hope that it will be useful,\n\n// but WITHOUT ANY WARRANTY; without even the implied warranty of\n\n// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\n// GNU General Public License for more details.\n\n//\n\n// You should have received a copy of the GNU General Public License\n\n// along with ink!. If not, see <http://www.gnu.org/licenses/>.\n\n\n\n//! Definitions for environment types for contracts targeted at a\n\n//! substrate chain with the default `node-runtime` configuration.\n\n\n\n#![cfg_attr(not(feature = \"std\"), no_std)]\n", "file_path": "liqum-node/ink-types-node-runtime/src/lib.rs", "rank": 62, "score": 27851.839379812252 }, { "content": "\n\n/// The default SRML hash type.\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Encode, Decode)]\n\npub struct Hash([u8; 32]);\n\n\n\nimpl From<[u8; 32]> for Hash {\n\n fn from(hash: [u8; 32]) -> Hash {\n\n Hash(hash)\n\n }\n\n}\n\n\n\nimpl<'a> TryFrom<&'a [u8]> for Hash {\n\n type Error = TryFromSliceError;\n\n\n\n fn try_from(bytes: &'a [u8]) -> Result<Hash, TryFromSliceError> {\n\n let hash = <[u8; 32]>::try_from(bytes)?;\n\n Ok(Hash(hash))\n\n }\n\n}\n\n\n", "file_path": "liqum-node/ink-types-node-runtime/src/lib.rs", "rank": 63, "score": 27851.27420298841 }, { "content": "// Copyright 2019 Parity Technologies (UK) Ltd.\n\n// This file is part of ink!.\n\n//\n\n// ink! is free software: you can redistribute it and/or modify\n\n// it under the terms of the GNU General Public License as published by\n\n// the Free Software Foundation, either version 3 of the License, or\n\n// (at your option) any later version.\n\n//\n\n// ink! is distributed in the hope that it will be useful,\n\n// but WITHOUT ANY WARRANTY; without even the implied warranty of\n\n// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n\n// GNU General Public License for more details.\n\n//\n\n// You should have received a copy of the GNU General Public License\n\n// along with ink!. If not, see <http://www.gnu.org/licenses/>.\n\n\n\nuse ink_core::env::EnvTypes;\n\nuse scale::{Codec, Decode, Encode};\n\nuse pallet_indices::address::Address;\n\nuse sp_runtime::traits::Member;\n", "file_path": "liqum-node/ink-types-node-runtime/src/calls.rs", "rank": 64, "score": 27851.13225378215 }, { "content": "}\n\n\n\n/// The default SRML moment type.\n\npub type Moment = u64;\n\n\n\n/// The default SRML blocknumber type.\n\npub type BlockNumber = u64;\n\n\n\n/// The default SRML AccountIndex type.\n\npub type AccountIndex = u32;\n\n\n\n/// The default timestamp type.\n\npub type Timestamp = u64;\n\n\n\nimpl ink_core::env::EnvTypes for NodeRuntimeTypes {\n\n type AccountId = AccountId;\n\n type Balance = Balance;\n\n type Hash = Hash;\n\n type Timestamp = Timestamp;\n\n type BlockNumber = BlockNumber;\n\n type Call = calls::Call;\n\n}\n\n\n", "file_path": "liqum-node/ink-types-node-runtime/src/lib.rs", "rank": 65, "score": 27848.238710361333 }, { "content": " calls::BpmnInterpreter::continue_execution(instance_id, element_index);\n\n let contract_call = Call::BpmnInterpreter(contract_continue_execution);\n\n\n\n // let srml_continue_execution = node_runtime::BpmnInterpreterCall::<Runtime>::continue_execution(instance_id, element_index);\n\n let srml_continue_execution = node_runtime::BpmnInterpreterCall::continue_execution(instance_id, element_index);\n\n let srml_call = node_runtime::Call::BpmnInterpreter(srml_continue_execution);\n\n\n\n let contract_call_encoded = contract_call.encode();\n\n let srml_call_encoded = srml_call.encode();\n\n\n\n //assert_eq!(srml_call_encoded, contract_call_encoded);\n\n\n\n let srml_call_decoded: node_runtime::Call =\n\n Decode::decode(&mut contract_call_encoded.as_slice())\n\n .expect(\"BpmnInterpreter continue_execution call decodes to srml type\");\n\n let srml_call_encoded = srml_call_decoded.encode();\n\n let contract_call_decoded: Call = Decode::decode(&mut contract_call_encoded.as_slice())\n\n .expect(\"BpmnInterpreter continue_execution call decodes back to contract type\");\n\n assert!(contract_call == contract_call_decoded);\n\n }\n\n}\n", "file_path": "liqum-node/ink-types-node-runtime/src/calls.rs", "rank": 66, "score": 27847.535317988633 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\nuse ink_lang as ink;\n\n\n\n#[ink::contract(version = \"0.1.0\")]\n\nmod idata {\n\n use ink_core::env::call::*;\n\n use ink_core::env::EnvError;\n\n #[cfg(not(feature = \"ink-as-dependency\"))]\n\n use ink_core::storage;\n\n use ink_prelude::vec::Vec;\n\n //iflow\n\n const GET_INTERPRETER: [u8; 4] = [0x54, 0xBC, 0xAE, 0x12];\n\n //interpreter\n\n const EXECUTE_ELEMENTS: [u8; 4] = [0xB8, 0x66, 0x1E, 0xE4];\n\n\n\n #[cfg_attr(feature = \"ink-generate-abi\", derive(type_metadata::Metadata))]\n\n #[derive(scale::Encode, scale::Decode)]\n\n pub enum Errors {\n\n EnviromentError,\n", "file_path": "smart_contracts/interpreter/idata/lib.rs", "rank": 67, "score": 26955.555476310466 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\nuse ink_lang as ink;\n\n\n\n#[ink::contract(version = \"0.1.0\")]\n\nmod ifactory {\n\n use ink_core::env::call::*;\n\n use ink_core::storage;\n\n\n\n const CONSTRUCTOR: [u8; 4] = [0x5E, 0xBD, 0x88, 0xD6];\n\n\n\n #[ink(storage)]\n\n struct Ifactory {\n\n idata_hash: storage::Value<Hash>,\n\n data_hash: storage::Value<Hash>,\n\n }\n\n\n\n struct NewIdata {\n\n instance: AccountId,\n\n }\n", "file_path": "smart_contracts/interpreter/ifactory/lib.rs", "rank": 68, "score": 26952.868282962212 }, { "content": " Other,\n\n }\n\n\n\n impl From<EnvError> for Errors {\n\n fn from(_: EnvError) -> Self {\n\n Errors::EnviromentError\n\n }\n\n }\n\n\n\n #[ink(storage)]\n\n struct Idata {\n\n tokens_on_edges: storage::Value<u128>,\n\n started_activities: storage::Value<u128>,\n\n idata_parent: storage::Value<AccountId>,\n\n iflow_node: storage::Value<AccountId>,\n\n index_in_parent: storage::Value<u128>,\n\n children: storage::HashMap<u128, Vec<AccountId>>,\n\n instance_count: storage::HashMap<u128, u128>,\n\n }\n\n\n", "file_path": "smart_contracts/interpreter/idata/lib.rs", "rank": 69, "score": 26947.49687821158 }, { "content": " }\n\n\n\n #[ink(message)]\n\n fn get_parent(&self) -> AccountId {\n\n *self.idata_parent\n\n }\n\n\n\n #[ink(message)]\n\n fn continue_execution(&self, element_index: u128) -> Result<(), Errors> {\n\n let get_interpreter_selector = Selector::from(GET_INTERPRETER);\n\n let execute_elements_selector = Selector::from(EXECUTE_ELEMENTS);\n\n let interpreter = CallParams::<EnvTypes, AccountId>::eval(\n\n self.get_child_flow_instance(),\n\n get_interpreter_selector,\n\n )\n\n .fire()?;\n\n CallParams::<EnvTypes, Result<(), Errors>>::eval(\n\n self.get_child_flow_instance(),\n\n execute_elements_selector,\n\n )\n\n .push_arg::<AccountId>(&self.env().caller())\n\n .push_arg::<u128>(&element_index)\n\n .fire()?\n\n }\n\n }\n\n}\n", "file_path": "smart_contracts/interpreter/idata/lib.rs", "rank": 70, "score": 26946.163503192307 }, { "content": " impl Idata {\n\n /// Initializes the value to the initial value.\n\n #[ink(constructor)]\n\n fn new(&mut self) {\n\n self.tokens_on_edges.set(0);\n\n self.started_activities.set(0);\n\n self.idata_parent.set(AccountId::default());\n\n self.iflow_node.set(AccountId::default());\n\n self.index_in_parent.set(0);\n\n }\n\n\n\n #[ink(message)]\n\n fn set_activity_marking(&mut self, n_marking: u128) {\n\n self.started_activities.set(n_marking);\n\n }\n\n\n\n #[ink(message)]\n\n fn set_marking(&mut self, n_marking: u128) {\n\n self.tokens_on_edges.set(n_marking);\n\n }\n", "file_path": "smart_contracts/interpreter/idata/lib.rs", "rank": 71, "score": 26945.655340289177 }, { "content": "\n\n impl NewIdata {\n\n fn get_instance(&self) -> AccountId {\n\n self.instance\n\n }\n\n }\n\n\n\n impl Default for NewIdata {\n\n fn default() -> Self {\n\n Self {\n\n instance: AccountId::default(),\n\n }\n\n }\n\n }\n\n\n\n impl FromAccountId<EnvTypes> for NewIdata {\n\n fn from_account_id(new_instance: AccountId) -> Self {\n\n Self {\n\n instance: new_instance,\n\n }\n", "file_path": "smart_contracts/interpreter/ifactory/lib.rs", "rank": 72, "score": 26945.00609935433 }, { "content": " }\n\n\n\n #[ink(message)]\n\n fn get_started_activities(&self) -> u128 {\n\n *self.started_activities\n\n }\n\n\n\n #[ink(message)]\n\n fn get_instance_count(&self, element_index: u128) -> u128 {\n\n *self.instance_count.get(&element_index).unwrap_or(&0)\n\n }\n\n\n\n #[ink(message)]\n\n fn decrease_instance_count(&mut self, element_index: u128) -> u128 {\n\n self.instance_count\n\n .mutate_with(&element_index, |count| *count -= 1);\n\n self.get_instance_count(element_index)\n\n }\n\n\n\n #[ink(message)]\n", "file_path": "smart_contracts/interpreter/idata/lib.rs", "rank": 73, "score": 26944.833916067895 }, { "content": " fn set_instance_count(&mut self, element_index: u128, instance_count: u128) {\n\n self.instance_count.insert(element_index, instance_count);\n\n }\n\n\n\n #[ink(message)]\n\n fn get_index_in_parent(&self) -> u128 {\n\n *self.index_in_parent\n\n }\n\n\n\n #[ink(message)]\n\n fn get_child_process_instance(&self, element_index: u128) -> Vec<AccountId> {\n\n self.children\n\n .get(&element_index)\n\n .unwrap_or(&Vec::default())\n\n .clone()\n\n }\n\n\n\n #[ink(message)]\n\n fn get_child_flow_instance(&self) -> AccountId {\n\n *self.iflow_node\n", "file_path": "smart_contracts/interpreter/idata/lib.rs", "rank": 74, "score": 26944.148306466694 }, { "content": "\n\n #[ink(message)]\n\n fn set_parent(&mut self, parent: AccountId, child_flow: AccountId, element_index: u128) {\n\n self.index_in_parent.set(element_index);\n\n self.idata_parent.set(parent);\n\n self.iflow_node.set(child_flow);\n\n }\n\n\n\n #[ink(message)]\n\n fn add_child(&mut self, element_index: u128, child: AccountId) {\n\n self.children\n\n .mutate_with(&element_index, |children| children.push(child));\n\n self.instance_count\n\n .mutate_with(&element_index, |count| *count += 1);\n\n }\n\n\n\n /// Returns the current state.\n\n #[ink(message)]\n\n fn get_marking(&self) -> u128 {\n\n *self.tokens_on_edges\n", "file_path": "smart_contracts/interpreter/idata/lib.rs", "rank": 75, "score": 26943.587337024655 }, { "content": " }\n\n }\n\n\n\n impl Ifactory {\n\n #[ink(constructor)]\n\n fn new(&mut self, idata_hash: Hash, data_hash: Hash) {\n\n self.idata_hash.set(idata_hash);\n\n self.data_hash.set(data_hash);\n\n }\n\n\n\n #[ink(message)]\n\n fn change_idata_hash(&mut self, idata_new_hash: Hash) {\n\n self.idata_hash.set(idata_new_hash);\n\n }\n\n\n\n #[ink(message)]\n\n fn change_data_hash(&mut self, data_new_hash: Hash) {\n\n self.data_hash.set(data_new_hash);\n\n }\n\n\n", "file_path": "smart_contracts/interpreter/ifactory/lib.rs", "rank": 76, "score": 26942.788135886698 }, { "content": " #[ink(message)]\n\n fn get_idata_hash(&self) -> Hash {\n\n *self.idata_hash\n\n }\n\n\n\n #[ink(message)]\n\n fn get_data_hash(&self) -> Hash {\n\n *self.data_hash\n\n }\n\n\n\n #[ink(message)]\n\n fn new_instance(&self) -> AccountId {\n\n let total_balance = self.env().balance();\n\n let selector = Selector::from(CONSTRUCTOR);\n\n InstantiateParams::<EnvTypes, NewIdata>::build(selector)\n\n .endowment(total_balance / 5)\n\n .using_code(*self.data_hash)\n\n .push_arg::<Hash>(&*self.idata_hash)\n\n .instantiate()\n\n .unwrap_or(NewIdata::default())\n\n .get_instance()\n\n }\n\n }\n\n}\n", "file_path": "smart_contracts/interpreter/ifactory/lib.rs", "rank": 77, "score": 26942.71452457932 }, { "content": "/// Helper function to generate a crypto pair from seed\n\npub fn get_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Public {\n\n TPublic::Pair::from_string(&format!(\"//{}\", seed), None)\n\n .expect(\"static values are valid; qed\")\n\n .public()\n\n}\n\n\n", "file_path": "liqum-node/node/src/chain_spec.rs", "rank": 78, "score": 26215.335047983575 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\nuse ink_lang as ink;\n\n\n\n#[ink::contract(version = \"0.1.0\", env = NodeRuntimeTypes)]\n\nmod invoice_managing_data {\n\n use ink_core::env;\n\n use ink_prelude::*;\n\n use ink_types_node_runtime::{calls as runtime_calls, NodeRuntimeTypes};\n\n use ink_core::storage;\n\n\n\n #[cfg_attr(feature = \"ink-generate-abi\", derive(type_metadata::Metadata))]\n\n #[derive(scale::Encode, scale::Decode)]\n\n pub enum Errors {\n\n CheckInError,\n\n CheckOutError,\n\n }\n\n\n\n #[ink(storage)]\n\n struct InvoiceManagingData {\n", "file_path": "liqum-node/ink-types-node-runtime/examples/invoice_managing_data/lib.rs", "rank": 79, "score": 25458.263137491893 }, { "content": " self.continue_execution(*self.idata_instance_id.get(), element_index);\n\n Ok(())\n\n } else {\n\n Err(Errors::CheckInError)\n\n }\n\n }\n\n\n\n /// Dispatches a `continue_execution` call to the BpmnInterpreter srml module\n\n #[ink(message)]\n\n fn continue_execution(&self, instance_id: u64, element_index: u128) {\n\n // create the BpmnInterpreter::continue_execution Call\n\n let continue_execution_call = runtime_calls::continue_execution(instance_id, element_index);\n\n // dispatch the call to the runtime\n\n let result = self.env().invoke_runtime(&continue_execution_call);\n\n\n\n // report result to console\n\n // NOTE: println should only be used on a development chain)\n\n env::println(&format!(\"continue_execution invoke_runtime result {:?}\", result));\n\n }\n\n }\n", "file_path": "liqum-node/ink-types-node-runtime/examples/invoice_managing_data/lib.rs", "rank": 80, "score": 25453.454478551703 }, { "content": " /// Stores a single `bool` value on the storage.\n\n accepted: storage::Value<bool>,\n\n /// Strores respective pallet idata instance id\n\n idata_instance_id: storage::Value<u64>\n\n }\n\n\n\n impl InvoiceManagingData {\n\n #[ink(constructor)]\n\n fn new(&mut self, idata_instance_id: u64) {\n\n self.accepted.set(false);\n\n self.idata_instance_id.set(idata_instance_id);\n\n }\n\n\n\n #[ink(message)]\n\n fn execute_script(&mut self, element_index: u128) -> u128 {\n\n if element_index == 5 {\n\n if *self.accepted {\n\n return 32;\n\n } else {\n\n return 64;\n", "file_path": "liqum-node/ink-types-node-runtime/examples/invoice_managing_data/lib.rs", "rank": 81, "score": 25453.183403550698 }, { "content": " }\n\n } else {\n\n return 0;\n\n }\n\n }\n\n\n\n #[ink(message)]\n\n fn check_in1(&self, element_index: u128) -> Result<(), Errors> {\n\n if 132 & (1 << element_index) != 0 {\n\n self.continue_execution(*self.idata_instance_id.get(), element_index);\n\n Ok(())\n\n } else {\n\n Err(Errors::CheckInError)\n\n }\n\n }\n\n\n\n #[ink(message)]\n\n fn check_in2(&mut self, element_index: u128, i1: bool) -> Result<(), Errors> {\n\n if 8 & (1 << element_index) != 0 {\n\n self.accepted.set(i1);\n", "file_path": "liqum-node/ink-types-node-runtime/examples/invoice_managing_data/lib.rs", "rank": 82, "score": 25451.515684994592 }, { "content": "\n\n #[cfg(test)]\n\n mod tests {\n\n use super::*;\n\n use sp_keyring::AccountKeyring;\n\n\n\n #[test]\n\n fn dispatches_continue_execution_call() {\n\n let calls = InvoiceManagingData::new(0);\n\n //assert_eq!(calls.env().dispatched_calls().into_iter().count(), 0);\n\n calls.continue_execution(0, 0);\n\n //assert_eq!(calls.env().dispatched_calls().into_iter().count(), 1);\n\n }\n\n }\n\n}\n", "file_path": "liqum-node/ink-types-node-runtime/examples/invoice_managing_data/lib.rs", "rank": 83, "score": 25451.009704360164 }, { "content": " start_evt: u128,\n\n /// elemIndex => [preC, postC, type]\n\n cond_table: BTreeMap<u128, [u128; 3]>,\n\n /// Element Index => List of elements that can be enabled with the completion of the key element\n\n next_elem: BTreeMap<u128, Vec<u128>>,\n\n /// List of Indexes of the subprocesses\n\n subprocesses: Vec<u128>,\n\n /// List of Event Indexes defined in the current Subprocess\n\n events: Vec<u128>,\n\n /// Event Index => Index of the element where event is attachedTo\n\n attached_to: BTreeMap<u128, u128>,\n\n\n\n event_code: BTreeMap<u128, [u8; 32]>,\n\n parent_references: BTreeMap<u128, T::InstanceId>,\n\n instance_count: BTreeMap<u128, u128>,\n\n factory: Ifactory<T>,\n\n}\n\n\n\nimpl<T: Trait> Default for Iflow<T> {\n\n fn default() -> Self {\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 84, "score": 25208.267207438683 }, { "content": " Self {\n\n start_evt: 0,\n\n cond_table: BTreeMap::new(),\n\n next_elem: BTreeMap::new(),\n\n subprocesses: vec![],\n\n events: vec![],\n\n attached_to: BTreeMap::new(),\n\n event_code: BTreeMap::new(),\n\n parent_references: BTreeMap::new(),\n\n instance_count: BTreeMap::new(),\n\n factory: Ifactory::<T>::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Trait> Iflow<T> {\n\n fn get_pre_condition(&self, element_index: u128) -> u128 {\n\n if let Some(cond_table) = self.cond_table.get(&element_index) {\n\n cond_table[0]\n\n } else {\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 85, "score": 25204.73354466741 }, { "content": " self.parent_references[&element_index]\n\n }\n\n\n\n fn get_sub_process_list(&self) -> &[u128] {\n\n &self.subprocesses\n\n }\n\n\n\n fn get_event_code(&self, element_index: u128) -> [u8; 32] {\n\n self.event_code[&element_index]\n\n }\n\n\n\n fn get_event_list(&self) -> &[u128] {\n\n &self.events\n\n }\n\n\n\n fn get_instance_count(&self, element_index: u128) -> u128 {\n\n self.instance_count[&element_index]\n\n }\n\n\n\n fn get_factory_instance_mut(&mut self) -> &mut Ifactory<T> {\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 86, "score": 25204.180880342796 }, { "content": " fn ensure_subprocess_to_link_in_data_structure(\n\n iflow: &Iflow<T>,\n\n parent_index: u128,\n\n ) -> DispatchResult {\n\n //BITs (0, 5) Veryfing the subprocess to link is already in the data structure\n\n ensure!(\n\n iflow.get_type_info(parent_index) & 33 != 33,\n\n SUBPROCESS_TO_LINK_NOT_FOUND\n\n );\n\n Ok(())\n\n }\n\n}\n\n\n\ndecl_event!(\n\n pub enum Event<T>\n\n where\n\n InstanceId = <T as Trait>::InstanceId,\n\n Hash = <T as frame_system::Trait>::Hash,\n\n AccountId = <T as frame_system::Trait>::AccountId,\n\n {\n\n FactorySet(InstanceId, Hash),\n\n NewCaseCreated(AccountId),\n\n MessageSent(Vec<u8>),\n\n }\n\n);\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 87, "score": 25202.997087323947 }, { "content": " &mut self.factory\n\n }\n\n\n\n fn get_factory_instance(&self) -> &Ifactory<T> {\n\n &self.factory\n\n }\n\n\n\n fn set_factory_instance(&mut self, factory: Ifactory<T>) {\n\n self.factory = factory;\n\n }\n\n\n\n fn set_element(\n\n &mut self,\n\n element_index: u128,\n\n pre_condition: u128,\n\n post_condition: u128,\n\n type_info: u128,\n\n event_code: [u8; 32],\n\n _next_elem: Vec<u128>,\n\n ) {\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 88, "score": 25202.7813566943 }, { "content": " element_index: u128,\n\n pre_condition: u128,\n\n post_condition: u128,\n\n type_info: u128,\n\n event_code: [u8; 32],\n\n _next_elem: Vec<u128>\n\n ) -> DispatchResult {\n\n ensure_signed(origin)?;\n\n let iflow = Self::ensure_iflow_instance_exists(iflow_index)?;\n\n\n\n let _type_info = iflow.get_type_info(element_index);\n\n if _type_info != 0 {\n\n ensure!(_type_info == type_info, \"Should be equal\");\n\n }\n\n\n\n //\n\n // == MUTATION SAFE ==\n\n //\n\n\n\n if <IflowById<T>>::contains_key(iflow_index) {\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 89, "score": 25202.636836635465 }, { "content": " ensure_signed(origin)?;\n\n let factory = Ifactory::new(data_hash, instantiate_selector, execute_script_selector);\n\n\n\n //\n\n // == MUTATION SAFE ==\n\n //\n\n\n\n <IflowById<T>>::mutate(instance_id, |iflow| iflow.set_factory_instance(factory));\n\n Self::deposit_event(RawEvent::FactorySet(instance_id, data_hash));\n\n Ok(())\n\n }\n\n\n\n #[weight = 10_000]\n\n pub fn continue_execution(origin, instance_id: T::InstanceId, element_index: u128) -> DispatchResult {\n\n ensure_signed(origin)?;\n\n let idata = Self::ensure_idata_instance_exists(instance_id)?;\n\n\n\n //\n\n // == MUTATION SAFE ==\n\n //\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 90, "score": 25202.518501867977 }, { "content": " }\n\n Ok(())\n\n }\n\n\n\n #[weight = 10_000]\n\n pub fn link_sub_process(\n\n origin,\n\n iflow_index: T::InstanceId,\n\n parent_index: u128,\n\n child_flow_inst: T::InstanceId,\n\n attached_events: Vec<u128>,\n\n count_instances: u128,\n\n ) -> DispatchResult {\n\n ensure_signed(origin)?;\n\n\n\n let iflow = Self::ensure_iflow_instance_exists(iflow_index)?;\n\n Self::ensure_subprocess_to_link_in_data_structure(&iflow, parent_index)?;\n\n\n\n //\n\n // == MUTATION SAFE ==\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 91, "score": 25202.41854619882 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\nuse codec::{Codec, Decode, Encode};\n\nuse frame_support::{\n\n decl_event, decl_module, decl_storage, dispatch::DispatchResult, ensure, Parameter,\n\n};\n\nuse frame_system::{self as system, ensure_signed, RawOrigin};\n\nuse sp_runtime::traits::{CheckedAdd, MaybeSerialize, Member};\n\nuse sp_std::{collections::btree_map::BTreeMap, prelude::*};\n\n\n\nmod errors;\n\nuse contracts::{CodeHash, ContractAddressFor};\n\nuse errors::*;\n\n\n\nconst ENDOWMENT: u32 = 1000;\n\nconst GAS: u32 = 500_000;\n\n\n\n#[cfg_attr(feature = \"std\", derive(Debug))]\n\n#[derive(Encode, Decode, Clone, PartialEq)]\n\npub struct Iflow<T: Trait> {\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 92, "score": 25202.298044179977 }, { "content": " parent_case: T::InstanceId,\n\n idata: &Idata<T>,\n\n event_code: [u8; 32],\n\n event_info: u128,\n\n ) -> Result<(), &'static str> {\n\n let mut parent_state: [u128; 2] = [0; 2];\n\n parent_state[0] = idata.get_marking();\n\n parent_state[0] = idata.get_started_activities();\n\n match event_info {\n\n event_info if event_info & 4096 == 4096 => {\n\n // Message (BIT 15), to publish a Message in the Event Log\n\n Self::deposit_event(RawEvent::MessageSent(event_code.to_vec()));\n\n }\n\n event_info if event_info & 5632 == 5632 => {\n\n // 9- End, 10- Default, 12- Message\n\n // If there are not tokens to consume nor started activities in any subprocess\n\n if parent_state[0] | parent_state[1] == 0 {\n\n // Sub-process ended, thus continue execution on parent\n\n Self::try_catch_event(parent_case, idata, event_code, event_info, true)?;\n\n }\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 93, "score": 25201.983179253995 }, { "content": " //\n\n\n\n <IflowById<T>>::mutate(iflow_index, |inner_iflow|\n\n inner_iflow.link_sub_process(\n\n parent_index,\n\n child_flow_inst,\n\n attached_events,\n\n count_instances,\n\n ));\n\n Ok(())\n\n }\n\n\n\n #[weight = 10_000]\n\n pub fn set_factory_instance(\n\n origin,\n\n instance_id: T::InstanceId,\n\n data_hash: T::Hash,\n\n instantiate_selector: Vec<u8>,\n\n execute_script_selector: Vec<u8>\n\n ) -> DispatchResult {\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 94, "score": 25200.717074705244 }, { "content": "\n\n\n\n let mut idata = Idata::default();\n\n\n\n idata.set_parent(None, parent_case, 0);\n\n\n\n <IdataById<T>>::insert(parent_case, idata);\n\n\n\n Self::deposit_event(RawEvent::NewCaseCreated(contract_id));\n\n\n\n Self::execution_required(parent_case, &iflow)?;\n\n\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Trait> Module<T> {\n\n /// BPMN Interpreter logic\n\n\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 95, "score": 25200.67113478822 }, { "content": " }\n\n}\n\n\n\n#[cfg_attr(feature = \"std\", derive(Debug))]\n\n#[derive(Encode, Decode, Clone, PartialEq)]\n\npub struct Ifactory<T: Trait> {\n\n /// Data & scripts hash\n\n data_hash: T::Hash,\n\n address: Option<T::AccountId>,\n\n instantiate_selector: Vec<u8>,\n\n execute_script_selector: Vec<u8>,\n\n}\n\n\n\nimpl<T: Trait> Default for Ifactory<T> {\n\n fn default() -> Self {\n\n Self {\n\n data_hash: T::Hash::default(),\n\n address: None,\n\n instantiate_selector: vec![],\n\n execute_script_selector: vec![],\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 96, "score": 25199.495842669443 }, { "content": " parent_case: T::InstanceId,\n\n idata: &Idata<T>,\n\n event_code: [u8; 32],\n\n event_info: u128,\n\n instance_completed: bool,\n\n ) -> Result<(), &'static str> {\n\n if let Some(catch_case) = idata.get_idata_parent() {\n\n let mut catch_case_data = Self::ensure_idata_instance_exists(catch_case)?;\n\n let child_flow = idata.get_flow_node();\n\n let child_flow_instance = Self::ensure_iflow_instance_exists(child_flow)?;\n\n let mut parent_state: [u128; 2] = [0; 2];\n\n parent_state[0] = catch_case_data.get_marking();\n\n parent_state[1] = catch_case_data.get_started_activities();\n\n let sub_process_index = idata.get_index_in_parent();\n\n let run_inst_count = if instance_completed {\n\n <IdataById<T>>::mutate(catch_case, |catch_case_data| {\n\n catch_case_data.decrement_instance_count(sub_process_index)\n\n });\n\n catch_case_data.get_instance_count(sub_process_index) - 1\n\n } else {\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 97, "score": 25199.467931392242 }, { "content": " if catch_event_info & 6 == 6 {\n\n // Start event-sub-process (BIT 6)\n\n if catch_event_info & 16 == 16 {\n\n // Interrupting (BIT 4 must be 1, 0 if non-interrupting)\n\n // Before starting the event subprocess, the current process-instance is killed\n\n Self::kill_process(parent_case)?;\n\n Self::create_instance(attached_to, parent_case)?;\n\n <IdataById<T>>::mutate(parent_case, |parent_case_instance| {\n\n parent_case_instance.set_activity_marking(1 << attached_to);\n\n });\n\n } else if catch_event_info & 256 == 256 {\n\n // Boundary (BIT 6) of the subproces propagating the event\n\n if catch_event_info & 16 == 16 {\n\n // Interrupting (BIT 4 must be 1, 0 if non-interrupting)\n\n // The subprocess propagating the event must be interrupted\n\n let child_process_instances =\n\n parent_case_instance.get_child_process_instances(attached_to);\n\n Self::kill_processes(child_process_instances)?;\n\n }\n\n let marking = parent_case_instance.get_marking();\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 98, "score": 25198.84645518261 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<T: Trait> Ifactory<T> {\n\n fn new(\n\n data_hash: T::Hash,\n\n instantiate_selector: Vec<u8>,\n\n execute_script_selector: Vec<u8>,\n\n ) -> Self {\n\n Self {\n\n data_hash,\n\n address: None,\n\n instantiate_selector,\n\n execute_script_selector,\n\n }\n\n }\n\n\n\n pub fn set_instantiate_selector(&mut self, instantiate_selector: Vec<u8>) {\n\n self.instantiate_selector = instantiate_selector\n", "file_path": "liqum-node/bpmn-interpreter-pallet/src/lib.rs", "rank": 99, "score": 25198.751603554716 } ]
Rust
src/main.rs
jameslahm/convertio
a11f96b9af7e3ae9e6dad7d0b41b8cce2a4c07d1
use base64::{decode, encode}; use clap::{load_yaml, App}; use futures::future::try_join_all; use serde::{Deserialize, Deserializer}; use std::{ collections::HashMap, fs::File, io::{Read, Write}, path::PathBuf, time::Duration, u64, }; const API_BASE_URL: &str = "http://api.convertio.co/convert"; use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; #[derive(Deserialize)] struct NewConversionResp { code: i32, error: Option<String>, data: Option<ConvertioData>, } #[derive(Deserialize)] struct StatusConversionResp { code: i32, error: Option<String>, data: Option<ConvertioData>, } #[derive(Deserialize, Clone)] struct ConvertioData { id: String, step: Option<String>, #[serde(default)] #[serde(deserialize_with = "deserialize_u64_or_empty_string")] step_percent: Option<u64>, } #[derive(Deserialize)] struct FileData { content: String, } #[derive(Deserialize)] struct FileDownloadResp { code: i32, data: FileData, } fn deserialize_u64_or_empty_string<'de, D>(deserializer: D) -> Result<Option<u64>, D::Error> where D: Deserializer<'de>, { let s: Option<u64> = Option::deserialize(deserializer).unwrap_or(Some(0)); Ok(s) } struct ConversionTask { conversion_id: String, done: bool, input_file_name: String, output_format: String, progress: u64, } async fn start_conversion( input_file_name: &str, output_format: &str, api_key: &str, ) -> Result<ConversionTask, Box<dyn std::error::Error>> { let mut map = HashMap::new(); map.insert("apikey", api_key); map.insert("input", "base64"); let mut file = File::open(input_file_name).expect("file open failed"); let mut buf = vec![]; file.read_to_end(&mut buf).expect("file read failed"); let encode_buf = encode(&buf); map.insert("file", encode_buf.as_str()); map.insert("filename", input_file_name); map.insert("outputformat", output_format); let client = reqwest::Client::new(); let resp = client .post(API_BASE_URL) .json(&map) .send() .await? .json::<NewConversionResp>() .await?; if resp.code != 200 { return Err(resp.error.unwrap().into()); } let conversion_id = resp.data.unwrap().id; Ok(ConversionTask { conversion_id, done: false, input_file_name: input_file_name.to_owned(), output_format: output_format.to_owned(), progress: 0, }) } async fn wait_for_status(task: &mut ConversionTask) -> Result<(), Box<dyn std::error::Error>> { let client = reqwest::Client::new(); let resp = client .get(format!("{}/{}/status", API_BASE_URL, task.conversion_id).as_str()) .send() .await? .json::<StatusConversionResp>() .await?; if resp.code == 200 { if resp.data.clone().unwrap().step.as_deref().unwrap() == "finish" { let client = reqwest::Client::new(); let resp = client .get(format!("{}/{}/dl/base64", API_BASE_URL, task.conversion_id).as_str()) .send() .await? .json::<FileDownloadResp>() .await?; if resp.code == 200 { let mut output_path = PathBuf::from(task.input_file_name.as_str()); output_path.set_extension(task.output_format.as_str()); let mut file = File::create(output_path).expect("create file failed"); let decode_buf = decode(&resp.data.content).unwrap(); file.write_all(&decode_buf).expect("write file failed"); } task.done = true; task.progress = 100; } else { task.progress = *resp.data.clone().unwrap().step_percent.as_ref().unwrap(); } } if resp.code != 200 { task.done = true; println!("{}", resp.error.unwrap()) } Ok(()) } #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { let yaml = load_yaml!("cli.yml"); let matches = App::from(yaml).get_matches(); let output_format = matches.value_of("format").unwrap(); let input_file_names = matches.values_of("input").unwrap(); let api_key = match std::env::var("CONVERTIO_API_KEY") { Ok(s) => s, Err(_) => "0750521d6eee3603ac222d0422891eea".to_owned(), }; let mut conversions: Vec<ConversionTask> = try_join_all(input_file_names.map(|input_file_name| { start_conversion(input_file_name, output_format, api_key.as_str()) })) .await .unwrap(); let sty = ProgressStyle::default_bar() .template("[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}") .progress_chars("##-"); let mut progress_bars = vec![]; let m = MultiProgress::new(); conversions.iter().for_each(|conversion| { let pb = m.add(ProgressBar::new(100)); pb.set_style(sty.clone()); pb.set_position(0); pb.set_message(&conversion.input_file_name); progress_bars.push(pb); }); tokio::spawn(async move { m.join().unwrap(); }); loop { if conversions.is_empty() { break; } try_join_all( conversions .iter_mut() .map(|conversion| wait_for_status(conversion)), ) .await .unwrap(); for (index, e) in conversions.iter().enumerate() { progress_bars[index].set_position(e.progress); progress_bars[index].set_message(&e.input_file_name); if e.progress == 100 { progress_bars[index].finish_and_clear(); } } conversions.retain(|conversion| !conversion.done); progress_bars.retain(|progress_bar| !progress_bar.position() != 100); tokio::time::sleep(Duration::from_secs(2)).await; } Ok(()) }
use base64::{decode, encode}; use clap::{load_yaml, App}; use futures::future::try_join_all; use serde::{Deserialize, Deserializer}; use std::{ collections::HashMap, fs::File, io::{Read, Write}, path::PathBuf, time::Duration, u64, }; const API_BASE_URL: &str = "http://api.convertio.co/convert"; use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; #[derive(Deserialize)] struct NewConversionResp { code: i32, error: Option<String>, data: Option<ConvertioData>, } #[derive(Deserialize)] struct StatusConversionResp { code: i32, error: Option<String>, data: Option<ConvertioData>, } #[derive(Deserialize, Clone)] struct ConvertioData { id: String, step: Option<String>, #[serde(default)] #[serde(deserialize_with = "deserialize_u64_or_empty_string")] step_percent: Option<u64>, } #[derive(Deserialize)] struct FileData { content: String, } #[derive(Deserialize)] struct FileDownloadResp { code: i32, data: FileData, } fn deserialize_u64_or_empty_string<'de, D>(deserializer: D) -> Result<Option<u64>, D::Error> where D: Deserializer<'de>, { let s: Option<u64> = Option::deserialize(deserializer).unwrap_or(Some(0)); Ok(s) } struct ConversionTask { conversion_id: String, done: bool, input_file_name: String, output_format: String, progress: u64, }
async fn wait_for_status(task: &mut ConversionTask) -> Result<(), Box<dyn std::error::Error>> { let client = reqwest::Client::new(); let resp = client .get(format!("{}/{}/status", API_BASE_URL, task.conversion_id).as_str()) .send() .await? .json::<StatusConversionResp>() .await?; if resp.code == 200 { if resp.data.clone().unwrap().step.as_deref().unwrap() == "finish" { let client = reqwest::Client::new(); let resp = client .get(format!("{}/{}/dl/base64", API_BASE_URL, task.conversion_id).as_str()) .send() .await? .json::<FileDownloadResp>() .await?; if resp.code == 200 { let mut output_path = PathBuf::from(task.input_file_name.as_str()); output_path.set_extension(task.output_format.as_str()); let mut file = File::create(output_path).expect("create file failed"); let decode_buf = decode(&resp.data.content).unwrap(); file.write_all(&decode_buf).expect("write file failed"); } task.done = true; task.progress = 100; } else { task.progress = *resp.data.clone().unwrap().step_percent.as_ref().unwrap(); } } if resp.code != 200 { task.done = true; println!("{}", resp.error.unwrap()) } Ok(()) } #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { let yaml = load_yaml!("cli.yml"); let matches = App::from(yaml).get_matches(); let output_format = matches.value_of("format").unwrap(); let input_file_names = matches.values_of("input").unwrap(); let api_key = match std::env::var("CONVERTIO_API_KEY") { Ok(s) => s, Err(_) => "0750521d6eee3603ac222d0422891eea".to_owned(), }; let mut conversions: Vec<ConversionTask> = try_join_all(input_file_names.map(|input_file_name| { start_conversion(input_file_name, output_format, api_key.as_str()) })) .await .unwrap(); let sty = ProgressStyle::default_bar() .template("[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}") .progress_chars("##-"); let mut progress_bars = vec![]; let m = MultiProgress::new(); conversions.iter().for_each(|conversion| { let pb = m.add(ProgressBar::new(100)); pb.set_style(sty.clone()); pb.set_position(0); pb.set_message(&conversion.input_file_name); progress_bars.push(pb); }); tokio::spawn(async move { m.join().unwrap(); }); loop { if conversions.is_empty() { break; } try_join_all( conversions .iter_mut() .map(|conversion| wait_for_status(conversion)), ) .await .unwrap(); for (index, e) in conversions.iter().enumerate() { progress_bars[index].set_position(e.progress); progress_bars[index].set_message(&e.input_file_name); if e.progress == 100 { progress_bars[index].finish_and_clear(); } } conversions.retain(|conversion| !conversion.done); progress_bars.retain(|progress_bar| !progress_bar.position() != 100); tokio::time::sleep(Duration::from_secs(2)).await; } Ok(()) }
async fn start_conversion( input_file_name: &str, output_format: &str, api_key: &str, ) -> Result<ConversionTask, Box<dyn std::error::Error>> { let mut map = HashMap::new(); map.insert("apikey", api_key); map.insert("input", "base64"); let mut file = File::open(input_file_name).expect("file open failed"); let mut buf = vec![]; file.read_to_end(&mut buf).expect("file read failed"); let encode_buf = encode(&buf); map.insert("file", encode_buf.as_str()); map.insert("filename", input_file_name); map.insert("outputformat", output_format); let client = reqwest::Client::new(); let resp = client .post(API_BASE_URL) .json(&map) .send() .await? .json::<NewConversionResp>() .await?; if resp.code != 200 { return Err(resp.error.unwrap().into()); } let conversion_id = resp.data.unwrap().id; Ok(ConversionTask { conversion_id, done: false, input_file_name: input_file_name.to_owned(), output_format: output_format.to_owned(), progress: 0, }) }
function_block-full_function
[ { "content": "## Convertio\n\nConvert anything from one format to another using [convertio.co](https://convertio.co/)\n\n\n\n### example\n\n```bash\n\ncargo run -- -f pdf \"Chp 2.pptx\" \"Chp 1.ppt\"\n\n```\n", "file_path": "README.md", "rank": 14, "score": 1.2168407813751934 } ]
Rust
ltn/src/select_boundary.rs
aclk/abstreet
611ab9fce155ae2ed37a224cbbd417fcebaa0536
use std::collections::BTreeSet; use anyhow::Result; use geom::Distance; use map_model::Block; use widgetry::mapspace::ToggleZoomed; use widgetry::mapspace::{World, WorldOutcome}; use widgetry::{ Color, EventCtx, GfxCtx, HorizontalAlignment, Key, Line, Outcome, Panel, State, Text, TextExt, VerticalAlignment, Widget, }; use crate::partition::BlockID; use crate::{App, NeighborhoodID, Partitioning, Transition}; pub struct SelectBoundary { panel: Panel, id: NeighborhoodID, world: World<BlockID>, draw_outline: ToggleZoomed, frontier: BTreeSet<BlockID>, orig_partitioning: Partitioning, last_failed_change: Option<(BlockID, bool)>, } impl SelectBoundary { pub fn new_state(ctx: &mut EventCtx, app: &App, id: NeighborhoodID) -> Box<dyn State<App>> { let mut state = SelectBoundary { panel: make_panel(ctx, app), id, world: World::bounded(app.map.get_bounds()), draw_outline: ToggleZoomed::empty(ctx), frontier: BTreeSet::new(), orig_partitioning: app.session.partitioning.clone(), last_failed_change: None, }; let initial_boundary = app.session.partitioning.neighborhood_block(id); state.frontier = app .session .partitioning .calculate_frontier(&initial_boundary.perimeter); for id in app.session.partitioning.all_block_ids() { state.add_block(ctx, app, id); } state.redraw_outline(ctx, initial_boundary); state.world.initialize_hover(ctx); Box::new(state) } fn add_block(&mut self, ctx: &mut EventCtx, app: &App, id: BlockID) { let neighborhood = app.session.partitioning.block_to_neighborhood(id); let color = app.session.partitioning.neighborhood_color(neighborhood); if self.frontier.contains(&id) { let have_block = self.currently_have_block(app, id); let mut obj = self .world .add(id) .hitbox(app.session.partitioning.get_block(id).polygon.clone()) .draw_color(color.alpha(0.5)) .hover_alpha(0.8) .clickable(); if have_block { obj = obj .hotkey(Key::Space, "remove") .hotkey(Key::LeftShift, "remove") } else { obj = obj .hotkey(Key::Space, "add") .hotkey(Key::LeftControl, "add") } obj.build(ctx); } else { let alpha = if self.id == neighborhood { 0.5 } else { 0.1 }; self.world .add(id) .hitbox(app.session.partitioning.get_block(id).polygon.clone()) .draw_color(color.alpha(alpha)) .build(ctx); } } fn redraw_outline(&mut self, ctx: &mut EventCtx, block: &Block) { let mut batch = ToggleZoomed::builder(); if let Ok(outline) = block.polygon.to_outline(Distance::meters(10.0)) { batch.unzoomed.push(Color::RED, outline); } if let Ok(outline) = block.polygon.to_outline(Distance::meters(5.0)) { batch.zoomed.push(Color::RED.alpha(0.5), outline); } self.draw_outline = batch.build(ctx); } fn toggle_block(&mut self, ctx: &mut EventCtx, app: &mut App, id: BlockID) -> Transition { if self.last_failed_change == Some((id, self.currently_have_block(app, id))) { return Transition::Keep; } self.last_failed_change = None; match self.try_toggle_block(app, id) { Ok(Some(new_neighborhood)) => { app.session.partitioning.recalculate_coloring(); return Transition::Replace(SelectBoundary::new_state(ctx, app, new_neighborhood)); } Ok(None) => { let old_frontier = std::mem::take(&mut self.frontier); self.frontier = app.session.partitioning.calculate_frontier( &app.session .partitioning .neighborhood_block(self.id) .perimeter, ); let mut changed_blocks: Vec<BlockID> = old_frontier .symmetric_difference(&self.frontier) .cloned() .collect(); changed_blocks.push(id); if app.session.partitioning.recalculate_coloring() { changed_blocks.clear(); changed_blocks.extend(app.session.partitioning.all_block_ids()); } for changed in changed_blocks { self.world.delete_before_replacement(changed); self.add_block(ctx, app, changed); } self.redraw_outline(ctx, app.session.partitioning.neighborhood_block(self.id)); self.panel = make_panel(ctx, app); } Err(err) => { self.last_failed_change = Some((id, self.currently_have_block(app, id))); let label = err.to_string().text_widget(ctx); self.panel.replace(ctx, "warning", label); } } Transition::Keep } fn try_toggle_block(&mut self, app: &mut App, id: BlockID) -> Result<Option<NeighborhoodID>> { if self.currently_have_block(app, id) { app.session .partitioning .remove_block_from_neighborhood(&app.map, id, self.id) } else { let old_owner = app.session.partitioning.block_to_neighborhood(id); app.session .partitioning .transfer_block(&app.map, id, old_owner, self.id)?; Ok(None) } } fn currently_have_block(&self, app: &App, id: BlockID) -> bool { app.session.partitioning.block_to_neighborhood(id) == self.id } } impl State<App> for SelectBoundary { fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Transition { if let Outcome::Clicked(x) = self.panel.event(ctx) { match x.as_ref() { "Cancel" => { app.session.partitioning = self.orig_partitioning.clone(); return Transition::Replace(super::connectivity::Viewer::new_state( ctx, app, self.id, )); } "Confirm" => { return Transition::Replace(super::connectivity::Viewer::new_state( ctx, app, self.id, )); } x => { return crate::handle_app_header_click(ctx, app, x).unwrap(); } } } match self.world.event(ctx) { WorldOutcome::Keypress("add" | "remove", id) | WorldOutcome::ClickedObject(id) => { return self.toggle_block(ctx, app, id); } _ => {} } if ctx.redo_mouseover() { if let Some(id) = self.world.get_hovering() { if ctx.is_key_down(Key::LeftControl) { if !self.currently_have_block(app, id) { return self.toggle_block(ctx, app, id); } } else if ctx.is_key_down(Key::LeftShift) { if self.currently_have_block(app, id) { return self.toggle_block(ctx, app, id); } } } } Transition::Keep } fn draw(&self, g: &mut GfxCtx, _: &App) { self.world.draw(g); self.draw_outline.draw(g); self.panel.draw(g); } } fn make_panel(ctx: &mut EventCtx, app: &App) -> Panel { Panel::new_builder(Widget::col(vec![ crate::app_header(ctx, app), "Draw a custom boundary for a neighborhood" .text_widget(ctx) .centered_vert(), Text::from_all(vec![ Line("Click").fg(ctx.style().text_hotkey_color), Line(" to add/remove a block"), ]) .into_widget(ctx), Text::from_all(vec![ Line("Hold "), Line(Key::LeftControl.describe()).fg(ctx.style().text_hotkey_color), Line(" and paint over blocks to add"), ]) .into_widget(ctx), Text::from_all(vec![ Line("Hold "), Line(Key::LeftShift.describe()).fg(ctx.style().text_hotkey_color), Line(" and paint over blocks to remove"), ]) .into_widget(ctx), Widget::row(vec![ ctx.style() .btn_solid_primary .text("Confirm") .hotkey(Key::Enter) .build_def(ctx), ctx.style() .btn_solid_destructive .text("Cancel") .hotkey(Key::Escape) .build_def(ctx), ]), Text::new().into_widget(ctx).named("warning"), ])) .aligned(HorizontalAlignment::Left, VerticalAlignment::Top) .build(ctx) }
use std::collections::BTreeSet; use anyhow::Result; use geom::Distance; use map_model::Block; use widgetry::mapspace::ToggleZoomed; use widgetry::mapspace::{World, WorldOutcome}; use widgetry::{ Color, EventCtx, GfxCtx, HorizontalAlignment, Key, Line, Outcome, Panel, State, Text, TextExt, VerticalAlignment, Widget, }; use crate::partition::BlockID; use crate::{App, NeighborhoodID, Partitioning, Transition}; pub struct SelectBoundary { panel: Panel, id: NeighborhoodID, world: World<BlockID>, draw_outline: ToggleZoomed, frontier: BTreeSet<BlockID>, orig_partitioning: Partitioning, last_failed_change: Option<(BlockID, bool)>, } impl SelectBoundary { pub fn new_state(ctx: &mut EventCtx, app: &App, id: NeighborhoodID) -> Box<dyn State<App>> { let mut state = SelectBoundary { panel: make_panel(ctx, app), id, world: World::bounded(app.map.get_bounds()), draw_outline: ToggleZoomed::empty(ctx), frontier: BTreeSet::new(), orig_partitioning: app.session.partitioning.clone(), last_failed_change: None, }; let initial_boundary = app.session.partitioning.neighborhood_block(id); state.frontier = app .session .partitioning .calculate_frontier(&initial_boundary.perimeter); for id in app.session.partitioning.all_block_ids() { state.add_block(ctx, app, id); } state.redraw_outline(ctx, initial_boundary); state.world.initialize_hover(ctx); Box::new(state) } fn add_block(&mut self, ctx: &mut EventCtx, app: &App, id: BlockID) { let neighborhood = app.session.partitioning.block_to_neighborhood(id); let color = app.session.partitioning.neighborhood_color(neighborhood); if self.frontier.contains(&id) { let have_block = self.currently_have_block(app, id); let mut obj = self .world .add(id) .hitbox(app.session.partitioning.get_block(id).polygon.clone()) .draw_color(color.alpha(0.5)) .hover_alpha(0.8) .clickable(); if have_block { obj = obj .hotkey(Key::Space, "remove") .hotkey(Key::LeftShift, "remove") } else { obj = obj .hotkey(Key::Space, "add") .hotkey(Key::LeftControl, "add") } obj.build(ctx); } else { let alpha = if self.id == neighborhood { 0.5 } else { 0.1 }; self.world .add(id) .hitbox(app.session.partitioning.get_block(id).polygon.clone()) .draw_color(color.alpha(alpha)) .build(ctx); } } fn redraw_outline(&mut self, ctx: &mut EventCtx, block: &Block) { let mut batch = ToggleZoomed::builder(); if let Ok(outline) = block.polygon.to_outline(Distance::meters(10.0)) { batch.unzoomed.push(Color::RED, outline); } if let Ok(outline) = block.polygon.to_outline(Distance::meters(5.0)) { batch.zoomed.push(Color::RED.alpha(0.5), outline); } self.draw_outline = batch.build(ctx); } fn toggle_block(&mut self, ctx: &mut EventCtx, app: &mut App, id: BlockID) -> Transition { if self.last_failed_change == Some((id, self.currently_have_block(app, id))) { return Transition::Keep; } self.last_failed_change = None; match self.try_toggle_block(app, id) { Ok(Some(new_neighborhood)) => { app.session.partitioning.recalculate_coloring(); return Transition::Replace(SelectBoundary::new_state(ctx, app, new_neighborhood)); } Ok(None) => { let old_frontier = std::mem::take(&mut self.frontier); self.frontier = app.session.partitioning.calculate_frontier( &app.session .partitioning .neighborhood_block(self.id) .perimeter, ); let mut changed_blocks: Vec<BlockID> = old_frontier .symmetric_difference(&self.frontier) .cloned() .collect(); changed_blocks.push(id); if app.session.partitioning.recalculate_coloring() { changed_blocks.clear(); changed_blocks.extend(app.session.partitioning.all_block_ids()); } for changed in changed_blocks { self.world.delete_before_replacement(changed); self.add_block(ctx, app, changed); } self.redraw_outline(ctx, app.session.partitioning.neighborhood_block(self.id)); self.panel = make_panel(ctx, app); } Err(err) => { self.last_failed_change = Some((id, self.currently_have_block(app, id))); let label = err.to_string().text_widget(ctx); self.panel.replace(ctx, "warning", label); } } Transition::Keep } fn try_toggle_block(&mut self, app: &mut App, id: BlockID) -> Result<Option<NeighborhoodID>> { if self.currently_have_block(app, id) { app.session .partitioning .remove_block_from_neighborhood(&app.map, id, self.id) } else { let ol
fn currently_have_block(&self, app: &App, id: BlockID) -> bool { app.session.partitioning.block_to_neighborhood(id) == self.id } } impl State<App> for SelectBoundary { fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Transition { if let Outcome::Clicked(x) = self.panel.event(ctx) { match x.as_ref() { "Cancel" => { app.session.partitioning = self.orig_partitioning.clone(); return Transition::Replace(super::connectivity::Viewer::new_state( ctx, app, self.id, )); } "Confirm" => { return Transition::Replace(super::connectivity::Viewer::new_state( ctx, app, self.id, )); } x => { return crate::handle_app_header_click(ctx, app, x).unwrap(); } } } match self.world.event(ctx) { WorldOutcome::Keypress("add" | "remove", id) | WorldOutcome::ClickedObject(id) => { return self.toggle_block(ctx, app, id); } _ => {} } if ctx.redo_mouseover() { if let Some(id) = self.world.get_hovering() { if ctx.is_key_down(Key::LeftControl) { if !self.currently_have_block(app, id) { return self.toggle_block(ctx, app, id); } } else if ctx.is_key_down(Key::LeftShift) { if self.currently_have_block(app, id) { return self.toggle_block(ctx, app, id); } } } } Transition::Keep } fn draw(&self, g: &mut GfxCtx, _: &App) { self.world.draw(g); self.draw_outline.draw(g); self.panel.draw(g); } } fn make_panel(ctx: &mut EventCtx, app: &App) -> Panel { Panel::new_builder(Widget::col(vec![ crate::app_header(ctx, app), "Draw a custom boundary for a neighborhood" .text_widget(ctx) .centered_vert(), Text::from_all(vec![ Line("Click").fg(ctx.style().text_hotkey_color), Line(" to add/remove a block"), ]) .into_widget(ctx), Text::from_all(vec![ Line("Hold "), Line(Key::LeftControl.describe()).fg(ctx.style().text_hotkey_color), Line(" and paint over blocks to add"), ]) .into_widget(ctx), Text::from_all(vec![ Line("Hold "), Line(Key::LeftShift.describe()).fg(ctx.style().text_hotkey_color), Line(" and paint over blocks to remove"), ]) .into_widget(ctx), Widget::row(vec![ ctx.style() .btn_solid_primary .text("Confirm") .hotkey(Key::Enter) .build_def(ctx), ctx.style() .btn_solid_destructive .text("Cancel") .hotkey(Key::Escape) .build_def(ctx), ]), Text::new().into_widget(ctx).named("warning"), ])) .aligned(HorizontalAlignment::Left, VerticalAlignment::Top) .build(ctx) }
d_owner = app.session.partitioning.block_to_neighborhood(id); app.session .partitioning .transfer_block(&app.map, id, old_owner, self.id)?; Ok(None) } }
function_block-function_prefixed
[ { "content": "pub fn stop(ctx: &mut EventCtx, app: &App, details: &mut Details, id: TransitStopID) -> Widget {\n\n let header = Widget::row(vec![\n\n Line(\"Bus stop\").small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]);\n\n\n\n Widget::custom_col(vec![header, stop_body(ctx, app, details, id).tab_body(ctx)])\n\n}\n\n\n", "file_path": "game/src/info/transit.rs", "rank": 0, "score": 603888.0935188932 }, { "content": "pub fn route(ctx: &mut EventCtx, app: &App, details: &mut Details, id: TransitRouteID) -> Widget {\n\n let header = {\n\n let map = &app.primary.map;\n\n let route = map.get_tr(id);\n\n\n\n Widget::row(vec![\n\n Line(format!(\"Route {}\", route.short_name))\n\n .small_heading()\n\n .into_widget(ctx),\n\n header_btns(ctx),\n\n ])\n\n };\n\n\n\n Widget::custom_col(vec![\n\n header,\n\n route_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/transit.rs", "rank": 1, "score": 603888.0935188932 }, { "content": "pub fn bus_status(ctx: &mut EventCtx, app: &App, details: &mut Details, id: CarID) -> Widget {\n\n Widget::custom_col(vec![\n\n bus_header(ctx, app, details, id, Tab::TransitVehicleStatus(id)),\n\n bus_status_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/transit.rs", "rank": 2, "score": 588885.3348676967 }, { "content": "fn make_world(ctx: &mut EventCtx, app: &App, timer: &mut Timer) -> World<NeighborhoodID> {\n\n let mut world = World::bounded(app.map.get_bounds());\n\n let map = &app.map;\n\n for (id, (block, color)) in app.session.partitioning.all_neighborhoods() {\n\n match app.session.draw_neighborhood_style {\n\n Style::SimpleColoring => {\n\n world\n\n .add(*id)\n\n .hitbox(block.polygon.clone())\n\n .draw_color(color.alpha(0.3))\n\n .hover_outline(Color::BLACK, Distance::meters(5.0))\n\n .clickable()\n\n .build(ctx);\n\n }\n\n Style::Cells => {\n\n // TODO The cell colors are confusing alongside the other neighborhood colors. I\n\n // tried greying out everything else, but then the view is too jumpy.\n\n let neighborhood = Neighborhood::new(ctx, app, *id);\n\n let render_cells = super::draw_cells::RenderCells::new(map, &neighborhood);\n\n let hovered_batch = render_cells.draw();\n", "file_path": "ltn/src/browse.rs", "rank": 3, "score": 557301.8932214582 }, { "content": "pub fn people(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::BldgPeople(id)),\n\n people_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/building.rs", "rank": 4, "score": 556386.2244373902 }, { "content": "pub fn info(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::BldgInfo(id)),\n\n info_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/building.rs", "rank": 5, "score": 556386.2244373902 }, { "content": "pub fn area(ctx: &EventCtx, app: &App, _: &mut Details, id: AreaID) -> Widget {\n\n let header = Widget::row(vec![\n\n Line(id.to_string()).small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]);\n\n\n\n Widget::custom_col(vec![header, area_body(ctx, app, id).tab_body(ctx)])\n\n}\n\n\n", "file_path": "game/src/info/debug.rs", "rank": 6, "score": 548753.6805142069 }, { "content": "fn stop_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: TransitStopID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let ts = app.primary.map.get_ts(id);\n\n let sim = &app.primary.sim;\n\n\n\n rows.push(Line(&ts.name).into_widget(ctx));\n\n\n\n let all_arrivals = &sim.get_analytics().bus_arrivals;\n\n for r in app.primary.map.get_routes_serving_stop(id) {\n\n // Full names can overlap, so include the ID\n\n let label = format!(\"{} ({})\", r.long_name, r.id);\n\n rows.push(\n\n ctx.style()\n\n .btn_outline\n\n .text(format!(\"Route {}\", r.short_name))\n\n .build_widget(ctx, &label),\n\n );\n\n details.hyperlinks.insert(label, Tab::TransitRoute(r.id));\n\n\n", "file_path": "game/src/info/transit.rs", "rank": 7, "score": 547762.8179303164 }, { "content": "fn route_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: TransitRouteID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let map = &app.primary.map;\n\n let route = map.get_tr(id);\n\n rows.push(\n\n Text::from(&route.long_name)\n\n .wrap_to_pct(ctx, 20)\n\n .into_widget(ctx),\n\n );\n\n\n\n let buses = app.primary.sim.status_of_buses(id, map);\n\n let mut bus_locations = Vec::new();\n\n if buses.is_empty() {\n\n rows.push(format!(\"No {} running\", route.plural_noun()).text_widget(ctx));\n\n } else {\n\n for (bus, _, _, pt) in buses {\n\n rows.push(ctx.style().btn_outline.text(bus.to_string()).build_def(ctx));\n\n details\n\n .hyperlinks\n", "file_path": "game/src/info/transit.rs", "rank": 8, "score": 547762.8179303163 }, { "content": "pub fn info(ctx: &mut EventCtx, app: &App, details: &mut Details, id: ParkingLotID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, details, id, Tab::ParkingLot(id)),\n\n info_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/parking_lot.rs", "rank": 9, "score": 546017.6179994497 }, { "content": "pub fn debug(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::LaneDebug(id)),\n\n debug_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/lane.rs", "rank": 10, "score": 543067.1974352914 }, { "content": "pub fn info(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::LaneInfo(id)),\n\n info_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/lane.rs", "rank": 11, "score": 543067.1974352914 }, { "content": "pub fn info(ctx: &EventCtx, app: &App, details: &mut Details, id: IntersectionID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::IntersectionInfo(id)),\n\n info_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/intersection.rs", "rank": 12, "score": 543067.1974352914 }, { "content": "pub fn execute(ctx: &mut EventCtx, app: &mut App, id: ID, action: &str) -> Transition {\n\n let mut tut = app.session.tutorial.as_mut().unwrap();\n\n let response = match (id, action) {\n\n (ID::Car(c), \"draw WASH ME\") => {\n\n let is_parked = app\n\n .primary\n\n .sim\n\n .agent_to_trip(AgentID::Car(ESCORT))\n\n .is_none();\n\n if c == ESCORT {\n\n if is_parked {\n\n tut.prank_done = true;\n\n PopupMsg::new_state(\n\n ctx,\n\n \"Prank in progress\",\n\n vec![\"You quickly scribble on the window...\"],\n\n )\n\n } else {\n\n PopupMsg::new_state(\n\n ctx,\n", "file_path": "game/src/sandbox/gameplay/tutorial.rs", "rank": 13, "score": 542329.986093848 }, { "content": "pub fn execute(ctx: &mut EventCtx, app: &mut App, id: ID, action: &str) -> Transition {\n\n match (id, action) {\n\n (ID::Building(b), \"start a trip here\") => {\n\n Transition::Push(spawner::AgentSpawner::new_state(ctx, app, Some(b)))\n\n }\n\n (ID::Intersection(id), \"spawn agents here\") => {\n\n spawn_agents_around(id, app);\n\n Transition::Keep\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "game/src/sandbox/gameplay/freeform/mod.rs", "rank": 14, "score": 537055.3381548569 }, { "content": "fn bus_status_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: CarID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let route = app\n\n .primary\n\n .map\n\n .get_tr(app.primary.sim.bus_route_id(id).unwrap());\n\n\n\n rows.push(\n\n ctx.style()\n\n .btn_outline\n\n .text(format!(\"Serves route {}\", route.short_name))\n\n .build_def(ctx),\n\n );\n\n details.hyperlinks.insert(\n\n format!(\"Serves route {}\", route.short_name),\n\n Tab::TransitRoute(route.id),\n\n );\n\n\n\n rows.push(\n\n Line(format!(\n\n \"Currently has {} passengers\",\n\n app.primary.sim.num_transit_passengers(id),\n\n ))\n\n .into_widget(ctx),\n\n );\n\n\n\n Widget::col(rows)\n\n}\n\n\n", "file_path": "game/src/info/transit.rs", "rank": 15, "score": 532760.0592791198 }, { "content": "fn bus_header(ctx: &mut EventCtx, app: &App, details: &mut Details, id: CarID, tab: Tab) -> Widget {\n\n let route = app.primary.sim.bus_route_id(id).unwrap();\n\n\n\n if let Some(pt) = app\n\n .primary\n\n .sim\n\n .canonical_pt_for_agent(AgentID::Car(id), &app.primary.map)\n\n {\n\n ctx.canvas.center_on_map_pt(pt);\n\n }\n\n\n\n let mut rows = vec![];\n\n rows.push(Widget::row(vec![\n\n Line(format!(\n\n \"{} (route {})\",\n\n id,\n\n app.primary.map.get_tr(route).short_name\n\n ))\n\n .small_heading()\n\n .into_widget(ctx),\n", "file_path": "game/src/info/transit.rs", "rank": 16, "score": 514278.5008184706 }, { "content": "pub fn warp_to_id(ctx: &mut EventCtx, app: &mut App, input: &str) -> Transition {\n\n if let Some(t) = inner_warp_to_id(ctx, app, input) {\n\n t\n\n } else {\n\n Transition::Replace(PopupMsg::new_state(\n\n ctx,\n\n \"Bad warp ID\",\n\n vec![format!(\"{} isn't a valid ID\", input)],\n\n ))\n\n }\n\n}\n\n\n", "file_path": "game/src/common/warp.rs", "rank": 17, "score": 512402.668396467 }, { "content": "pub fn custom_bar(ctx: &mut EventCtx, filled_color: Color, pct_full: f64, txt: Text) -> Widget {\n\n let total_width = 300.0;\n\n let height = 32.0;\n\n let radius = 4.0;\n\n\n\n let mut batch = GeomBatch::new();\n\n // Background\n\n batch.push(\n\n Color::hex(\"#666666\"),\n\n Polygon::rounded_rectangle(total_width, height, radius),\n\n );\n\n // Foreground\n\n if let Some(poly) = Polygon::maybe_rounded_rectangle(pct_full * total_width, height, radius) {\n\n batch.push(filled_color, poly);\n\n }\n\n // Text\n\n let label = txt.render_autocropped(ctx);\n\n let dims = label.get_dims();\n\n batch.append(label.translate(10.0, height / 2.0 - dims.height / 2.0));\n\n batch.into_widget(ctx)\n\n}\n\n\n", "file_path": "santa/src/meters.rs", "rank": 18, "score": 511565.2768796651 }, { "content": "fn schedule_body(ctx: &mut EventCtx, app: &App, id: PersonID) -> Widget {\n\n let mut rows = vec![];\n\n let person = app.primary.sim.get_person(id);\n\n let mut rng = XorShiftRng::seed_from_u64(id.0 as u64);\n\n\n\n // TODO Proportional 24-hour timeline would be easier to understand\n\n let mut last_t = Time::START_OF_DAY;\n\n for t in &person.trips {\n\n let trip = app.primary.sim.trip_info(*t);\n\n let at = match trip.start {\n\n TripEndpoint::Building(b) => {\n\n let b = app.primary.map.get_b(b);\n\n if b.amenities.is_empty() {\n\n b.address.clone()\n\n } else {\n\n let list = b\n\n .amenities\n\n .iter()\n\n .map(|a| a.names.get(app.opts.language.as_ref()))\n\n .collect::<Vec<_>>();\n", "file_path": "game/src/info/person.rs", "rank": 19, "score": 511365.8820485416 }, { "content": "fn inner_warp_to_id(ctx: &mut EventCtx, app: &mut App, line: &str) -> Option<Transition> {\n\n if line.is_empty() {\n\n return None;\n\n }\n\n if line == \"j\" {\n\n if let Some((pt, zoom)) = app.primary.last_warped_from {\n\n return Some(Transition::Replace(Warping::new_state(\n\n ctx,\n\n pt,\n\n Some(zoom),\n\n None,\n\n &mut app.primary,\n\n )));\n\n }\n\n return None;\n\n }\n\n\n\n let id = match (&line[1..line.len()]).parse::<usize>() {\n\n Ok(idx) => match line.chars().next().unwrap() {\n\n 'r' => {\n", "file_path": "game/src/common/warp.rs", "rank": 20, "score": 508020.5502227529 }, { "content": "fn current_demand_body(ctx: &mut EventCtx, app: &App, id: IntersectionID) -> Widget {\n\n let mut rows = vec![];\n\n let mut total_demand = 0;\n\n let mut demand_per_movement: Vec<(&PolyLine, usize)> = Vec::new();\n\n for m in app.primary.map.get_i(id).movements.values() {\n\n let demand = app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .demand\n\n .get(&m.id)\n\n .cloned()\n\n .unwrap_or(0);\n\n if demand > 0 {\n\n total_demand += demand;\n\n demand_per_movement.push((&m.geom, demand));\n\n }\n\n }\n\n\n\n let mut batch = GeomBatch::new();\n", "file_path": "game/src/info/intersection.rs", "rank": 21, "score": 506333.6773495529 }, { "content": "fn traffic_signal_body(ctx: &mut EventCtx, app: &App, id: IntersectionID) -> Widget {\n\n let mut rows = vec![];\n\n // Slightly inaccurate -- the turn rendering may slightly exceed the intersection polygon --\n\n // but this is close enough.\n\n let bounds = app.primary.map.get_i(id).polygon.get_bounds();\n\n // Pick a zoom so that we fit a fixed width in pixels\n\n let zoom = 150.0 / bounds.width();\n\n let bbox = Polygon::rectangle(zoom * bounds.width(), zoom * bounds.height());\n\n\n\n let signal = app.primary.map.get_traffic_signal(id);\n\n {\n\n let mut txt = Text::new();\n\n txt.add_line(Line(format!(\"{} stages\", signal.stages.len())).small_heading());\n\n txt.add_line(format!(\"Signal offset: {}\", signal.offset));\n\n {\n\n let mut total = Duration::ZERO;\n\n for s in &signal.stages {\n\n total += s.stage_type.simple_duration();\n\n }\n\n // TODO Say \"normally\" or something?\n", "file_path": "game/src/info/intersection.rs", "rank": 22, "score": 506333.6773495529 }, { "content": "fn info_body(ctx: &mut EventCtx, app: &App, id: ParkingLotID) -> Widget {\n\n let mut rows = vec![];\n\n let pl = app.primary.map.get_pl(id);\n\n let capacity = pl.capacity();\n\n\n\n rows.push(\n\n format!(\n\n \"{} / {} spots available\",\n\n prettyprint_usize(app.primary.sim.get_free_lot_spots(pl.id).len()),\n\n prettyprint_usize(capacity)\n\n )\n\n .text_widget(ctx),\n\n );\n\n\n\n let mut series = vec![Series {\n\n label: format!(\"After \\\"{}\\\"\", app.primary.map.get_edits().edits_name),\n\n color: app.cs.after_changes,\n\n pts: app.primary.sim.get_analytics().parking_lot_availability(\n\n app.primary.sim.time(),\n\n pl.id,\n", "file_path": "game/src/info/parking_lot.rs", "rank": 23, "score": 501479.32325273164 }, { "content": "pub fn crowd(ctx: &EventCtx, app: &App, details: &mut Details, members: &[PedestrianID]) -> Widget {\n\n let header = Widget::custom_col(vec![\n\n Line(\"Pedestrian crowd\").small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]);\n\n Widget::custom_col(vec![\n\n header,\n\n crowd_body(ctx, app, details, members).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/person.rs", "rank": 24, "score": 500803.61842974625 }, { "content": "fn people_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n // Two caveats about these counts:\n\n // 1) A person might use multiple modes through the day, but this just picks a single category.\n\n // 2) Only people currently in the building currently are counted, whether or not that's their\n\n // home.\n\n let mut drivers = 0;\n\n let mut cyclists = 0;\n\n let mut others = 0;\n\n\n\n let mut ppl: Vec<(Time, Widget)> = Vec::new();\n\n for p in app.primary.sim.bldg_to_people(id) {\n\n let person = app.primary.sim.get_person(p);\n\n\n\n let mut has_car = false;\n\n let mut has_bike = false;\n\n for vehicle in &person.vehicles {\n\n if vehicle.vehicle_type == VehicleType::Car {\n\n has_car = true;\n", "file_path": "game/src/info/building.rs", "rank": 25, "score": 499494.587084507 }, { "content": "fn info_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let b = app.primary.map.get_b(id);\n\n\n\n let mut kv = vec![(\"Address\", b.address.clone())];\n\n if let Some(ref names) = b.name {\n\n kv.push((\"Name\", names.get(app.opts.language.as_ref()).to_string()));\n\n }\n\n if app.opts.dev {\n\n kv.push((\"OSM ID\", format!(\"{}\", b.orig_id.inner())));\n\n }\n\n\n\n let num_spots = b.num_parking_spots();\n\n if app.primary.sim.infinite_parking() {\n\n kv.push((\n\n \"Parking\",\n\n format!(\n\n \"Unlimited, currently {} cars inside\",\n\n app.primary.sim.bldg_to_parked_cars(b.id).len()\n", "file_path": "game/src/info/building.rs", "rank": 26, "score": 499494.587084507 }, { "content": "fn bio_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: PersonID) -> Widget {\n\n let mut rows = vec![];\n\n let person = app.primary.sim.get_person(id);\n\n let mut rng = XorShiftRng::seed_from_u64(id.0 as u64);\n\n\n\n let mut svg_data = Vec::new();\n\n svg_face::generate_face(&mut svg_data, &mut rng).unwrap();\n\n let batch = GeomBatch::load_svg_bytes_uncached(&svg_data).autocrop();\n\n let dims = batch.get_dims();\n\n let batch = batch.scale((200.0 / dims.width).min(200.0 / dims.height));\n\n rows.push(batch.into_widget(ctx).centered_horiz());\n\n\n\n let nickname = petname::Petnames::default().generate(&mut rng, 2, \" \");\n\n let age = rng.gen_range(5..100);\n\n\n\n let mut table = vec![(\"Nickname\", nickname), (\"Age\", age.to_string())];\n\n if app.opts.dev {\n\n table.push((\"Debug ID\", format!(\"{:?}\", person.orig_id)));\n\n }\n\n rows.extend(make_table(ctx, table));\n", "file_path": "game/src/info/person.rs", "rank": 27, "score": 499494.587084507 }, { "content": "fn parked_car_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: CarID) -> Widget {\n\n // TODO prev trips, next trips, etc\n\n let mut rows = vec![];\n\n\n\n let p = app.primary.sim.get_owner_of_car(id).unwrap();\n\n rows.push(\n\n ctx.style()\n\n .btn_outline\n\n .text(format!(\"Owned by {}\", p))\n\n .build_def(ctx),\n\n );\n\n details.hyperlinks.insert(\n\n format!(\"Owned by {}\", p),\n\n Tab::PersonTrips(p, BTreeMap::new()),\n\n );\n\n\n\n if let Some(p) = app.primary.sim.lookup_parked_car(id) {\n\n match p.spot {\n\n ParkingSpot::Onstreet(_, _) | ParkingSpot::Lot(_, _) => {\n\n ctx.canvas.center_on_map_pt(\n", "file_path": "game/src/info/person.rs", "rank": 28, "score": 494990.83669566124 }, { "content": "fn make_top_panel(ctx: &mut EventCtx, app: &App, can_undo: bool, can_redo: bool) -> Panel {\n\n let row = vec![\n\n ctx.style()\n\n .btn_solid_primary\n\n .text(\"Finish\")\n\n .hotkey(Key::Enter)\n\n .build_def(ctx),\n\n ctx.style()\n\n .btn_outline\n\n .text(\"Preview\")\n\n .hotkey(lctrl(Key::P))\n\n .build_def(ctx),\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/undo.svg\")\n\n .disabled(!can_undo)\n\n .hotkey(lctrl(Key::Z))\n\n .build_widget(ctx, \"undo\"),\n\n ctx.style()\n\n .btn_plain\n", "file_path": "game/src/edit/traffic_signals/mod.rs", "rank": 29, "score": 494756.84462469653 }, { "content": "fn make_tool_panel(ctx: &mut EventCtx, app: &App) -> Widget {\n\n let buttons = ctx\n\n .style()\n\n .btn_floating\n\n .btn()\n\n .image_dims(ScreenDims::square(20.0))\n\n // the default transparent button background is jarring for these buttons which are floating\n\n // in a transparent panel.\n\n .bg_color(app.cs.inner_panel_bg, ControlState::Default)\n\n .padding(8);\n\n\n\n Widget::col(vec![\n\n (if ctx.canvas.is_zoomed() {\n\n buttons\n\n .clone()\n\n .image_path(\"system/assets/minimap/zoom_out_fully.svg\")\n\n .build_widget(ctx, \"zoom out fully\")\n\n } else {\n\n buttons\n\n .clone()\n", "file_path": "game/src/sandbox/minimap.rs", "rank": 30, "score": 493199.1941556323 }, { "content": "fn make_world(ctx: &mut EventCtx, app: &dyn AppLike) -> World<Obj> {\n\n let mut world = World::bounded(app.map().get_bounds());\n\n for r in app.map().all_roads() {\n\n world\n\n .add(Obj::Road(r.id))\n\n .hitbox(r.get_thick_polygon())\n\n .drawn_in_master_batch()\n\n .invisibly_hoverable()\n\n .build(ctx);\n\n }\n\n for i in app.map().all_intersections() {\n\n world\n\n .add(Obj::Intersection(i.id))\n\n .hitbox(i.polygon.clone())\n\n .drawn_in_master_batch()\n\n .invisibly_hoverable()\n\n .build(ctx);\n\n }\n\n world\n\n}\n", "file_path": "map_gui/src/tools/compare_counts.rs", "rank": 31, "score": 490327.1267001308 }, { "content": "fn traffic_body(ctx: &mut EventCtx, app: &App, id: LaneID, opts: &DataOptions) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let r = id.road;\n\n\n\n // Since this applies to the entire road, ignore lane type.\n\n let mut txt = Text::from(\"Traffic over entire road, not just this lane\");\n\n txt.add_line(format!(\n\n \"Since midnight: {} commuters and vehicles crossed\",\n\n prettyprint_usize(app.primary.sim.get_analytics().road_thruput.total_for(r))\n\n ));\n\n rows.push(txt.into_widget(ctx));\n\n\n\n rows.push(opts.to_controls(ctx, app));\n\n\n\n let time = if opts.show_end_of_day {\n\n app.primary.sim.get_end_of_day()\n\n } else {\n\n app.primary.sim.time()\n\n };\n", "file_path": "game/src/info/lane.rs", "rank": 32, "score": 480195.8701379684 }, { "content": "fn traffic_body(ctx: &mut EventCtx, app: &App, id: IntersectionID, opts: &DataOptions) -> Widget {\n\n let mut rows = vec![];\n\n let mut txt = Text::new();\n\n\n\n txt.add_line(format!(\n\n \"Since midnight: {} commuters and vehicles crossed\",\n\n prettyprint_usize(\n\n app.primary\n\n .sim\n\n .get_analytics()\n\n .intersection_thruput\n\n .total_for(id)\n\n )\n\n ));\n\n rows.push(txt.into_widget(ctx));\n\n\n\n rows.push(opts.to_controls(ctx, app));\n\n\n\n let time = if opts.show_end_of_day {\n\n app.primary.sim.get_end_of_day()\n", "file_path": "game/src/info/intersection.rs", "rank": 33, "score": 480195.87013796845 }, { "content": "pub fn import_all(ctx: &mut EventCtx, app: &mut App, path: &str) -> Box<dyn State<App>> {\n\n let all_signals: Vec<IntersectionID> = app\n\n .primary\n\n .map\n\n .all_intersections()\n\n .iter()\n\n .filter_map(|i| {\n\n if i.is_traffic_signal() {\n\n Some(i.id)\n\n } else {\n\n None\n\n }\n\n })\n\n .collect();\n\n let mut successes = 0;\n\n let mut failures_no_match = 0;\n\n let mut failures_other = 0;\n\n let mut edits = app.primary.map.get_edits().clone();\n\n\n\n ctx.loading_screen(\"import signal timing\", |_, timer| {\n", "file_path": "game/src/edit/traffic_signals/gmns.rs", "rank": 34, "score": 476645.06010187743 }, { "content": "/// `is_enabled`: are (car, bike, bus, pedestrian) toggles enabled\n\n/// returns Widgets for (car, bike, bus, pedestrian)\n\nfn make_agent_toggles(ctx: &mut EventCtx, app: &App, is_enabled: [bool; 4]) -> Vec<Widget> {\n\n use widgetry::{include_labeled_bytes, Color, GeomBatchStack, RewriteColor, Toggle};\n\n let [is_car_enabled, is_bike_enabled, is_bus_enabled, is_pedestrian_enabled] = is_enabled;\n\n\n\n pub fn colored_checkbox(\n\n ctx: &EventCtx,\n\n action: &str,\n\n is_enabled: bool,\n\n color: Color,\n\n icon: &str,\n\n label: &str,\n\n tooltip: Text,\n\n ) -> Widget {\n\n let buttons = ctx\n\n .style()\n\n .btn_plain\n\n .btn()\n\n .label_text(label)\n\n .padding(4.0)\n\n .tooltip(tooltip)\n", "file_path": "game/src/sandbox/minimap.rs", "rank": 35, "score": 474985.73414478905 }, { "content": "fn preview_route(g: &mut GfxCtx, app: &App, id: TripID, batch: &mut GeomBatch) {\n\n for p in app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .get_trip_phases(id, &app.primary.map)\n\n {\n\n if let Some(path) = &p.path {\n\n if let Some(trace) = path.trace(&app.primary.map) {\n\n batch.push(\n\n color_for_trip_phase(app, p.phase_type),\n\n trace.make_polygons(Distance::meters(20.0)),\n\n );\n\n }\n\n }\n\n }\n\n\n\n let trip = app.primary.sim.trip_info(id);\n\n batch.append(map_gui::tools::start_marker(\n\n g,\n", "file_path": "game/src/sandbox/dashboards/generic_trip_table.rs", "rank": 36, "score": 470886.28400290024 }, { "content": "fn make_vehicle_panel(ctx: &mut EventCtx, app: &App) -> Panel {\n\n let mut buttons = Vec::new();\n\n for name in &app.session.vehicles_unlocked {\n\n let vehicle = Vehicle::get(name);\n\n let batch = vehicle\n\n .animate(ctx.prerender, Time::START_OF_DAY)\n\n .scale(10.0);\n\n\n\n buttons.push(\n\n if name == &app.session.current_vehicle {\n\n batch\n\n .into_widget(ctx)\n\n .container()\n\n .padding(5)\n\n .outline((2.0, Color::WHITE))\n\n } else {\n\n let normal = batch.clone().color(RewriteColor::MakeGrayscale);\n\n let hovered = batch;\n\n ButtonBuilder::new()\n\n .custom_batch(normal, ControlState::Default)\n", "file_path": "santa/src/before_level.rs", "rank": 37, "score": 470093.96137215523 }, { "content": "fn greedy(ctx: &EventCtx, app: &mut App, neighborhood: &Neighborhood, timer: &mut Timer) {\n\n let rat_runs = find_rat_runs(app, &neighborhood, timer);\n\n // TODO How should we break ties? Some rat-runs are worse than others; use that weight?\n\n // TODO Should this operation be per cell instead? We could hover on a road belonging to that\n\n // cell to select it\n\n if let Some((r, _)) = rat_runs\n\n .count_per_road\n\n .borrow()\n\n .iter()\n\n .max_by_key(|pair| pair.1)\n\n {\n\n if try_to_filter_road(ctx, app, neighborhood, *r).is_none() {\n\n warn!(\"Filtering {} disconnects a cell, never mind\", r);\n\n // TODO Try the next choice\n\n }\n\n }\n\n}\n\n\n", "file_path": "ltn/src/auto.rs", "rank": 39, "score": 468824.17190072837 }, { "content": "fn header(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID, tab: Tab) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let map = &app.primary.map;\n\n let l = map.get_l(id);\n\n let r = map.get_r(id.road);\n\n\n\n let label = if l.is_shoulder() {\n\n \"Shoulder\"\n\n } else if l.is_sidewalk() {\n\n \"Sidewalk\"\n\n } else {\n\n \"Lane\"\n\n };\n\n\n\n // Navbar\n\n rows.push(Widget::row(vec![\n\n Line(format!(\"{} #{}\", label, id.encode_u32()))\n\n .small_heading()\n\n .into_widget(ctx),\n", "file_path": "game/src/info/lane.rs", "rank": 40, "score": 465939.32571923203 }, { "content": "fn header(ctx: &EventCtx, app: &App, details: &mut Details, id: BuildingID, tab: Tab) -> Widget {\n\n let rows = vec![\n\n Widget::row(vec![\n\n Line(id.to_string()).small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]),\n\n make_tabs(\n\n ctx,\n\n &mut details.hyperlinks,\n\n tab,\n\n vec![(\"Info\", Tab::BldgInfo(id)), (\"People\", Tab::BldgPeople(id))],\n\n ),\n\n ];\n\n\n\n draw_occupants(details, app, id, None);\n\n // TODO Draw cars parked inside?\n\n\n\n Widget::custom_col(rows)\n\n}\n\n\n", "file_path": "game/src/info/building.rs", "rank": 41, "score": 465939.3257192321 }, { "content": "fn make_top_panel(ctx: &mut EventCtx, app: &App) -> Panel {\n\n let map_name = app.primary.map.get_name().clone();\n\n let change_key = app.primary.map.get_edits_change_key();\n\n let col;\n\n\n\n if app.session.mode_shift.key().as_ref() == Some(&(map_name.clone(), change_key)) {\n\n let data = app.session.mode_shift.value().unwrap();\n\n\n\n col = vec![\n\n ctx.style()\n\n .btn_plain\n\n .icon_text(\n\n \"system/assets/tools/info.svg\",\n\n \"How many drivers might switch to biking?\",\n\n )\n\n .build_widget(ctx, \"read about how this prediction works\"),\n\n percentage_bar(\n\n ctx,\n\n Text::from(Line(format!(\n\n \"{} total driving trips in this area\",\n", "file_path": "game/src/ungap/predict.rs", "rank": 42, "score": 465451.6127855603 }, { "content": "fn make_panel(ctx: &mut EventCtx, app: &App) -> Panel {\n\n Panel::new_builder(Widget::col(vec![\n\n DashTab::CommuterPatterns.picker(ctx, app),\n\n Toggle::choice(ctx, \"from / to this block\", \"from\", \"to\", Key::Space, true),\n\n Toggle::switch(ctx, \"include borders\", None, true),\n\n Widget::row(vec![\n\n \"Departing from:\".text_widget(ctx).margin_right(20),\n\n Slider::area(ctx, 0.15 * ctx.canvas.window_width, 0.0, \"depart from\"),\n\n ]),\n\n Widget::row(vec![\n\n \"Departing until:\".text_widget(ctx).margin_right(20),\n\n Slider::area(ctx, 0.15 * ctx.canvas.window_width, 1.0, \"depart until\"),\n\n ]),\n\n checkbox_per_mode(ctx, app, &TripMode::all().into_iter().collect()),\n\n ColorLegend::gradient(ctx, &app.cs.good_to_bad_red, vec![\"0\", \"0\"]).named(\"scale\"),\n\n \"None selected\".text_widget(ctx).named(\"current\"),\n\n ]))\n\n .aligned(HorizontalAlignment::Right, VerticalAlignment::Top)\n\n .build(ctx)\n\n}\n", "file_path": "game/src/sandbox/dashboards/commuter.rs", "rank": 43, "score": 465451.6127855603 }, { "content": "fn brute_force(ctx: &EventCtx, app: &mut App, neighborhood: &Neighborhood, timer: &mut Timer) {\n\n // Which road leads to the fewest rat-runs?\n\n let mut best: Option<(RoadID, usize)> = None;\n\n\n\n let orig_filters = app.session.modal_filters.roads.len();\n\n timer.start_iter(\n\n \"evaluate candidate filters\",\n\n neighborhood.orig_perimeter.interior.len(),\n\n );\n\n for r in &neighborhood.orig_perimeter.interior {\n\n timer.next();\n\n if app.session.modal_filters.roads.contains_key(r) {\n\n continue;\n\n }\n\n if let Some(new) = try_to_filter_road(ctx, app, neighborhood, *r) {\n\n let num_rat_runs =\n\n // This spams too many logs, and can't be used within a start_iter anyway\n\n find_rat_runs(app, &new, &mut Timer::throwaway())\n\n .paths\n\n .len();\n", "file_path": "ltn/src/auto.rs", "rank": 44, "score": 464579.6092636449 }, { "content": "pub fn after_edit(ctx: &EventCtx, app: &mut App) {\n\n app.session.draw_all_filters = app.session.modal_filters.draw(ctx, &app.map);\n\n}\n", "file_path": "ltn/src/lib.rs", "rank": 45, "score": 464085.84540746955 }, { "content": "fn make_controls(ctx: &mut EventCtx, app: &App, opts: &Options, legend: Option<Widget>) -> Panel {\n\n let model = app.primary.sim.get_pandemic_model().unwrap();\n\n let pct = 100.0 / (model.count_total() as f64);\n\n\n\n let mut col = vec![\n\n header(ctx, \"Pandemic model\"),\n\n Text::from_multiline(vec![\n\n Line(format!(\n\n \"{} Sane ({:.1}%)\",\n\n prettyprint_usize(model.count_sane()),\n\n (model.count_sane() as f64) * pct\n\n )),\n\n Line(format!(\n\n \"{} Exposed ({:.1}%)\",\n\n prettyprint_usize(model.count_exposed()),\n\n (model.count_exposed() as f64) * pct\n\n )),\n\n Line(format!(\n\n \"{} Infected ({:.1}%)\",\n\n prettyprint_usize(model.count_infected()),\n", "file_path": "game/src/layer/pandemic.rs", "rank": 46, "score": 462725.46216773055 }, { "content": "fn make_controls(ctx: &mut EventCtx, app: &App, opts: &Options, legend: Option<Widget>) -> Panel {\n\n let (total_ppl, ppl_in_bldg, ppl_off_map) = app.primary.sim.num_ppl();\n\n\n\n let mut col = vec![\n\n header(\n\n ctx,\n\n &format!(\"Population: {}\", prettyprint_usize(total_ppl)),\n\n ),\n\n Widget::row(vec![\n\n Widget::row(vec![\n\n Image::from_path(\"system/assets/tools/home.svg\").into_widget(ctx),\n\n Line(prettyprint_usize(ppl_in_bldg))\n\n .small()\n\n .into_widget(ctx),\n\n ]),\n\n Line(format!(\"Off-map: {}\", prettyprint_usize(ppl_off_map)))\n\n .small()\n\n .into_widget(ctx),\n\n ])\n\n .centered(),\n", "file_path": "game/src/layer/population.rs", "rank": 47, "score": 462725.46216773055 }, { "content": "// TODO Kinda misnomer\n\npub fn tool_panel(ctx: &mut EventCtx) -> Panel {\n\n Panel::new_builder(Widget::row(vec![\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/home.svg\")\n\n .hotkey(Key::Escape)\n\n .build_widget(ctx, \"back\"),\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/settings.svg\")\n\n .build_widget(ctx, \"settings\"),\n\n ]))\n\n .aligned(HorizontalAlignment::Left, VerticalAlignment::BottomAboveOSD)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "game/src/common/mod.rs", "rank": 48, "score": 457626.06062608236 }, { "content": "/// A button to change maps, with default keybindings\n\npub fn change_map_btn(ctx: &EventCtx, app: &dyn AppLike) -> Widget {\n\n ctx.style()\n\n .btn_popup_icon_text(\n\n \"system/assets/tools/map.svg\",\n\n nice_map_name(app.map().get_name()),\n\n )\n\n .hotkey(lctrl(Key::L))\n\n .build_widget(ctx, \"change map\")\n\n}\n\n\n", "file_path": "map_gui/src/tools/mod.rs", "rank": 49, "score": 456437.73222367675 }, { "content": "fn make_topcenter(ctx: &mut EventCtx, app: &App) -> Panel {\n\n Panel::new_builder(Widget::col(vec![\n\n Line(\"Editing map\")\n\n .small_heading()\n\n .into_widget(ctx)\n\n .centered_horiz(),\n\n ctx.style()\n\n .btn_solid_primary\n\n .text(format!(\n\n \"Finish & resume from {}\",\n\n app.primary\n\n .suspended_sim\n\n .as_ref()\n\n .unwrap()\n\n .time()\n\n .ampm_tostring()\n\n ))\n\n .hotkey(Key::Escape)\n\n .build_widget(ctx, \"finish editing\"),\n\n ]))\n\n .aligned(HorizontalAlignment::Center, VerticalAlignment::Top)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "game/src/edit/mod.rs", "rank": 50, "score": 455508.57432741876 }, { "content": "fn make_changelist(ctx: &mut EventCtx, app: &App) -> Panel {\n\n // TODO Support redo. Bit harder here to reset the redo_stack when the edits\n\n // change, because nested other places modify it too.\n\n let edits = app.primary.map.get_edits();\n\n let mut col = vec![\n\n Widget::row(vec![\n\n ctx.style()\n\n .btn_outline\n\n .popup(&edits.edits_name)\n\n .hotkey(lctrl(Key::P))\n\n .build_widget(ctx, \"manage proposals\"),\n\n \"autosaved\"\n\n .text_widget(ctx)\n\n .container()\n\n .padding(10)\n\n .bg(Color::hex(\"#5D9630\")),\n\n ]),\n\n ColorLegend::row(\n\n ctx,\n\n app.cs.edits_layer,\n", "file_path": "game/src/edit/mod.rs", "rank": 51, "score": 455508.5743274187 }, { "content": "fn before_after_button(ctx: &mut EventCtx, app: &App) -> Widget {\n\n let edits = app.primary.map.get_edits();\n\n if app.secondary.is_none() {\n\n return Widget::nothing();\n\n }\n\n let (txt, label) = if edits.commands.is_empty() {\n\n (\n\n Text::from_all(vec![\n\n Line(\"After / \").secondary(),\n\n Line(\"Before\"),\n\n Line(\" proposal\"),\n\n ]),\n\n \"show edited map\",\n\n )\n\n } else {\n\n (\n\n Text::from_all(vec![\n\n Line(\"After\"),\n\n Line(\" / Before\").secondary(),\n\n Line(\" proposal\"),\n", "file_path": "game/src/ungap/trip/mod.rs", "rank": 52, "score": 455006.8821280669 }, { "content": "fn area_body(ctx: &EventCtx, app: &App, id: AreaID) -> Widget {\n\n let mut rows = vec![];\n\n let area = app.primary.map.get_a(id);\n\n\n\n if let Some(osm_id) = area.osm_id {\n\n rows.push(\n\n ctx.style()\n\n .btn_outline\n\n .text(\"Open in OSM\")\n\n .build_widget(ctx, format!(\"open {}\", osm_id)),\n\n );\n\n }\n\n\n\n rows.extend(make_table(\n\n ctx,\n\n area.osm_tags\n\n .inner()\n\n .iter()\n\n .map(|(k, v)| (k.to_string(), v.to_string()))\n\n .collect(),\n\n ));\n\n\n\n Widget::col(rows)\n\n}\n", "file_path": "game/src/info/debug.rs", "rank": 53, "score": 452656.77340746636 }, { "content": "fn info_body(ctx: &EventCtx, app: &App, id: IntersectionID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let i = app.primary.map.get_i(id);\n\n\n\n let mut txt = Text::from(\"Connecting\");\n\n let mut road_names = BTreeSet::new();\n\n for r in &i.roads {\n\n road_names.insert(\n\n app.primary\n\n .map\n\n .get_r(*r)\n\n .get_name(app.opts.language.as_ref()),\n\n );\n\n }\n\n for r in road_names {\n\n txt.add_line(format!(\" {}\", r));\n\n }\n\n rows.push(txt.into_widget(ctx));\n\n\n", "file_path": "game/src/info/intersection.rs", "rank": 54, "score": 452656.77340746636 }, { "content": "fn info_body(ctx: &EventCtx, app: &App, id: LaneID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let map = &app.primary.map;\n\n let l = map.get_l(id);\n\n let r = map.get_r(id.road);\n\n\n\n let mut kv = Vec::new();\n\n\n\n if !l.is_walkable() {\n\n kv.push((\"Type\", l.lane_type.describe().to_string()));\n\n }\n\n if r.is_private() {\n\n let mut ban = Vec::new();\n\n for p in PathConstraints::all() {\n\n if !r.access_restrictions.allow_through_traffic.contains(p) {\n\n ban.push(format!(\"{:?}\", p).to_ascii_lowercase());\n\n }\n\n }\n\n if !ban.is_empty() {\n", "file_path": "game/src/info/lane.rs", "rank": 55, "score": 452656.77340746636 }, { "content": "fn debug_body(ctx: &EventCtx, app: &App, id: LaneID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let map = &app.primary.map;\n\n let l = map.get_l(id);\n\n let r = map.get_r(id.road);\n\n\n\n let mut kv = vec![(\"Parent\".to_string(), r.id.to_string())];\n\n\n\n if l.lane_type.is_for_moving_vehicles() {\n\n kv.push((\n\n \"Driving blackhole\".to_string(),\n\n l.driving_blackhole.to_string(),\n\n ));\n\n kv.push((\n\n \"Biking blackhole\".to_string(),\n\n l.biking_blackhole.to_string(),\n\n ));\n\n }\n\n\n", "file_path": "game/src/info/lane.rs", "rank": 56, "score": 452656.77340746636 }, { "content": "fn proposal_management(ctx: &mut EventCtx, app: &App) -> Widget {\n\n let mut col = Vec::new();\n\n let edits = app.primary.map.get_edits();\n\n\n\n let total_mileage = {\n\n // Look for the new lanes...\n\n let mut total = Distance::ZERO;\n\n // TODO We're assuming the edits have been compressed.\n\n for cmd in &edits.commands {\n\n if let EditCmd::ChangeRoad { r, old, new } = cmd {\n\n let num_before = old\n\n .lanes_ltr\n\n .iter()\n\n .filter(|spec| spec.lt == LaneType::Biking)\n\n .count();\n\n let num_after = new\n\n .lanes_ltr\n\n .iter()\n\n .filter(|spec| spec.lt == LaneType::Biking)\n\n .count();\n", "file_path": "game/src/ungap/quick_sketch.rs", "rank": 57, "score": 450265.291397262 }, { "content": "pub fn make_bar(ctx: &mut EventCtx, filled_color: Color, value: usize, max: usize) -> Widget {\n\n let pct_full = if max == 0 {\n\n 0.0\n\n } else {\n\n (value as f64) / (max as f64)\n\n };\n\n let txt = Text::from(format!(\n\n \"{} / {}\",\n\n prettyprint_usize(value),\n\n prettyprint_usize(max)\n\n ));\n\n custom_bar(ctx, filled_color, pct_full, txt)\n\n}\n", "file_path": "santa/src/meters.rs", "rank": 58, "score": 450235.4434080934 }, { "content": "// Draw two planters on each end of a line. They'll be offset so that they don't exceed the\n\n// endpoints.\n\nfn draw_zoomed_planters(ctx: &EventCtx, batch: &mut GeomBatch, line: Line) {\n\n let planter = GeomBatch::load_svg(ctx, \"system/assets/map/planter.svg\");\n\n let planter_width = planter.get_dims().width;\n\n let scaled_planter = planter.scale(0.3 * line.length().inner_meters() / planter_width);\n\n\n\n batch.append(\n\n scaled_planter\n\n .clone()\n\n .centered_on(line.must_dist_along(0.15 * line.length()))\n\n .rotate(line.angle()),\n\n );\n\n batch.append(\n\n scaled_planter\n\n .centered_on(line.must_dist_along(0.85 * line.length()))\n\n .rotate(line.angle()),\n\n );\n\n}\n\n\n\n/// Depending on the canvas zoom level, draws one of 3 things.\n\npub struct Toggle3Zoomed {\n", "file_path": "ltn/src/filters/mod.rs", "rank": 59, "score": 449007.6459729152 }, { "content": "fn make_upzone_panel(ctx: &mut EventCtx, app: &App, num_picked: usize) -> Panel {\n\n // Don't overwhelm players on the very first level.\n\n if app.session.upzones_unlocked == 0 {\n\n return Panel::new_builder(\n\n ctx.style()\n\n .btn_solid_primary\n\n .text(\"Start game\")\n\n .hotkey(Key::Enter)\n\n .build_def(ctx)\n\n .container(),\n\n )\n\n .aligned(\n\n HorizontalAlignment::RightInset,\n\n VerticalAlignment::BottomInset,\n\n )\n\n .build(ctx);\n\n }\n\n\n\n Panel::new_builder(Widget::col(vec![\n\n Widget::row(vec![\n", "file_path": "santa/src/before_level.rs", "rank": 60, "score": 439969.78272034135 }, { "content": "/// Detect roads that're modelled in OSM as cycleways, but really are regular roads with modal\n\n/// filters. Transform them into normal roads, and instead use this tool's explicit representation\n\n/// for filters.\n\npub fn transform_existing_filters(ctx: &EventCtx, app: &mut App, timer: &mut Timer) {\n\n let mut edits = app.map.get_edits().clone();\n\n for r in detect_filters(&app.map) {\n\n edits.commands.push(app.map.edit_road_cmd(r.id, |new| {\n\n // Use a fixed [sidewalk, driving, driving, sidewalk] configuration\n\n let tags = Tags::empty();\n\n let fwd = vec![\n\n LaneSpec {\n\n lt: LaneType::Driving,\n\n dir: Direction::Fwd,\n\n width: LaneSpec::typical_lane_widths(LaneType::Driving, &tags)[0].0,\n\n },\n\n LaneSpec {\n\n lt: LaneType::Sidewalk,\n\n dir: Direction::Fwd,\n\n width: LaneSpec::typical_lane_widths(LaneType::Sidewalk, &tags)[0].0,\n\n },\n\n ];\n\n let back = vec![\n\n LaneSpec {\n", "file_path": "ltn/src/filters/existing.rs", "rank": 61, "score": 438843.34656964603 }, { "content": "pub fn apply_map_edits(ctx: &mut EventCtx, app: &mut App, edits: MapEdits) {\n\n ctx.loading_screen(\"apply map edits\", |ctx, timer| {\n\n if !app.store_unedited_map_in_secondary && app.primary.unedited_map.is_none() {\n\n timer.start(\"save unedited map\");\n\n assert!(app.primary.map.get_edits().commands.is_empty());\n\n app.primary.unedited_map = Some(app.primary.map.clone());\n\n timer.stop(\"save unedited map\");\n\n }\n\n if app.store_unedited_map_in_secondary && app.secondary.is_none() {\n\n timer.start(\"save unedited map for toggling\");\n\n assert!(app.primary.map.get_edits().commands.is_empty());\n\n let mut per_map = crate::app::PerMap::map_loaded(\n\n app.primary.map.clone(),\n\n app.primary.sim.clone(),\n\n app.primary.current_flags.clone(),\n\n &app.opts,\n\n &app.cs,\n\n ctx,\n\n timer,\n\n );\n", "file_path": "game/src/edit/mod.rs", "rank": 62, "score": 434816.3842717882 }, { "content": "fn enter_state(ctx: &mut EventCtx, app: &mut App, args: Vec<&str>) -> Box<dyn State<App>> {\n\n match args[0] {\n\n \"--tutorial-intro\" => Tutorial::start(ctx, app),\n\n \"--challenges\" => ChallengesPicker::new_state(ctx, app),\n\n \"--sandbox\" => SandboxMode::simple_new(\n\n app,\n\n GameplayMode::PlayScenario(\n\n app.primary.map.get_name().clone(),\n\n Scenario::default_scenario_for_map(app.primary.map.get_name()),\n\n Vec::new(),\n\n ),\n\n ),\n\n \"--proposals\" => proposals::Proposals::new_state(ctx, None),\n\n \"--ungap\" => {\n\n let layers = crate::ungap::Layers::new(ctx, app);\n\n crate::ungap::ExploreMap::new_state(ctx, app, layers)\n\n }\n\n \"--devtools\" => crate::devtools::DevToolsMode::new_state(ctx, app),\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "game/src/pregame/mod.rs", "rank": 63, "score": 433819.3111750051 }, { "content": "fn draw_zone(ctx: &mut EventCtx, app: &App, members: &BTreeSet<RoadID>) -> (ToggleZoomed, Widget) {\n\n let mut colorer = ColorDiscrete::new(\n\n app,\n\n vec![\n\n (\"restricted road\", Color::CYAN),\n\n (\"entrance/exit\", Color::BLUE),\n\n ],\n\n );\n\n let map = &app.primary.map;\n\n for r in members {\n\n let r = map.get_r(*r);\n\n colorer.add_r(r.id, \"restricted road\");\n\n for next in map.get_next_roads(r.id) {\n\n if !members.contains(&next) {\n\n if let CommonEndpoint::One(i) = r.common_endpoint(map.get_r(next)) {\n\n colorer.add_i(i, \"entrance/exit\");\n\n }\n\n }\n\n }\n\n }\n\n for i in intersections_from_roads(members, &app.primary.map) {\n\n colorer.add_i(i, \"restricted road\");\n\n }\n\n colorer.build(ctx)\n\n}\n\n\n", "file_path": "game/src/edit/zones.rs", "rank": 64, "score": 427878.0935800837 }, { "content": "fn setup_app(ctx: &mut EventCtx, mut setup: Setup) -> (App, Vec<Box<dyn State<App>>>) {\n\n let title = !setup.opts.dev\n\n && !setup.flags.sim_flags.load.contains(\"player/save\")\n\n && !setup.flags.sim_flags.load.contains(\"/scenarios/\")\n\n && setup.mode == Mode::SomethingElse;\n\n\n\n // Load the map used previously if we're starting on the title screen without any overrides.\n\n if title && setup.flags.sim_flags.load == MapName::seattle(\"montlake\").path() {\n\n if let Ok(default) = abstio::maybe_read_json::<map_gui::tools::DefaultMap>(\n\n abstio::path_player(\"maps.json\"),\n\n &mut Timer::throwaway(),\n\n ) {\n\n setup.flags.sim_flags.load = default.last_map.path();\n\n }\n\n }\n\n\n\n // If we're starting directly in a challenge mode, the tutorial, or by playing a scenario,\n\n // usually time is midnight, so save some effort and start with the correct color scheme. If\n\n // we're loading a savestate and it's actually daytime, we'll pay a small penalty to switch\n\n // colors.\n", "file_path": "game/src/lib.rs", "rank": 65, "score": 424964.67976419 }, { "content": "fn build_panel(ctx: &mut EventCtx, app: &App, start: &Building, isochrone: &Isochrone) -> Panel {\n\n let mut rows = vec![\n\n map_gui::tools::app_header(ctx, app, \"15-minute neighborhood explorer\"),\n\n Text::from_all(vec![\n\n Line(\"Starting from: \").secondary(),\n\n Line(&start.address),\n\n ])\n\n .into_widget(ctx),\n\n Text::from_all(vec![\n\n Line(\"Estimated population: \").secondary(),\n\n Line(prettyprint_usize(isochrone.population)),\n\n ])\n\n .into_widget(ctx),\n\n Text::from_all(vec![\n\n Line(\"Estimated street parking spots: \").secondary(),\n\n Line(prettyprint_usize(isochrone.onstreet_parking_spots)),\n\n ])\n\n .into_widget(ctx),\n\n ColorLegend::categories(\n\n ctx,\n", "file_path": "fifteen_min/src/viewer.rs", "rank": 66, "score": 421633.84389677923 }, { "content": "fn pick_file(ctx: &mut EventCtx, app: &App) -> Transition {\n\n Transition::Push(FilePicker::new_state(\n\n ctx,\n\n Some(app.primary.map.get_city_name().input_path(\"\")),\n\n Box::new(|ctx, app, maybe_path| {\n\n if let Ok(Some(path)) = maybe_path {\n\n Transition::Multi(vec![\n\n Transition::Pop,\n\n Transition::Replace(ViewKML::new_state(ctx, app, Some(path))),\n\n ])\n\n } else {\n\n Transition::Pop\n\n }\n\n }),\n\n ))\n\n}\n", "file_path": "game/src/devtools/kml.rs", "rank": 67, "score": 421570.2720705557 }, { "content": "fn scatter_plot(ctx: &mut EventCtx, app: &App, filter: &Filter) -> Widget {\n\n let points = filter.get_trips(app);\n\n if points.is_empty() {\n\n return Widget::nothing();\n\n }\n\n\n\n Widget::col(vec![\n\n Line(\"Trip time before vs. after\")\n\n .small_heading()\n\n .into_widget(ctx),\n\n CompareTimes::new_widget(\n\n ctx,\n\n format!(\n\n \"Trip time before \\\"{}\\\"\",\n\n app.primary.map.get_edits().edits_name\n\n ),\n\n format!(\n\n \"Trip time after \\\"{}\\\"\",\n\n app.primary.map.get_edits().edits_name\n\n ),\n\n points,\n\n ),\n\n ])\n\n}\n\n\n", "file_path": "game/src/sandbox/dashboards/travel_times.rs", "rank": 68, "score": 420434.383190035 }, { "content": "fn contingency_table(ctx: &mut EventCtx, app: &App, filter: &Filter) -> Widget {\n\n let total_width = 500.0;\n\n let total_height = 300.0;\n\n\n\n let points = filter.get_trips(app);\n\n if points.is_empty() {\n\n return Widget::nothing();\n\n }\n\n\n\n // bucket by trip duration _before_ changes\n\n let duration_buckets = vec![\n\n Duration::ZERO,\n\n Duration::minutes(5),\n\n Duration::minutes(15),\n\n Duration::minutes(30),\n\n Duration::hours(1),\n\n ];\n\n let num_buckets = duration_buckets.len();\n\n\n\n let mut batch = GeomBatch::new();\n", "file_path": "game/src/sandbox/dashboards/travel_times.rs", "rank": 69, "score": 420434.383190035 }, { "content": "fn summary_boxes(ctx: &mut EventCtx, app: &App, filter: &Filter) -> Widget {\n\n let mut num_same = 0;\n\n let mut num_faster = 0;\n\n let mut num_slower = 0;\n\n let mut sum_faster = Duration::ZERO;\n\n let mut sum_slower = Duration::ZERO;\n\n for (_, b, a, mode) in app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .both_finished_trips(app.primary.sim.time(), app.prebaked())\n\n {\n\n if !filter.modes.contains(&mode) {\n\n continue;\n\n }\n\n let same = if let Some(pct) = filter.changes_pct {\n\n pct_diff(a, b) <= pct\n\n } else {\n\n a == b\n\n };\n", "file_path": "game/src/sandbox/dashboards/travel_times.rs", "rank": 70, "score": 420434.383190035 }, { "content": "/// Creates the top row for any layer panel.\n\npub fn header(ctx: &mut EventCtx, name: &str) -> Widget {\n\n Widget::row(vec![\n\n Image::from_path(\"system/assets/tools/layers.svg\")\n\n .into_widget(ctx)\n\n .centered_vert(),\n\n name.text_widget(ctx).centered_vert(),\n\n ctx.style().btn_close_widget(ctx),\n\n ])\n\n}\n\n\n\npub const PANEL_PLACEMENT: (HorizontalAlignment, VerticalAlignment) = (\n\n HorizontalAlignment::Percent(0.02),\n\n VerticalAlignment::Percent(0.2),\n\n);\n", "file_path": "game/src/layer/mod.rs", "rank": 71, "score": 419192.29927573906 }, { "content": "pub fn actions(app: &App, id: ID) -> Vec<(Key, String)> {\n\n match (app.session.tutorial.as_ref().unwrap().interaction(), id) {\n\n (Task::LowParking, ID::Lane(_)) => {\n\n vec![(Key::C, \"check the parking occupancy\".to_string())]\n\n }\n\n (Task::Escort, ID::Car(_)) => vec![(Key::C, \"draw WASH ME\".to_string())],\n\n _ => Vec::new(),\n\n }\n\n}\n\n\n", "file_path": "game/src/sandbox/gameplay/tutorial.rs", "rank": 72, "score": 415630.4991618082 }, { "content": "fn search_osm(filter: String, ctx: &mut EventCtx, app: &mut App) -> Transition {\n\n let mut num_matches = 0;\n\n let mut batch = GeomBatch::new();\n\n\n\n // TODO Case insensitive\n\n let map = &app.primary.map;\n\n let color = Color::RED.alpha(0.8);\n\n for r in map.all_roads() {\n\n if r.osm_tags\n\n .inner()\n\n .iter()\n\n .any(|(k, v)| format!(\"{} = {}\", k, v).contains(&filter))\n\n {\n\n num_matches += 1;\n\n batch.push(color, r.get_thick_polygon());\n\n }\n\n }\n\n for a in map.all_areas() {\n\n if a.osm_tags\n\n .inner()\n", "file_path": "game/src/debug/mod.rs", "rank": 73, "score": 414597.0989505884 }, { "content": "fn link(ctx: &mut EventCtx, label: &str, url: &str) -> Widget {\n\n ctx.style()\n\n .btn_plain\n\n .text(label)\n\n .build_widget(ctx, format!(\"open {}\", url))\n\n}\n\n\n\nimpl SimpleState<App> for Credits {\n\n fn on_click(&mut self, _: &mut EventCtx, _: &mut App, x: &str, _: &mut Panel) -> Transition {\n\n match x {\n\n \"close\" | \"Back\" => Transition::Pop,\n\n x => {\n\n if let Some(url) = x.strip_prefix(\"open \") {\n\n open_browser(url);\n\n return Transition::Keep;\n\n }\n\n\n\n unreachable!()\n\n }\n\n }\n", "file_path": "santa/src/title.rs", "rank": 74, "score": 412656.7404447043 }, { "content": "pub fn draw_unwalkable_roads(ctx: &mut EventCtx, app: &App, opts: &Options) -> Drawable {\n\n let allow_shoulders = match opts {\n\n Options::Walking(ref opts) => opts.allow_shoulders,\n\n Options::Biking => {\n\n return Drawable::empty(ctx);\n\n }\n\n };\n\n\n\n let mut batch = GeomBatch::new();\n\n 'ROADS: for road in app.map.all_roads() {\n\n if road.is_light_rail() {\n\n continue;\n\n }\n\n for l in &road.lanes {\n\n if l.lane_type == LaneType::Sidewalk\n\n || (l.lane_type == LaneType::Shoulder && allow_shoulders)\n\n {\n\n continue 'ROADS;\n\n }\n\n }\n\n // TODO Skip highways\n\n batch.push(Color::BLUE.alpha(0.5), road.get_thick_polygon());\n\n }\n\n ctx.upload(batch)\n\n}\n", "file_path": "fifteen_min/src/viewer.rs", "rank": 75, "score": 412197.4682497296 }, { "content": "fn intro_story(ctx: &mut EventCtx) -> Box<dyn State<App>> {\n\n CutsceneBuilder::new(\"Introduction\")\n\n .boss(\n\n \"Argh, the mayor's on my case again about the West Seattle bridge. This day couldn't \\\n\n get any worse.\",\n\n )\n\n .player(\"Er, hello? Boss? I'm --\")\n\n .boss(\"Yet somehow it did.. You're the new recruit. Yeah, yeah. Come in.\")\n\n .boss(\n\n \"Due to budget cuts, we couldn't hire a real traffic engineer, so we just called some \\\n\n know-it-all from Reddit who seems to think they can fix Seattle traffic.\",\n\n )\n\n .player(\"Yes, hi, my name is --\")\n\n .boss(\"We can't afford name-tags, didn't you hear, budget cuts? Your name doesn't matter.\")\n\n .player(\"What about my Insta handle?\")\n\n .boss(\"-glare-\")\n\n .boss(\n\n \"Look, you think fixing traffic is easy? Hah! You can't fix one intersection without \\\n\n breaking ten more.\",\n\n )\n", "file_path": "game/src/sandbox/gameplay/tutorial.rs", "rank": 76, "score": 410983.48441200925 }, { "content": "// TODO Preview the map, add padding, add the linear gradient...\n\nfn locked_level(ctx: &mut EventCtx, app: &App, level: &Level, idx: usize) -> Widget {\n\n let mut batch = level_btn(ctx, app, level, idx);\n\n let hitbox = batch.get_bounds().get_rectangle();\n\n let center = hitbox.center();\n\n batch.push(app.cs.fade_map_dark, hitbox);\n\n batch.append(GeomBatch::load_svg(ctx, \"system/assets/tools/locked.svg\").centered_on(center));\n\n batch.into_widget(ctx)\n\n}\n\n\n", "file_path": "santa/src/title.rs", "rank": 77, "score": 410507.19483277644 }, { "content": "fn unlocked_level(ctx: &mut EventCtx, app: &App, level: &Level, idx: usize) -> Widget {\n\n let normal = level_btn(ctx, app, level, idx);\n\n let hovered = normal\n\n .clone()\n\n .color(RewriteColor::Change(Color::WHITE, Color::WHITE.alpha(0.6)));\n\n\n\n ButtonBuilder::new()\n\n .custom_batch(normal, ControlState::Default)\n\n .custom_batch(hovered, ControlState::Hovered)\n\n .build_widget(ctx, &level.title)\n\n}\n\n\n", "file_path": "santa/src/title.rs", "rank": 78, "score": 410499.9710478276 }, { "content": "fn impact_widget(ctx: &EventCtx, app: &App) -> Widget {\n\n let map_name = app.map.get_name();\n\n\n\n if &app.session.impact.map != map_name {\n\n // Starting from scratch\n\n let scenario_name = Scenario::default_scenario_for_map(map_name);\n\n if scenario_name == \"home_to_work\" {\n\n return \"This city doesn't have travel demand model data available\".text_widget(ctx);\n\n }\n\n let size = abstio::Manifest::load()\n\n .get_entry(&abstio::path_scenario(map_name, &scenario_name))\n\n .map(|entry| abstutil::prettyprint_bytes(entry.compressed_size_bytes))\n\n .unwrap_or_else(|| \"???\".to_string());\n\n return Widget::col(vec![\n\n Text::from_multiline(vec![\n\n Line(\"Predicting impact of your proposal may take a moment.\"),\n\n Line(\"The application may freeze up during that time.\"),\n\n Line(format!(\"We need to load a {} file\", size)),\n\n ])\n\n .into_widget(ctx),\n", "file_path": "ltn/src/browse.rs", "rank": 79, "score": 408341.9433785867 }, { "content": "fn make_controls(ctx: &mut EventCtx, tabs: &mut TabController) -> Panel {\n\n Panel::new_builder(Widget::col(vec![\n\n Text::from(Line(\"widgetry demo\").big_heading_styled()).into_widget(ctx),\n\n Widget::col(vec![\n\n Text::from(\n\n \"Click and drag the background to pan, use touchpad or scroll wheel to zoom\",\n\n )\n\n .into_widget(ctx),\n\n Widget::row(vec![\n\n ctx.style()\n\n .btn_outline\n\n .text(\"New faces\")\n\n .hotkey(Key::F)\n\n .build_widget(ctx, \"generate new faces\"),\n\n Toggle::switch(ctx, \"Draw scrollable canvas\", None, true),\n\n Toggle::switch(ctx, \"Show timeseries\", lctrl(Key::T), false),\n\n ]),\n\n \"Stopwatch: ...\"\n\n .text_widget(ctx)\n\n .named(\"stopwatch\")\n", "file_path": "widgetry_demo/src/lib.rs", "rank": 80, "score": 405082.924576118 }, { "content": "/// A standard way to group a home button back to the title screen, the title of the current app,\n\n/// and a button to change maps. Callers must handle the `change map` and `home` click events.\n\npub fn app_header(ctx: &EventCtx, app: &dyn AppLike, title: &str) -> Widget {\n\n Widget::col(vec![\n\n Widget::row(vec![\n\n home_btn(ctx),\n\n Line(title).small_heading().into_widget(ctx).centered_vert(),\n\n ]),\n\n change_map_btn(ctx, app),\n\n ])\n\n}\n\n\n", "file_path": "map_gui/src/tools/mod.rs", "rank": 81, "score": 404594.9230135578 }, { "content": "pub fn maybe_exit_sandbox(ctx: &mut EventCtx) -> Transition {\n\n Transition::Push(ChooseSomething::new_state(\n\n ctx,\n\n \"Are you ready to leave this mode?\",\n\n vec![\n\n Choice::string(\"keep playing\"),\n\n Choice::string(\"quit to main screen\").key(Key::Q),\n\n ],\n\n Box::new(|resp, ctx, app| {\n\n if resp == \"keep playing\" {\n\n return Transition::Pop;\n\n }\n\n\n\n if app.primary.map.unsaved_edits() {\n\n return Transition::Multi(vec![\n\n Transition::Push(Box::new(BackToTitleScreen)),\n\n Transition::Push(SaveEdits::new_state(\n\n ctx,\n\n app,\n\n \"Do you want to save your proposal first?\",\n", "file_path": "game/src/sandbox/mod.rs", "rank": 82, "score": 402483.42427088466 }, { "content": "// Like map_gui::tools::app_header, but squeezing in a search button\n\nfn app_header(ctx: &EventCtx, app: &App) -> Widget {\n\n Widget::col(vec![\n\n Widget::row(vec![\n\n map_gui::tools::home_btn(ctx),\n\n Line(\"Low traffic neighborhoods\")\n\n .small_heading()\n\n .into_widget(ctx)\n\n .centered_vert(),\n\n ]),\n\n Widget::row(vec![\n\n map_gui::tools::change_map_btn(ctx, app),\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/search.svg\")\n\n .hotkey(lctrl(Key::F))\n\n .build_widget(ctx, \"search\")\n\n .align_right(),\n\n ]),\n\n ])\n\n}\n\n\n", "file_path": "ltn/src/lib.rs", "rank": 83, "score": 401075.03677300195 }, { "content": "fn make_btn(ctx: &EventCtx, label: &str, tooltip: &str, is_persisten_split: bool) -> Button {\n\n // If we want to make Dropdown configurable, pass in or expose its button builder?\n\n let builder = if is_persisten_split {\n\n // Quick hacks to make PersistentSplit's dropdown look a little better.\n\n // It's not ideal, but we only use one persistent split in the whole app\n\n // and it's front and center - we'll notice if something breaks.\n\n ctx.style()\n\n .btn_solid\n\n .dropdown()\n\n .padding(EdgeInsets {\n\n top: 15.0,\n\n bottom: 15.0,\n\n left: 8.0,\n\n right: 8.0,\n\n })\n\n .corner_rounding(CornerRounding::CornerRadii(CornerRadii {\n\n top_left: 0.0,\n\n bottom_left: 0.0,\n\n bottom_right: 2.0,\n\n top_right: 2.0,\n\n }))\n\n // override any outline element within persistent split\n\n .outline((0.0, Color::CLEAR), ControlState::Default)\n\n } else {\n\n ctx.style().btn_outline.dropdown().label_text(label)\n\n };\n\n\n\n builder.build(ctx, tooltip)\n\n}\n", "file_path": "widgetry/src/widgets/dropdown.rs", "rank": 84, "score": 398080.33297627664 }, { "content": "pub fn import_json(ctx: &mut EventCtx) -> Transition {\n\n Transition::Push(FilePicker::new_state(\n\n ctx,\n\n None,\n\n Box::new(|ctx, app, maybe_path| {\n\n if let Ok(Some(path)) = maybe_path {\n\n let result = ctx.loading_screen(\"import JSON scenario\", |_, timer| {\n\n import_json_scenario(&app.primary.map, path, timer)\n\n });\n\n match result {\n\n Ok(scenario_name) => {\n\n // Clear out the cached scenario. If we repeatedly use this import, the\n\n // scenario name is always the same, but the file is changing.\n\n app.primary.scenario = None;\n\n Transition::Replace(SandboxMode::simple_new(\n\n app,\n\n GameplayMode::PlayScenario(\n\n app.primary.map.get_name().clone(),\n\n scenario_name,\n\n Vec::new(),\n", "file_path": "game/src/sandbox/gameplay/freeform/importers.rs", "rank": 85, "score": 397631.058314156 }, { "content": "pub fn import_grid2demand(ctx: &mut EventCtx) -> Transition {\n\n Transition::Push(FilePicker::new_state(\n\n ctx,\n\n None,\n\n Box::new(|ctx, app, maybe_path| {\n\n if let Ok(Some(path)) = maybe_path {\n\n Transition::Replace(RunCommand::new_state(\n\n ctx,\n\n true,\n\n vec![\n\n find_exe(\"cli\"),\n\n \"import-grid2-demand\".to_string(),\n\n format!(\"--map={}\", app.primary.map.get_name().path()),\n\n format!(\"--input={}\", path),\n\n ],\n\n Box::new(|_, app, success, _| {\n\n if success {\n\n // Clear out the cached scenario. If we repeatedly use this import, the\n\n // scenario name is always the same, but the file is changing.\n\n app.primary.scenario = None;\n", "file_path": "game/src/sandbox/gameplay/freeform/importers.rs", "rank": 86, "score": 397631.058314156 }, { "content": "fn mouseover_unzoomed_agent_circle(ctx: &mut EventCtx, app: &mut App) {\n\n let cursor = if let Some(pt) = ctx.canvas.get_cursor_in_map_space() {\n\n pt\n\n } else {\n\n return;\n\n };\n\n\n\n for (id, _, _) in app\n\n .primary\n\n .agents\n\n .borrow_mut()\n\n .calculate_unzoomed_agents(ctx, &app.primary.map, &app.primary.sim, &app.cs)\n\n .query(\n\n Circle::new(cursor, Distance::meters(3.0))\n\n .get_bounds()\n\n .as_bbox(),\n\n )\n\n {\n\n if let Some(pt) = app\n\n .primary\n\n .sim\n\n .canonical_pt_for_agent(*id, &app.primary.map)\n\n {\n\n if Circle::new(pt, unzoomed_agent_radius(id.to_vehicle_type())).contains_pt(cursor) {\n\n app.primary.current_selection = Some(ID::from_agent(*id));\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "game/src/sandbox/mod.rs", "rank": 87, "score": 396616.76383364253 }, { "content": "fn handle_app_header_click(ctx: &mut EventCtx, app: &App, x: &str) -> Option<Transition> {\n\n match x {\n\n \"Home\" => Some(Transition::Clear(vec![\n\n map_gui::tools::TitleScreen::new_state(\n\n ctx,\n\n app,\n\n map_gui::tools::Executable::LTN,\n\n Box::new(|ctx, app, _| BrowseNeighborhoods::new_state(ctx, app)),\n\n ),\n\n ])),\n\n \"change map\" => Some(Transition::Push(map_gui::tools::CityPicker::new_state(\n\n ctx,\n\n app,\n\n Box::new(|ctx, app| Transition::Replace(BrowseNeighborhoods::new_state(ctx, app))),\n\n ))),\n\n \"search\" => Some(Transition::Push(map_gui::tools::Navigator::new_state(\n\n ctx, app,\n\n ))),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "ltn/src/lib.rs", "rank": 88, "score": 395648.4900699114 }, { "content": "pub fn find_rat_runs(app: &App, neighborhood: &Neighborhood, timer: &mut Timer) -> RatRuns {\n\n let map = &app.map;\n\n let modal_filters = &app.session.modal_filters;\n\n // The overall approach: look for all possible paths from an entrance to an exit, only if they\n\n // connect to different major roads.\n\n //\n\n // But an entrance and exit to _what_? If we try to route from the entrance to one cell to the\n\n // exit of another, then the route will make strange U-turns and probably use the perimeter. By\n\n // definition, two cells aren't reachable without using the perimeter. So restrict our search\n\n // to pairs of entrances/exits in the _same_ cell.\n\n let mut requests = Vec::new();\n\n\n\n for cell in &neighborhood.cells {\n\n let entrances = find_entrances(map, neighborhood, cell);\n\n let exits = find_exits(map, neighborhood, cell);\n\n\n\n for entrance in &entrances {\n\n for exit in &exits {\n\n if entrance.major_road_name != exit.major_road_name {\n\n requests.push(PathRequest::vehicle(\n", "file_path": "ltn/src/rat_runs.rs", "rank": 89, "score": 395638.38761529024 }, { "content": "fn launch(ctx: &mut EventCtx, app: &App, edits: PermanentMapEdits) -> Transition {\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n {\n\n if !abstio::file_exists(edits.map_name.path()) {\n\n return map_gui::tools::prompt_to_download_missing_data(ctx, edits.map_name);\n\n }\n\n }\n\n\n\n Transition::Push(MapLoader::new_state(\n\n ctx,\n\n app,\n\n edits.map_name.clone(),\n\n Box::new(move |ctx, app| {\n\n // Apply edits before setting up the sandbox, for simplicity\n\n let maybe_err = ctx.loading_screen(\"apply edits\", |ctx, timer| {\n\n match edits.into_edits(&app.primary.map) {\n\n Ok(edits) => {\n\n apply_map_edits(ctx, app, edits);\n\n app.primary.map.recalculate_pathfinding_after_edits(timer);\n\n None\n", "file_path": "game/src/pregame/proposals.rs", "rank": 90, "score": 394098.8481861467 }, { "content": "pub fn percentage_bar(ctx: &EventCtx, txt: Text, pct_green: f64) -> Widget {\n\n let bad_color = Color::RED;\n\n let good_color = Color::GREEN;\n\n\n\n let total_width = 450.0;\n\n let height = 32.0;\n\n let radius = 4.0;\n\n\n\n let mut batch = GeomBatch::new();\n\n // Background\n\n batch.push(\n\n bad_color,\n\n Polygon::rounded_rectangle(total_width, height, radius),\n\n );\n\n // Foreground\n\n if let Some(poly) = Polygon::maybe_rounded_rectangle(pct_green * total_width, height, radius) {\n\n batch.push(good_color, poly);\n\n }\n\n // Text\n\n let label = txt.render_autocropped(ctx);\n\n let dims = label.get_dims();\n\n batch.append(label.translate(10.0, height / 2.0 - dims.height / 2.0));\n\n batch.into_widget(ctx)\n\n}\n\n\n", "file_path": "map_gui/src/tools/ui.rs", "rank": 91, "score": 394018.87404376577 }, { "content": "fn make_panel(ctx: &mut EventCtx) -> Panel {\n\n Panel::new_builder(Widget::col(vec![\n\n Widget::row(vec![\n\n Line(\"Blockfinder\").small_heading().into_widget(ctx),\n\n ctx.style().btn_close_widget(ctx),\n\n ]),\n\n \"Click a block to examine.\".text_widget(ctx),\n\n \"Press space to mark/unmark for merging\".text_widget(ctx),\n\n ctx.style()\n\n .btn_outline\n\n .text(\"Merge\")\n\n .hotkey(Key::M)\n\n .build_def(ctx),\n\n ctx.style()\n\n .btn_outline\n\n .text(\"Collapse dead-ends\")\n\n .hotkey(Key::D)\n\n .build_def(ctx),\n\n ctx.style()\n\n .btn_outline\n", "file_path": "game/src/debug/blockfinder.rs", "rank": 92, "score": 391580.33046054014 }, { "content": "pub fn actions(_: &App, id: ID) -> Vec<(Key, String)> {\n\n match id {\n\n ID::Building(_) => vec![(Key::Z, \"start a trip here\".to_string())],\n\n ID::Intersection(_) => vec![(Key::Z, \"spawn agents here\".to_string())],\n\n _ => Vec::new(),\n\n }\n\n}\n\n\n", "file_path": "game/src/sandbox/gameplay/freeform/mod.rs", "rank": 93, "score": 389089.8662937186 }, { "content": "fn header(ctx: &EventCtx, details: &mut Details, id: ParkingLotID, tab: Tab) -> Widget {\n\n Widget::custom_col(vec![\n\n Widget::row(vec![\n\n Line(id.to_string()).small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]),\n\n make_tabs(\n\n ctx,\n\n &mut details.hyperlinks,\n\n tab,\n\n vec![(\"Info\", Tab::ParkingLot(id))],\n\n ),\n\n ])\n\n}\n", "file_path": "game/src/info/parking_lot.rs", "rank": 94, "score": 386848.3413702502 }, { "content": "fn make_pagination(ctx: &mut EventCtx, total: usize, skip: usize) -> Widget {\n\n let next = ctx\n\n .style()\n\n .btn_next()\n\n .disabled(skip + 1 + ROWS >= total)\n\n .hotkey(Key::RightArrow);\n\n let prev = ctx\n\n .style()\n\n .btn_prev()\n\n .disabled(skip == 0)\n\n .hotkey(Key::LeftArrow);\n\n\n\n Widget::row(vec![\n\n prev.build_widget(ctx, \"previous\"),\n\n format!(\n\n \"{}-{} of {}\",\n\n if total > 0 {\n\n prettyprint_usize(skip + 1)\n\n } else {\n\n \"0\".to_string()\n\n },\n\n prettyprint_usize((skip + 1 + ROWS).min(total)),\n\n prettyprint_usize(total)\n\n )\n\n .text_widget(ctx)\n\n .centered_vert(),\n\n next.build_widget(ctx, \"next\"),\n\n ])\n\n}\n\n\n", "file_path": "widgetry/src/widgets/table.rs", "rank": 95, "score": 386192.7303273221 }, { "content": "pub fn draw_star(ctx: &mut EventCtx, b: &Building) -> GeomBatch {\n\n GeomBatch::load_svg(ctx, \"system/assets/tools/star.svg\")\n\n .centered_on(b.polygon.center())\n\n .color(RewriteColor::ChangeAll(Color::BLACK))\n\n}\n\n\n", "file_path": "fifteen_min/src/viewer.rs", "rank": 96, "score": 382460.0218816633 }, { "content": "fn level_btn(ctx: &mut EventCtx, app: &App, level: &Level, idx: usize) -> GeomBatch {\n\n let mut txt = Text::new();\n\n txt.add_line(Line(format!(\"LEVEL {}\", idx + 1)).small_heading());\n\n txt.add_line(Line(&level.title).small_heading());\n\n txt.add_line(&level.description);\n\n let batch = txt.wrap_to_pct(ctx, 15).render_autocropped(ctx);\n\n\n\n // Add padding\n\n let (mut batch, hitbox) = batch\n\n .batch()\n\n .container()\n\n .padding(EdgeInsets {\n\n top: 20.0,\n\n bottom: 20.0,\n\n left: 10.0,\n\n right: 10.0,\n\n })\n\n .into_geom(ctx, None);\n\n batch.unshift(app.cs.unzoomed_bike, hitbox);\n\n batch\n\n}\n\n\n", "file_path": "santa/src/title.rs", "rank": 97, "score": 378205.86219800345 }, { "content": "/// Make it clear the map can't be interacted with right now.\n\npub fn grey_out_map(g: &mut GfxCtx, app: &dyn AppLike) {\n\n g.fork_screenspace();\n\n // TODO - OSD height\n\n g.draw_polygon(\n\n app.cs().fade_map_dark,\n\n Polygon::rectangle(g.canvas.window_width, g.canvas.window_height),\n\n );\n\n g.unfork();\n\n}\n\n\n", "file_path": "map_gui/src/tools/mod.rs", "rank": 98, "score": 378054.96555775695 }, { "content": "fn only_one_border(app: &mut App, neighborhood: &Neighborhood) {\n\n for cell in &neighborhood.cells {\n\n if cell.borders.len() > 1 {\n\n // TODO How to pick which one to leave open?\n\n for i in cell.borders.iter().skip(1) {\n\n // Find the road in this cell connected to this border\n\n for r in cell.roads.keys() {\n\n let road = app.map.get_r(*r);\n\n if road.src_i == *i {\n\n app.session\n\n .modal_filters\n\n .roads\n\n .insert(road.id, 0.1 * road.length());\n\n break;\n\n } else if road.dst_i == *i {\n\n app.session\n\n .modal_filters\n\n .roads\n\n .insert(road.id, 0.9 * road.length());\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "ltn/src/auto.rs", "rank": 99, "score": 375087.20231228624 } ]
Rust
src/lib/worker.rs
seb-odessa/workers-pool
dd093585771d86c6295123edc64c0b84a497d1f3
use std::sync::mpsc; use std::sync::mpsc::{Sender, Receiver}; use super::types::{Message, Processor, HasName, HasTarget}; pub struct WorkerHandler<T: HasName + HasTarget, W: HasName + Processor> { gate: Sender<Message<T>>, input: Receiver<Message<T>>, output: Sender<Message<T>>, received: usize, processed: usize, worker: W, } impl<T: HasName + HasTarget, W: HasName + Processor> Drop for WorkerHandler<T, W> { fn drop(&mut self) { trace!("{} dropped. Received {} tasks, processed {} tasks.", self.worker.name(), self.received, self.processed); } } impl<T: HasName + HasTarget, W: HasName + Processor> WorkerHandler<T, W> { pub fn new(worker: W, output: Sender<Message<T>>) -> Self { trace!("WorkerHandler::new({}, ...)", &worker.name()); let (tx, rx) = mpsc::channel(); WorkerHandler { gate: tx, input: rx, output: output, received: 0, processed: 0, worker: worker, } } pub fn gate(&self) -> Sender<Message<T>> { self.gate.clone() } fn say(&self, msg: Message<T>) -> bool { return self.output.send(msg).is_ok(); } pub fn run(&mut self) { while let Ok(msg) = self.input.recv() { self.received += 1; match msg { Message::Quit => { trace!("{} <= Message::Quit", self.worker.name()); break; } Message::Event(request) => { let name = request.name(); trace!("{} <= Message::Request({})", self.worker.name(), name); let succ = self.say(Message::Busy(name.clone())) && self.say(Message::Event(self.worker.process(request))) && self.say(Message::Free(name.clone())); if !succ { break; } self.processed += 1; trace!("{} <= Message::Request({}); processed: {}", self.worker.name(), name, self.processed); } _ => { warn!("{} <= Unexpected message!!!", self.worker.name()); } } } trace!("{} Has finished run()", self.worker.name()); } } #[cfg(test)] mod tests { use std::sync::mpsc; use std::sync::mpsc::{Sender, Receiver}; use std::thread; use types::{Message, Processor, HasName, HasTarget}; use super::WorkerHandler; #[derive(Debug, PartialEq)] struct EventFake; impl HasName for EventFake { fn name(&self) -> String { String::from("EventFake") } } impl HasTarget for EventFake { fn target(&self) -> String { String::from("EventFakeTarget") } } #[derive(Debug, PartialEq)] struct TaskFake; impl HasName for TaskFake { fn name(&self) -> String { String::from("EventFake") } } impl Processor for TaskFake { fn process<T>(&self, event: T) -> T { event } } #[test] fn message_quit() { let task = TaskFake; let (pipe, _): (Sender<Message<EventFake>>, Receiver<Message<EventFake>>) = mpsc::channel(); let mut handler = WorkerHandler::new(task, pipe.clone()); let gate = handler.gate(); let thread = thread::spawn(move || handler.run()); gate.send(Message::Quit).unwrap(); thread.join().unwrap(); } #[test] fn message_event() { let task = TaskFake; let taskname = task.name(); let (pipe, results): (Sender<Message<EventFake>>, Receiver<Message<EventFake>>) = mpsc::channel(); let mut handler = WorkerHandler::new(task, pipe.clone()); let gate = handler.gate(); let thread = thread::spawn(move || handler.run()); gate.send(Message::Event(EventFake)).unwrap(); assert!(results.recv().unwrap() == Message::Busy(taskname.clone())); assert!(results.recv().unwrap() == Message::Event(EventFake)); assert!(results.recv().unwrap() == Message::Free(taskname.clone())); gate.send(Message::Quit).unwrap(); thread.join().unwrap(); } }
use std::sync::mpsc; use std::sync::mpsc::{Sender, Receiver}; use super::types::{Message, Processor, HasName, HasTarget}; pub struct WorkerHandler<T: HasName + HasTarget, W: HasName + Processor> { gate: Sender<Message<T>>, input: Receiver<Message<T>>, output: Sender<Message<T>>, received: usize, processed: usize, worker: W, } impl<T: HasName + HasTarget, W: HasName + Processor> Drop for WorkerHandler<T, W> { fn drop(&mut self) { trace!("{} dropped. Received {} tasks, processed {} tasks.", self.worker.name(), self.received, self.processed); } }
trace!("{} Has finished run()", self.worker.name()); } } #[cfg(test)] mod tests { use std::sync::mpsc; use std::sync::mpsc::{Sender, Receiver}; use std::thread; use types::{Message, Processor, HasName, HasTarget}; use super::WorkerHandler; #[derive(Debug, PartialEq)] struct EventFake; impl HasName for EventFake { fn name(&self) -> String { String::from("EventFake") } } impl HasTarget for EventFake { fn target(&self) -> String { String::from("EventFakeTarget") } } #[derive(Debug, PartialEq)] struct TaskFake; impl HasName for TaskFake { fn name(&self) -> String { String::from("EventFake") } } impl Processor for TaskFake { fn process<T>(&self, event: T) -> T { event } } #[test] fn message_quit() { let task = TaskFake; let (pipe, _): (Sender<Message<EventFake>>, Receiver<Message<EventFake>>) = mpsc::channel(); let mut handler = WorkerHandler::new(task, pipe.clone()); let gate = handler.gate(); let thread = thread::spawn(move || handler.run()); gate.send(Message::Quit).unwrap(); thread.join().unwrap(); } #[test] fn message_event() { let task = TaskFake; let taskname = task.name(); let (pipe, results): (Sender<Message<EventFake>>, Receiver<Message<EventFake>>) = mpsc::channel(); let mut handler = WorkerHandler::new(task, pipe.clone()); let gate = handler.gate(); let thread = thread::spawn(move || handler.run()); gate.send(Message::Event(EventFake)).unwrap(); assert!(results.recv().unwrap() == Message::Busy(taskname.clone())); assert!(results.recv().unwrap() == Message::Event(EventFake)); assert!(results.recv().unwrap() == Message::Free(taskname.clone())); gate.send(Message::Quit).unwrap(); thread.join().unwrap(); } }
impl<T: HasName + HasTarget, W: HasName + Processor> WorkerHandler<T, W> { pub fn new(worker: W, output: Sender<Message<T>>) -> Self { trace!("WorkerHandler::new({}, ...)", &worker.name()); let (tx, rx) = mpsc::channel(); WorkerHandler { gate: tx, input: rx, output: output, received: 0, processed: 0, worker: worker, } } pub fn gate(&self) -> Sender<Message<T>> { self.gate.clone() } fn say(&self, msg: Message<T>) -> bool { return self.output.send(msg).is_ok(); } pub fn run(&mut self) { while let Ok(msg) = self.input.recv() { self.received += 1; match msg { Message::Quit => { trace!("{} <= Message::Quit", self.worker.name()); break; } Message::Event(request) => { let name = request.name(); trace!("{} <= Message::Request({})", self.worker.name(), name); let succ = self.say(Message::Busy(name.clone())) && self.say(Message::Event(self.worker.process(request))) && self.say(Message::Free(name.clone())); if !succ { break; } self.processed += 1; trace!("{} <= Message::Request({}); processed: {}", self.worker.name(), name, self.processed); } _ => { warn!("{} <= Unexpected message!!!", self.worker.name()); } } }
random
[ { "content": "/// This trait guaranty that it's implementer has a process<T>() function\n\npub trait Processor {\n\n /// Takes arg of type <T>\n\n /// Returns result of the same type\n\n fn process<T>(&self, arg: T) -> T;\n\n}\n\n\n\n/// Message type used as container for all Supevisors/Workers communications\n\n#[derive(Debug, PartialEq)]\n\npub enum Message<T: HasName + HasTarget> {\n\n /// Finish the work (function run()) and be ready to thread join\n\n Quit,\n\n /// Contains a portion of data for processing by the Worker\n\n /// T must has name and target\n\n Event(T),\n\n /// Inform Supervisor that worker become busy\n\n Busy(String),\n\n /// Inform Supervisor that worker become free for next event\n\n Free(String),\n\n}\n", "file_path": "src/lib/types.rs", "rank": 0, "score": 46948.19295558127 }, { "content": "/// This trait guaranty that it's implementer has a target() function\n\npub trait HasTarget {\n\n /// Returns target of the implementer\n\n fn target(&self) -> String;\n\n}\n\n\n", "file_path": "src/lib/types.rs", "rank": 1, "score": 29461.16782807465 }, { "content": "fn main() {}\n", "file_path": "src/main.rs", "rank": 2, "score": 27793.758510801978 }, { "content": "/// This trait guaranty that it's implementer has a name() function\n\npub trait HasName {\n\n /// Returns name of the implementer\n\n fn name(&self) -> String;\n\n}\n\n\n", "file_path": "src/lib/types.rs", "rank": 3, "score": 22495.7840903451 }, { "content": "# workers-pool\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any\n", "file_path": "README.md", "rank": 12, "score": 11071.632905836672 }, { "content": "// use std::thread;\n\n// use std::thread::{JoinHandle};\n\n// use std::sync::mpsc;\n\n// use std::sync::mpsc::{Sender, Receiver, TryRecvError};\n\n// use std::collections::HashMap;\n\n//\n\n// use super::worker::Worker;\n\n// use super::types::{Message, Worker};\n\n//\n\n// pub struct Supervisor<Obj:Task+Send + 'static> {\n\n// name : String, /// The name of the Pool\n\n// workers : usize, /// The number of threads\n\n// gate : Sender<Message<Obj>>, /// The external side of the INPUT channel\n\n// input : Receiver<Message<Obj>>, /// The internal side of the INPUT channel\n\n// output : Sender<Message<Obj>>, /// The internal part of the OUTPUT channel\n\n// worker_gate:HashMap<String, Sender<Message<Obj>>>,\n\n// worker_ready:HashMap<String, bool>,\n\n// worker_thread:Vec<JoinHandle<()>>,\n\n// worker_result:Receiver<Message<Obj>>,\n\n// active:usize,\n", "file_path": "src/lib/supervisor.rs", "rank": 13, "score": 12.92344947049238 }, { "content": "// wait_quit:bool,\n\n// }\n\n//\n\n// impl <Obj:Task+Send> Drop for Supervisor<Obj> {\n\n// fn drop(&mut self) {\n\n// while !self.worker_thread.is_empty() {\n\n// if let Ok(_) = self.worker_thread.pop().unwrap().join() {\n\n// trace!(\"{} successful join a thread.\", self.name);\n\n// }\n\n// }\n\n// trace!(\"{} was dropped.\", self.name);\n\n// }\n\n//\n\n// }\n\n// impl <Obj:Task+Send> Supervisor <Obj> {\n\n// #[allow(dead_code)]\n\n// pub fn new<Name : Into<String>>(name : Name, workers : usize, results : Sender<Message<Obj>>) -> Self {\n\n// let name = name.into();\n\n// trace!(\"{} created.\", &name);\n\n// let (gate, input) = mpsc::channel();\n", "file_path": "src/lib/supervisor.rs", "rank": 14, "score": 12.663692324095935 }, { "content": "// *self.worker_ready.get_mut(&worker).unwrap() = true;\n\n// self.output.send(Message::Done(worker, task)).unwrap();\n\n// self.active -= 1;\n\n// }\n\n// Message::Exited(worker) => {\n\n// trace!(\"{} <= Message::Exited({})\", self.name, worker);\n\n// self.worker_gate.remove(&worker);\n\n// self.active -= 1;\n\n// }\n\n// _ => {\n\n// panic!(\"{} has received unexpected command.\", self.name);\n\n// }\n\n// }\n\n// }\n\n//\n\n// fn process_input(&mut self) -> () {\n\n// if self.is_pool_empty() {\n\n// match self.input.recv(){\n\n// Ok(msg) => self.handle_input(msg),\n\n// Err(err) => panic!(\"{} has found {}\", self.name, err),\n", "file_path": "src/lib/supervisor.rs", "rank": 15, "score": 10.993389432881461 }, { "content": "// *self.worker_ready.get_mut(&name).unwrap() = false;\n\n// self.worker_gate[&name].send(Message::Invoke(task)).unwrap();\n\n// self.active += 1;\n\n// }\n\n// None => {\n\n// self.output.send(Message::Resend(task)).unwrap()\n\n// }\n\n// }\n\n// }\n\n// _ => {\n\n// panic!(\"{} has received unexpected command.\", self.name);\n\n// }\n\n// }\n\n//\n\n// }\n\n//\n\n// fn handle_results(&mut self, msg:Message<Obj>) -> () {\n\n// match msg {\n\n// Message::Done(worker, task) => {\n\n// trace!(\"{} <= Message::Done({},{})\", self.name, worker, task.name());\n", "file_path": "src/lib/supervisor.rs", "rank": 16, "score": 8.805577768776258 }, { "content": "// Err(TryRecvError::Empty) => {},\n\n// Err(TryRecvError::Disconnected) => panic!(\"{} has found disconnected channel\", self.name)\n\n// }\n\n// }\n\n// }\n\n//\n\n// #[allow(dead_code)]\n\n// pub fn run(&mut self) {\n\n// while !self.worker_gate.is_empty() {\n\n// self.process_results();\n\n// self.process_input();\n\n// }\n\n// self.output.send(Message::Exited(self.name.clone())).unwrap();\n\n// }\n\n// }\n", "file_path": "src/lib/supervisor.rs", "rank": 17, "score": 8.692112214187976 }, { "content": "// return Some(k.clone());\n\n// }\n\n// }\n\n// return None;\n\n// }\n\n//\n\n// fn handle_input(&mut self, msg:Message<Obj>) -> (){\n\n// match msg {\n\n// Message::Quit => {\n\n// trace!(\"{} <= Message::Quit\", self.name);\n\n// for (_, gate) in &self.worker_gate {\n\n// gate.send(Message::Quit).unwrap();\n\n// self.active += 1;\n\n// }\n\n// self.wait_quit = true;\n\n// },\n\n// Message::Invoke(task) => {\n\n// match self.get_ready_worker() {\n\n// Some(name) => {\n\n// trace!(\"{} <= Message::Invoke({})\", self.name, task.name());\n", "file_path": "src/lib/supervisor.rs", "rank": 18, "score": 8.298812541793772 }, { "content": "// let (worker_gate, worker_result) : (Sender<Message<Obj>>, Receiver<Message<Obj>>) = mpsc::channel();\n\n//\n\n// let mut pool = Supervisor {\n\n// name:name,\n\n// workers:workers,\n\n// gate:gate,\n\n// input:input,\n\n// output:results,\n\n// worker_gate:HashMap::new(),\n\n// worker_ready:HashMap::new(),\n\n// worker_thread:Vec::new(),\n\n// worker_result:worker_result,\n\n// active:0,\n\n// wait_quit:false,\n\n// };\n\n// for idx in 0..pool.workers {\n\n// let name = format!(\"Agent_{}\", (idx+1));\n\n// let mut worker = Worker::new(name.clone(), worker_gate.clone());\n\n// pool.worker_gate.insert(name.clone(), worker.gate());\n\n// pool.worker_ready.insert(name.clone(), true);\n", "file_path": "src/lib/supervisor.rs", "rank": 19, "score": 7.737900370238901 }, { "content": "// fn drop(&mut self) {\n\n// trace!(\"{} was dropped.\", self.name);\n\n// }\n\n// }\n\n// impl Work {\n\n// pub fn new<Name : Into<String>>(name : Name) -> Self{\n\n// let name = name.into();\n\n// trace!(\"Work::new({})\", name);\n\n// Work { name : name.into(), value : 0 }\n\n// }\n\n// }\n\n// impl Task for Work {\n\n// fn run(&mut self) {\n\n// self.value = rand::random::<u32>() % 3000;\n\n// trace!(\"{}.invoke() started. ETA: {} ms\", self.name, self.value);\n\n// sleep(Duration::new(0,self.value));\n\n// trace!(\"{}.invoke() was completed!\", self.name);\n\n// }\n\n// fn name(&self)->String {\n\n// self.name.clone()\n", "file_path": "src/main.rs", "rank": 20, "score": 7.571543440919875 }, { "content": "// fn main() {\n\n// let _ = env_logger::init().unwrap();\n\n// const THREAD_MAX :usize = 4;\n\n// let (pipe, results) : (Sender<Message<Work>>, Receiver<Message<Work>>) = mpsc::channel();\n\n// let mut sup = Supervisor::new(\"Pool\", THREAD_MAX, pipe.clone());\n\n// let gate = sup.gate();\n\n// let thread = thread::spawn(move || sup.run());\n\n//\n\n// const MAX_TASK:usize = 100000;\n\n// let mut generated:usize = 0;\n\n// let mut processed:usize = 0;\n\n//\n\n// enum WorkState {\n\n// ReadyForTask,\n\n// WaitForDone,\n\n// Done\n\n// }\n\n// let mut state:WorkState = WorkState::ReadyForTask;\n\n// let mut source = WorkSource::new();\n\n// loop {\n", "file_path": "src/main.rs", "rank": 21, "score": 7.35156162755181 }, { "content": "//! Library description\n\n#![deny(missing_docs, unsafe_code)]\n\npub mod types;\n\npub mod worker;\n\n// pub mod supervisor;\n\n\n\n// pub use self::types::{Message};\n\n// pub use self::worker::Worker;\n\n// pub use self::supervisor::Supervisor;\n\n\n\n#[macro_use]\n\nextern crate log;\n\nextern crate env_logger;\n", "file_path": "src/lib/lib.rs", "rank": 22, "score": 7.026219017885289 }, { "content": "// message = msg;\n\n// }\n\n// }\n\n// WorkState::Done => {\n\n// break;\n\n// }\n\n// }\n\n// match message {\n\n// Message::Done(agent, task) => {\n\n// trace!(\"Message::Done({},{}) with the value {}\", agent, task.name, task.value);\n\n// processed += 1;\n\n// }\n\n// Message::Resend(task) => {\n\n// println!(\"Message::Resend({})\", task.name);\n\n// gate.send(Message::Invoke(task)).unwrap();\n\n// }\n\n// Message::Exited(name) => {\n\n// trace!(\"Message::Exited({})\", &name);\n\n// state = WorkState::Done;\n\n// }\n", "file_path": "src/main.rs", "rank": 23, "score": 6.366169817458685 }, { "content": "// Message::Nothing => {\n\n// }\n\n// _ => {\n\n// panic!(\"Has received unexpected command.\");\n\n// }\n\n// }\n\n// }\n\n//\n\n// thread.join().unwrap();\n\n// println!(\"Generated {} tasks.\", generated);\n\n// println!(\"Processed {} tasks.\", processed);\n\n// }\n\n\n\n\n", "file_path": "src/main.rs", "rank": 24, "score": 6.01700522831416 }, { "content": "// }\n\n// }\n\n//\n\n// struct WorkSource {\n\n// idx : usize\n\n// }\n\n// impl WorkSource {\n\n// pub fn new() -> Self {\n\n// WorkSource { idx : 0 }\n\n// }\n\n// }\n\n// impl Iterator for WorkSource {\n\n// type Item = Work;\n\n// fn next(&mut self) -> Option<Work> {\n\n// self.idx += 1;\n\n// sleep(Duration::new(0, rand::random::<u32>() % 100));\n\n// Some(Work::new(format!(\"Task_{}\", &self.idx)))\n\n// }\n\n// }\n\n//\n", "file_path": "src/main.rs", "rank": 25, "score": 5.759673100796865 }, { "content": "// let mut message = Message::Nothing;\n\n// match state {\n\n// WorkState::ReadyForTask => {\n\n// if generated < MAX_TASK {\n\n// if let Some(task) = source.next() {\n\n// trace!(\"gate.send(Message::Invoke(Task({})))\", task.name());\n\n// gate.send(Message::Invoke(task)).unwrap();\n\n// generated += 1;\n\n// }\n\n// } else {\n\n// trace!(\"gate.send(Message::Quit)\");\n\n// gate.send(Message::Quit).unwrap();\n\n// state = WorkState::WaitForDone;\n\n// }\n\n// if let Ok(msg) = results.try_recv() {\n\n// message = msg;\n\n// }\n\n// }\n\n// WorkState::WaitForDone => {\n\n// if let Ok(msg) = results.recv() {\n", "file_path": "src/main.rs", "rank": 26, "score": 5.686609754476649 }, { "content": "// pool.worker_thread.push(thread::spawn(move || worker.run()));\n\n// }\n\n// return pool;\n\n// }\n\n//\n\n// pub fn gate(&self) -> Sender<Message<Obj>> {\n\n// self.gate.clone()\n\n// }\n\n//\n\n// fn is_pool_empty(&self) -> bool {\n\n// return self.active == 0 && !self.wait_quit;\n\n// }\n\n//\n\n// fn is_pool_full(&self) -> bool {\n\n// return self.active >= self.worker_gate.len();\n\n// }\n\n//\n\n// fn get_ready_worker(&self) -> Option<String> {\n\n// for (k, v) in &self.worker_ready {\n\n// if *v {\n", "file_path": "src/lib/supervisor.rs", "rank": 27, "score": 5.471980655418495 }, { "content": "// use std::thread;\n\n// use std::thread::sleep;\n\n// use std::time::Duration;\n\n// use std::sync::mpsc;\n\n// use std::sync::mpsc::{Sender, Receiver};\n\n//\n\n// #[macro_use]\n\n// extern crate log;\n\n// extern crate env_logger;\n\n//\n\n// extern crate rand;\n\n// extern crate lib;\n\n// use lib::types::{Message};\n\n// use lib::supervisor::Supervisor;\n\n//\n\n// struct Work {\n\n// name : String,\n\n// value : u32\n\n// }\n\n// impl Drop for Work {\n", "file_path": "src/main.rs", "rank": 28, "score": 5.184996890558974 }, { "content": "// }\n\n// } else {\n\n// match self.input.try_recv(){\n\n// Ok(msg) => self.handle_input(msg),\n\n// Err(TryRecvError::Empty) => {},\n\n// Err(TryRecvError::Disconnected) => panic!(\"{} has found disconnected channel\", self.name),\n\n// }\n\n//\n\n// }\n\n// }\n\n//\n\n// fn process_results(&mut self) -> () {\n\n// if self.is_pool_full() {\n\n// match self.worker_result.recv() {\n\n// Ok(msg) => self.handle_results(msg),\n\n// Err(err) => panic!(\"{} has found {}\", self.name, err),\n\n// }\n\n// } else {\n\n// match self.worker_result.try_recv() {\n\n// Ok(msg) => self.handle_results(msg),\n", "file_path": "src/lib/supervisor.rs", "rank": 29, "score": 4.344927726495378 } ]
Rust
dnscat/src/encryption/standard.rs
avitex/rust-dnscat2
151f889780ac4ebf65ecabe3a738f84a22965bb1
use std::borrow::Borrow; use bytes::BufMut; use constant_time_eq::constant_time_eq; use generic_array::typenum::{U32, U65}; use generic_array::{sequence::Lengthen, GenericArray}; use ring::agreement::{self, agree_ephemeral}; use ring::rand; use salsa20::cipher::{NewStreamCipher, StreamCipher}; use salsa20::Salsa20; use secstr::SecStr; use sha3::{Digest, Sha3_256}; use super::{Authenticator, Encryption, EncryptionAcceptor, EncryptionError, PublicKey}; use crate::packet::SessionHeader; use crate::util::Encode; const PUBLIC_KEY_OCTET_TAG: u8 = 0x04; const STANDARD_ARGS_SIZE: usize = 6 + 2; type EncryptionKey = GenericArray<u8, <Salsa20 as NewStreamCipher>::KeySize>; type EncryptionNonce = GenericArray<u8, <Salsa20 as NewStreamCipher>::NonceSize>; type EncryptionMac = GenericArray<u8, U32>; type PublicKeyWithTag = GenericArray<u8, U65>; #[derive(Debug)] pub struct StandardEncryption { is_client: bool, nonce: u16, preshared_key: Option<SecStr>, self_pub_key: agreement::PublicKey, self_authenticator: Option<Authenticator>, peer_authenticator: Option<Authenticator>, self_priv_key: Option<agreement::EphemeralPrivateKey>, peer_pub_key: Option<agreement::UnparsedPublicKey<PublicKeyWithTag>>, stream_keys: Option<StreamKeys>, } impl StandardEncryption { pub fn new_with_ephemeral( is_client: bool, preshared_key: Option<SecStr>, ) -> Result<Self, EncryptionError> { let rand = rand::SystemRandom::new(); let (self_pub_key, self_priv_key) = agreement::EphemeralPrivateKey::generate(&agreement::ECDH_P256, &rand) .and_then(|priv_key| { priv_key .compute_public_key() .map(|pub_key| (pub_key, priv_key)) }) .or(Err(EncryptionError::Keygen))?; Ok(Self { nonce: 0, is_client, preshared_key, self_pub_key, peer_pub_key: None, self_authenticator: None, peer_authenticator: None, self_priv_key: Some(self_priv_key), stream_keys: None, }) } fn next_nouce(&mut self) -> Result<u16, EncryptionError> { if self.nonce == u16::max_value() { Err(EncryptionError::Renegotiate) } else { let current = self.nonce; self.nonce += 1; Ok(current) } } fn stream_keys(&self) -> &StreamKeys { self.stream_keys.as_ref().expect("stream keys not set") } fn raw_public_key(&self) -> &[u8] { &self.self_pub_key.as_ref()[1..] } } impl Encryption for StandardEncryption { fn args_size(&self) -> u8 { STANDARD_ARGS_SIZE as u8 } fn public_key(&self) -> PublicKey { GenericArray::clone_from_slice(self.raw_public_key()) } fn handshake(&mut self, peer: PublicKey) -> Result<(), EncryptionError> { let peer_with_tag = peer.prepend(PUBLIC_KEY_OCTET_TAG); let peer_pub_key = agreement::UnparsedPublicKey::new(&agreement::ECDH_P256, peer_with_tag); let (self_auth, peer_auth, stream_keys) = agree_ephemeral( self.self_priv_key.take().expect("no private key"), &peer_pub_key, EncryptionError::Handshake, |shared_key| { let self_auth = calc_authenticator( self.is_client, self.is_client, self.raw_public_key(), peer.as_ref(), shared_key, self.preshared_key.as_ref().map(Borrow::borrow), ); let peer_auth = calc_authenticator( self.is_client, !self.is_client, self.raw_public_key(), peer.as_ref(), shared_key, self.preshared_key.as_ref().map(Borrow::borrow), ); let stream_keys = StreamKeys::from_shared(shared_key); Ok((self_auth, peer_auth, stream_keys)) }, )?; self.self_authenticator = Some(self_auth); self.peer_authenticator = Some(peer_auth); self.peer_pub_key = Some(peer_pub_key); self.stream_keys = Some(stream_keys); Ok(()) } fn authenticator(&self) -> Authenticator { self.self_authenticator .expect("authenticator not initialised") } fn authenticate(&mut self, peer: Authenticator) -> Result<(), EncryptionError> { let valid = self .peer_authenticator .expect("peer authenticator consumed or not init"); if constant_time_eq(&valid[..], &peer[..]) { Ok(()) } else { Err(EncryptionError::Authentication) } } fn encrypt( &mut self, head: &SessionHeader, mut args: &mut [u8], data: &mut [u8], ) -> Result<(), EncryptionError> { let (cipher_key, mac_key) = self.stream_keys().get_write_keys(self.is_client); let nonce = self.next_nouce()?.to_be_bytes(); let mut cipher = Salsa20::new(&cipher_key, &calc_nonce(nonce)); cipher.encrypt(data); let sig = calc_signature(head, &nonce[..], &mac_key[..], data); args.put_slice(&sig[..]); args.put_slice(&nonce[..]); Ok(()) } fn decrypt( &mut self, head: &SessionHeader, args: &[u8], data: &mut [u8], ) -> Result<(), EncryptionError> { let (cipher_key, mac_key) = self.stream_keys().get_read_keys(self.is_client); let sig = [args[0], args[1], args[2], args[3], args[4], args[5]]; let nonce = [args[6], args[7]]; if calc_signature(head, &nonce[..], &mac_key[..], data) != sig { return Err(EncryptionError::Signature); } let mut cipher = Salsa20::new(&cipher_key, &calc_nonce(nonce)); cipher.decrypt(data); Ok(()) } } #[derive(Debug)] pub struct StandardEncryptionAcceptor { preshared_key: Option<SecStr>, } impl StandardEncryptionAcceptor { pub fn new(preshared_key: Option<SecStr>) -> Self { Self { preshared_key } } } impl EncryptionAcceptor for StandardEncryptionAcceptor { type Encryption = StandardEncryption; fn accept(&mut self, client: PublicKey) -> Result<Self::Encryption, EncryptionError> { let psk = self.preshared_key.clone(); StandardEncryption::new_with_ephemeral(false, psk).and_then(|mut encryption| { encryption.handshake(client)?; Ok(encryption) }) } } #[derive(Debug)] struct StreamKeys { client_mac: EncryptionMac, server_mac: EncryptionMac, client_write: EncryptionKey, server_write: EncryptionKey, } impl StreamKeys { fn get_write_keys(&self, is_client: bool) -> (EncryptionKey, EncryptionMac) { if is_client { (self.client_write, self.client_mac) } else { (self.server_write, self.server_mac) } } fn get_read_keys(&self, is_client: bool) -> (EncryptionKey, EncryptionMac) { self.get_write_keys(!is_client) } fn from_shared(key: &[u8]) -> Self { let mut hash = Sha3_256::new(); hash.update(key); hash.update("client_write_key"); let client_write = hash.finalize_reset(); hash.update(key); hash.update("client_mac_key"); let client_mac = hash.finalize_reset(); hash.update(key); hash.update("server_write_key"); let server_write = hash.finalize_reset(); hash.update(key); hash.update("server_mac_key"); let server_mac = hash.finalize(); Self { client_write, server_write, client_mac, server_mac, } } } fn calc_nonce(nonce: [u8; 2]) -> EncryptionNonce { let mut nonce_array = [0u8; 8]; nonce_array[6] = nonce[0]; nonce_array[7] = nonce[1]; nonce_array.into() } fn calc_authenticator( is_client: bool, for_client: bool, pubkey_self: &[u8], pubkey_peer: &[u8], shared_key: &[u8], preshared_key: Option<&[u8]>, ) -> Authenticator { let mut hash = Sha3_256::new(); if for_client { hash.update("client"); } else { hash.update("server"); } hash.update(shared_key); if is_client { hash.update(pubkey_self); hash.update(pubkey_peer); } else { hash.update(pubkey_peer); hash.update(pubkey_self); } if let Some(preshared_key) = preshared_key { hash.update(preshared_key); } hash.finalize() } fn calc_signature( head: &SessionHeader, nonce: &[u8], mac_key: &[u8], ciphertext: &[u8], ) -> [u8; 6] { let mut head_bytes = [0u8; SessionHeader::len()]; head.encode(&mut &mut head_bytes[..]); let mut hash = Sha3_256::new(); hash.update(mac_key); hash.update(&head_bytes[..]); hash.update(nonce); hash.update(ciphertext); let res = hash.finalize(); [res[0], res[1], res[2], res[3], res[4], res[5]] } #[cfg(test)] mod tests { use super::*; use crate::packet::PacketKind; #[test] fn test_basic() { let mut client = StandardEncryption::new_with_ephemeral(true, None).expect("client enc"); let mut server = StandardEncryption::new_with_ephemeral(false, None).expect("server enc"); server .handshake(client.public_key()) .expect("client to server handshake"); client .handshake(server.public_key()) .expect("server to client handshake"); server .authenticate(client.authenticator()) .expect("client to server auth"); client .authenticate(server.authenticator()) .expect("server to client auth"); let header = SessionHeader::new(1, PacketKind::SYN, 2); let mut args = [0u8; 8]; let mut data = [1, 2, 3, 5]; client .encrypt(&header, &mut args[..], &mut data[..]) .expect("encrypt"); assert_ne!(data, [1, 2, 3, 5]); server .decrypt(&header, &mut args[..], &mut data[..]) .expect("decrypt"); assert_eq!(data, [1, 2, 3, 5]); } }
use std::borrow::Borrow; use bytes::BufMut; use constant_time_eq::constant_time_eq; use generic_array::typenum::{U32, U65}; use generic_array::{sequence::Lengthen, GenericArray}; use ring::agreement::{self, agree_ephemeral}; use ring::rand; use salsa20::cipher::{NewStreamCipher, StreamCipher}; use salsa20::Salsa20; use secstr::SecStr; use sha3::{Digest, Sha3_256}; use super::{Authenticator, Encryption, EncryptionAcceptor, EncryptionError, PublicKey}; use crate::packet::SessionHeader; use crate::util::Encode; const PUBLIC_KEY_OCTET_TAG: u8 = 0x04; const STANDARD_ARGS_SIZE: usize = 6 + 2; type EncryptionKey = GenericArray<u8, <Salsa20 as NewStreamCipher>::KeySize>; type EncryptionNonce = GenericArray<u8, <Salsa20 as NewStreamCipher>::NonceSize>; type EncryptionMac = GenericArray<u8, U32>; type PublicKeyWithTag = GenericArray<u8, U65>; #[derive(Debug)] pub struct StandardEncryption { is_client: bool, nonce: u16, preshared_key: Option<SecStr>, self_pub_key: agreement::PublicKey, self_authenticator: Option<Authenticator>, peer_authenticator: Option<Authenticator>, self_priv_key: Option<agreement::EphemeralPrivateKey>, peer_pub_key: Option<agreement::UnparsedPublicKey<PublicKeyWithTag>>, stream_keys: Option<StreamKeys>, } impl StandardEncryption { pub fn new_with_ephemeral( is_client: bool, preshared_key: Option<SecStr>, ) -> Result<Self, EncryptionError> { let rand = rand::SystemRandom::new(); let (self_pub_key, self_priv_key) = agreement::EphemeralPrivateKey::generate(&agreement::ECDH_P256, &rand) .and_then(|priv_key| { priv_key .compute_public_key() .map(|pub_key| (pub_key, priv_key)) }) .or(Err(EncryptionError::Keygen))?; Ok(Self { nonce: 0, is_client, preshared_key, self_pub_key, peer_pub_key: None, self_authenticator: None, peer_authenticator: None, self_priv_key: Some(self_priv_key), stream_keys: None, }) } fn next_nouce(&mut self) -> Result<u16, EncryptionError> { if self.nonce == u16::max_value() { Err(EncryptionError::Renegotiate) } else { let current = self.nonce; self.nonce += 1; Ok(current) } } fn stream_keys(&self) -> &StreamKeys { self.stream_keys.as_ref().expect("stream keys not set") } fn raw_public_key(&self) -> &[u8] { &self.self_pub_key.as_ref()[1..] } } impl Encryption for StandardEncryption { fn args_size(&self) -> u8 { STANDARD_ARGS_SIZE as u8 } fn public_key(&self) -> PublicKey { GenericArray::clone_from_slice(self.raw_public_key()) } fn handshake(&mut self, peer: PublicKey) -> Result<(), EncryptionError> { let peer_with_tag = peer.prepend(PUBLIC_KEY_OCTET_TAG); let peer_pub_key = agreement::UnparsedPublicKey::new(&agreement::ECDH_P256, peer_with_tag); let (self_auth, peer_auth, stream_keys) = agree_ephemeral( self.self_priv_key.take().expect("no private key"), &peer_pub_key, EncryptionError::Handshake, |shared_key| { let self_auth = calc_authenticator( self.is_client, self.is_client, self.raw_public_key(), peer.as_ref(), shared_key, self.preshared_key.as_ref().map(Borrow::borrow), ); let peer_auth = calc_authenticator( self.is_client, !self.is_client, self.raw_public_key(), peer.as_ref(), shared_key, self.preshared_key.as_ref().map(Borrow::borrow), ); let stream_keys = StreamKeys::from_shared(shared_key); Ok((self_auth, peer_auth, stream_keys)) }, )?; self.self_authenticator = Some(self_auth); self.peer_authenticator = Some(peer_auth); self.peer_pub_key = Some(peer_pub_key); self.stream_keys = Some(stream_keys); Ok(()) } fn authenticator(&self) -> Authenticator { self.self_authenticator .expect("authenticator not initialised") } fn authenticate(&mut self, peer: Authenticator) -> Result<(), EncryptionError> { let valid
lt<(), EncryptionError> { let (cipher_key, mac_key) = self.stream_keys().get_write_keys(self.is_client); let nonce = self.next_nouce()?.to_be_bytes(); let mut cipher = Salsa20::new(&cipher_key, &calc_nonce(nonce)); cipher.encrypt(data); let sig = calc_signature(head, &nonce[..], &mac_key[..], data); args.put_slice(&sig[..]); args.put_slice(&nonce[..]); Ok(()) } fn decrypt( &mut self, head: &SessionHeader, args: &[u8], data: &mut [u8], ) -> Result<(), EncryptionError> { let (cipher_key, mac_key) = self.stream_keys().get_read_keys(self.is_client); let sig = [args[0], args[1], args[2], args[3], args[4], args[5]]; let nonce = [args[6], args[7]]; if calc_signature(head, &nonce[..], &mac_key[..], data) != sig { return Err(EncryptionError::Signature); } let mut cipher = Salsa20::new(&cipher_key, &calc_nonce(nonce)); cipher.decrypt(data); Ok(()) } } #[derive(Debug)] pub struct StandardEncryptionAcceptor { preshared_key: Option<SecStr>, } impl StandardEncryptionAcceptor { pub fn new(preshared_key: Option<SecStr>) -> Self { Self { preshared_key } } } impl EncryptionAcceptor for StandardEncryptionAcceptor { type Encryption = StandardEncryption; fn accept(&mut self, client: PublicKey) -> Result<Self::Encryption, EncryptionError> { let psk = self.preshared_key.clone(); StandardEncryption::new_with_ephemeral(false, psk).and_then(|mut encryption| { encryption.handshake(client)?; Ok(encryption) }) } } #[derive(Debug)] struct StreamKeys { client_mac: EncryptionMac, server_mac: EncryptionMac, client_write: EncryptionKey, server_write: EncryptionKey, } impl StreamKeys { fn get_write_keys(&self, is_client: bool) -> (EncryptionKey, EncryptionMac) { if is_client { (self.client_write, self.client_mac) } else { (self.server_write, self.server_mac) } } fn get_read_keys(&self, is_client: bool) -> (EncryptionKey, EncryptionMac) { self.get_write_keys(!is_client) } fn from_shared(key: &[u8]) -> Self { let mut hash = Sha3_256::new(); hash.update(key); hash.update("client_write_key"); let client_write = hash.finalize_reset(); hash.update(key); hash.update("client_mac_key"); let client_mac = hash.finalize_reset(); hash.update(key); hash.update("server_write_key"); let server_write = hash.finalize_reset(); hash.update(key); hash.update("server_mac_key"); let server_mac = hash.finalize(); Self { client_write, server_write, client_mac, server_mac, } } } fn calc_nonce(nonce: [u8; 2]) -> EncryptionNonce { let mut nonce_array = [0u8; 8]; nonce_array[6] = nonce[0]; nonce_array[7] = nonce[1]; nonce_array.into() } fn calc_authenticator( is_client: bool, for_client: bool, pubkey_self: &[u8], pubkey_peer: &[u8], shared_key: &[u8], preshared_key: Option<&[u8]>, ) -> Authenticator { let mut hash = Sha3_256::new(); if for_client { hash.update("client"); } else { hash.update("server"); } hash.update(shared_key); if is_client { hash.update(pubkey_self); hash.update(pubkey_peer); } else { hash.update(pubkey_peer); hash.update(pubkey_self); } if let Some(preshared_key) = preshared_key { hash.update(preshared_key); } hash.finalize() } fn calc_signature( head: &SessionHeader, nonce: &[u8], mac_key: &[u8], ciphertext: &[u8], ) -> [u8; 6] { let mut head_bytes = [0u8; SessionHeader::len()]; head.encode(&mut &mut head_bytes[..]); let mut hash = Sha3_256::new(); hash.update(mac_key); hash.update(&head_bytes[..]); hash.update(nonce); hash.update(ciphertext); let res = hash.finalize(); [res[0], res[1], res[2], res[3], res[4], res[5]] } #[cfg(test)] mod tests { use super::*; use crate::packet::PacketKind; #[test] fn test_basic() { let mut client = StandardEncryption::new_with_ephemeral(true, None).expect("client enc"); let mut server = StandardEncryption::new_with_ephemeral(false, None).expect("server enc"); server .handshake(client.public_key()) .expect("client to server handshake"); client .handshake(server.public_key()) .expect("server to client handshake"); server .authenticate(client.authenticator()) .expect("client to server auth"); client .authenticate(server.authenticator()) .expect("server to client auth"); let header = SessionHeader::new(1, PacketKind::SYN, 2); let mut args = [0u8; 8]; let mut data = [1, 2, 3, 5]; client .encrypt(&header, &mut args[..], &mut data[..]) .expect("encrypt"); assert_ne!(data, [1, 2, 3, 5]); server .decrypt(&header, &mut args[..], &mut data[..]) .expect("decrypt"); assert_eq!(data, [1, 2, 3, 5]); } }
= self .peer_authenticator .expect("peer authenticator consumed or not init"); if constant_time_eq(&valid[..], &peer[..]) { Ok(()) } else { Err(EncryptionError::Authentication) } } fn encrypt( &mut self, head: &SessionHeader, mut args: &mut [u8], data: &mut [u8], ) -> Resu
random
[ { "content": "pub fn decode_iter<I>(iter: I, skip_ignored: bool) -> impl Iterator<Item = Result<u8, DecodeError>>\n\nwhere\n\n I: Iterator<Item = u8>,\n\n{\n\n let mut iter = iter\n\n .fuse()\n\n .map(decode_nibble)\n\n .filter_map(move |res| match res {\n\n NibbleResult::Ignored(_) if skip_ignored => None,\n\n other => Some(other),\n\n })\n\n .map(|res| match res {\n\n NibbleResult::Value(v) => Ok(v),\n\n NibbleResult::Ignored(v) => Err(DecodeError::InvalidNibble(v)),\n\n NibbleResult::Invalid(v) => Err(DecodeError::InvalidNibble(v)),\n\n });\n\n\n\n iter::from_fn(move || {\n\n iter.next_tuple()\n\n .map(|(hr, lr)| Ok(join_halves(hr?, lr?)))\n\n .or_else(|| iter.next().map(|_| Err(DecodeError::IncompleteByte)))\n\n })\n\n}\n\n\n", "file_path": "dnscat/src/util/hex.rs", "rank": 4, "score": 188922.10552287084 }, { "content": "#[inline]\n\npub fn be_u16(bytes: &mut Bytes) -> Result<u16, Needed> {\n\n require_size_of::<u16>(bytes)?;\n\n Ok(bytes.get_u16())\n\n}\n\n\n", "file_path": "dnscat/src/util/parse.rs", "rank": 6, "score": 162091.83675072296 }, { "content": "#[inline]\n\npub fn be_u8(bytes: &mut Bytes) -> Result<u8, Needed> {\n\n require_size_of::<u8>(bytes)?;\n\n Ok(bytes.get_u8())\n\n}\n\n\n", "file_path": "dnscat/src/util/parse.rs", "rank": 7, "score": 159999.9359199278 }, { "content": "pub fn encode_iter<I>(iter: I) -> impl Iterator<Item = u8>\n\nwhere\n\n I: Iterator<Item = u8>,\n\n{\n\n iter.flat_map(|byte| {\n\n let (high, low) = split_halves(byte);\n\n GenericArray::from([encode_nibble(high), encode_nibble(low)])\n\n })\n\n}\n\n\n", "file_path": "dnscat/src/util/hex.rs", "rank": 8, "score": 148219.15973738654 }, { "content": "/// Used to validate any part of a packet is always less than the max\n\n/// size. We care that the length fits within a `u8` safetly.\n\nfn as_valid_len(len: usize) -> u8 {\n\n assert!((len <= u8::max_value() as usize));\n\n len as u8\n\n}\n\n\n\n/// Packet ID (`u16`).\n\npub type PacketId = u16;\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Packet<T>\n\nwhere\n\n T: PacketBody,\n\n{\n\n pub head: T::Head,\n\n pub body: T,\n\n}\n\n\n\nimpl<T> Packet<T>\n\nwhere\n\n T: PacketBody,\n", "file_path": "dnscat/src/packet/mod.rs", "rank": 9, "score": 139924.0890233777 }, { "content": "fn block_data_iter<'a>(data: &'a [u8]) -> impl FnMut() -> u8 + 'a {\n\n let mut data = data.iter().copied();\n\n move || data.next().unwrap_or(0)\n\n}\n\n\n\n//////////////////////////////////////////\n\n\n", "file_path": "dnscat/src/transport/split.rs", "rank": 10, "score": 134636.79657451232 }, { "content": "#[inline]\n\npub fn split_to(bytes: &mut Bytes, len: usize) -> Result<Bytes, Needed> {\n\n require_len(bytes, len)?;\n\n Ok(bytes.split_to(len))\n\n}\n\n\n", "file_path": "dnscat/src/util/parse.rs", "rank": 11, "score": 127171.62601043904 }, { "content": "#[inline]\n\npub fn split_to_array<N: ArrayLength<u8>>(\n\n bytes: &mut Bytes,\n\n) -> Result<GenericArray<u8, N>, Needed> {\n\n let bytes = split_to(bytes, size_of::<N::ArrayType>())?;\n\n Ok(GenericArray::clone_from_slice(&bytes[..]))\n\n}\n", "file_path": "dnscat/src/util/parse.rs", "rank": 12, "score": 120871.96415094714 }, { "content": "#[inline]\n\nfn decode_nibble(nibble: u8) -> NibbleResult {\n\n if nibble > 127 {\n\n return NibbleResult::Invalid(nibble);\n\n }\n\n assert!(nibble <= 127);\n\n match HEX_TO_DEC_NIBBLE[nibble as usize] {\n\n SKIP => NibbleResult::Ignored(nibble),\n\n INVD => NibbleResult::Invalid(nibble),\n\n value => NibbleResult::Value(value),\n\n }\n\n}\n\n\n", "file_path": "dnscat/src/util/hex.rs", "rank": 14, "score": 110127.34648923707 }, { "content": "pub trait EncryptionAcceptor {\n\n type Encryption: Encryption;\n\n\n\n fn accept(&mut self, client: PublicKey) -> Result<Self::Encryption, EncryptionError>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum NoEncryption {}\n\n\n\nimpl Encryption for NoEncryption {\n\n fn args_size(&self) -> u8 {\n\n unreachable!()\n\n }\n\n\n\n fn public_key(&self) -> PublicKey {\n\n unreachable!()\n\n }\n\n\n\n fn authenticator(&self) -> Authenticator {\n\n unreachable!()\n", "file_path": "dnscat/src/encryption/mod.rs", "rank": 16, "score": 107984.00659546797 }, { "content": "pub fn get_system_dns_server() -> Result<Option<SocketAddr>, io::Error> {\n\n read_system_conf().map(|(config, _)| {\n\n config\n\n .name_servers()\n\n .iter()\n\n .filter(|server| server.protocol == Protocol::Udp)\n\n .map(|server| server.socket_addr)\n\n .next()\n\n })\n\n}\n", "file_path": "dnscat/src/transport/dns/resolver.rs", "rank": 17, "score": 106405.78914371718 }, { "content": "#[inline]\n\nfn split_halves(byte: u8) -> (u8, u8) {\n\n (byte >> 4, byte & 0x0F)\n\n}\n\n\n", "file_path": "dnscat/src/util/hex.rs", "rank": 18, "score": 105449.44227677594 }, { "content": "pub fn nt_string<E>(bytes: &mut Bytes) -> Result<StringBytes, E>\n\nwhere\n\n E: From<NoNullTermError>,\n\n E: From<Utf8Error>,\n\n{\n\n let slice_len = {\n\n let mut parts = bytes.split(|x| *x == 0);\n\n let slice = parts.next().unwrap();\n\n if parts.next().is_none() {\n\n return Err(NoNullTermError.into());\n\n }\n\n slice.len()\n\n };\n\n let string = StringBytes::from_utf8(bytes.split_to(slice_len))?;\n\n bytes.advance(1);\n\n Ok(string)\n\n}\n\n\n", "file_path": "dnscat/src/util/parse.rs", "rank": 19, "score": 104114.26381833428 }, { "content": "pub fn encode_into_buf<B: BufMut>(buf: &mut B, src: &[u8]) {\n\n for nibble in encode_iter(src.iter().copied()) {\n\n buf.put_u8(nibble);\n\n }\n\n}\n\n\n", "file_path": "dnscat/src/util/hex.rs", "rank": 20, "score": 103052.13503688812 }, { "content": "#[inline]\n\nfn join_halves(high: u8, low: u8) -> u8 {\n\n low | (high << 4)\n\n}\n\n\n", "file_path": "dnscat/src/util/hex.rs", "rank": 21, "score": 102302.54197224425 }, { "content": "#[inline]\n\nfn encode_nibble(nibble: u8) -> u8 {\n\n assert!(nibble <= 0x0F, \"valid nibble range\");\n\n DEC_TO_HEX_NIBBLE[nibble as usize]\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use bytes::BytesMut;\n\n\n\n const TEST_BYTES_ENCODED: &[u8] = b\"deadbeef\";\n\n const TEST_BYTES_DECODED: &[u8] = &[0xDE, 0xAD, 0xBE, 0xEF];\n\n\n\n #[test]\n\n fn test_join_halves() {\n\n assert_eq!(join_halves(0x00, 0x00), 0x00);\n\n assert_eq!(join_halves(0x00, 0x0F), 0x0F);\n\n assert_eq!(join_halves(0x0F, 0x00), 0xF0);\n\n assert_eq!(join_halves(0x0F, 0x0F), 0xFF);\n\n }\n", "file_path": "dnscat/src/util/hex.rs", "rank": 22, "score": 101181.44657761589 }, { "content": "#[inline]\n\nfn require_len(bytes: &Bytes, len: usize) -> Result<(), Needed> {\n\n if bytes.len() < len {\n\n Err(Needed(len - bytes.len()))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "dnscat/src/util/parse.rs", "rank": 23, "score": 101083.45227879142 }, { "content": "pub trait Encryption {\n\n fn args_size(&self) -> u8;\n\n\n\n fn public_key(&self) -> PublicKey;\n\n\n\n fn authenticator(&self) -> Authenticator;\n\n\n\n fn handshake(&mut self, peer: PublicKey) -> Result<(), EncryptionError>;\n\n\n\n fn authenticate(&mut self, peer: Authenticator) -> Result<(), EncryptionError>;\n\n\n\n fn encrypt(\n\n &mut self,\n\n head: &SessionHeader,\n\n args: &mut [u8],\n\n data: &mut [u8],\n\n ) -> Result<(), EncryptionError>;\n\n\n\n fn decrypt(\n\n &mut self,\n\n head: &SessionHeader,\n\n args: &[u8],\n\n data: &mut [u8],\n\n ) -> Result<(), EncryptionError>;\n\n}\n\n\n", "file_path": "dnscat/src/encryption/mod.rs", "rank": 24, "score": 87644.42009019086 }, { "content": "pub fn decode_into_buf<B: BufMut>(\n\n buf: &mut B,\n\n src: &[u8],\n\n skip_ignored: bool,\n\n) -> Result<(), DecodeError> {\n\n for result in decode_iter(src.iter().copied(), skip_ignored) {\n\n buf.put_u8(result?)\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "dnscat/src/util/hex.rs", "rank": 25, "score": 83385.05333720343 }, { "content": "#[inline]\n\nfn require_size_of<T>(bytes: &Bytes) -> Result<(), Needed> {\n\n require_len(bytes, size_of::<T>())\n\n}\n\n\n", "file_path": "dnscat/src/util/parse.rs", "rank": 26, "score": 80000.38698384966 }, { "content": "#[derive(Debug)]\n\nstruct ClientOpts {\n\n min_delay: Duration,\n\n max_delay: Duration,\n\n random_delay: bool,\n\n retransmit_backoff: bool,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Client<T, E = (), R = ThreadRng>\n\nwhere\n\n T: Transport<LazyPacket>,\n\n{\n\n transport: T,\n\n session: Session<E, R>,\n\n options: ClientOpts,\n\n exchange: Option<Exchange>,\n\n poll_delay: Option<Delay>,\n\n send_buf: Bytes,\n\n recv_buf: Bytes,\n\n send_task: Option<Waker>,\n", "file_path": "dnscat/src/client/mod.rs", "rank": 28, "score": 54232.32587022885 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "dnscat-server/src/main.rs", "rank": 29, "score": 50261.04910398787 }, { "content": "/// Synchronously encode into a buffer.\n\npub trait Encode {\n\n /// Encode into a `BufMut` buffer.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if self does not have enough capacity to encode into.\n\n fn encode<B: BufMut + ?Sized>(&self, buf: &mut B);\n\n}\n\n\n", "file_path": "dnscat/src/util/encdec.rs", "rank": 30, "score": 48744.7389709286 }, { "content": "pub trait DnsEndpoint {\n\n fn supported_queries() -> &'static [RecordType];\n\n\n\n /// Returns the max size for request data.\n\n fn max_request_size(&self) -> usize;\n\n\n\n /// Build an endpoint request given data.\n\n fn build_request(&mut self, data: Bytes) -> Result<DnsEndpointRequest, DnsEndpointError>;\n\n\n\n /// Parse an endpoint request into data.\n\n fn parse_request(&mut self, req: DnsEndpointRequest) -> Result<Bytes, DnsEndpointError>;\n\n\n\n /// Build a MX response given data.\n\n fn build_mx_response(&mut self, data: Bytes) -> Result<Name, DnsEndpointError>;\n\n\n\n /// Parse a MX response into data.\n\n fn parse_mx_response(&mut self, name: Name) -> Result<Bytes, DnsEndpointError>;\n\n\n\n /// Build a CNAME response given data.\n\n fn build_cname_response(&mut self, data: Bytes) -> Result<Name, DnsEndpointError>;\n", "file_path": "dnscat/src/transport/dns/endpoint.rs", "rank": 31, "score": 46558.18479942616 }, { "content": "/// Trait implemented for split datagram block structures.\n\npub trait SplitDatagramBlock {\n\n fn new_head(seq: u8, len: u8, data: &[u8]) -> Self;\n\n\n\n fn new_tail(len: u8, data: &[u8]) -> Self;\n\n\n\n /// Get the total size of the block.\n\n fn len(&self) -> usize;\n\n\n\n /// Get the `SEQ` field from the block.\n\n fn seq_field(&self) -> u8;\n\n\n\n /// Get the `LEN` field, assuming it is a head block.\n\n fn len_field(&self) -> u8;\n\n\n\n // Get the `DATA` field.\n\n fn write_data_field_into<B: BufMut>(&self, buf: &mut B, head: bool);\n\n\n\n /// Get the header length of the block.\n\n #[inline]\n\n fn header_len(&self, head: bool) -> usize {\n", "file_path": "dnscat/src/transport/split.rs", "rank": 32, "score": 46558.18479942616 }, { "content": "/// Synchronously decode from a buffer.\n\npub trait Decode: Sized {\n\n /// Decode error type.\n\n type Error: Fail;\n\n\n\n /// Decode from a `Bytes` buffer.\n\n ///\n\n /// Returns the decoded type on success or a decode error on failure.\n\n fn decode(buf: &mut Bytes) -> Result<Self, Self::Error>;\n\n}\n", "file_path": "dnscat/src/util/encdec.rs", "rank": 33, "score": 46286.27129024758 }, { "content": "pub trait Transport<D>\n\nwhere\n\n D: Datagram,\n\n{\n\n type Error: Fail;\n\n\n\n /// Poll receiving a datagram.\n\n fn poll_recv(&mut self, cx: &mut Context<'_>) -> Poll<Result<D, Self::Error>>;\n\n\n\n /// Poll sending a datagram.\n\n fn poll_send(&mut self, cx: &mut Context<'_>, datagram: D) -> Poll<Result<(), Self::Error>>;\n\n\n\n /// Returns the max datagram size this transport supports.\n\n fn max_datagram_size(&self) -> usize;\n\n}\n", "file_path": "dnscat/src/transport/mod.rs", "rank": 34, "score": 46286.27129024758 }, { "content": "fn retransmit_delay<E, R>(\n\n opts: &ClientOpts,\n\n session: &mut Session<E, R>,\n\n attempt: usize,\n\n) -> Duration\n\nwhere\n\n R: Rng,\n\n E: Encryption,\n\n{\n\n if opts.random_delay {\n\n session.random().gen_range(opts.min_delay..opts.max_delay)\n\n } else if opts.retransmit_backoff {\n\n Duration::from_secs(2u64.pow(attempt as u32))\n\n } else {\n\n opts.min_delay\n\n }\n\n}\n\n\n", "file_path": "dnscat/src/client/exchange.rs", "rank": 35, "score": 44156.425409672716 }, { "content": "pub trait PacketBody: Sized + Encode {\n\n type Head: PacketHead;\n\n\n\n /// Decode a packet kind.\n\n fn decode_body(head: &Self::Head, b: &mut Bytes) -> Result<Self, PacketDecodeError>;\n\n}\n\n\n\n///////////////////////////////////////////////////////////////////////////////\n\n// Packet Header\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct PacketHeader {\n\n pub id: PacketId,\n\n pub kind: PacketKind,\n\n}\n\n\n\nimpl PacketHeader {\n\n pub const fn len() -> usize {\n\n mem::size_of::<PacketId>() + mem::size_of::<PacketKind>()\n\n }\n", "file_path": "dnscat/src/packet/mod.rs", "rank": 36, "score": 43106.73259971931 }, { "content": "pub trait Datagram: Encode + Decode + Send + 'static {}\n\n\n\nimpl<T> Datagram for T where T: Encode + Decode + Send + 'static {}\n\n\n\n#[derive(Debug, Fail)]\n\npub enum DatagramError<D: Fail> {\n\n #[fail(display = \"Decode error: {}\", _0)]\n\n Decode(D),\n\n #[fail(display = \"Datagram underflow\")]\n\n Underflow,\n\n #[fail(display = \"Hex decode error: {}\", _0)]\n\n Hex(hex::DecodeError),\n\n #[fail(display = \"Split datagram error: {}\", _0)]\n\n Split(SplitDatagramError),\n\n}\n\n\n\nimpl<D: Fail> From<hex::DecodeError> for DatagramError<D> {\n\n fn from(err: hex::DecodeError) -> Self {\n\n Self::Hex(err)\n\n }\n\n}\n\n\n\nimpl<D: Fail> From<SplitDatagramError> for DatagramError<D> {\n\n fn from(err: SplitDatagramError) -> Self {\n\n Self::Split(err)\n\n }\n\n}\n\n\n", "file_path": "dnscat/src/transport/mod.rs", "rank": 37, "score": 40293.73909241726 }, { "content": "pub trait PacketHead: Sized + Encode + AsRef<PacketHeader> {\n\n fn decode_head(head: PacketHeader, b: &mut Bytes) -> Result<Self, PacketDecodeError>;\n\n}\n\n\n\n///////////////////////////////////////////////////////////////////////////////\n\n// Packet Body\n\n\n", "file_path": "dnscat/src/packet/mod.rs", "rank": 38, "score": 38647.66364983669 }, { "content": " }\n\n\n\n fn handshake(&mut self, _peer: PublicKey) -> Result<(), EncryptionError> {\n\n unreachable!()\n\n }\n\n\n\n fn authenticate(&mut self, _peer: Authenticator) -> Result<(), EncryptionError> {\n\n unreachable!()\n\n }\n\n\n\n fn encrypt(\n\n &mut self,\n\n _head: &SessionHeader,\n\n _args: &mut [u8],\n\n _data: &mut [u8],\n\n ) -> Result<(), EncryptionError> {\n\n unreachable!()\n\n }\n\n\n\n fn decrypt(\n", "file_path": "dnscat/src/encryption/mod.rs", "rank": 45, "score": 32245.71492711364 }, { "content": "#[cfg(feature = \"encryption\")]\n\nmod standard;\n\n\n\nuse failure::Fail;\n\nuse generic_array::typenum::{U32, U64};\n\nuse generic_array::GenericArray;\n\n\n\nuse crate::packet::SessionHeader;\n\n\n\n#[cfg(feature = \"encryption\")]\n\npub use self::standard::{StandardEncryption, StandardEncryptionAcceptor};\n\n\n\npub type PublicKey = GenericArray<u8, U64>;\n\npub type Authenticator = GenericArray<u8, U32>;\n\n\n\n#[derive(Debug, Fail)]\n\npub enum EncryptionError {\n\n #[fail(display = \"Encryption needs to be renegotiated\")]\n\n Renegotiate,\n\n #[fail(display = \"Failed to agree on a shared secret\")]\n", "file_path": "dnscat/src/encryption/mod.rs", "rank": 46, "score": 32245.416495177815 }, { "content": " &mut self,\n\n _head: &SessionHeader,\n\n _args: &[u8],\n\n _data: &mut [u8],\n\n ) -> Result<(), EncryptionError> {\n\n unreachable!()\n\n }\n\n}\n\n\n\nimpl EncryptionAcceptor for NoEncryption {\n\n type Encryption = NoEncryption;\n\n\n\n fn accept(&mut self, _client: PublicKey) -> Result<Self::Encryption, EncryptionError> {\n\n unimplemented!()\n\n }\n\n}\n", "file_path": "dnscat/src/encryption/mod.rs", "rank": 48, "score": 32243.940834016572 }, { "content": " Handshake,\n\n #[fail(display = \"Authentication failed\")]\n\n Authentication,\n\n #[fail(display = \"Keypair generation failed\")]\n\n Keygen,\n\n #[fail(display = \"Invalid signature\")]\n\n Signature,\n\n #[fail(display = \"{}\", _0)]\n\n Custom(&'static str),\n\n}\n\n\n", "file_path": "dnscat/src/encryption/mod.rs", "rank": 55, "score": 32228.118554417004 }, { "content": "fn transmit_delay<E, R>(opts: &ClientOpts, session: &mut Session<E, R>) -> Option<Delay>\n\nwhere\n\n R: Rng,\n\n E: Encryption,\n\n{\n\n let dur_since_last = session\n\n .last_exchange()\n\n .map(|last| Instant::now().duration_since(last))\n\n .unwrap_or(Duration::from_secs(0));\n\n\n\n let dur = if opts.random_delay {\n\n session\n\n .random()\n\n .gen_range(opts.min_delay..opts.max_delay)\n\n .checked_sub(dur_since_last)\n\n } else if dur_since_last < opts.min_delay {\n\n Some(opts.min_delay - dur_since_last)\n\n } else {\n\n None\n\n };\n\n\n\n dur.map(Delay::new)\n\n}\n", "file_path": "dnscat/src/client/exchange.rs", "rank": 56, "score": 30350.764721588326 }, { "content": "#[derive(Debug, PartialEq)]\n\nenum NibbleResult {\n\n Value(u8),\n\n Invalid(u8),\n\n Ignored(u8),\n\n}\n\n\n", "file_path": "dnscat/src/util/hex.rs", "rank": 57, "score": 29979.714824997427 }, { "content": " /// `AUTH` encryption packet kind.\n\n AUTH = 0x01,\n\n}\n\n\n\nimpl EncBodyKind {\n\n /// Converts a encryption packet kind value to a supported variant.\n\n pub fn from_u16(kind: u16) -> Option<Self> {\n\n match kind {\n\n 0x00 => Some(Self::INIT),\n\n 0x01 => Some(Self::AUTH),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::encryption::{Authenticator, PublicKey};\n\n use generic_array::sequence::GenericSequence;\n", "file_path": "dnscat/src/packet/session.rs", "rank": 58, "score": 24.920978247591204 }, { "content": " pub fn from_code(code: u8) -> Result<Self, PacketDecodeError> {\n\n match code {\n\n 0x00 => Ok(Self::SYN),\n\n 0x01 => Ok(Self::MSG),\n\n 0x02 => Ok(Self::FIN),\n\n 0x03 => Ok(Self::ENC),\n\n 0xFF => Ok(Self::PING),\n\n code => Err(PacketDecodeError::UnexpectedKind(code)),\n\n }\n\n }\n\n\n\n pub fn is_session(self) -> bool {\n\n match self {\n\n Self::SYN | Self::MSG | Self::FIN | Self::ENC => true,\n\n Self::PING => false,\n\n }\n\n }\n\n}\n\n\n\nimpl From<PacketKind> for u8 {\n", "file_path": "dnscat/src/packet/mod.rs", "rank": 59, "score": 20.202907349705967 }, { "content": "impl EncBodyVariant {\n\n /// Retrives the encryption packet kind.\n\n pub fn kind(&self) -> EncBodyKind {\n\n match self {\n\n Self::Init { .. } => EncBodyKind::INIT,\n\n Self::Auth { .. } => EncBodyKind::AUTH,\n\n }\n\n }\n\n\n\n /// Decodes a encryption packet body given the encryption packet kind.\n\n ///\n\n /// Returns a tuple of the remaining buffer not used and the decoded encryption\n\n /// packet body on success or a packet decode error on failure.\n\n pub fn decode_kind(kind: EncBodyKind, b: &mut Bytes) -> Result<Self, PacketDecodeError> {\n\n match kind {\n\n EncBodyKind::INIT => Ok(Self::Init {\n\n public_key: parse::split_to_array(b)?,\n\n }),\n\n EncBodyKind::AUTH => Ok(Self::Auth {\n\n authenticator: parse::split_to_array(b)?,\n", "file_path": "dnscat/src/packet/session.rs", "rank": 60, "score": 20.009820784578704 }, { "content": "use std::{fmt, mem};\n\n\n\nuse bytes::{Buf, BufMut, Bytes};\n\n\n\nuse crate::encryption::{Authenticator, PublicKey};\n\nuse crate::util::{parse, Decode, Encode, StringBytes};\n\n\n\nuse super::*;\n\n\n\n/// Session ID (`u16`).\n\npub type SessionId = u16;\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct SessionHeader {\n\n packet: PacketHeader,\n\n pub session_id: SessionId,\n\n}\n\n\n\nimpl SessionHeader {\n\n pub const fn new(packet_id: PacketId, packet_kind: PacketKind, session_id: SessionId) -> Self {\n", "file_path": "dnscat/src/packet/session.rs", "rank": 61, "score": 19.76420176209363 }, { "content": " }),\n\n }\n\n }\n\n}\n\n\n\nimpl Encode for EncBodyVariant {\n\n fn encode<B: BufMut + ?Sized>(&self, b: &mut B) {\n\n match self {\n\n Self::Init { ref public_key } => b.put_slice(&public_key[..]),\n\n Self::Auth { ref authenticator } => b.put_slice(&authenticator[..]),\n\n }\n\n }\n\n}\n\n\n\n/// Enum of all supported encryption packet kinds.\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\n#[repr(u8)]\n\npub enum EncBodyKind {\n\n /// `INIT` encryption packet kind.\n\n INIT = 0x00,\n", "file_path": "dnscat/src/packet/session.rs", "rank": 62, "score": 18.88796020775395 }, { "content": " match self.role {\n\n SessionRole::Client => self.assert_stage(SessionStage::Uninit),\n\n SessionRole::Server => self.assert_stage(SessionStage::EncryptInit),\n\n }\n\n let encryption = self.encryption.as_ref().expect(\"client has no encryption\");\n\n let public_key = encryption.public_key();\n\n let body = EncBody::new(0, EncBodyVariant::Init { public_key });\n\n match self.role {\n\n SessionRole::Client => self.set_stage(SessionStage::EncryptInit),\n\n SessionRole::Server => self.set_stage(SessionStage::EncryptAuth),\n\n }\n\n self.mark_exchange_start();\n\n Self::build_packet(body, self.id, &mut self.random, None, self.packet_trace)\n\n }\n\n\n\n pub fn build_enc_auth(&mut self) -> Result<Packet<SessionBodyBytes>, SessionError> {\n\n self.assert_stage(SessionStage::EncryptAuth);\n\n let encryption = self.encryption.as_mut().expect(\"client has no encryption\");\n\n let authenticator = encryption.authenticator();\n\n let body = EncBody::new(0, EncBodyVariant::Auth { authenticator });\n", "file_path": "dnscat/src/session.rs", "rank": 63, "score": 18.040258319068904 }, { "content": "use std::borrow::Cow;\n\nuse std::fmt;\n\nuse std::ops::Deref;\n\n\n\nuse bytes::Bytes;\n\n\n\npub use std::str::{self, Utf8Error};\n\n\n\n#[derive(Clone, PartialEq)]\n\npub struct StringBytes(Bytes);\n\n\n\nimpl StringBytes {\n\n pub fn new() -> Self {\n\n Self(Bytes::new())\n\n }\n\n\n\n pub fn from_utf8(bytes: Bytes) -> Result<Self, Utf8Error> {\n\n str::from_utf8(bytes.as_ref())?;\n\n Ok(Self(bytes))\n\n }\n", "file_path": "dnscat/src/util/sbytes.rs", "rank": 64, "score": 17.98819325101192 }, { "content": " pub(crate) prefer_peer_name: bool,\n\n /// Whether or not packet bodies should be traced.\n\n pub(crate) packet_trace: bool,\n\n /// The last instant we attempted an exchange.\n\n pub(crate) last_exchange: Option<Instant>,\n\n /// The current if any attempt of exchanges.\n\n pub(crate) exchange_attempt: Option<usize>,\n\n /// The max number of retransmissions before closing.\n\n pub(crate) max_exchange_attempts: Option<usize>,\n\n}\n\n\n\nimpl<T, R> Session<T, R>\n\nwhere\n\n T: Encryption,\n\n R: Rng,\n\n{\n\n /// Returns session ID.\n\n pub fn id(&self) -> SessionId {\n\n self.id\n\n }\n", "file_path": "dnscat/src/session.rs", "rank": 65, "score": 17.690532231851112 }, { "content": "}\n\n\n\nimpl Encode for PacketHeader {\n\n fn encode<B: BufMut + ?Sized>(&self, b: &mut B) {\n\n b.put_u16(self.id);\n\n b.put_u8(self.kind.into());\n\n }\n\n}\n\n\n\nimpl Decode for PacketHeader {\n\n type Error = PacketDecodeError;\n\n\n\n fn decode(b: &mut Bytes) -> Result<Self, Self::Error> {\n\n Ok(Self {\n\n id: parse::be_u16(b)?,\n\n kind: PacketKind::decode(b)?,\n\n })\n\n }\n\n}\n\n\n", "file_path": "dnscat/src/packet/mod.rs", "rank": 66, "score": 17.3822888530176 }, { "content": " Self::parse_packet(packet, self.encryption.as_mut(), self.packet_trace)?;\n\n self.validate_exchange(body.seq(), body.ack(), body.data_len())?;\n\n let data = body.into_data();\n\n if data.is_empty() {\n\n Ok(None)\n\n } else {\n\n Ok(Some(data))\n\n }\n\n }\n\n\n\n fn handle_fin(&mut self, packet: Packet<SessionBodyBytes>) -> Result<(), SessionError> {\n\n let body: FinBody =\n\n Self::parse_packet(packet, self.encryption.as_mut(), self.packet_trace)?;\n\n self.close_reason = Some(body.reason().to_owned().into());\n\n Ok(())\n\n }\n\n\n\n ///////////////////////////////////////////////////////////////////////////\n\n\n\n pub fn build_enc_init(&mut self) -> Result<Packet<SessionBodyBytes>, SessionError> {\n", "file_path": "dnscat/src/session.rs", "rank": 67, "score": 17.246891875929446 }, { "content": " EncBodyVariant::Auth { authenticator } => authenticator,\n\n };\n\n encryption.authenticate(peer_auth)?;\n\n Ok(())\n\n } else {\n\n Err(SessionError::EncryptionMismatch)\n\n }\n\n }\n\n\n\n fn handle_syn(&mut self, packet: Packet<SessionBodyBytes>) -> Result<(), SessionError> {\n\n let body: SynBody =\n\n Self::parse_packet(packet, self.encryption.as_mut(), self.packet_trace)?;\n\n self.init_from_peer_syn(body, self.prefer_peer_name)\n\n }\n\n\n\n fn handle_msg(\n\n &mut self,\n\n packet: Packet<SessionBodyBytes>,\n\n ) -> Result<Option<Bytes>, SessionError> {\n\n let body: MsgBody =\n", "file_path": "dnscat/src/session.rs", "rank": 68, "score": 17.18912404921871 }, { "content": " session_id: Option<u16>,\n\n session_name: Cow<'static, str>,\n\n initial_sequence: Option<u16>,\n\n is_command: bool,\n\n min_delay: Duration,\n\n max_delay: Duration,\n\n random_delay: bool,\n\n prefer_server_name: bool,\n\n recv_queue_size: usize,\n\n max_retransmits: Option<usize>,\n\n retransmit_backoff: bool,\n\n packet_trace: bool,\n\n}\n\n\n\nimpl<R> ClientBuilder<R>\n\nwhere\n\n R: Rng,\n\n{\n\n pub fn default_with_random(random: R) -> Self {\n\n Self {\n", "file_path": "dnscat/src/client/builder.rs", "rank": 69, "score": 17.18054426212041 }, { "content": " // Integer division, rounding up\n\n #[inline]\n\n fn u8_rounded_up_div(num: u8, den: u8) -> u8 {\n\n let val = (num as u16 + (den as u16 - 1)) / den as u16;\n\n val as u8\n\n }\n\n}\n\n\n\nimpl Default for Labeller {\n\n fn default() -> Self {\n\n Self::exact(LABEL_MAX_SIZE)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use itertools::Itertools;\n\n\n\n #[test]\n", "file_path": "dnscat/src/transport/dns/name.rs", "rank": 70, "score": 17.084859553539946 }, { "content": " if self > next {\n\n let steps_to_max = u16::max_value() - self.0;\n\n steps_to_max + next.0 + 1\n\n } else {\n\n next.0 - self.0\n\n }\n\n }\n\n\n\n pub fn add_data(self, len: u8) -> Self {\n\n Self(self.0.wrapping_add(len as u16))\n\n }\n\n}\n\n\n\nimpl From<u16> for Sequence {\n\n fn from(seq: u16) -> Self {\n\n Self(seq)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Sequence {\n", "file_path": "dnscat/src/packet/session.rs", "rank": 71, "score": 16.971380272258493 }, { "content": " encryption.handshake(peer_pub_key)?;\n\n Ok(())\n\n } else {\n\n Err(SessionError::EncryptionMismatch)\n\n }\n\n }\n\n\n\n fn handle_encrypt_auth(\n\n &mut self,\n\n packet: Packet<SessionBodyBytes>,\n\n ) -> Result<(), SessionError> {\n\n if let Some(ref mut encryption) = self.encryption {\n\n let body: EncBody = Self::parse_packet(packet, Some(encryption), self.packet_trace)?;\n\n let peer_auth = match body.into_body() {\n\n EncBodyVariant::Init { .. } => {\n\n return Err(SessionError::UnexpectedEncKind {\n\n expected: EncBodyKind::AUTH,\n\n actual: EncBodyKind::INIT,\n\n })\n\n }\n", "file_path": "dnscat/src/session.rs", "rank": 72, "score": 16.767978221623604 }, { "content": " /// If set, will re-transmit forever until a server sends a\n\n /// valid response.\n\n #[structopt(long, conflicts_with = \"max_retransmits\")]\n\n retransmit_forever: bool,\n\n\n\n /// If set, will exponentially backoff in delay from\n\n /// re-attempting a transmit.\n\n #[structopt(long, conflicts_with = \"retransmit_forever\")]\n\n retransmit_backoff: bool,\n\n\n\n /// Set the shared secret used for encryption.\n\n #[structopt(long)]\n\n secret: Option<String>,\n\n\n\n /// If set, will turn off encryption/authentication.\n\n #[structopt(long, conflicts_with = \"secret\")]\n\n insecure: bool,\n\n\n\n /// Set the session ID manually.\n\n #[structopt(long)]\n", "file_path": "dnscat/src/cli/client.rs", "rank": 73, "score": 16.74888844551315 }, { "content": " }\n\n Err(err) => Err(err),\n\n }\n\n }\n\n\n\n fn handle_encrypt_init(\n\n &mut self,\n\n packet: Packet<SessionBodyBytes>,\n\n ) -> Result<(), SessionError> {\n\n if let Some(ref mut encryption) = self.encryption {\n\n let body: EncBody = Self::parse_packet(packet, None, self.packet_trace)?;\n\n let peer_pub_key = match body.into_body() {\n\n EncBodyVariant::Init { public_key } => public_key,\n\n EncBodyVariant::Auth { .. } => {\n\n return Err(SessionError::UnexpectedEncKind {\n\n expected: EncBodyKind::INIT,\n\n actual: EncBodyKind::AUTH,\n\n })\n\n }\n\n };\n", "file_path": "dnscat/src/session.rs", "rank": 74, "score": 16.591695021856776 }, { "content": "\n\nimpl<R> BasicDnsEndpoint<R>\n\nwhere\n\n R: Rng,\n\n{\n\n pub fn new(\n\n query_types: Vec<RecordType>,\n\n name_encoder: NameEncoder,\n\n random: R,\n\n ) -> Result<Self, DnsEndpointError> {\n\n assert_ne!(query_types.len(), 0);\n\n let unsupported_query = query_types\n\n .iter()\n\n .find(|query| !Self::supported_queries().contains(query));\n\n if let Some(query) = unsupported_query {\n\n return Err(DnsEndpointError::UnsupportedQuery(*query));\n\n }\n\n let max_request_size = name_encoder.max_hex_data() as usize;\n\n Ok(Self {\n\n random,\n", "file_path": "dnscat/src/transport/dns/endpoint.rs", "rank": 75, "score": 16.46458652063832 }, { "content": " /// Session is uninitialized.\n\n Uninit,\n\n /// Session is initialising encryption.\n\n EncryptInit,\n\n /// Session is authenticating encryption.\n\n EncryptAuth,\n\n /// Session is initialising session.\n\n SessionInit,\n\n /// Session is sending data.\n\n Send,\n\n /// Session is receiving data.\n\n Recv,\n\n /// Session is closed.\n\n Closed,\n\n}\n\n\n\nimpl SessionStage {\n\n pub fn is_established(self) -> bool {\n\n use SessionStage::*;\n\n match self {\n", "file_path": "dnscat/src/session.rs", "rank": 76, "score": 16.34106688022525 }, { "content": " len: usize,\n\n}\n\n\n\nimpl LowerAsciiName {\n\n pub fn len(&self) -> usize {\n\n self.len\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.len == 0\n\n }\n\n\n\n fn from_name_unchecked(name: Name) -> Self {\n\n let len = name.len();\n\n let name = name.to_lowercase();\n\n Self { name, len }\n\n }\n\n}\n\n\n\nimpl AsRef<Name> for LowerAsciiName {\n", "file_path": "dnscat/src/transport/dns/name.rs", "rank": 77, "score": 16.266724109671735 }, { "content": " self.data = data.into();\n\n as_valid_len(self.data.len())\n\n }\n\n}\n\n\n\nimpl Encode for PingBody {\n\n fn encode<B: BufMut + ?Sized>(&self, b: &mut B) {\n\n b.put_slice(self.data.as_bytes());\n\n b.put_u8(0);\n\n }\n\n}\n\n\n\nimpl PacketBody for PingBody {\n\n type Head = PingHeader;\n\n\n\n fn decode_body(_head: &Self::Head, b: &mut Bytes) -> Result<Self, PacketDecodeError> {\n\n let data = parse::nt_string::<PacketDecodeError>(b)?;\n\n let mut ping = Self::new();\n\n ping.set_data(data);\n\n Ok(ping)\n\n }\n\n}\n", "file_path": "dnscat/src/packet/ping.rs", "rank": 78, "score": 16.193488356183742 }, { "content": "\n\n fn validate_exchange(\n\n &mut self,\n\n peer_seq: Sequence,\n\n peer_ack: Sequence,\n\n recv_len: u8,\n\n ) -> Result<(), SessionError> {\n\n // We first validate that the peer acknowledged\n\n // the data (if any) we sent.\n\n if peer_ack != self.self_seq_pending {\n\n return Err(SessionError::UnexpectedPeerAck {\n\n expected: self.self_seq_pending,\n\n actual: peer_ack,\n\n });\n\n }\n\n // We now validate we are current with the peer's\n\n // current sequence.\n\n if peer_seq != self.peer_seq {\n\n return Err(SessionError::UnexpectedPeerSeq {\n\n expected: self.peer_seq,\n", "file_path": "dnscat/src/session.rs", "rank": 79, "score": 15.702164199597771 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct BasicDnsEndpoint<R: Rng = OsRng> {\n\n random: R,\n\n name_encoder: NameEncoder,\n\n query_types: Vec<RecordType>,\n\n max_request_size: usize,\n\n}\n\n\n\nimpl BasicDnsEndpoint {\n\n pub fn new_with_defaults(\n\n query_types: Vec<RecordType>,\n\n constant: Name,\n\n ) -> Result<Self, DnsEndpointError> {\n\n let name_encoder = NameEncoder::new(constant, Labeller::random())?;\n\n Self::new(query_types, name_encoder, OsRng)\n\n }\n\n}\n", "file_path": "dnscat/src/transport/dns/endpoint.rs", "rank": 80, "score": 15.65992105402794 }, { "content": " (SessionHeader::len() + mem::size_of::<Sequence>() * 2) as u8\n\n }\n\n}\n\n\n\nimpl Encode for MsgBody {\n\n fn encode<B: BufMut + ?Sized>(&self, b: &mut B) {\n\n b.put_u16(self.seq.get());\n\n b.put_u16(self.ack.get());\n\n b.put_slice(&self.data[..]);\n\n }\n\n}\n\n\n\nimpl Decode for MsgBody {\n\n type Error = PacketDecodeError;\n\n\n\n fn decode(b: &mut Bytes) -> Result<Self, Self::Error> {\n\n let seq = Sequence(parse::be_u16(b)?);\n\n let ack = Sequence(parse::be_u16(b)?);\n\n let mut msg = Self::new(seq, ack);\n\n msg.set_data(b.copy_to_bytes(b.remaining()));\n", "file_path": "dnscat/src/packet/session.rs", "rank": 81, "score": 15.645710009918435 }, { "content": "}\n\n\n\nimpl EncBody {\n\n /// Constructs a new `ENC` packet.\n\n pub fn new(cryp_flags: CryptoFlags, body: EncBodyVariant) -> Self {\n\n Self { cryp_flags, body }\n\n }\n\n\n\n /// Retrives the crypto flags.\n\n ///\n\n /// # Notes\n\n ///\n\n /// This field is currently not used in the original specification.\n\n pub fn crypto_flags(&self) -> CryptoFlags {\n\n self.cryp_flags\n\n }\n\n\n\n /// Retrives the encryption packet kind.\n\n pub fn kind(&self) -> EncBodyKind {\n\n self.body.kind()\n", "file_path": "dnscat/src/packet/session.rs", "rank": 82, "score": 15.355334109059616 }, { "content": "impl Encode for FinBody {\n\n fn encode<B: BufMut + ?Sized>(&self, b: &mut B) {\n\n b.put_slice(self.reason.as_bytes());\n\n b.put_u8(0);\n\n }\n\n}\n\n\n\nimpl Decode for FinBody {\n\n type Error = PacketDecodeError;\n\n\n\n fn decode(b: &mut Bytes) -> Result<Self, Self::Error> {\n\n let reason = parse::nt_string::<PacketDecodeError>(b)?;\n\n let mut fin = Self::new();\n\n fin.set_reason(reason);\n\n Ok(fin)\n\n }\n\n}\n\n\n\nimpl PacketBody for FinBody {\n\n type Head = SessionHeader;\n", "file_path": "dnscat/src/packet/session.rs", "rank": 83, "score": 15.283352739413935 }, { "content": "impl Encode for PingHeader {\n\n fn encode<B: BufMut + ?Sized>(&self, b: &mut B) {\n\n self.packet.encode(b);\n\n b.put_u16(self.ping_id);\n\n }\n\n}\n\n\n\nimpl PacketHead for PingHeader {\n\n fn decode_head(head: PacketHeader, b: &mut Bytes) -> Result<Self, PacketDecodeError> {\n\n Ok(Self {\n\n packet: head,\n\n ping_id: parse::be_u16(b)?,\n\n })\n\n }\n\n}\n\n\n\n/// A `PING` packet body.\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct PingBody {\n\n data: StringBytes,\n", "file_path": "dnscat/src/packet/ping.rs", "rank": 84, "score": 14.909805149613648 }, { "content": "\n\n /// Splits the bytes into a label iter, given a total data budget.\n\n ///\n\n /// Depending on how many labels the data will split into the data usage will\n\n /// grow, which is why have a budget to work against.\n\n ///\n\n /// Returns `None` if the labeller can not fit the data into the budget.\n\n pub fn label<'a>(\n\n &'a mut self,\n\n mut bytes: &'a [u8],\n\n budget: u8,\n\n ) -> Option<impl Iterator<Item = &'a [u8]>> {\n\n // No point continuing if it's not even in the valid range of a budget.\n\n if bytes.len() > u8::max_value() as usize {\n\n return None;\n\n }\n\n // We know it's in the valid range of a u8 now.\n\n let bytes_len = bytes.len() as u8;\n\n // Calculate the spare budget we have available.\n\n let spare_budget = budget.saturating_sub(bytes_len);\n", "file_path": "dnscat/src/transport/dns/name.rs", "rank": 85, "score": 14.840023306116809 }, { "content": " /// Random source.\n\n pub(crate) random: R,\n\n /// The peer sequence for receiving data.\n\n pub(crate) peer_seq: Sequence,\n\n /// This session's sequence for sending data.\n\n pub(crate) self_seq: Sequence,\n\n /// The peer sequence we expect in the next message.\n\n pub(crate) self_seq_pending: Sequence,\n\n /// Whether or not this is a command session.\n\n pub(crate) is_command: bool,\n\n /// Whether or not this is a client to server session.\n\n pub(crate) role: SessionRole,\n\n /// Session stage.\n\n pub(crate) stage: SessionStage,\n\n /// The reason the session was closing/closed.\n\n pub(crate) close_reason: Option<Cow<'static, str>>,\n\n /// The session encryption if set.\n\n pub(crate) encryption: Option<T>,\n\n /// Whether the session prefers the peer name or\n\n /// its own if it was set.\n", "file_path": "dnscat/src/session.rs", "rank": 86, "score": 14.606555351224747 }, { "content": "\n\nimpl Decode for EncBody {\n\n type Error = PacketDecodeError;\n\n\n\n fn decode(b: &mut Bytes) -> Result<Self, Self::Error> {\n\n let enc_kind = parse::be_u16(b)?;\n\n let enc_kind =\n\n EncBodyKind::from_u16(enc_kind).ok_or(PacketDecodeError::UnknownEncKind(enc_kind))?;\n\n let cryp_flags = parse::be_u16(b)?;\n\n let body = EncBodyVariant::decode_kind(enc_kind, b)?;\n\n Ok(Self::new(cryp_flags, body))\n\n }\n\n}\n\n\n\nimpl PacketBody for EncBody {\n\n type Head = SessionHeader;\n\n\n\n fn decode_body(head: &Self::Head, b: &mut Bytes) -> Result<Self, PacketDecodeError> {\n\n match head.packet.kind {\n\n PacketKind::ENC => Self::decode(b),\n", "file_path": "dnscat/src/packet/session.rs", "rank": 87, "score": 14.455642471455361 }, { "content": " if self.retransmit_forever {\n\n conn = conn.max_retransmits(None);\n\n } else {\n\n conn = conn.max_retransmits(Some(self.max_retransmits));\n\n }\n\n\n\n info!(\n\n \"connecting to `{}` using `{}`\",\n\n dns_server_addr, self.constant\n\n );\n\n\n\n let result = if self.insecure {\n\n match conn.connect_insecure(dns_client).await {\n\n Ok(client) => Ok(start_session(client, self).await),\n\n Err(err) => Err(err),\n\n }\n\n } else {\n\n let preshared_key = self.secret.clone().map(Into::into);\n\n if preshared_key.is_none() {\n\n warn!(\"no preshared secret! (use `--secret <secret>`)\");\n", "file_path": "dnscat/src/cli/client.rs", "rank": 88, "score": 14.34310298006872 }, { "content": " debug!(\"using peer session name\");\n\n self.name = syn.session_name().map(ToString::to_string).map(Into::into);\n\n }\n\n // Extract if the peer indicates this is a command session\n\n self.is_command = syn.is_command();\n\n // Extract the peer initial sequence\n\n self.peer_seq = syn.initial_sequence();\n\n // Woo!\n\n Ok(())\n\n }\n\n\n\n ///////////////////////////////////////////////////////////////////////////\n\n\n\n fn parse_packet<B>(\n\n packet: Packet<SessionBodyBytes>,\n\n encryption: Option<&mut T>,\n\n packet_trace: bool,\n\n ) -> Result<B, SessionError>\n\n where\n\n B: PacketBody<Head = SessionHeader>,\n", "file_path": "dnscat/src/session.rs", "rank": 89, "score": 14.234735635614342 }, { "content": "\n\n /// Returns the number of blocks in the datagram.\n\n pub fn block_count(&self) -> usize {\n\n self.blocks.len()\n\n }\n\n\n\n /// The max length of data that can be stored in a datagram.\n\n pub fn max_data_len() -> usize {\n\n u8::max_value() as usize\n\n }\n\n\n\n /// Returns a reference to the head block.\n\n ///\n\n /// Note this assumes you have sorted the blocks via `Self::sort_blocks`.\n\n ///\n\n /// Returns `SplitDatagramError::NotSorted` if not sorted or\n\n /// `SplitDatagramError::Empty` if no blocks are present.\n\n pub fn head(&self) -> Result<&T, SplitDatagramError> {\n\n if self.sorted {\n\n self.blocks.first().ok_or(SplitDatagramError::Empty)\n", "file_path": "dnscat/src/transport/split.rs", "rank": 90, "score": 14.195210563705066 }, { "content": "}\n\n\n\nimpl PingBody {\n\n /// Constructs a new `PING` packet.\n\n #[allow(clippy::new_without_default)]\n\n pub fn new() -> Self {\n\n Self {\n\n data: StringBytes::new(),\n\n }\n\n }\n\n\n\n /// Retrives the ping data.\n\n pub fn data(&self) -> &str {\n\n self.data.as_ref()\n\n }\n\n\n\n pub fn set_data<S>(&mut self, data: S) -> u8\n\n where\n\n S: Into<StringBytes>,\n\n {\n", "file_path": "dnscat/src/packet/ping.rs", "rank": 91, "score": 14.189174922796628 }, { "content": " pub fn command(mut self, value: bool) -> Self {\n\n self.is_command = value;\n\n self\n\n }\n\n\n\n pub fn prefer_server_name(mut self, value: bool) -> Self {\n\n self.prefer_server_name = value;\n\n self\n\n }\n\n\n\n pub fn recv_queue_size(mut self, size: usize) -> Self {\n\n self.recv_queue_size = size;\n\n self\n\n }\n\n\n\n pub fn packet_trace(mut self, value: bool) -> Self {\n\n self.packet_trace = value;\n\n self\n\n }\n\n\n", "file_path": "dnscat/src/client/builder.rs", "rank": 92, "score": 14.086776881082189 }, { "content": " self.packet.encode(b);\n\n b.put_u16(self.session_id);\n\n }\n\n}\n\n\n\nimpl AsRef<PacketHeader> for SessionHeader {\n\n fn as_ref(&self) -> &PacketHeader {\n\n &self.packet\n\n }\n\n}\n\n\n\nimpl PacketHead for SessionHeader {\n\n fn decode_head(head: PacketHeader, b: &mut Bytes) -> Result<Self, PacketDecodeError> {\n\n assert!(head.kind.is_session());\n\n Ok(Self {\n\n packet: head,\n\n session_id: parse::be_u16(b)?,\n\n })\n\n }\n\n}\n", "file_path": "dnscat/src/packet/session.rs", "rank": 93, "score": 14.078059963241635 }, { "content": "\n\n fn write_data_field_into<B: BufMut>(&self, buf: &mut B, head: bool) {\n\n buf.put_slice(&self.octets()[self.header_len(head)..]);\n\n }\n\n}\n\n\n\nimpl SplitDatagramBlock for Ipv6Addr {\n\n fn new_head(seq: u8, len: u8, data: &[u8]) -> Self {\n\n let mut next = block_data_iter(data);\n\n Self::new(\n\n u16::from_be_bytes([seq, len]),\n\n u16::from_be_bytes([next(), next()]),\n\n u16::from_be_bytes([next(), next()]),\n\n u16::from_be_bytes([next(), next()]),\n\n u16::from_be_bytes([next(), next()]),\n\n u16::from_be_bytes([next(), next()]),\n\n u16::from_be_bytes([next(), next()]),\n\n u16::from_be_bytes([next(), next()]),\n\n )\n\n }\n", "file_path": "dnscat/src/transport/split.rs", "rank": 94, "score": 13.8490713179786 }, { "content": "\n\n /// Returns session name.\n\n pub fn name(&self) -> Option<&str> {\n\n self.name.as_ref().map(AsRef::as_ref)\n\n }\n\n\n\n /// Returns `true` if this is a command session.\n\n pub fn is_command(&mut self) -> bool {\n\n self.is_command\n\n }\n\n\n\n /// Returns `true` if the session is encrypted.\n\n pub fn is_encrypted(&self) -> bool {\n\n self.encryption.is_some()\n\n }\n\n\n\n /// Returns the current session stage.\n\n pub fn stage(&self) -> SessionStage {\n\n self.stage\n\n }\n", "file_path": "dnscat/src/session.rs", "rank": 95, "score": 13.845468711426683 }, { "content": "use std::net::SocketAddr;\n\nuse std::process::Stdio;\n\nuse std::time::Duration;\n\n\n\nuse futures::{future, pin_mut};\n\nuse log::{error, info, warn};\n\nuse structopt::StructOpt;\n\nuse tokio::{io, process};\n\n\n\nuse crate::client::{Client, ClientBuilder};\n\nuse crate::encryption::{Encryption, StandardEncryption};\n\nuse crate::packet::LazyPacket;\n\nuse crate::transport::dns::{self, BasicDnsEndpoint, DnsClient, Name, RecordType};\n\nuse crate::transport::Transport;\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(version = \"0.1\", author = \"avitex <avitex@wfxlabs.com>\")]\n\npub struct App {\n\n /// DNS name constant.\n\n constant: Name,\n", "file_path": "dnscat/src/cli/client.rs", "rank": 96, "score": 13.724583614807603 }, { "content": "\n\n///////////////////////////////////////////////////////////////////////////////\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct SessionBodyBytes(pub Bytes);\n\n\n\nimpl Encode for SessionBodyBytes {\n\n fn encode<B: BufMut + ?Sized>(&self, b: &mut B) {\n\n b.put_slice(self.0.as_ref())\n\n }\n\n}\n\n\n\nimpl PacketBody for SessionBodyBytes {\n\n type Head = SessionHeader;\n\n\n\n fn decode_body(_head: &Self::Head, b: &mut Bytes) -> Result<Self, PacketDecodeError> {\n\n Ok(Self(b.copy_to_bytes(b.remaining())))\n\n }\n\n}\n\n\n", "file_path": "dnscat/src/packet/session.rs", "rank": 97, "score": 13.575768381271743 }, { "content": " .copied()\n\n .eq(encoded_const_iter)\n\n {\n\n data.extend(encoded_name.iter().skip(const_label_num).flatten().copied());\n\n return Ok(data.freeze());\n\n }\n\n }\n\n Err(NameEncoderError::ConstantNotFound)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Labeller<R: Rng = OsRng> {\n\n random: Option<R>,\n\n max_size: u8,\n\n}\n\n\n\nimpl Labeller {\n\n /// Constructs a labeller that splits data into labels of the max size possible.\n\n pub fn new() -> Self {\n", "file_path": "dnscat/src/transport/dns/name.rs", "rank": 98, "score": 13.573644192217166 }, { "content": " fn from(kind: PacketKind) -> u8 {\n\n kind as u8\n\n }\n\n}\n\n\n\nimpl Encode for PacketKind {\n\n fn encode<B: BufMut + ?Sized>(&self, b: &mut B) {\n\n b.put_u8((*self).into())\n\n }\n\n}\n\n\n\nimpl Decode for PacketKind {\n\n type Error = PacketDecodeError;\n\n\n\n fn decode(b: &mut Bytes) -> Result<Self, Self::Error> {\n\n Self::from_code(parse::be_u8(b)?)\n\n }\n\n}\n\n\n\n///////////////////////////////////////////////////////////////////////////////\n", "file_path": "dnscat/src/packet/mod.rs", "rank": 99, "score": 13.561350793038674 } ]
Rust
src/dnsimple/domains_signer_records.rs
dnsimple/dnsimple-rust
4f972d5e89b1cf76baea5b4df604359a39acfe5f
use crate::dnsimple::domains::Domains; use crate::dnsimple::{DNSimpleEmptyResponse, DNSimpleResponse, Endpoint, RequestOptions}; use serde::{Deserialize, Serialize}; #[derive(Debug, Deserialize, Serialize)] pub struct DelegationSignerRecord { pub id: u64, pub domain_id: u64, pub algorithm: String, pub digest: String, pub digest_type: String, pub keytag: String, pub public_key: Option<String>, pub created_at: String, pub updated_at: String, } struct ListSignerRecordsEndpoint; impl Endpoint for ListSignerRecordsEndpoint { type Output = Vec<DelegationSignerRecord>; } #[derive(Debug, Serialize)] pub struct DelegationSignerRecordPayload { pub algorithm: String, pub digest: String, pub digest_type: String, pub keytag: String, pub public_key: Option<String>, } struct SignerRecordEndpoint; impl Endpoint for SignerRecordEndpoint { type Output = DelegationSignerRecord; } impl Domains<'_> { pub fn list_delegation_signer_records( &self, account_id: u64, domain: &str, options: Option<RequestOptions>, ) -> Result<DNSimpleResponse<Vec<DelegationSignerRecord>>, String> { let path = format!("/{}/domains/{}/ds_records", account_id, domain); self.client .get::<ListSignerRecordsEndpoint>(&*path, options) } pub fn create_delegation_signer_record( &self, account_id: u64, domain: &str, payload: DelegationSignerRecordPayload, ) -> Result<DNSimpleResponse<DelegationSignerRecord>, String> { let path = format!("/{}/domains/{}/ds_records", account_id, domain); self.client .post::<SignerRecordEndpoint>(&*path, serde_json::to_value(payload).unwrap()) } pub fn get_delegation_signer_record( &self, account_id: u64, domain: &str, ) -> Result<DNSimpleResponse<DelegationSignerRecord>, String> { let path = format!("/{}/domains/{}/ds_records", account_id, domain); self.client.get::<SignerRecordEndpoint>(&*path, None) } pub fn delete_delegation_signer_record( &self, account_id: u64, domain: &str, delegation_signer_record_id: i32, ) -> DNSimpleEmptyResponse { let path = format!( "/{}/domains/{}/ds_records/{}", account_id, domain, delegation_signer_record_id ); self.client.delete(&*path) } }
use crate::dnsimple::domains::Domains; use crate::dnsimple::{DNSimpleEmptyResponse, DNSimpleResponse, Endpoint, RequestOptions}; use serde::{Deserialize, Serialize}; #[derive(Debug, Deserialize, Serialize)] pub struct DelegationSignerRecord { pub id: u64, pub domain_id: u64, pub algorithm: String, pub digest: String, pub digest_type: String, pub keytag: String, pub public_key: Option<String>, pub created_at: String, pub updated_at: String, } struct ListSignerRecordsEndpoint; impl Endpoint for ListSignerRecordsEndpoint { type Output = Vec<DelegationSignerRecord>; } #[derive(Debug, Serialize)] pub struct DelegationSignerRecordPayload { pub algorithm: String, pub digest: String, pub digest_type: String, pub keytag: String, pub public_key: Option<String>, } struct SignerRecordEndpoint; impl Endpoint for SignerRecordEndpoint { type Output = DelegationSignerRecord; } impl Domains<'_> { pub fn list_delegation_signer_records( &self, account_id: u64, domain: &str, options: Option<RequestOptions>, ) -> Result<DNSimpleResponse<Vec<DelegationSignerRecord>>, String> { let path = format!("/{}/domains/{}/ds_records", account_id, domain); self.client .get::<ListSignerRecordsEndpoint>(&*path, options) } pub fn create_delegation_signer_record( &sel
pub fn get_delegation_signer_record( &self, account_id: u64, domain: &str, ) -> Result<DNSimpleResponse<DelegationSignerRecord>, String> { let path = format!("/{}/domains/{}/ds_records", account_id, domain); self.client.get::<SignerRecordEndpoint>(&*path, None) } pub fn delete_delegation_signer_record( &self, account_id: u64, domain: &str, delegation_signer_record_id: i32, ) -> DNSimpleEmptyResponse { let path = format!( "/{}/domains/{}/ds_records/{}", account_id, domain, delegation_signer_record_id ); self.client.delete(&*path) } }
f, account_id: u64, domain: &str, payload: DelegationSignerRecordPayload, ) -> Result<DNSimpleResponse<DelegationSignerRecord>, String> { let path = format!("/{}/domains/{}/ds_records", account_id, domain); self.client .post::<SignerRecordEndpoint>(&*path, serde_json::to_value(payload).unwrap()) }
function_block-function_prefixed
[ { "content": "/// Creates a mockserver and a client (changing the url of the client\n\n/// to that of the mockserver to capture the requests).\n\n///\n\n/// It builds a response struct for the mock server using the fixture.\n\n///\n\n/// # Arguments\n\n///\n\n/// `fixture`: the path to the fixture inside the `api` directory\n\n/// `path`: the path in the server (i.e. `/whoami`)\n\n/// `method`: the HTTP method we are going to use (GET, POST, DELETE, ...)\n\n///\n\npub fn setup_mock_for(path: &str, fixture: &str, method: &str) -> (Client, Mock) {\n\n let path = format!(\"/v2{}\", path);\n\n let fixture = format!(\"./tests/fixtures/v2/api/{}.http\", fixture);\n\n\n\n let content =\n\n fs::read_to_string(fixture.as_str()).expect(\"Something went wrong: Couldn't read the file\");\n\n\n\n let lines = content.lines();\n\n let status = &content[9..12];\n\n let body = lines.last();\n\n\n\n let mock = mock(method, path.as_str())\n\n .with_header(\"X-RateLimit-Limit\", \"2\")\n\n .with_header(\"X-RateLimit-Remaining\", \"2\")\n\n .with_header(\"X-RateLimit-Reset\", \"never\")\n\n .with_status(status.parse().unwrap())\n\n .with_body(body.unwrap())\n\n .create();\n\n\n\n let mut client = new_client(true, String::from(\"some-token\"));\n\n client.set_base_url(&mockito::server_url());\n\n (client, mock)\n\n}\n", "file_path": "tests/common/mod.rs", "rank": 0, "score": 172761.0502869934 }, { "content": "struct DomainsEndpoint;\n\n\n\nimpl Endpoint for DomainsEndpoint {\n\n type Output = Vec<Domain>;\n\n}\n\n\n", "file_path": "src/dnsimple/domains.rs", "rank": 3, "score": 132725.8665870953 }, { "content": "struct DomainEndpoint;\n\n\n\nimpl Endpoint for DomainEndpoint {\n\n type Output = Domain;\n\n}\n\n\n\n/// The Domains Service handles the domains endpoint of the DNSimple API.\n\n///\n\n/// See [API Documentation: domains](https://developer.dnsimple.com/v2/domains/)\n\npub struct Domains<'a> {\n\n pub client: &'a Client,\n\n}\n\n\n\nimpl Domains<'_> {\n\n /// Lists the domains in the account\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// use std::collections::HashMap;\n", "file_path": "src/dnsimple/domains.rs", "rank": 4, "score": 132725.8665870953 }, { "content": "struct DomainPushEndpoint;\n\n\n\nimpl Endpoint for DomainPushEndpoint {\n\n type Output = DomainPush;\n\n}\n\n\n\n/// Represents a domain push\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct DomainPush {\n\n /// The domain push ID in DNSimple.\n\n pub id: u64,\n\n /// The associated domain ID.\n\n pub domain_id: u64,\n\n /// The associated contact ID.\n\n pub contact_id: Option<u64>,\n\n /// The associated account ID.\n\n pub account_id: u64,\n\n /// When the domain push was created in DNSimple.\n\n pub created_at: String,\n\n /// When the domain push was last updated in DNSimple.\n", "file_path": "src/dnsimple/domains_push.rs", "rank": 5, "score": 126161.2233685349 }, { "content": "/// Helper function to create a new client\n\n///\n\n/// Make sure you use this to create your client.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// use dnsimple::dnsimple::{Client, new_client};\n\n///\n\n/// let client = new_client(true, String::from(\"AUTH_TOKEN\"));\n\n/// ```\n\n///\n\n/// # Arguments\n\n///\n\n/// `sandbox`: `true` if you want to run in the sandbox environment, otherwise `false`\n\n/// `token`: the bearer authentication token\n\npub fn new_client(sandbox: bool, token: String) -> Client {\n\n let mut url = DEFAULT_BASE_URL;\n\n if sandbox {\n\n url = DEFAULT_SANDBOX_URL;\n\n }\n\n\n\n Client {\n\n base_url: String::from(url),\n\n user_agent: DEFAULT_USER_AGENT.to_owned() + VERSION,\n\n auth_token: token,\n\n _agent: ureq::Agent::new(),\n\n }\n\n}\n\n\n\nimpl Client {\n\n ///Returns the `accounts` service attached to this client\n\n pub fn accounts(&self) -> Accounts {\n\n Accounts { client: self }\n\n }\n\n\n", "file_path": "src/dnsimple.rs", "rank": 6, "score": 124611.53493196785 }, { "content": "struct DomainPushesListEndpoint;\n\n\n\nimpl Endpoint for DomainPushesListEndpoint {\n\n type Output = Vec<DomainPush>;\n\n}\n\n\n", "file_path": "src/dnsimple/domains_push.rs", "rank": 7, "score": 123183.08527996336 }, { "content": "struct IdsEndpoint;\n\n\n\nimpl Endpoint for IdsEndpoint {\n\n type Output = Vec<Id>;\n\n}\n\n\n", "file_path": "tests/query_parameters_in_request.rs", "rank": 8, "score": 122175.90926384853 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Id {\n\n #[allow(dead_code)] // Unread but required for test fixture\n\n pub id: u64,\n\n}\n\n\n", "file_path": "tests/query_parameters_in_request.rs", "rank": 9, "score": 120708.37819024696 }, { "content": "struct DomainCheckEndpoint;\n\n\n\nimpl Endpoint for DomainCheckEndpoint {\n\n type Output = DomainCheck;\n\n}\n\n\n", "file_path": "src/dnsimple/registrar.rs", "rank": 10, "score": 120276.20766311348 }, { "content": "struct CollaboratorEndpoint;\n\n\n\nimpl Endpoint for CollaboratorEndpoint {\n\n type Output = Collaborator;\n\n}\n\n\n\n/// The domains collaborators set of endpoints\n\n///\n\n/// See [API Documentation: domains/collaborators](https://developer.dnsimple.com/v2/domains/collaborators)\n\nimpl Domains<'_> {\n\n /// List collaborators for the domain in the account.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// use dnsimple::dnsimple::{Filters, new_client, Paginate, Sort};\n\n /// use std::collections::HashMap;\n\n ///\n\n /// let client = new_client(true, String::from(\"AUTH_TOKEN\"));\n\n /// let collaborators = client.domains().list_collaborators(1234, 1, None).unwrap().data.unwrap();\n", "file_path": "src/dnsimple/domains_collaborators.rs", "rank": 11, "score": 120276.20766311348 }, { "content": "struct DomainTransferEndpoint;\n\n\n\nimpl Endpoint for DomainTransferEndpoint {\n\n type Output = DomainTransfer;\n\n}\n\n\n", "file_path": "src/dnsimple/registrar.rs", "rank": 12, "score": 120276.20766311348 }, { "content": "struct DomainRegistrationEndpoint;\n\n\n\nimpl Endpoint for DomainRegistrationEndpoint {\n\n type Output = DomainRegistration;\n\n}\n\n\n", "file_path": "src/dnsimple/registrar.rs", "rank": 13, "score": 120276.20766311348 }, { "content": "struct DomainRenewalEndpoint;\n\n\n\nimpl Endpoint for DomainRenewalEndpoint {\n\n type Output = DomainRenewal;\n\n}\n\n\n\n/// The Registrar Service handles the domains registrations of the DNSimple API.\n\n///\n\n/// See [API Documentation: registrar](https://developer.dnsimple.com/v2/registrar/)\n\npub struct Registrar<'a> {\n\n pub client: &'a Client,\n\n}\n\n\n\nimpl Registrar<'_> {\n\n /// Checks a domain name for availability.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// use dnsimple::dnsimple::new_client;\n", "file_path": "src/dnsimple/registrar.rs", "rank": 14, "score": 120276.20766311348 }, { "content": "struct DomainPricesEndpoint;\n\n\n\nimpl Endpoint for DomainPricesEndpoint {\n\n type Output = DomainPrice;\n\n}\n\n\n", "file_path": "src/dnsimple/registrar.rs", "rank": 15, "score": 120276.20766311348 }, { "content": "struct LetsEncryptPurchaseEndpoint;\n\n\n\nimpl Endpoint for LetsEncryptPurchaseEndpoint {\n\n type Output = LetsEncryptPurchase;\n\n}\n\n\n", "file_path": "src/dnsimple/certificates.rs", "rank": 16, "score": 118772.29059954378 }, { "content": "/// Defines the Endpoint trait for the different API endpoints\n\npub trait Endpoint {\n\n type Output: DeserializeOwned;\n\n}\n\n\n\n/// Represents the response from an API call\n\n#[derive(Debug)]\n\npub struct DNSimpleResponse<T> {\n\n /// The maximum number of requests you can perform per hour.\n\n pub rate_limit: String,\n\n /// The number of requests remaining in the current rate limit window.\n\n pub rate_limit_remaining: String,\n\n /// The time at which the current rate limit window in [Unix time](https://en.wikipedia.org/wiki/Unix_time) format.\n\n pub rate_limit_reset: String,\n\n /// The HTTP Status Code\n\n pub status: u16,\n\n /// The object or a Vec<T> of objects (the type `T` will depend on the endpoint).\n\n pub data: Option<T>,\n\n /// The error response if any\n\n pub errors: Option<APIErrorMessage>,\n\n /// Any API endpoint that returns a list of items requires pagination.\n", "file_path": "src/dnsimple.rs", "rank": 17, "score": 117913.85382742377 }, { "content": "struct DomainPremiumPriceEndpoint;\n\n\n\nimpl Endpoint for DomainPremiumPriceEndpoint {\n\n type Output = DomainPremiumPrice;\n\n}\n\n\n", "file_path": "src/dnsimple/registrar.rs", "rank": 18, "score": 116939.85335306353 }, { "content": "struct ListCollaboratorsEndpoint;\n\n\n\nimpl Endpoint for ListCollaboratorsEndpoint {\n\n type Output = Vec<Collaborator>;\n\n}\n\n\n", "file_path": "src/dnsimple/domains_collaborators.rs", "rank": 19, "score": 116939.85335306355 }, { "content": "struct DnssecStatusEndpoint;\n\n\n\nimpl Endpoint for DnssecStatusEndpoint {\n\n type Output = Dnssec;\n\n}\n\n\n\n/// The domains dnssec set of endpoints\n\n///\n\n/// See [API Documentation: domains/dnssec](https://developer.dnsimple.com/v2/domains/dnssec)\n\nimpl Domains<'_> {\n\n /// Enable DNSSEC for the domain in the account. This will sign the zone. If the domain is\n\n /// registered it will also add the DS record to the corresponding registry.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// use dnsimple::dnsimple::new_client;\n\n ///\n\n /// let client = new_client(true, String::from(\"AUTH_TOKEN\"));\n\n /// let dnssec = client.domains().enable_dnssec(1234, \"example.com\").unwrap().data.unwrap();\n", "file_path": "src/dnsimple/domains_dnssec.rs", "rank": 20, "score": 116939.85335306355 }, { "content": "struct LetsEncryptPurchaseRenewalEndpoint;\n\n\n\nimpl Endpoint for LetsEncryptPurchaseRenewalEndpoint {\n\n type Output = LetsEncryptPurchaseRenewal;\n\n}\n\n\n\n/// The Certificates Service handles the certificates endpoint of the DNSimple API.\n\n///\n\n/// See [API Documentation: certificates](https://developer.dnsimple.com/v2/certificates/)\n\npub struct Certificates<'a> {\n\n pub client: &'a Client,\n\n}\n\n\n\nimpl Certificates<'_> {\n\n /// List the certificates for a domain in the account.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// use dnsimple::dnsimple::{Client, new_client};\n", "file_path": "src/dnsimple/certificates.rs", "rank": 21, "score": 115601.45909762307 }, { "content": "struct EmailForwardEndpoint;\n\n\n\nimpl Endpoint for EmailForwardEndpoint {\n\n type Output = EmailForward;\n\n}\n\n\n\n/// The domains email forwards set of endpoints\n\n///\n\n/// See [API Documentation: domains/email-forwards](https://developer.dnsimple.com/v2/domains/email-forwards)\n\nimpl Domains<'_> {\n\n /// List email forwards for the domain in the account.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// use dnsimple::dnsimple::new_client;\n\n ///\n\n /// let client = new_client(true, String::from(\"AUTH_TOKEN\"));\n\n /// let email_forwards_list = client.domains().list_email_forwards(1234, \"example.com\", None).unwrap().data.unwrap();\n\n /// ```\n", "file_path": "src/dnsimple/domains_email_forwards.rs", "rank": 22, "score": 113831.6857258196 }, { "content": "struct DomainDelegationEndpoint;\n\n\n\nimpl Endpoint for DomainDelegationEndpoint {\n\n type Output = Vec<String>;\n\n}\n\n\n\n/// Represents a vanity name server\n\n#[derive(Debug, Deserialize)]\n\npub struct VanityNameServer {\n\n /// The vanity name server ID in DNSimple.\n\n pub id: u64,\n\n /// The vanity name server name.\n\n pub name: String,\n\n /// The vanity name server IPv4.\n\n pub ipv4: String,\n\n /// The vanity name server IPv6.\n\n pub ipv6: String,\n\n /// When the vanity name server was created in DNSimple.\n\n pub created_at: String,\n\n /// When the vanity name server was last updated in DNSimple.\n\n pub updated_at: String,\n\n}\n\n\n", "file_path": "src/dnsimple/registrar_name_servers.rs", "rank": 23, "score": 113831.6857258196 }, { "content": "struct EmailForwardsListEndpoint;\n\n\n\nimpl Endpoint for EmailForwardsListEndpoint {\n\n type Output = Vec<EmailForwardsInList>;\n\n}\n\n\n\n/// Represents an email forwards\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct EmailForward {\n\n /// The email forward ID in DNSimple.\n\n pub id: u64,\n\n /// The associated domain ID.\n\n pub domain_id: u64,\n\n /// The email alias\n\n pub alias_email: String,\n\n /// The destination email\n\n pub destination_email: String,\n\n /// The \"local part\" of the originating email address. Anything to the left of the @ symbol.\n\n pub from: String,\n\n /// The full email address to forward to.\n", "file_path": "src/dnsimple/domains_email_forwards.rs", "rank": 24, "score": 110929.06895229379 }, { "content": "struct DomainDelegationVanityEndpoint;\n\n\n\nimpl Endpoint for DomainDelegationVanityEndpoint {\n\n type Output = Vec<VanityNameServer>;\n\n}\n\n\n\nimpl Registrar<'_> {\n\n /// List name servers for the domain in the account.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// `account_id`: The account ID\n\n /// `domain`: The domain name or id\n\n pub fn get_domain_delegation(\n\n &self,\n\n account_id: u64,\n\n domain: String,\n\n ) -> Result<DNSimpleResponse<Vec<String>>, String> {\n\n let path = format!(\"/{}/registrar/domains/{}/delegation\", account_id, domain);\n\n\n", "file_path": "src/dnsimple/registrar_name_servers.rs", "rank": 25, "score": 110929.06895229378 }, { "content": "#[test]\n\nfn create_domain_test() {\n\n let setup = setup_mock_for(\"/1385/domains\", \"createDomain/created\", \"POST\");\n\n let client = setup.0;\n\n let account_id = 1385;\n\n let domain_name = String::from(\"example-beta.com\");\n\n\n\n let domain = client\n\n .domains()\n\n .create_domain(account_id, domain_name)\n\n .unwrap()\n\n .data\n\n .unwrap();\n\n\n\n assert_eq!(domain.id, 181985);\n\n assert_eq!(domain.account_id, account_id);\n\n assert_eq!(domain.registrant_id, None);\n\n assert_eq!(domain.name, \"example-beta.com\");\n\n assert_eq!(domain.unicode_name, \"example-beta.com\");\n\n assert_eq!(domain.state, \"hosted\");\n\n assert_eq!(domain.auto_renew, false);\n\n assert_eq!(domain.private_whois, false);\n\n assert_eq!(domain.expires_on, None);\n\n assert_eq!(domain.expires_at, None);\n\n assert_eq!(domain.created_at, \"2020-06-04T19:47:05Z\");\n\n assert_eq!(domain.updated_at, \"2020-06-04T19:47:05Z\");\n\n}\n\n\n", "file_path": "tests/domains_test.rs", "rank": 26, "score": 92494.91090509907 }, { "content": "#[test]\n\nfn test_delete_domain() {\n\n let setup = setup_mock_for(\"/1385/domains/181984\", \"deleteDomain/success\", \"DELETE\");\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain_id = 181984 as u64;\n\n\n\n let response = client.domains().delete_domain(account_id, domain_id);\n\n\n\n assert_eq!(response.status, 204);\n\n}\n", "file_path": "tests/domains_test.rs", "rank": 27, "score": 92494.91090509907 }, { "content": "#[test]\n\nfn test_get_domain() {\n\n let setup = setup_mock_for(\"/1385/domains/181984\", \"getDomain/success\", \"GET\");\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain_id = 181984 as u64;\n\n\n\n let domain = client\n\n .domains()\n\n .get_domain(account_id, domain_id)\n\n .unwrap()\n\n .data\n\n .unwrap();\n\n\n\n assert_eq!(domain_id, domain.id);\n\n assert_eq!(account_id, domain.account_id);\n\n assert_eq!(2715, domain.registrant_id.unwrap());\n\n assert_eq!(\"example-alpha.com\", domain.name);\n\n assert_eq!(\"example-alpha.com\", domain.unicode_name);\n\n assert_eq!(\"registered\", domain.state);\n\n assert_eq!(false, domain.auto_renew);\n\n assert_eq!(false, domain.private_whois);\n\n assert_eq!(\"2021-06-05\", domain.expires_on.unwrap());\n\n assert_eq!(\"2021-06-05T02:15:00Z\", domain.expires_at.unwrap());\n\n assert_eq!(\"2020-06-04T19:15:14Z\", domain.created_at);\n\n assert_eq!(\"2020-06-04T19:15:21Z\", domain.updated_at);\n\n}\n\n\n", "file_path": "tests/domains_test.rs", "rank": 28, "score": 92494.91090509907 }, { "content": "#[test]\n\nfn list_domains_test() {\n\n let setup = setup_mock_for(\"/1385/domains\", \"listDomains/success\", \"GET\");\n\n let client = setup.0;\n\n let account_id = 1385;\n\n\n\n let domains_data = client.domains().list_domains(account_id, None).unwrap();\n\n let domains = domains_data.data.unwrap();\n\n\n\n assert_eq!(2, domains.len());\n\n\n\n let first_domain = domains.first().unwrap();\n\n assert_eq!(181984, first_domain.id);\n\n assert_eq!(account_id, first_domain.account_id);\n\n assert_eq!(2715, first_domain.registrant_id.unwrap());\n\n assert_eq!(\"example-alpha.com\", first_domain.name);\n\n assert_eq!(\"example-alpha.com\", first_domain.unicode_name);\n\n assert_eq!(\"registered\", first_domain.state);\n\n assert_eq!(false, first_domain.auto_renew);\n\n assert_eq!(false, first_domain.private_whois);\n\n assert_eq!(\"2021-06-05\", first_domain.expires_on.as_ref().unwrap());\n\n assert_eq!(\n\n \"2021-06-05T02:15:00Z\",\n\n first_domain.expires_at.as_ref().unwrap()\n\n );\n\n assert_eq!(\"2020-06-04T19:15:14Z\", first_domain.created_at);\n\n assert_eq!(\"2020-06-04T19:15:21Z\", first_domain.updated_at);\n\n}\n\n\n", "file_path": "tests/domains_test.rs", "rank": 29, "score": 92494.91090509907 }, { "content": "struct ServiceEndpoint;\n\n\n\nimpl Endpoint for ServiceEndpoint {\n\n type Output = Service;\n\n}\n\n\n\n/// The Services Service handles the domains services of the DNSimple API.\n\n///\n\n/// See [API Documentation: services](https://developer.dnsimple.com/v2/services/)\n\npub struct Services<'a> {\n\n pub client: &'a Client,\n\n}\n\n\n\nimpl Services<'_> {\n\n /// List services\n\n pub fn list_services(\n\n &self,\n\n options: Option<RequestOptions>,\n\n ) -> Result<DNSimpleResponse<Vec<Service>>, String> {\n\n let path = \"/services\";\n", "file_path": "src/dnsimple/services.rs", "rank": 30, "score": 92084.1393331959 }, { "content": "struct AccountsEndpoint;\n\n\n\nimpl Endpoint for AccountsEndpoint {\n\n type Output = Vec<Account>;\n\n}\n\n\n\n/// The Accounts Service handles the accounts endpoint of the DNSimple API.\n\n///\n\n/// See [API Documentation: accounts](https://developer.dnsimple.com/v2/accounts/)\n\npub struct Accounts<'a> {\n\n pub client: &'a Client,\n\n}\n\n\n\nimpl Accounts<'_> {\n\n /// Lists the accounts the current authenticated entity has access to.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// use dnsimple::dnsimple::new_client;\n", "file_path": "src/dnsimple/accounts.rs", "rank": 31, "score": 92084.1393331959 }, { "content": "struct ZoneEndpoint;\n\n\n\nimpl Endpoint for ZoneEndpoint {\n\n type Output = Zone;\n\n}\n\n\n", "file_path": "src/dnsimple/zones.rs", "rank": 32, "score": 92084.1393331959 }, { "content": "struct TemplatesEndpoint;\n\n\n\nimpl Endpoint for TemplatesEndpoint {\n\n type Output = Vec<Template>;\n\n}\n\n\n", "file_path": "src/dnsimple/templates.rs", "rank": 33, "score": 92084.1393331959 }, { "content": "struct ContactEndpoint;\n\n\n\nimpl Endpoint for ContactEndpoint {\n\n type Output = Contact;\n\n}\n\n\n\n/// The Contacts Service handles the contacts endpoint of the DNSimple API.\n\n///\n\n/// See [API Documentation: contacts](https://developer.dnsimple.com/v2/contacts/)\n\npub struct Contacts<'a> {\n\n pub client: &'a Client,\n\n}\n\n\n\nimpl Contacts<'_> {\n\n /// Lists the contacts in the account.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// `account_id`: The account ID\n\n /// `options`: The `RequestOptions`\n", "file_path": "src/dnsimple/contacts.rs", "rank": 34, "score": 92084.1393331959 }, { "content": "struct ContactsEndpoint;\n\n\n\nimpl Endpoint for ContactsEndpoint {\n\n type Output = Vec<Contact>;\n\n}\n\n\n", "file_path": "src/dnsimple/contacts.rs", "rank": 35, "score": 92084.1393331959 }, { "content": "struct ServicesEndpoint;\n\n\n\nimpl Endpoint for ServicesEndpoint {\n\n type Output = Vec<Service>;\n\n}\n\n\n", "file_path": "src/dnsimple/services.rs", "rank": 36, "score": 92084.1393331959 }, { "content": "struct TldEndpoint;\n\n\n\nimpl Endpoint for TldEndpoint {\n\n type Output = Tld;\n\n}\n\n\n", "file_path": "src/dnsimple/tlds.rs", "rank": 37, "score": 92084.1393331959 }, { "content": "struct WebhookEndpoint;\n\n\n\nimpl Endpoint for WebhookEndpoint {\n\n type Output = Webhook;\n\n}\n\n\n\n/// The Webhooks Service handles the webhooks of the DNSimple API.\n\n///\n\n/// See [API Documentation: webhooks](https://developer.dnsimple.com/v2/webhooks/)\n\npub struct Webhooks<'a> {\n\n pub client: &'a Client,\n\n}\n\n\n\nimpl Webhooks<'_> {\n\n /// List webhooks in the account.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// `account_id`: The account id\n\n pub fn list_webhooks(\n", "file_path": "src/dnsimple/webhooks.rs", "rank": 38, "score": 92084.1393331959 }, { "content": "struct WebhooksEndpoint;\n\n\n\nimpl Endpoint for WebhooksEndpoint {\n\n type Output = Vec<Webhook>;\n\n}\n\n\n", "file_path": "src/dnsimple/webhooks.rs", "rank": 39, "score": 92084.1393331959 }, { "content": "struct CertificateEndpoint;\n\n\n\nimpl Endpoint for CertificateEndpoint {\n\n type Output = Certificate;\n\n}\n\n\n", "file_path": "src/dnsimple/certificates.rs", "rank": 40, "score": 92084.1393331959 }, { "content": "struct TemplateEndpoint;\n\n\n\nimpl Endpoint for TemplateEndpoint {\n\n type Output = Template;\n\n}\n\n\n", "file_path": "src/dnsimple/templates.rs", "rank": 41, "score": 92084.1393331959 }, { "content": "struct IdentityEndpoint;\n\n\n\nimpl Endpoint for IdentityEndpoint {\n\n type Output = WhoamiData;\n\n}\n\n\n\n/// The Identity Service handles the identity (whoami) endpoint of the DNSimple API.\n\n///\n\n/// See [API Documentation: identity](https://developer.dnsimple.com/v2/identity/)\n\npub struct Identity<'a> {\n\n pub client: &'a Client,\n\n}\n\n\n\nimpl Identity<'_> {\n\n /// Retrieves the details about the current authenticated entity used to access the API.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// use dnsimple::dnsimple::{Client, new_client};\n", "file_path": "src/dnsimple/identity.rs", "rank": 42, "score": 92084.1393331959 }, { "content": "struct ListCertificatesEndpoint;\n\n\n\nimpl Endpoint for ListCertificatesEndpoint {\n\n type Output = Vec<Certificate>;\n\n}\n\n\n", "file_path": "src/dnsimple/certificates.rs", "rank": 43, "score": 89660.17837047605 }, { "content": "struct TemplateRecordEndpoint;\n\n\n\nimpl Endpoint for TemplateRecordEndpoint {\n\n type Output = TemplateRecord;\n\n}\n\n\n\n/// The Templates Service handles the domains templates of the DNSimple API.\n\n///\n\n/// See [API Documentation: templates](https://developer.dnsimple.com/v2/templates/)\n\npub struct Templates<'a> {\n\n pub client: &'a Client,\n\n}\n\n\n\nimpl Templates<'_> {\n\n /// List templates in the account.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// `account_id`: The account id\n\n /// `options`: The `RequestOptions` for sorting, etc.\n", "file_path": "src/dnsimple/templates.rs", "rank": 44, "score": 89660.17837047605 }, { "content": "struct ListZonesEndpoint;\n\n\n\nimpl Endpoint for ListZonesEndpoint {\n\n type Output = Vec<Zone>;\n\n}\n\n\n", "file_path": "src/dnsimple/zones.rs", "rank": 45, "score": 89660.17837047605 }, { "content": "struct ZoneFileEndpoint;\n\n\n\nimpl Endpoint for ZoneFileEndpoint {\n\n type Output = ZoneFile;\n\n}\n\n\n\npub(crate) struct DistributionEndpoint;\n\n\n\nimpl Endpoint for DistributionEndpoint {\n\n type Output = ZoneDistribution;\n\n}\n\n\n\n/// The Zones Service handles the zone distribution of the DNSimple API.\n\n///\n\n/// See [API Documentation: zones](https://developer.dnsimple.com/v2/zones/)\n\npub struct Zones<'a> {\n\n pub client: &'a Client,\n\n}\n\n\n\nimpl Zones<'_> {\n", "file_path": "src/dnsimple/zones.rs", "rank": 46, "score": 89660.17837047605 }, { "content": "struct TemplateRecordsEndpoint;\n\n\n\nimpl Endpoint for TemplateRecordsEndpoint {\n\n type Output = Vec<TemplateRecord>;\n\n}\n\n\n", "file_path": "src/dnsimple/templates.rs", "rank": 47, "score": 89660.17837047605 }, { "content": "struct CertificateDownloadEndpoint;\n\n\n\nimpl Endpoint for CertificateDownloadEndpoint {\n\n type Output = CertificateBundle;\n\n}\n\n\n", "file_path": "src/dnsimple/certificates.rs", "rank": 48, "score": 89660.17837047605 }, { "content": "struct ListTldsEndpoint;\n\n\n\nimpl Endpoint for ListTldsEndpoint {\n\n type Output = Vec<Tld>;\n\n}\n\n\n", "file_path": "src/dnsimple/tlds.rs", "rank": 49, "score": 89660.17837047605 }, { "content": "struct ZoneRecordsEndpoint;\n\n\n\nimpl Endpoint for ZoneRecordsEndpoint {\n\n type Output = Vec<ZoneRecord>;\n\n}\n\n\n", "file_path": "src/dnsimple/zones_records.rs", "rank": 50, "score": 87407.87191268805 }, { "content": "struct CertificatePrivateKeyEndpoint;\n\n\n\nimpl Endpoint for CertificatePrivateKeyEndpoint {\n\n type Output = CertificatePrivateKey;\n\n}\n\n\n", "file_path": "src/dnsimple/certificates.rs", "rank": 51, "score": 87407.87191268805 }, { "content": "struct ZoneRecordEndpoint;\n\n\n\nimpl Endpoint for ZoneRecordEndpoint {\n\n type Output = ZoneRecord;\n\n}\n\n\n\nimpl Zones<'_> {\n\n /// List zone records\n\n ///\n\n /// # Arguments\n\n ///\n\n /// `account_id`: The account ID\n\n /// `zone`: The zone name\n\n pub fn list_zone_records(\n\n &self,\n\n account_id: u64,\n\n zone: &str,\n\n options: Option<RequestOptions>,\n\n ) -> Result<DNSimpleResponse<Vec<ZoneRecord>>, String> {\n\n let path = format!(\"/{}/zones/{}/records\", account_id, zone);\n", "file_path": "src/dnsimple/zones_records.rs", "rank": 52, "score": 87407.87191268805 }, { "content": "struct WhoisPrivacyEndpoint;\n\n\n\nimpl Endpoint for WhoisPrivacyEndpoint {\n\n type Output = WhoisPrivacy;\n\n}\n\n\n", "file_path": "src/dnsimple/registrar_whois_privacy.rs", "rank": 53, "score": 85309.60974741331 }, { "content": "struct ListTldsExtendedAttributesEndpoint;\n\n\n\nimpl Endpoint for ListTldsExtendedAttributesEndpoint {\n\n type Output = Vec<TldExtendedAttribute>;\n\n}\n\n\n\n/// The Tlds Service handles the tlds of the DNSimple API.\n\n///\n\n/// See [API Documentation: tlds](https://developer.dnsimple.com/v2/tlds/)\n\npub struct Tlds<'a> {\n\n pub client: &'a Client,\n\n}\n\n\n\nimpl Tlds<'_> {\n\n /// Returns the list of TLDs supported for registration or transfer.\n\n pub fn list_tlds(\n\n &self,\n\n options: Option<RequestOptions>,\n\n ) -> Result<DNSimpleResponse<Vec<Tld>>, String> {\n\n let path = \"/tlds\";\n", "file_path": "src/dnsimple/tlds.rs", "rank": 54, "score": 85309.60974741331 }, { "content": "#[test]\n\nfn test_check_domain_premium_price_not_a_premium_domain() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/cocotero.love/premium_price?action=registration\",\n\n \"checkDomainPremiumPrice/error_400_not_a_premium_domain\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = \"cocotero.love\";\n\n\n\n let response = client\n\n .registrar()\n\n .check_domain_premium_price(account_id, domain, None)\n\n .unwrap();\n\n let error = response.errors.unwrap();\n\n\n\n assert_eq!(\n\n \"`cocotero.love` is not a premium domain for registration\",\n\n error.message.unwrap()\n\n );\n\n}\n", "file_path": "tests/registrar_test.rs", "rank": 55, "score": 85118.52639809287 }, { "content": "#[test]\n\nfn test_transfer_domain() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/example.com/transfers\",\n\n \"transferDomain/success\",\n\n \"POST\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = \"example.com\";\n\n let payload = DomainTransferPayload {\n\n registrant_id: 2,\n\n auth_code: String::from(\"THE_AUTH_CODE\"),\n\n whois_privacy: None,\n\n auto_renew: None,\n\n extended_attributes: None,\n\n premium_price: None,\n\n };\n\n\n\n let response = client\n\n .registrar()\n", "file_path": "tests/registrar_test.rs", "rank": 56, "score": 83435.85792214725 }, { "content": "#[test]\n\nfn test_register_domain() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/example.com/registrations\",\n\n \"registerDomain/success\",\n\n \"POST\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = \"example.com\";\n\n let payload = DomainRegistrationPayload {\n\n registrant_id: 2,\n\n whois_privacy: None,\n\n auto_renew: None,\n\n extended_attributes: None,\n\n premium_price: None,\n\n };\n\n\n\n let response = client\n\n .registrar()\n\n .register_domain(account_id, domain, payload)\n", "file_path": "tests/registrar_test.rs", "rank": 57, "score": 83435.85792214725 }, { "content": "#[test]\n\nfn test_check_domain() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/ruby.codes/check\",\n\n \"checkDomain/success\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = \"ruby.codes\";\n\n\n\n let response = client.registrar().check_domain(account_id, domain).unwrap();\n\n let domain_check = response.data.unwrap();\n\n\n\n assert_eq!(\"ruby.codes\", domain_check.domain);\n\n assert_eq!(true, domain_check.available);\n\n assert_eq!(true, domain_check.premium);\n\n}\n\n\n", "file_path": "tests/registrar_test.rs", "rank": 58, "score": 83435.85792214725 }, { "content": "#[test]\n\nfn test_renew_a_domain() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/example.com/renewals\",\n\n \"renewDomain/success\",\n\n \"POST\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = String::from(\"example.com\");\n\n let payload = DomainRenewalPayload {\n\n period: 1,\n\n premium_price: None,\n\n };\n\n\n\n let response = client\n\n .registrar()\n\n .renew_domain(account_id, domain, payload)\n\n .unwrap();\n\n let domain_renewal = response.data.unwrap();\n\n\n\n assert_eq!(1, domain_renewal.id);\n\n assert_eq!(999, domain_renewal.domain_id);\n\n assert_eq!(1, domain_renewal.period);\n\n assert_eq!(\"new\", domain_renewal.state);\n\n assert_eq!(\"2016-12-09T19:46:45Z\", domain_renewal.created_at);\n\n assert_eq!(\"2016-12-09T19:46:45Z\", domain_renewal.updated_at);\n\n}\n\n\n", "file_path": "tests/registrar_test.rs", "rank": 59, "score": 83435.85792214725 }, { "content": "struct VanityNameServersEndpoint;\n\n\n\nimpl Endpoint for VanityNameServersEndpoint {\n\n type Output = Vec<VanityNameServer>;\n\n}\n\n\n\n/// The Vanity Name Servers Service handles the vanity name servers of the DNSimple API.\n\n///\n\n/// See [API Documentation: vanity](https://developer.dnsimple.com/v2/vanity/)\n\npub struct VanityNameServers<'a> {\n\n pub client: &'a Client,\n\n}\n\n\n\nimpl VanityNameServers<'_> {\n\n /// Enable vanity name servers\n\n ///\n\n /// # Arguments\n\n /// `account_id`: The account id\n\n /// `domain`: The domain name or id\n\n pub fn enable_vanity_name_servers(\n", "file_path": "src/dnsimple/vanity_name_servers.rs", "rank": 60, "score": 83350.1108765041 }, { "content": "struct WhoisPrivacyRenewalEndpoint;\n\n\n\nimpl Endpoint for WhoisPrivacyRenewalEndpoint {\n\n type Output = WhoisPrivacyRenewal;\n\n}\n\n\n\nimpl Registrar<'_> {\n\n /// Retrieve the domain WHOIS privacy\n\n ///\n\n /// # Arguments\n\n ///\n\n /// `account_id`: The account ID\n\n /// `domain`: The domain name or id\n\n pub fn get_whois_privacy(\n\n &self,\n\n account_id: u64,\n\n domain: String,\n\n ) -> Result<DNSimpleResponse<WhoisPrivacy>, String> {\n\n let path = format!(\"/{}/registrar/domains/{}/whois_privacy\", account_id, domain);\n\n\n", "file_path": "src/dnsimple/registrar_whois_privacy.rs", "rank": 61, "score": 83350.1108765041 }, { "content": "#[test]\n\nfn test_remove_collaborator() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/1/collaborators/100\",\n\n \"removeCollaborator/success\",\n\n \"DELETE\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain_id = 1 as u64;\n\n\n\n let response = client\n\n .domains()\n\n .remove_collaborator(account_id, domain_id, 100);\n\n\n\n assert_eq!(response.status, 204);\n\n}\n", "file_path": "tests/domains_collaborators_test.rs", "rank": 62, "score": 81265.03325907332 }, { "content": "#[test]\n\nfn test_reject_push() {\n\n let setup = setup_mock_for(\"/1385/domains/pushes/42\", \"rejectPush/success\", \"DELETE\");\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let push_id = 42;\n\n\n\n let response = client.domains().reject_push(account_id, push_id);\n\n\n\n assert_eq!(response.status, 204);\n\n}\n", "file_path": "tests/domains_push_test.rs", "rank": 63, "score": 81265.03325907332 }, { "content": "#[test]\n\nfn test_list_pushes() {\n\n let setup = setup_mock_for(\"/1385/domains/pushes\", \"listPushes/success\", \"GET\");\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n\n\n let response = client.domains().list_pushes(account_id, None).unwrap();\n\n let domain_pushes_list = response.data.unwrap();\n\n\n\n assert_eq!(2, domain_pushes_list.len());\n\n}\n\n\n", "file_path": "tests/domains_push_test.rs", "rank": 64, "score": 81265.03325907332 }, { "content": "#[test]\n\nfn test_dnssec_status() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/example.com/dnssec\",\n\n \"getDnssec/success\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain = \"example.com\";\n\n\n\n let response = client.domains().get_dnssec(account_id, domain).unwrap();\n\n let dnssec = response.data.unwrap();\n\n\n\n assert_eq!(response.status, 200);\n\n\n\n assert_eq!(true, dnssec.enabled);\n\n assert_eq!(\"2017-02-03T17:43:22Z\", dnssec.created_at);\n\n assert_eq!(\"2017-02-03T17:43:22Z\", dnssec.updated_at);\n\n}\n", "file_path": "tests/domains_dnssec_test.rs", "rank": 65, "score": 81265.03325907332 }, { "content": "#[test]\n\nfn test_retrieve_domain_transfer() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/google.com/transfers/361\",\n\n \"getDomainTransfer/success\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = String::from(\"google.com\");\n\n let domain_transfer = 361;\n\n\n\n let response = client\n\n .registrar()\n\n .get_domain_transfer(account_id, domain, domain_transfer)\n\n .unwrap();\n\n let transfer = response.data.unwrap();\n\n\n\n assert_eq!(361, transfer.id);\n\n assert_eq!(182245, transfer.domain_id);\n\n assert_eq!(2715, transfer.registrant_id);\n\n assert_eq!(\"cancelled\", transfer.state);\n\n assert_eq!(false, transfer.auto_renew);\n\n assert_eq!(false, transfer.whois_privacy);\n\n assert_eq!(\"Canceled by customer\", transfer.status_description.unwrap());\n\n assert_eq!(\"2020-06-05T18:08:00Z\", transfer.created_at);\n\n assert_eq!(\"2020-06-05T18:10:01Z\", transfer.updated_at);\n\n}\n\n\n", "file_path": "tests/registrar_test.rs", "rank": 66, "score": 81265.03325907332 }, { "content": "#[test]\n\nfn test_accept_push() {\n\n let setup = setup_mock_for(\"/1385/domains/pushes/42\", \"acceptPush/success\", \"POST\");\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let push_id = 42;\n\n\n\n let response = client.domains().accept_push(account_id, push_id);\n\n\n\n assert_eq!(response.status, 204);\n\n}\n", "file_path": "tests/domains_push_test.rs", "rank": 67, "score": 81265.03325907332 }, { "content": "#[test]\n\nfn test_renew_a_domain_to_early() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/example.com/renewals\",\n\n \"renewDomain/error-tooearly\",\n\n \"POST\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = String::from(\"example.com\");\n\n let payload = DomainRenewalPayload {\n\n period: 1,\n\n premium_price: None,\n\n };\n\n\n\n let response = client\n\n .registrar()\n\n .renew_domain(account_id, domain, payload)\n\n .unwrap();\n\n let errors = response.errors.unwrap();\n\n\n\n assert_eq!(\n\n \"example.com may not be renewed at this time\",\n\n errors.message.unwrap()\n\n );\n\n}\n\n\n", "file_path": "tests/registrar_test.rs", "rank": 68, "score": 81265.03325907332 }, { "content": "#[test]\n\nfn test_enable_dnssec() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/example.com/dnssec\",\n\n \"enableDnssec/success\",\n\n \"POST\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain = \"example.com\";\n\n\n\n let response = client.domains().enable_dnssec(account_id, domain).unwrap();\n\n let dnssec = response.data.unwrap();\n\n\n\n assert_eq!(response.status, 201);\n\n\n\n assert_eq!(true, dnssec.enabled);\n\n assert_eq!(\"2017-03-03T13:49:58Z\", dnssec.created_at);\n\n assert_eq!(\"2017-03-03T13:49:58Z\", dnssec.updated_at);\n\n}\n\n\n", "file_path": "tests/domains_dnssec_test.rs", "rank": 69, "score": 81265.03325907332 }, { "content": "#[test]\n\nfn test_disable_dnssec() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/example.com/dnssec\",\n\n \"disableDnssec/success\",\n\n \"DELETE\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain = \"example.com\";\n\n\n\n let response = client.domains().disable_dnssec(account_id, domain);\n\n\n\n assert_eq!(response.status, 204);\n\n}\n\n\n", "file_path": "tests/domains_dnssec_test.rs", "rank": 70, "score": 81265.03325907332 }, { "content": "#[test]\n\nfn test_authorize_domain_transfer_out() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/example.com/authorize_transfer_out\",\n\n \"authorizeDomainTransferOut/success\",\n\n \"POST\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = String::from(\"example.com\");\n\n\n\n let response = client.registrar().transfer_domain_out(account_id, domain);\n\n\n\n assert_eq!(204, response.status);\n\n}\n", "file_path": "tests/registrar_test.rs", "rank": 71, "score": 81265.03325907332 }, { "content": "#[test]\n\nfn test_get_domain_prices() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/bingo.pizza/prices\",\n\n \"getDomainPrices/success\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = \"bingo.pizza\";\n\n\n\n let response = client\n\n .registrar()\n\n .get_domain_prices(account_id, domain)\n\n .unwrap();\n\n let domain_prices = response.data.unwrap();\n\n\n\n assert_eq!(\"bingo.pizza\", domain_prices.domain);\n\n assert_eq!(true, domain_prices.premium);\n\n assert_eq!(20.0, domain_prices.registration_price);\n\n assert_eq!(20.0, domain_prices.renewal_price);\n\n assert_eq!(20.0, domain_prices.transfer_price);\n\n}\n\n\n", "file_path": "tests/registrar_test.rs", "rank": 72, "score": 81265.03325907332 }, { "content": "#[test]\n\nfn test_cancel_domain_transfer() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/google.com/transfers/361\",\n\n \"cancelDomainTransfer/success\",\n\n \"DELETE\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = String::from(\"google.com\");\n\n let domain_transfer = 361;\n\n\n\n let response = client\n\n .registrar()\n\n .cancel_domain_transfer(account_id, domain, domain_transfer)\n\n .unwrap();\n\n\n\n assert_eq!(202, response.status);\n\n\n\n let transfer = response.data.unwrap();\n\n\n", "file_path": "tests/registrar_test.rs", "rank": 73, "score": 81265.03325907332 }, { "content": "#[test]\n\nfn test_list_collaborators() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/1/collaborators\",\n\n \"listCollaborators/success\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain_id = 1 as u64;\n\n\n\n let response = client\n\n .domains()\n\n .list_collaborators(account_id, domain_id, None);\n\n let collaborators = response.unwrap().data.unwrap();\n\n\n\n assert_eq!(2, collaborators.len());\n\n let first_collaborator = collaborators.first().unwrap();\n\n let second_collaborator = collaborators.last().unwrap();\n\n\n\n assert_eq!(100, first_collaborator.id);\n", "file_path": "tests/domains_collaborators_test.rs", "rank": 74, "score": 81265.03325907332 }, { "content": "#[test]\n\nfn test_check_domain_premium_price() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/ruby.codes/premium_price?action=registration\",\n\n \"checkDomainPremiumPrice/success\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = \"ruby.codes\";\n\n\n\n let response = client\n\n .registrar()\n\n .check_domain_premium_price(account_id, domain, None)\n\n .unwrap();\n\n let domain_premium_price = response.data.unwrap();\n\n\n\n assert_eq!(\"2640.00\", domain_premium_price.premium_price);\n\n assert_eq!(\"registration\", domain_premium_price.action);\n\n}\n\n\n", "file_path": "tests/registrar_test.rs", "rank": 75, "score": 79242.68002103004 }, { "content": "#[test]\n\nfn test_get_domain_prices_failure() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/bingo.pineapple/prices\",\n\n \"getDomainPrices/failure\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = \"bingo.pineapple\";\n\n\n\n let response = client\n\n .registrar()\n\n .get_domain_prices(account_id, domain)\n\n .unwrap();\n\n let error = response.errors.unwrap();\n\n\n\n assert_eq!(\"TLD .PINEAPPLE is not supported\", error.message.unwrap());\n\n}\n\n\n", "file_path": "tests/registrar_test.rs", "rank": 76, "score": 79242.68002103004 }, { "content": "#[test]\n\nfn test_add_collaborator_success() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/1/collaborators\",\n\n \"addCollaborator/success\",\n\n \"POST\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain_id = 1 as u64;\n\n let collaborator_email = \"existing-user@example.com\";\n\n let collaborator = client\n\n .domains()\n\n .add_collaborator(account_id, domain_id, collaborator_email)\n\n .unwrap()\n\n .data\n\n .unwrap();\n\n\n\n assert_eq!(100, collaborator.id);\n\n assert_eq!(domain_id, collaborator.domain_id);\n\n assert_eq!(\"example.com\", collaborator.domain_name);\n", "file_path": "tests/domains_collaborators_test.rs", "rank": 77, "score": 79242.68002103004 }, { "content": "#[test]\n\nfn test_transfer_domain_error_in_dnsimple() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/google.com/transfers\",\n\n \"transferDomain/error-indnsimple\",\n\n \"POST\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = \"google.com\";\n\n let payload = DomainTransferPayload {\n\n registrant_id: 2,\n\n auth_code: String::from(\"THE_AUTH_CODE\"),\n\n whois_privacy: None,\n\n auto_renew: None,\n\n extended_attributes: None,\n\n premium_price: None,\n\n };\n\n\n\n let response = client\n\n .registrar()\n\n .transfer_domain(account_id, domain, payload)\n\n .unwrap();\n\n let error = response.errors.unwrap();\n\n\n\n assert_eq!(\n\n \"The domain google.com is already in DNSimple and cannot be added\",\n\n error.message.unwrap()\n\n );\n\n}\n\n\n", "file_path": "tests/registrar_test.rs", "rank": 78, "score": 79242.68002103004 }, { "content": "#[test]\n\nfn test_initiate_push_test() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/target-account.test/pushes\",\n\n \"initiatePush/success\",\n\n \"POST\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain = \"target-account.test\";\n\n let payload = InitiatePushPayload {\n\n new_account_email: String::from(\"admin@target-account.test\"),\n\n };\n\n\n\n let push = client\n\n .domains()\n\n .initiate_push(account_id, domain, payload)\n\n .unwrap()\n\n .data\n\n .unwrap();\n\n\n\n assert_eq!(1, push.id);\n\n assert_eq!(100, push.domain_id);\n\n assert_eq!(None, push.contact_id);\n\n assert_eq!(2020, push.account_id);\n\n assert_eq!(\"2016-08-11T10:16:03Z\", push.created_at);\n\n assert_eq!(\"2016-08-11T10:16:03Z\", push.updated_at);\n\n assert_eq!(None, push.accepted_at);\n\n}\n\n\n", "file_path": "tests/domains_push_test.rs", "rank": 79, "score": 79242.68002103004 }, { "content": "#[test]\n\nfn get_domain_delegation_test() {\n\n let setup = setup_mock_for(\n\n \"/1385/registrar/domains/example.com/delegation\",\n\n \"getDomainDelegation/success\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385;\n\n let domain = \"example.com\";\n\n\n\n let delegation = client\n\n .registrar()\n\n .get_domain_delegation(account_id, String::from(domain))\n\n .unwrap()\n\n .data\n\n .unwrap();\n\n\n\n assert_eq!(4, delegation.len());\n\n\n\n for (position, value) in delegation.iter().enumerate() {\n\n let number = position + 1;\n\n let should_eq = format!(\"ns{}.dnsimple.com\", number);\n\n assert_eq!(&should_eq, value);\n\n }\n\n}\n\n\n", "file_path": "tests/registrar_name_servers_test.rs", "rank": 80, "score": 77354.07003124521 }, { "content": "#[test]\n\nfn change_domain_delegation_test() {\n\n let setup = setup_mock_for(\n\n \"/1385/registrar/domains/example.com/delegation\",\n\n \"changeDomainDelegation/success\",\n\n \"PUT\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385;\n\n let domain = \"example.com\";\n\n let server_names = vec![\n\n \"ns1.dnsimple.com\",\n\n \"ns2.dnsimple.com\",\n\n \"ns3.dnsimple.com\",\n\n \"ns4.dnsimple.com\",\n\n ];\n\n\n\n let delegation_change = client\n\n .registrar()\n\n .change_domain_delegation(account_id, String::from(domain), server_names)\n\n .unwrap()\n\n .data\n\n .unwrap();\n\n\n\n for (position, value) in delegation_change.iter().enumerate() {\n\n let number = position + 1;\n\n let should_eq = format!(\"ns{}.dnsimple.com\", number);\n\n assert_eq!(&should_eq, value);\n\n }\n\n}\n\n\n", "file_path": "tests/registrar_name_servers_test.rs", "rank": 81, "score": 77354.07003124521 }, { "content": "#[test]\n\nfn change_domain_delegation_to_vanity() {\n\n let setup = setup_mock_for(\n\n \"/1385/registrar/domains/example.com/delegation/vanity\",\n\n \"changeDomainDelegationToVanity/success\",\n\n \"PUT\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385;\n\n let domain = \"example.com\";\n\n let server_names = vec![\"ns1.example.com\", \"ns2.example.com\"];\n\n\n\n let vanity_servers = client\n\n .registrar()\n\n .change_domain_delegation_to_vanity(account_id, String::from(domain), server_names)\n\n .unwrap()\n\n .data\n\n .unwrap();\n\n\n\n assert_eq!(2, vanity_servers.len());\n\n\n\n let vanity_server = vanity_servers.first().unwrap();\n\n\n\n assert_eq!(1, vanity_server.id);\n\n assert_eq!(\"ns1.example.com\", vanity_server.name);\n\n assert_eq!(\"127.0.0.1\", vanity_server.ipv4);\n\n assert_eq!(\"::1\", vanity_server.ipv6);\n\n assert_eq!(\"2016-07-11T09:40:19Z\", vanity_server.created_at);\n\n assert_eq!(\"2016-07-11T09:40:19Z\", vanity_server.updated_at);\n\n}\n\n\n", "file_path": "tests/registrar_name_servers_test.rs", "rank": 82, "score": 77354.07003124521 }, { "content": "#[test]\n\nfn test_delete_email_forward() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/example.com/email_forwards/41872\",\n\n \"deleteEmailForward/success\",\n\n \"DELETE\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain = \"example.com\";\n\n let email_forward = 41872;\n\n\n\n let response = client\n\n .domains()\n\n .delete_email_forward(account_id, domain, email_forward);\n\n\n\n assert_eq!(response.status, 204);\n\n}\n", "file_path": "tests/domains_email_forwards_test.rs", "rank": 83, "score": 77354.07003124521 }, { "content": "#[test]\n\nfn change_domain_delegation_from_vanity() {\n\n let setup = setup_mock_for(\n\n \"/1385/registrar/domains/example.com/delegation/vanity\",\n\n \"changeDomainDelegationFromVanity/success\",\n\n \"DELETE\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385;\n\n let domain = \"example.com\";\n\n\n\n let response = client\n\n .registrar()\n\n .change_domain_delegation_from_vanity(account_id, String::from(domain));\n\n\n\n assert_eq!(204, response.status);\n\n}\n", "file_path": "tests/registrar_name_servers_test.rs", "rank": 84, "score": 77354.07003124521 }, { "content": "#[test]\n\nfn test_get_email_forward() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/example.com/email_forwards/41872\",\n\n \"getEmailForward/success\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain = \"example.com\";\n\n let email_forward = 41872;\n\n\n\n let record = client\n\n .domains()\n\n .get_email_forward(account_id, domain, email_forward)\n\n .unwrap()\n\n .data\n\n .unwrap();\n\n\n\n assert_eq!(41872, record.id);\n\n assert_eq!(235146, record.domain_id);\n\n assert_eq!(\"example@dnsimple.xyz\", record.alias_email);\n\n assert_eq!(\"example@example.com\", record.destination_email);\n\n assert_eq!(\"2021-01-25T13:54:40Z\", record.created_at);\n\n assert_eq!(\"2021-01-25T13:54:40Z\", record.updated_at);\n\n assert_eq!(\"example@dnsimple.xyz\", record.from);\n\n assert_eq!(\"example@example.com\", record.to);\n\n}\n\n\n", "file_path": "tests/domains_email_forwards_test.rs", "rank": 85, "score": 77354.07003124521 }, { "content": "#[test]\n\nfn test_list_email_forwards() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/example.com/email_forwards\",\n\n \"listEmailForwards/success\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain = \"example.com\";\n\n\n\n let response = client\n\n .domains()\n\n .list_email_forwards(account_id, domain, None)\n\n .unwrap();\n\n let email_forwards_list = response.data.unwrap();\n\n\n\n assert_eq!(2, email_forwards_list.len());\n\n\n\n let email_forwards = email_forwards_list.first().unwrap();\n\n\n\n assert_eq!(17702, email_forwards.id);\n\n assert_eq!(228963, email_forwards.domain_id);\n\n assert_eq!(\".*@a-domain.com\", email_forwards.from);\n\n assert_eq!(\"jane.smith@example.com\", email_forwards.to);\n\n assert_eq!(\"2016-02-04T13:59:29Z\", email_forwards.created_at);\n\n assert_eq!(\"2016-02-04T13:59:29Z\", email_forwards.updated_at);\n\n}\n\n\n", "file_path": "tests/domains_email_forwards_test.rs", "rank": 86, "score": 77354.07003124521 }, { "content": "#[test]\n\nfn test_add_collaborator_invite_success() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/1/collaborators\",\n\n \"addCollaborator/invite-success\",\n\n \"post\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain_id = 1 as u64;\n\n let collaborator_email = \"invited-user@example.com\";\n\n\n\n let collaborator = client\n\n .domains()\n\n .add_collaborator(account_id, domain_id, collaborator_email)\n\n .unwrap()\n\n .data\n\n .unwrap();\n\n\n\n assert_eq!(101, collaborator.id);\n\n assert_eq!(domain_id, collaborator.domain_id);\n\n assert_eq!(\"example.com\", collaborator.domain_name);\n\n assert_eq!(None, collaborator.user_id);\n\n assert_eq!(\"invited-user@example.com\", collaborator.user_email);\n\n assert_eq!(true, collaborator.invitation);\n\n assert_eq!(\"2016-10-07T08:51:12Z\", collaborator.created_at);\n\n assert_eq!(\"2016-10-07T08:51:12Z\", collaborator.updated_at);\n\n assert_eq!(None, collaborator.accepted_at.as_ref());\n\n}\n\n\n", "file_path": "tests/domains_collaborators_test.rs", "rank": 87, "score": 77354.07003124521 }, { "content": "#[test]\n\nfn test_create_email_forward() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/example.com/email_forwards\",\n\n \"createEmailForward/created\",\n\n \"POST\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain = \"example.com\";\n\n let payload = EmailForwardPayload {\n\n alias_name: String::from(\"example@dnsimple.xyz\"),\n\n destination_email: String::from(\"example@example.com\"),\n\n };\n\n\n\n let record = client\n\n .domains()\n\n .create_email_forward(account_id, domain, payload)\n\n .unwrap()\n\n .data\n\n .unwrap();\n", "file_path": "tests/domains_email_forwards_test.rs", "rank": 88, "score": 77354.07003124521 }, { "content": "#[test]\n\nfn test_list_delegation_signer_records() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/example.com/ds_records\",\n\n \"listDelegationSignerRecords/success\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385 as u64;\n\n let domain = \"example.com\";\n\n\n\n let response = client\n\n .domains()\n\n .list_delegation_signer_records(account_id, domain, None)\n\n .unwrap();\n\n let signer_records = response.data.unwrap();\n\n\n\n assert_eq!(1, signer_records.len());\n\n\n\n let record = signer_records.first().unwrap();\n\n\n", "file_path": "tests/domains_signer_records_test.rs", "rank": 89, "score": 75586.36078283133 }, { "content": "#[test]\n\nfn test_create_delegation_signer_record() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/example.com/ds_records\",\n\n \"createDelegationSignerRecord/created\",\n\n \"POST\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385;\n\n let domain = \"example.com\";\n\n\n\n let payload = DelegationSignerRecordPayload {\n\n algorithm: String::from(\"13\"),\n\n digest: String::from(\"684a1f049d7d082b7f98691657da5a65764913df7f065f6f8c36edf62d66ca03\"),\n\n digest_type: String::from(\"2\"),\n\n keytag: String::from(\"2371\"),\n\n public_key: None,\n\n };\n\n\n\n let record = client\n\n .domains()\n", "file_path": "tests/domains_signer_records_test.rs", "rank": 90, "score": 75586.36078283133 }, { "content": "#[test]\n\nfn test_check_domain_premium_price_tld_not_supported() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/.love/premium_price?action=registration\",\n\n \"checkDomainPremiumPrice/error_400_tld_not_supported\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = \".love\";\n\n\n\n let response = client\n\n .registrar()\n\n .check_domain_premium_price(account_id, domain, None)\n\n .unwrap();\n\n let error = response.errors.unwrap();\n\n\n\n assert_eq!(\"TLD .LOVE is not supported\", error.message.unwrap());\n\n}\n\n\n", "file_path": "tests/registrar_test.rs", "rank": 91, "score": 75586.36078283133 }, { "content": "#[test]\n\nfn test_transfer_domain_error_missing_auth_code() {\n\n let setup = setup_mock_for(\n\n \"/1010/registrar/domains/google.com/transfers\",\n\n \"transferDomain/error-missing-authcode\",\n\n \"POST\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1010;\n\n let domain = \"google.com\";\n\n let payload = DomainTransferPayload {\n\n registrant_id: 2,\n\n auth_code: String::from(\"\"),\n\n whois_privacy: None,\n\n auto_renew: None,\n\n extended_attributes: None,\n\n premium_price: None,\n\n };\n\n\n\n let response = client\n\n .registrar()\n", "file_path": "tests/registrar_test.rs", "rank": 92, "score": 75586.36078283133 }, { "content": "#[test]\n\nfn disable_domain_auto_renewal_test() {\n\n let setup = setup_mock_for(\n\n \"/1385/registrar/domains/example.com/auto_renewal\",\n\n \"disableDomainAutoRenewal/success\",\n\n \"DELETE\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385;\n\n let domain = \"example.com\";\n\n\n\n let response = client\n\n .registrar()\n\n .disable_domain_auto_renewal(account_id, String::from(domain));\n\n\n\n assert_eq!(204, response.status);\n\n}\n", "file_path": "tests/registrar_auto_renewal_test.rs", "rank": 93, "score": 75586.36078283133 }, { "content": "#[test]\n\nfn enable_domain_auto_renewal_test() {\n\n let setup = setup_mock_for(\n\n \"/1385/registrar/domains/example.com/auto_renewal\",\n\n \"enableDomainAutoRenewal/success\",\n\n \"PUT\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385;\n\n let domain = \"example.com\";\n\n\n\n let response = client\n\n .registrar()\n\n .enable_domain_auto_renewal(account_id, String::from(domain));\n\n\n\n assert_eq!(204, response.status);\n\n}\n\n\n", "file_path": "tests/registrar_auto_renewal_test.rs", "rank": 94, "score": 75586.36078283133 }, { "content": "#[test]\n\nfn test_delete_delegation_signer_record() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/example.com/ds_records/24\",\n\n \"deleteDelegationSignerRecord/success\",\n\n \"DELETE\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385;\n\n let domain = \"example.com\";\n\n let delegation_signer_record_id = 24;\n\n\n\n let response = client.domains().delete_delegation_signer_record(\n\n account_id,\n\n domain,\n\n delegation_signer_record_id,\n\n );\n\n\n\n assert_eq!(response.status, 204);\n\n}\n", "file_path": "tests/domains_signer_records_test.rs", "rank": 95, "score": 75586.36078283133 }, { "content": "#[test]\n\nfn get_empty_domain_delegation_test() {\n\n let setup = setup_mock_for(\n\n \"/1385/registrar/domains/example.com/delegation\",\n\n \"getDomainDelegation/success-empty\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385;\n\n let domain = \"example.com\";\n\n\n\n let delegation = client\n\n .registrar()\n\n .get_domain_delegation(account_id, String::from(domain))\n\n .unwrap()\n\n .data\n\n .unwrap();\n\n\n\n assert!(delegation.is_empty());\n\n}\n\n\n", "file_path": "tests/registrar_name_servers_test.rs", "rank": 96, "score": 75586.36078283133 }, { "content": "#[test]\n\nfn test_get_delegation_signer_record() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/example.com/ds_records\",\n\n \"getDelegationSignerRecord/success\",\n\n \"GET\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385;\n\n let domain = \"example.com\";\n\n\n\n let record = client\n\n .domains()\n\n .get_delegation_signer_record(account_id, domain)\n\n .unwrap()\n\n .data\n\n .unwrap();\n\n\n\n assert_eq!(24, record.id);\n\n assert_eq!(1010, record.domain_id);\n\n assert_eq!(\"8\", record.algorithm);\n", "file_path": "tests/domains_signer_records_test.rs", "rank": 97, "score": 75586.36078283133 }, { "content": "#[test]\n\nfn test_create_delegation_signer_record_validation_error() {\n\n let setup = setup_mock_for(\n\n \"/1385/domains/example.com/ds_records\",\n\n \"createDelegationSignerRecord/validation-error\",\n\n \"POST\",\n\n );\n\n let client = setup.0;\n\n let account_id = 1385;\n\n let domain = \"example.com\";\n\n\n\n let payload = DelegationSignerRecordPayload {\n\n algorithm: String::from(\"\"),\n\n digest: String::from(\"\"),\n\n digest_type: String::from(\"\"),\n\n keytag: String::from(\"\"),\n\n public_key: None,\n\n };\n\n\n\n let response = client\n\n .domains()\n", "file_path": "tests/domains_signer_records_test.rs", "rank": 98, "score": 72370.00108277619 }, { "content": "#[derive(Debug, Deserialize, Serialize)]\n\nstruct OAuthTokenParams {\n\n grant_type: String,\n\n client_id: String,\n\n client_secret: String,\n\n code: String,\n\n redirect_uri: String,\n\n state: String,\n\n}\n\n\n\n/// Represents an access token containing the token to access the API\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct AccessToken {\n\n /// The token you can use to authenticate.\n\n pub access_token: String,\n\n /// The account ID in DNSimple this token belongs to.\n\n pub account_id: u64,\n\n /// The token scope (not used for now).\n\n pub scope: Option<String>,\n\n /// The token type.\n\n pub token_type: String,\n", "file_path": "src/dnsimple/oauth.rs", "rank": 99, "score": 56729.61936404431 } ]
Rust
src/lib.rs
dnaeon/rust-libzmq
6925d29555743e714864378c08eaeffaeed5e8f1
extern crate libc; const ZMQ_HAUSNUMERO: usize = 156384712; pub const EFSM: usize = ZMQ_HAUSNUMERO + 51; pub const ENOCOMPATPROTO: usize = ZMQ_HAUSNUMERO + 52; pub const ETERM: usize = ZMQ_HAUSNUMERO + 53; pub const EMTHREAD: usize = ZMQ_HAUSNUMERO + 54; pub const ZMQ_IO_THREADS: usize = 1; pub const ZMQ_MAX_SOCKETS: usize = 2; pub const ZMQ_SOCKET_LIMIT: usize = 3; pub const ZMQ_THREAD_PRIORITY: usize = 3; pub const ZMQ_THREAD_SCHED_POLICY: usize = 4; pub const ZMQ_IO_THREADS_DFLT: usize = 1; pub const ZMQ_MAX_SOCKETS_DFLT: usize = 1023; pub const ZMQ_THREAD_PRIORITY_DFLT: isize = -1; pub const ZMQ_THREAD_SCHED_POLICY_DFLT: isize = -1; #[repr(C)] #[derive(Copy)] pub struct Struct_zmq_msg_t { pub _m: [::libc::c_uchar; 64usize], } impl Clone for Struct_zmq_msg_t { fn clone(&self) -> Self { *self } } #[allow(non_camel_case_types)] pub type zmq_msg_t = Struct_zmq_msg_t; #[allow(non_camel_case_types)] pub type zmq_free_fn = extern "C" fn(data: *mut ::libc::c_void, hint: *mut ::libc::c_void); pub const ZMQ_PAIR: usize = 0; pub const ZMQ_PUB: usize = 1; pub const ZMQ_SUB: usize = 2; pub const ZMQ_REQ: usize = 3; pub const ZMQ_REP: usize = 4; pub const ZMQ_DEALER: usize = 5; pub const ZMQ_ROUTER: usize = 6; pub const ZMQ_PULL: usize = 7; pub const ZMQ_PUSH: usize = 8; pub const ZMQ_XPUB: usize = 9; pub const ZMQ_XSUB: usize = 10; pub const ZMQ_STREAM: usize = 11; pub const ZMQ_SERVER: usize = 12; pub const ZMQ_CLIENT: usize = 13; pub const ZMQ_XREQ: usize = ZMQ_DEALER; pub const ZMQ_XREP: usize = ZMQ_ROUTER; pub const ZMQ_AFFINITY: usize = 4; pub const ZMQ_IDENTITY: usize = 5; pub const ZMQ_SUBSCRIBE: usize = 6; pub const ZMQ_UNSUBSCRIBE: usize = 7; pub const ZMQ_RATE: usize = 8; pub const ZMQ_RECOVERY_IVL: usize = 9; pub const ZMQ_SNDBUF: usize = 11; pub const ZMQ_RCVBUF: usize = 12; pub const ZMQ_RCVMORE: usize = 13; pub const ZMQ_FD: usize = 14; pub const ZMQ_EVENTS: usize = 15; pub const ZMQ_TYPE: usize = 16; pub const ZMQ_LINGER: usize = 17; pub const ZMQ_RECONNECT_IVL: usize = 18; pub const ZMQ_BACKLOG: usize = 19; pub const ZMQ_RECONNECT_IVL_MAX: usize = 21; pub const ZMQ_MAXMSGSIZE: usize = 22; pub const ZMQ_SNDHWM: usize = 23; pub const ZMQ_RCVHWM: usize = 24; pub const ZMQ_MULTICAST_HOPS: usize = 25; pub const ZMQ_RCVTIMEO: usize = 27; pub const ZMQ_SNDTIMEO: usize = 28; pub const ZMQ_LAST_ENDPOINT: usize = 32; pub const ZMQ_ROUTER_MANDATORY: usize = 33; pub const ZMQ_TCP_KEEPALIVE: usize = 34; pub const ZMQ_TCP_KEEPALIVE_CNT: usize = 35; pub const ZMQ_TCP_KEEPALIVE_IDLE: usize = 36; pub const ZMQ_TCP_KEEPALIVE_INTVL: usize = 37; pub const ZMQ_IMMEDIATE: usize = 39; pub const ZMQ_XPUB_VERBOSE: usize = 40; pub const ZMQ_ROUTER_RAW: usize = 41; pub const ZMQ_IPV6: usize = 42; pub const ZMQ_MECHANISM: usize = 43; pub const ZMQ_PLAIN_SERVER: usize = 44; pub const ZMQ_PLAIN_USERNAME: usize = 45; pub const ZMQ_PLAIN_PASSWORD: usize = 46; pub const ZMQ_CURVE_SERVER: usize = 47; pub const ZMQ_CURVE_PUBLICKEY: usize = 48; pub const ZMQ_CURVE_SECRETKEY: usize = 49; pub const ZMQ_CURVE_SERVERKEY: usize = 50; pub const ZMQ_PROBE_ROUTER: usize = 51; pub const ZMQ_REQ_CORRELATE: usize = 52; pub const ZMQ_REQ_RELAXED: usize = 53; pub const ZMQ_CONFLATE: usize = 54; pub const ZMQ_ZAP_DOMAIN: usize = 55; pub const ZMQ_ROUTER_HANDOVER: usize = 56; pub const ZMQ_TOS: usize = 57; pub const ZMQ_CONNECT_RID: usize = 61; pub const ZMQ_GSSAPI_SERVER: usize = 62; pub const ZMQ_GSSAPI_PRINCIPAL: usize = 63; pub const ZMQ_GSSAPI_SERVICE_PRINCIPAL: usize = 64; pub const ZMQ_GSSAPI_PLAINTEXT: usize = 65; pub const ZMQ_HANDSHAKE_IVL: usize = 66; pub const ZMQ_SOCKS_PROXY: usize = 68; pub const ZMQ_XPUB_NODROP: usize = 69; pub const ZMQ_BLOCKY: usize = 70; pub const ZMQ_XPUB_MANUAL: usize = 71; pub const ZMQ_XPUB_WELCOME_MSG: usize = 72; pub const ZMQ_STREAM_NOTIFY: usize = 73; pub const ZMQ_INVERT_MATCHING: usize = 74; pub const ZMQ_HEARTBEAT_IVL: usize = 75; pub const ZMQ_HEARTBEAT_TTL: usize = 76; pub const ZMQ_HEARTBEAT_TIMEOUT: usize = 77; pub const ZMQ_MORE: usize = 1; pub const ZMQ_SRCFD: usize = 2; pub const ZMQ_SHARED: usize = 3; pub const ZMQ_DONTWAIT: usize = 1; pub const ZMQ_SNDMORE: usize = 2; pub const ZMQ_NULL: usize = 0; pub const ZMQ_PLAIN: usize = 1; pub const ZMQ_CURVE: usize = 2; pub const ZMQ_GSSAPI: usize = 3; pub const ZMQ_TCP_ACCEPT_FILTER: usize = 38; pub const ZMQ_IPC_FILTER_PID: usize = 58; pub const ZMQ_IPC_FILTER_UID: usize = 59; pub const ZMQ_IPC_FILTER_GID: usize = 60; pub const ZMQ_IPV4ONLY: usize = 31; pub const ZMQ_DELAY_ATTACH_ON_CONNECT: usize = ZMQ_IMMEDIATE; pub const ZMQ_NOBLOCK: usize = ZMQ_DONTWAIT; pub const ZMQ_FAIL_UNROUTABLE: usize = ZMQ_ROUTER_MANDATORY; pub const ZMQ_ROUTER_BEHAVIOR: usize = ZMQ_ROUTER_MANDATORY; pub const ZMQ_EVENT_CONNECTED: usize = 1; pub const ZMQ_EVENT_CONNECT_DELAYED: usize = 2; pub const ZMQ_EVENT_CONNECT_RETRIED: usize = 4; pub const ZMQ_EVENT_LISTENING: usize = 8; pub const ZMQ_EVENT_BIND_FAILED: usize = 16; pub const ZMQ_EVENT_ACCEPTED: usize = 32; pub const ZMQ_EVENT_ACCEPT_FAILED: usize = 64; pub const ZMQ_EVENT_CLOSED: usize = 128; pub const ZMQ_EVENT_CLOSE_FAILED: usize = 256; pub const ZMQ_EVENT_DISCONNECTED: usize = 512; pub const ZMQ_EVENT_MONITOR_STOPPED: usize = 1024; pub const ZMQ_EVENT_ALL: usize = 65536; pub const ZMQ_POLLIN: usize = 1; pub const ZMQ_POLLOUT: usize = 2; pub const ZMQ_POLLERR: usize = 4; pub const ZMQ_POLLPRI: usize = 8; #[repr(C)] #[derive(Copy)] pub struct Struct_zmq_pollitem_t { socket: *mut libc::c_void, fd: libc::c_int, events: libc::c_short, revents: libc::c_short, } impl Clone for Struct_zmq_pollitem_t { fn clone(&self) -> Self { *self } } impl Default for Struct_zmq_pollitem_t { fn default() -> Self { unsafe { ::std::mem::zeroed() } } } #[allow(non_camel_case_types)] pub type zmq_pollitem_t = Struct_zmq_pollitem_t; pub const ZMQ_POLLITEMS_DFLT: usize = 16; pub const ZMQ_HAS_CAPABILITIES: usize = 1; pub const ZMQ_STREAMER: usize = 1; pub const ZMQ_FORWARDER: usize = 2; pub const ZMQ_QUEUE: usize = 3; #[repr(C)] #[allow(non_camel_case_types)] pub struct iovec; #[allow(non_camel_case_types)] pub type zmq_thread_fn = extern "C" fn(arg: *mut libc::c_void); #[link(name = "zmq")] extern { pub fn zmq_errno(); pub fn zmq_strerror(errnum: &i32) -> *const libc::c_char; pub fn zmq_version(major: &mut i32, minor: &mut i32, patch: &mut i32); pub fn zmq_ctx_new() -> *mut libc::c_void; pub fn zmq_ctx_term(context: *mut libc::c_void) -> libc::c_int; pub fn zmq_ctx_shutdown(ctx_: *mut libc::c_void) -> libc::c_int; pub fn zmq_ctx_set(context: *mut libc::c_void, option: libc::c_int, optval: libc::c_int) -> libc::c_int; pub fn zmq_ctx_get(context: *mut libc::c_void, option: libc::c_int) -> libc::c_int; pub fn zmq_init(io_threads: libc::c_int) -> *mut libc::c_void; pub fn zmq_term(context: *mut libc::c_void) -> libc::c_int; pub fn zmq_ctx_destroy(context: *mut libc::c_void) -> libc::c_int; pub fn zmq_msg_init(msg: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_init_size(msg: *mut zmq_msg_t, size: libc::size_t) -> libc::c_int; pub fn zmq_msg_init_data(msg: *mut zmq_msg_t, data: *mut libc::c_void, size: libc::size_t, ffn: *mut zmq_free_fn, hint: *mut libc::c_void) -> libc::c_int; pub fn zmq_msg_send(msg: *mut zmq_msg_t, s: *mut libc::c_void, flags: libc::c_int) -> libc::c_int; pub fn zmq_msg_recv(msg: *mut zmq_msg_t, s: *mut libc::c_void, flags: libc::c_int) -> libc::c_int; pub fn zmq_msg_close(msg: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_move(dest: *mut zmq_msg_t, src: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_copy(dest: *mut zmq_msg_t, src: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_data(msg: *mut zmq_msg_t) -> *mut libc::c_void; pub fn zmq_msg_size(msg: *mut zmq_msg_t) -> libc::size_t; pub fn zmq_msg_more(msg: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_get(msg: *mut zmq_msg_t, property: libc::c_int) -> libc::c_int; pub fn zmq_msg_set(msg: *mut zmq_msg_t, property: libc::c_int, optval: libc::c_int) -> libc::c_int; pub fn zmq_msg_gets(msg: *mut zmq_msg_t, property: *const libc::c_char) -> *const libc::c_char; pub fn zmq_msg_set_routing_id(msg: *mut zmq_msg_t, routing_id: libc::uint32_t) -> libc::c_int; pub fn zmq_msg_get_routing_id(msg: *mut zmq_msg_t) -> libc::uint32_t; pub fn zmq_socket(s: *mut libc::c_void, t: libc::c_int) -> *mut libc::c_void; pub fn zmq_close(s: *mut libc::c_void) -> libc::c_int; pub fn zmq_setsockopt(s: *mut libc::c_void, option: libc::c_int, optval: *const libc::c_void, optvallen: libc::size_t) -> libc::c_int; pub fn zmq_getsockopt(s: *mut libc::c_void, option: libc::c_int, optval: *mut libc::c_void, optvallen: *mut libc::size_t) -> libc::c_int; pub fn zmq_bind(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_connect(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_unbind(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_disconnect(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_send(s: *mut libc::c_void, buf: *const libc::c_void, len: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_send_const(s: *mut libc::c_void, buf: *const libc::c_void, len: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_recv(s: *mut libc::c_void, buf: *mut libc::c_void, len: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_socket_monitor(s: *mut libc::c_void, addr: *const libc::c_char, events: libc::c_int) -> libc::c_int; pub fn zmq_poll(items: *mut zmq_pollitem_t, nitems: libc::c_int, timeout: libc::c_long) -> libc::c_int; pub fn zmq_proxy(frontend: *mut libc::c_void, backend: *mut libc::c_void, capture: *mut libc::c_void) -> libc::c_int; pub fn zmq_proxy_steerable(frontend: *mut libc::c_void, backend: *mut libc::c_void, capture: *mut libc::c_void, control: *mut libc::c_void) -> libc::c_int; pub fn zmq_has(capabitility: *const libc::c_char) -> libc::c_int; pub fn zmq_device(t: libc::c_int, frontend: *mut libc::c_void, backend: *mut libc::c_void) -> libc::c_int; pub fn zmq_sendmsg(s: *mut libc::c_void, msg: *mut zmq_msg_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_recvmsg(s: *mut libc::c_void, msg: *mut zmq_msg_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_z85_encode(dest: *mut libc::c_char, data: *const libc::uint8_t, size: libc::size_t) -> *mut libc::c_char; pub fn zmq_z85_decode(dest: *mut libc::uint8_t, string: *const libc::c_char) -> libc::uint8_t; pub fn zmq_curve_keypair(z85_public_key: *mut libc::c_char, z85_secret_key: *mut libc::c_char) -> libc::c_int; pub fn zmq_atomic_counter_new() -> *mut libc::c_void; pub fn zmq_atomic_counter_set(counter: *mut libc::c_void, value: libc::c_int); pub fn zmq_atomic_counter_inc(counter: *mut libc::c_void) -> libc::c_int; pub fn zmq_atomic_counter_dec(counter: *mut libc::c_void) -> libc::c_int; pub fn zmq_atomic_counter_value(counter: *mut libc::c_void) -> libc::c_int; pub fn zmq_atomic_counter_destroy(counter: *mut *mut libc::c_void); pub fn zmq_stopwatch_start() -> *mut libc::c_void; pub fn zmq_stopwatch_stop(watch_: *mut libc::c_void) -> libc::c_ulong; pub fn zmq_sleep(seconds_: libc::c_int); pub fn zmq_threadstart(func: *mut zmq_thread_fn, arg: *mut libc::c_void); pub fn zmq_threadclose(thread: *mut libc::c_void); pub fn zmq_sendiov(s: *mut libc::c_void, iov: *mut iovec, count: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_recviov(s: *mut libc::c_void, iov: *mut iovec, count: *mut libc::size_t, flags: libc::c_int) -> libc::c_int; }
extern crate libc; const ZMQ_HAUSNUMERO: usize = 156384712; pub const EFSM: usize = ZMQ_HAUSNUMERO + 51; pub const ENOCOMPATPROTO: usize = ZMQ_HAUSNUMERO + 52; pub const ETERM: usize = ZMQ_HAUSNUMERO + 53; pub const EMTHREAD: usize = ZMQ_HAUSNUMERO + 54; pub const ZMQ_IO_THREADS: usize = 1; pub const ZMQ_MAX_SOCKETS: usize = 2; pub const ZMQ_SOCKET_LIMIT: usize = 3; pub const ZMQ_THREAD_PRIORITY: usize = 3; pub const ZMQ_THREAD_SCHED_POLICY: usize = 4; pub const ZMQ_IO_THREADS_DFLT: usize = 1; pub const ZMQ_MAX_SOCKETS_DFLT: usize = 1023; pub const ZMQ_THREAD_PRIORITY_DFLT: isize = -1; pub const ZMQ_THREAD_SCHED_POLICY_DFLT: isize = -1; #[repr(C)] #[derive(Copy)] pub struct Struct_zmq_msg_t { pub _m: [::libc::c_uchar; 64usize], } impl Clone for Struct_zmq_msg_t { fn clone(&self) -> Self { *self } } #[allow(non_camel_case_types)] pub type zmq_msg_t = Struct_zmq_msg_t; #[allow(non_camel_case_types)] pub type zmq_free_fn = extern "C" fn(data: *mut ::libc::c_void, hint: *mut ::libc::c_void); pub const ZMQ_PAIR: usize = 0; pub const ZMQ_PUB: usize = 1; pub const ZMQ_SUB: usize = 2; pub const ZMQ_REQ: usize = 3; pub const ZMQ_REP: usize = 4; pub const ZMQ_DEALER: usize = 5; pub const ZMQ_ROUTER: usize = 6; pub const ZMQ_PULL: usize = 7; pub const ZMQ_PUSH: usize = 8; pub const ZMQ_XPUB: usize = 9; pub const ZMQ_XSUB: usize = 10; pub const ZMQ_STREAM: usize = 11; pub const ZMQ_SERVER: usize = 12; pub const ZMQ_CLIENT: usize = 13; pub const ZMQ_XREQ: usize = ZMQ_DEALER; pub const ZMQ_XREP: usize = ZMQ_ROUTER; pub const ZMQ_AFFINITY: usize = 4; pub const ZMQ_IDENTITY: usize = 5; pub const ZMQ_SUBSCRIBE: usize = 6; pub const ZMQ_UNSUBSCRIBE: usize = 7; pub const ZMQ_RATE: usize = 8; pub const ZMQ_RECOVERY_IVL: usize = 9; pub const ZMQ_SNDBUF: usize = 11; pub const ZMQ_RCVBUF: usize = 12; pub const ZMQ_RCVMORE: usize = 13; pub const ZMQ_FD: usize = 14; pub const ZMQ_EVENTS: usize = 15; pub const ZMQ_TYPE: usize = 16; pub const ZMQ_LINGER: usize = 17; pub const ZMQ_RECONNECT_IVL: usize = 18; pub const ZMQ_BACKLOG: usize = 19; pub const ZMQ_RECONNECT_IVL_MAX: usize = 21; pub const ZMQ_MAXMSGSIZE: usize = 22; pub const ZMQ_SNDHWM: usize = 23; pub const ZMQ_RCVHWM: usize = 24; pub const ZMQ_MULTICAST_HOPS: usize = 25; pub const ZMQ_RCVTIMEO: usize = 27; pub const ZMQ_SNDTIMEO: usize = 28; pub const ZMQ_LAST_ENDPOINT: usize = 32; pub const ZMQ_ROUTER_MANDATORY: usize = 33; pub const ZMQ_TCP_KEEPALIVE: usize = 34; pub const ZMQ_TCP_KEEPALIVE_CNT: usize = 35; pub const ZMQ_TCP_KEEPALIVE_IDLE: usize = 36; pub const ZMQ_TCP_KEEPALIVE_INTVL: usize = 37; pub const ZMQ_IMMEDIATE: usize = 39; pub const ZMQ_XPUB_VERBOSE: usize = 40; pub const ZMQ_ROUTER_RAW: usize = 41; pub const ZMQ_IPV6: usize = 42; pub const ZMQ_MECHANISM: usize = 43; pub const ZMQ_PLAIN_SERVER: usize = 44; pub const ZMQ_PLAIN_USERNAME: usize = 45; pub const ZMQ_PLAIN_PASSWORD: usize = 46; pub const ZMQ_CURVE_SERVER: usize = 47; pub const ZMQ_CURVE_PUBLICKEY: usize = 48; pub const ZMQ_CURVE_SECRETKEY: usize = 49; pub const ZMQ_CURVE_SERVERKEY: usize = 50; pub const ZMQ_PROBE_ROUTER: usize = 51; pub const ZMQ_REQ_CORRELATE: usize = 52; pub const ZMQ_REQ_RELAXED: usize = 53; pub const ZMQ_CONFLATE: usize = 54; pub const ZMQ_ZAP_DOMAIN: usize = 55; pub const ZMQ_ROUTER_HANDOVER: usize = 56; pub const ZMQ_TOS: usize = 57; pub const ZMQ_CONNECT_RID: usize = 61; pub const ZMQ_GSSAPI_SERVER: usize = 62; pub const ZMQ_GSSAPI_PRINCIPAL: usize = 63; pub const ZMQ_GSSAPI_SERVICE_PRINCIPAL: usize = 64; pub const ZMQ_GSSAPI_PLAINTEXT: usize = 65; pub const ZMQ_HANDSHAKE_IVL: usize = 66; pub const ZMQ_SOCKS_PROXY: usize = 68; pub const ZMQ_XPUB_NODROP: usize = 69; pub const ZMQ_BLOCKY: usize = 70; pub const ZMQ_XPUB_MANUAL: usize = 71; pub const ZMQ_XPUB_WELCOME_MSG: usize = 72; pub const ZMQ_STREAM_NOTIFY: usize = 73; pub const ZMQ_INVERT_MATCHING: usize = 74; pub const ZMQ_HEARTBEAT_IVL: usize = 75; pub const ZMQ_HEARTBEAT_TTL: usize = 76; pub const ZMQ_HEARTBEAT_TIMEOUT: usize = 77; pub const ZMQ_MORE: usize = 1; pub const ZMQ_SRCFD: usize = 2; pub const ZMQ_SHARED: usize = 3; pub const ZMQ_DONTWAIT: usize = 1; pub const ZMQ_SNDMORE: usize = 2; pub const ZMQ_NULL: usize = 0; pub const ZMQ_PLAIN: usize = 1; pub const ZMQ_CURVE: usize = 2; pub const ZMQ_GSSAPI: usize = 3; pub const ZMQ_TCP_ACCEPT_FILTER: usize = 38; pub const ZMQ_IPC_FILTER_PID: usize = 58; pub const ZMQ_IPC_FILTER_UID: usize = 59; pub const ZMQ_IPC_FILTER_GID: usize = 60; pub const ZMQ_IPV4ONLY: usize = 31; pub const ZMQ_DELAY_ATTACH_ON_CONNECT: usize = ZMQ_IMMEDIATE; pub const ZMQ_NOBLOCK: usize = ZMQ_DONTWAIT; pub const ZMQ_FAIL_UNROUTABLE: usize = ZMQ_ROUTER_MANDATORY; pub const ZMQ_ROUTER_BEHAVIOR: usize = ZMQ_ROUTER_MANDATORY; pub const ZMQ_EVENT_CONNECTED: usize = 1; pub const ZMQ_EVENT_CONNECT_DELAYED: usize = 2; pub const ZMQ_EVENT_CONNECT_RETRIED: usize = 4; pub const ZMQ_EVENT_LISTENING: usize = 8; pub const ZMQ_EVENT_BIND_FAILED: usize = 16; pub const ZMQ_EVENT_ACCEPTED: usize = 32; pub const ZMQ_EVENT_ACCEPT_FAILED: usize = 64; pub const ZMQ_EVENT_CLOSED: usize = 128; pub const ZMQ_EVENT_CLOSE_FAILED: usize = 256; pub const ZMQ_EVENT_DISCONNECTED: usize = 512; pub const ZMQ_EVENT_MONITOR_STOPPED: usize = 1024; pub const ZMQ_EVENT_ALL: usize = 65536; pub const ZMQ_POLLIN: usize = 1; pub const ZMQ_POLLOUT: usize = 2; pub const ZMQ_POLLERR: usize = 4; pub const ZMQ_POLLPRI: usize = 8; #[repr(C)] #[derive(Copy)] pub struct Struct_zmq_pollitem_t { socket: *mut libc::c_void, fd: libc::c_int, events: libc::c_short, revents: libc::c_short, } impl Clone for Struct_zmq_pollitem_t { fn clone(&self) -> Self { *self } } impl Default for Struct_zmq_pollitem_t { fn default() -> Self { unsafe { ::std::mem::zeroed() } } } #[allow(non_camel_case_types)] pub type zmq_pollitem_t = Struct_zmq_pollitem_t; pub const ZMQ_POLLITEMS_DFLT: usize = 16; pub const ZMQ_HAS_CAPABILITIES: usize = 1; pub const ZMQ_STREAMER: usize = 1; pub const ZMQ_FORWARDER: usize = 2; pub const ZMQ_QUEUE: usize = 3; #[repr(C)] #[allow(non_camel_case_types)] pub struct iovec; #[allow(non_camel_case_types)] pub type zmq_thread_fn = extern "C" fn(arg: *mut libc::c_void); #[link(name = "zmq")] extern {
ibc::c_void, t: libc::c_int) -> *mut libc::c_void; pub fn zmq_close(s: *mut libc::c_void) -> libc::c_int; pub fn zmq_setsockopt(s: *mut libc::c_void, option: libc::c_int, optval: *const libc::c_void, optvallen: libc::size_t) -> libc::c_int; pub fn zmq_getsockopt(s: *mut libc::c_void, option: libc::c_int, optval: *mut libc::c_void, optvallen: *mut libc::size_t) -> libc::c_int; pub fn zmq_bind(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_connect(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_unbind(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_disconnect(s: *mut libc::c_void, addr: *const libc::c_char) -> libc::c_int; pub fn zmq_send(s: *mut libc::c_void, buf: *const libc::c_void, len: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_send_const(s: *mut libc::c_void, buf: *const libc::c_void, len: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_recv(s: *mut libc::c_void, buf: *mut libc::c_void, len: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_socket_monitor(s: *mut libc::c_void, addr: *const libc::c_char, events: libc::c_int) -> libc::c_int; pub fn zmq_poll(items: *mut zmq_pollitem_t, nitems: libc::c_int, timeout: libc::c_long) -> libc::c_int; pub fn zmq_proxy(frontend: *mut libc::c_void, backend: *mut libc::c_void, capture: *mut libc::c_void) -> libc::c_int; pub fn zmq_proxy_steerable(frontend: *mut libc::c_void, backend: *mut libc::c_void, capture: *mut libc::c_void, control: *mut libc::c_void) -> libc::c_int; pub fn zmq_has(capabitility: *const libc::c_char) -> libc::c_int; pub fn zmq_device(t: libc::c_int, frontend: *mut libc::c_void, backend: *mut libc::c_void) -> libc::c_int; pub fn zmq_sendmsg(s: *mut libc::c_void, msg: *mut zmq_msg_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_recvmsg(s: *mut libc::c_void, msg: *mut zmq_msg_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_z85_encode(dest: *mut libc::c_char, data: *const libc::uint8_t, size: libc::size_t) -> *mut libc::c_char; pub fn zmq_z85_decode(dest: *mut libc::uint8_t, string: *const libc::c_char) -> libc::uint8_t; pub fn zmq_curve_keypair(z85_public_key: *mut libc::c_char, z85_secret_key: *mut libc::c_char) -> libc::c_int; pub fn zmq_atomic_counter_new() -> *mut libc::c_void; pub fn zmq_atomic_counter_set(counter: *mut libc::c_void, value: libc::c_int); pub fn zmq_atomic_counter_inc(counter: *mut libc::c_void) -> libc::c_int; pub fn zmq_atomic_counter_dec(counter: *mut libc::c_void) -> libc::c_int; pub fn zmq_atomic_counter_value(counter: *mut libc::c_void) -> libc::c_int; pub fn zmq_atomic_counter_destroy(counter: *mut *mut libc::c_void); pub fn zmq_stopwatch_start() -> *mut libc::c_void; pub fn zmq_stopwatch_stop(watch_: *mut libc::c_void) -> libc::c_ulong; pub fn zmq_sleep(seconds_: libc::c_int); pub fn zmq_threadstart(func: *mut zmq_thread_fn, arg: *mut libc::c_void); pub fn zmq_threadclose(thread: *mut libc::c_void); pub fn zmq_sendiov(s: *mut libc::c_void, iov: *mut iovec, count: libc::size_t, flags: libc::c_int) -> libc::c_int; pub fn zmq_recviov(s: *mut libc::c_void, iov: *mut iovec, count: *mut libc::size_t, flags: libc::c_int) -> libc::c_int; }
pub fn zmq_errno(); pub fn zmq_strerror(errnum: &i32) -> *const libc::c_char; pub fn zmq_version(major: &mut i32, minor: &mut i32, patch: &mut i32); pub fn zmq_ctx_new() -> *mut libc::c_void; pub fn zmq_ctx_term(context: *mut libc::c_void) -> libc::c_int; pub fn zmq_ctx_shutdown(ctx_: *mut libc::c_void) -> libc::c_int; pub fn zmq_ctx_set(context: *mut libc::c_void, option: libc::c_int, optval: libc::c_int) -> libc::c_int; pub fn zmq_ctx_get(context: *mut libc::c_void, option: libc::c_int) -> libc::c_int; pub fn zmq_init(io_threads: libc::c_int) -> *mut libc::c_void; pub fn zmq_term(context: *mut libc::c_void) -> libc::c_int; pub fn zmq_ctx_destroy(context: *mut libc::c_void) -> libc::c_int; pub fn zmq_msg_init(msg: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_init_size(msg: *mut zmq_msg_t, size: libc::size_t) -> libc::c_int; pub fn zmq_msg_init_data(msg: *mut zmq_msg_t, data: *mut libc::c_void, size: libc::size_t, ffn: *mut zmq_free_fn, hint: *mut libc::c_void) -> libc::c_int; pub fn zmq_msg_send(msg: *mut zmq_msg_t, s: *mut libc::c_void, flags: libc::c_int) -> libc::c_int; pub fn zmq_msg_recv(msg: *mut zmq_msg_t, s: *mut libc::c_void, flags: libc::c_int) -> libc::c_int; pub fn zmq_msg_close(msg: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_move(dest: *mut zmq_msg_t, src: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_copy(dest: *mut zmq_msg_t, src: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_data(msg: *mut zmq_msg_t) -> *mut libc::c_void; pub fn zmq_msg_size(msg: *mut zmq_msg_t) -> libc::size_t; pub fn zmq_msg_more(msg: *mut zmq_msg_t) -> libc::c_int; pub fn zmq_msg_get(msg: *mut zmq_msg_t, property: libc::c_int) -> libc::c_int; pub fn zmq_msg_set(msg: *mut zmq_msg_t, property: libc::c_int, optval: libc::c_int) -> libc::c_int; pub fn zmq_msg_gets(msg: *mut zmq_msg_t, property: *const libc::c_char) -> *const libc::c_char; pub fn zmq_msg_set_routing_id(msg: *mut zmq_msg_t, routing_id: libc::uint32_t) -> libc::c_int; pub fn zmq_msg_get_routing_id(msg: *mut zmq_msg_t) -> libc::uint32_t; pub fn zmq_socket(s: *mut l
random
[ { "content": "## rust-libzmq\n\n\n\nRust low-level bindings to [libzmq](https://github.com/zeromq/libzmq).\n\n\n\nCurrent version of `rust-libzmq` is built against libzmq version 4.2.0.\n\n\n\nIf you are looking for the high-level Rust ZeroMQ bindings, please\n\ncheck [rust-zmq](https://github.com/erickt/rust-zmq).\n\n\n\n## Using the library\n\n\n\nIn order to make use of `rust-libzmq` in your Rust project, first\n\ncreate a new Rust project and add `rust-libzmq` as a dependency to\n\nyour project.\n\n\n\nHere is an example project using `rust-libzmq`, which will get the\n\nversion of the libzmq library that you have installed on your\n\nsystem.\n\n\n\n```bash\n\n$ cargo new --bin libzmq-version\n\n```\n\n\n\nThis is how my `Cargo.toml` file looks like.\n\n\n\n```toml\n\n[package]\n\nname = \"libzmq-version\"\n\nversion = \"0.1.0\"\n\nauthors = [\"Marin Atanasov Nikolov <dnaeon@gmail.com>\"]\n\n\n\n[dependencies.libc]\n\nversion = \"*\"\n\n\n\n[dependencies.libzmq]\n\ngit = \"https://github.com/dnaeon/rust-libzmq.git\"\n\n```\n\n\n\nAnd here is the code for our little project.\n\n\n\n```rust\n\nextern crate libc;\n\nextern crate libzmq;\n\n\n\nunsafe fn print_version() {\n\n let mut major = 0;\n\n let mut minor = 0;\n\n let mut patch = 0;\n\n\n\n libzmq::zmq_version(&mut major, &mut minor, &mut patch);\n\n println!(\"Installed ZeroMQ version is {}.{}.{}\", major, minor, patch);\n\n}\n\n\n\nfn main() {\n\n unsafe { print_version(); }\n\n}\n\n```\n\n\n\nOnce ready, simply build and run the project.\n\n\n\n```bash\n\n$ cargo run\n\n```\n\n\n\n`rust-libzmq` is Open Source and licensed under the\n\n[BSD License](http://opensource.org/licenses/BSD-2-Clause).\n\n\n\nContributions\n\n=============\n\n\n\n`rust-libzmq` is hosted on\n\n[Github](https://github.com/dnaeon/rust-libzmq).\n\nPlease contribute by reporting issues, suggesting features or by\n\nsending patches using pull requests.\n\n\n\nBugs\n\n====\n\n\n\nProbably. If you experience a bug issue, please report it to the\n\n[rust-libzmq issue tracker on Github](https://github.com/dnaeon/rust-libzmq/issues>).\n", "file_path": "README.md", "rank": 8, "score": 8.420742347590881 } ]
Rust
research/query_service/ir/runtime/src/process/operator/sort/sort.rs
lnfjpt/GraphScope
917146f86d8387302a2e1de6963115e7568bf3ee
use std::cmp::Ordering; use std::convert::{TryFrom, TryInto}; use ir_common::error::ParsePbError; use ir_common::generated::algebra as algebra_pb; use ir_common::generated::algebra::order_by::ordering_pair::Order; use crate::error::FnGenResult; use crate::process::functions::CompareFunction; use crate::process::operator::sort::CompareFunctionGen; use crate::process::operator::TagKey; use crate::process::record::Record; #[derive(Debug)] struct RecordCompare { tag_key_order: Vec<(TagKey, Order)>, } impl CompareFunction<Record> for RecordCompare { fn compare(&self, left: &Record, right: &Record) -> Ordering { let mut result = Ordering::Equal; for (tag_key, order) in self.tag_key_order.iter() { let left_obj = tag_key.get_arc_entry(left).ok(); let right_obj = tag_key.get_arc_entry(right).ok(); let ordering = left_obj.partial_cmp(&right_obj); if let Some(ordering) = ordering { if Ordering::Equal != ordering { result = { match order { Order::Desc => ordering.reverse(), _ => ordering, } }; break; } } } result } } impl CompareFunctionGen for algebra_pb::OrderBy { fn gen_cmp(self) -> FnGenResult<Box<dyn CompareFunction<Record>>> { let record_compare = RecordCompare::try_from(self)?; debug!("Runtime order operator cmp: {:?}", record_compare); Ok(Box::new(record_compare)) } } impl TryFrom<algebra_pb::OrderBy> for RecordCompare { type Error = ParsePbError; fn try_from(order_pb: algebra_pb::OrderBy) -> Result<Self, Self::Error> { let mut tag_key_order = Vec::with_capacity(order_pb.pairs.len()); for order_pair in order_pb.pairs { let key = order_pair .key .ok_or(ParsePbError::EmptyFieldError("key is empty in order".to_string()))? .try_into()?; let order: Order = unsafe { ::std::mem::transmute(order_pair.order) }; tag_key_order.push((key, order)); } Ok(RecordCompare { tag_key_order }) } } #[cfg(test)] mod tests { use std::collections::HashMap; use dyn_type::Object; use ir_common::generated::algebra as pb; use ir_common::generated::common as common_pb; use ir_common::NameOrId; use pegasus::api::{Sink, SortBy}; use pegasus::result::ResultStream; use pegasus::JobConf; use crate::graph::element::{Element, GraphElement, Vertex}; use crate::graph::property::{DefaultDetails, Details, DynDetails}; use crate::process::operator::sort::CompareFunctionGen; use crate::process::operator::tests::{init_source, init_source_with_tag, to_var_pb, TAG_A}; use crate::process::record::Record; fn sort_test(source: Vec<Record>, sort_opr: pb::OrderBy) -> ResultStream<Record> { let conf = JobConf::new("sort_test"); let result = pegasus::run(conf, || { let source = source.clone(); let sort_opr = sort_opr.clone(); |input, output| { let mut stream = input.input_from(source.into_iter())?; let sort_func = sort_opr.gen_cmp().unwrap(); stream = stream.sort_by(move |a, b| sort_func.compare(a, b))?; stream.sink_into(output) } }) .expect("build job failure"); result } #[test] fn sort_simple_ascending_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(common_pb::Variable { tag: None, property: None }), order: 1, }], limit: None, }; let mut result = sort_test(init_source(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![1, 2]; assert_eq!(result_ids, expected_ids); } #[test] fn sort_simple_descending_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(common_pb::Variable { tag: None, property: None }), order: 2, }], limit: None, }; let mut result = sort_test(init_source(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![2, 1]; assert_eq!(result_ids, expected_ids); } #[test] fn sort_by_property_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(common_pb::Variable::from("@.name".to_string())), order: 2, }], limit: None, }; let mut result = sort_test(init_source(), sort_opr); let mut result_name = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_name.push( element .details() .unwrap() .get_property(&"name".into()) .unwrap() .try_to_owned() .unwrap(), ); } } let expected_names = vec![object!("vadas"), object!("marko")]; assert_eq!(result_name, expected_names); } #[test] fn sort_by_multi_property_test() { let map3: HashMap<NameOrId, Object> = vec![("id".into(), object!(3)), ("age".into(), object!(20)), ("name".into(), object!("marko"))] .into_iter() .collect(); let v3 = Vertex::new(1, Some("person".into()), DynDetails::new(DefaultDetails::new(map3))); let mut source = init_source(); source.push(Record::new(v3, None)); let sort_opr = pb::OrderBy { pairs: vec![ pb::order_by::OrderingPair { key: Some(common_pb::Variable::from("@.name".to_string())), order: 1, }, pb::order_by::OrderingPair { key: Some(common_pb::Variable::from("@.age".to_string())), order: 2, }, ], limit: None, }; let mut result = sort_test(source, sort_opr); let mut result_name_ages = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { let details = element.details().unwrap(); result_name_ages.push(( details .get_property(&"name".into()) .unwrap() .try_to_owned() .unwrap(), details .get_property(&"age".into()) .unwrap() .try_to_owned() .unwrap(), )); } } let expected_name_ages = vec![ (object!("marko"), object!(29)), (object!("marko"), object!(20)), (object!("vadas"), object!(27)), ]; assert_eq!(result_name_ages, expected_name_ages); } #[test] fn sort_by_tag_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(to_var_pb(Some(TAG_A.into()), None)), order: 2, }], limit: None, }; let mut result = sort_test(init_source_with_tag(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record .get(Some(&TAG_A.into())) .unwrap() .as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![2, 1]; assert_eq!(result_ids, expected_ids); } #[test] fn sort_by_tag_property_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(to_var_pb(Some(TAG_A.into()), Some("age".into()))), order: 2, }], limit: None, }; let mut result = sort_test(init_source_with_tag(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record .get(Some(&TAG_A.into())) .unwrap() .as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![1, 2]; assert_eq!(result_ids, expected_ids); } }
use std::cmp::Ordering; use std::convert::{TryFrom, TryInto}; use ir_common::error::ParsePbError; use ir_common::generated::algebra as algebra_pb; use ir_common::generated::algebra::order_by::ordering_pair::Order; use crate::error::FnGenResult; use crate::process::functions::CompareFunction; use crate::process::operator::sort::CompareFunctionGen; use crate::process::operator::TagKey; use crate::process::record::Record; #[derive(Debug)] struct RecordCompare { tag_key_order: Vec<(TagKey, Order)>, } impl CompareFunction<Record> for RecordCompare { fn compare(&self, left: &Record, right: &Record) -> Ordering { let mut result = Ordering::Equal; for (tag_key, order) in self.tag_key_order.iter() { let left_obj = tag_key.get_arc_entry(left).ok(); let right_obj = tag_key.get_arc_entry(right).ok(); let ordering = left_obj.partial_cmp(&right_obj); if let Some(ordering) = ordering { if Ordering::Equal != ordering { result = { match order { Order::Desc => ordering.reverse(), _ => ordering, } }; break; } } } result } } impl CompareFunctionGen for algebra_pb::OrderBy { fn gen_cmp(self) -> FnGenResult<Box<dyn CompareFunction<Record>>> { let record_compare = RecordCompare::try_from(self)?; debug!("Runtime order operator cmp: {:?}", record_compare); Ok(Box::new(record_compare)) } } impl TryFrom<algebra_pb::OrderBy> for RecordCompare { type Error = ParsePbError; fn try_from(order_pb: algebra_pb::OrderBy) -> Result<Self, Self::Error> { let mut tag_key_order = Vec::with_capacity(order_pb.pairs.len()); for order_pair in order_pb.pairs { let key = order_pair .key .ok_or(ParsePbError::EmptyFieldError("key is empty in order".to_string()))? .try_into()?; let order: Order = unsafe { ::std::mem::transmute(order_pair.order) }; tag_key_order.push((key, order)); } Ok(RecordCompare { tag_key_order }) } } #[cfg(test)] mod tests { use std::collections::HashMap; use dyn_type::Object; use ir_common::generated::algebra as pb; use ir_common::generated::common as common_pb; use ir_common::NameOrId; use pegasus::api::{Sink, SortBy}; use pegasus::result::ResultStream; use pegasus::JobConf; use crate::graph::element::{Element, GraphElement, Vertex}; use crate::graph::property::{DefaultDetails, Details, DynDetails}; use crate::process::operator::sort::CompareFunctionGen; use crate::process::operator::tests::{init_source, init_source_with_tag, to_var_pb, TAG_A}; use crate::process::record::Record; fn sort_test(source: Vec<Record>, sort_opr: pb::OrderBy) -> ResultStream<Record> { let conf = JobConf::new("sort_test"); let result = pegasus::run(conf, || { let source = source.clone(); let sort_opr = sort_opr.clone(); |input, output| { let mut stream = input.input_from(source.into_iter())?; let sort_func = sort_opr.gen_cmp().unwrap(); stream = stream.sort_by(move |a, b| sort_func.compare(a, b))?; stream.sink_into(output) } }) .expect("build job failure"); result } #[test] fn sort_simple_ascending_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(common_pb::Variable { tag: None, property: None }), order: 1, }], limit: None, }; let mut result = sort_test(init_source(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![1, 2]; assert_eq!(result_ids, expected_ids); } #[test] fn sort_simple_descending_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(common_pb::Variable { tag: None, property: None }), order: 2, }], limit: None, }; let mut result = sort_test(init_source(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() {
#[test] fn sort_by_property_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(common_pb::Variable::from("@.name".to_string())), order: 2, }], limit: None, }; let mut result = sort_test(init_source(), sort_opr); let mut result_name = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_name.push( element .details() .unwrap() .get_property(&"name".into()) .unwrap() .try_to_owned() .unwrap(), ); } } let expected_names = vec![object!("vadas"), object!("marko")]; assert_eq!(result_name, expected_names); } #[test] fn sort_by_multi_property_test() { let map3: HashMap<NameOrId, Object> = vec![("id".into(), object!(3)), ("age".into(), object!(20)), ("name".into(), object!("marko"))] .into_iter() .collect(); let v3 = Vertex::new(1, Some("person".into()), DynDetails::new(DefaultDetails::new(map3))); let mut source = init_source(); source.push(Record::new(v3, None)); let sort_opr = pb::OrderBy { pairs: vec![ pb::order_by::OrderingPair { key: Some(common_pb::Variable::from("@.name".to_string())), order: 1, }, pb::order_by::OrderingPair { key: Some(common_pb::Variable::from("@.age".to_string())), order: 2, }, ], limit: None, }; let mut result = sort_test(source, sort_opr); let mut result_name_ages = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record.get(None).unwrap().as_graph_vertex() { let details = element.details().unwrap(); result_name_ages.push(( details .get_property(&"name".into()) .unwrap() .try_to_owned() .unwrap(), details .get_property(&"age".into()) .unwrap() .try_to_owned() .unwrap(), )); } } let expected_name_ages = vec![ (object!("marko"), object!(29)), (object!("marko"), object!(20)), (object!("vadas"), object!(27)), ]; assert_eq!(result_name_ages, expected_name_ages); } #[test] fn sort_by_tag_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(to_var_pb(Some(TAG_A.into()), None)), order: 2, }], limit: None, }; let mut result = sort_test(init_source_with_tag(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record .get(Some(&TAG_A.into())) .unwrap() .as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![2, 1]; assert_eq!(result_ids, expected_ids); } #[test] fn sort_by_tag_property_test() { let sort_opr = pb::OrderBy { pairs: vec![pb::order_by::OrderingPair { key: Some(to_var_pb(Some(TAG_A.into()), Some("age".into()))), order: 2, }], limit: None, }; let mut result = sort_test(init_source_with_tag(), sort_opr); let mut result_ids = vec![]; while let Some(Ok(record)) = result.next() { if let Some(element) = record .get(Some(&TAG_A.into())) .unwrap() .as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![1, 2]; assert_eq!(result_ids, expected_ids); } }
if let Some(element) = record.get(None).unwrap().as_graph_vertex() { result_ids.push(element.id()); } } let expected_ids = vec![2, 1]; assert_eq!(result_ids, expected_ids); }
function_block-function_prefix_line
[ { "content": "fn create_src(id: u32, source: &mut Source<i32>) -> Result<(Stream<i32>, Stream<i32>), BuildJobError> {\n\n let src1 = if id == 0 { source.input_from(1..5)? } else { source.input_from(8..10)? };\n\n let (src1, src2) = src1.copied()?;\n\n let src2 = src2.map(|x| Ok(x + 1))?;\n\n Ok((src1, src2))\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/tests/join_test.rs", "rank": 0, "score": 535074.7584016144 }, { "content": "fn has_any<T: Data>(mut stream: Stream<T>) -> Result<SingleItem<bool>, BuildJobError> {\n\n stream\n\n .set_upstream_batch_capacity(1)\n\n .set_upstream_batch_size(1);\n\n let x = stream.unary(\"any_global\", |info| {\n\n let mut any_map = TidyTagMap::<()>::new(info.scope_level);\n\n move |input, output| {\n\n input.for_each_batch(|batch| {\n\n if !batch.is_empty() {\n\n if !any_map.contains_key(batch.tag()) {\n\n any_map.insert(batch.tag().clone(), ());\n\n output\n\n .new_session(batch.tag())?\n\n .give(Single(true))?;\n\n }\n\n batch.clear();\n\n\n\n if batch.is_last() {\n\n any_map.remove(batch.tag());\n\n }\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/operator/concise/any.rs", "rank": 1, "score": 471983.7262875746 }, { "content": "fn do_reduce<D, B, F>(src: Stream<D>, builder: B) -> Result<SingleItem<D>, BuildJobError>\n\nwhere\n\n D: Data,\n\n F: FnMut(D, D) -> FnResult<D> + Send + 'static,\n\n B: Fn() -> F + Send + 'static,\n\n{\n\n let single = src.unary(\"reduce\", |info| {\n\n let mut table = TidyTagMap::<(D, F)>::new(info.scope_level);\n\n move |input, output| {\n\n input.for_each_batch(|dataset| {\n\n let r = if let Some((mut pre, mut f)) = table.remove(&dataset.tag) {\n\n for item in dataset.drain() {\n\n pre = f(pre, item)?;\n\n }\n\n Some((pre, f))\n\n } else {\n\n let mut f = (builder)();\n\n let mut iter = dataset.drain();\n\n if let Some(mut pre) = iter.next() {\n\n for item in iter {\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/operator/concise/reduce.rs", "rank": 2, "score": 444653.738900672 }, { "content": "fn set_last_node_children(match_plan: &mut pb::LogicalPlan, children: Vec<KeyId>) -> IrPatternResult<()> {\n\n if match_plan.nodes.len() == 0 {\n\n Err(IrPatternError::InvalidExtendPattern(format!(\"Fail to get plan node at index {}\", 0)))\n\n } else {\n\n let last_index = match_plan.nodes.len() - 1;\n\n set_node_children_at_index(match_plan, children, last_index)\n\n }\n\n}\n\n\n\n/// Generate pattern matching order in a heuristic way:\n\n/// 1. vertex has predicates will be extended first; Specifically, predicates of eq compare is in high priority.\n\n/// 2. vertex adjacent to more edges with predicates should be extended first; Specifically, predicates of eq compare is in high priority.\n\n/// 3. vertex adjacent to more path_expand should be extended later;\n\n/// 4. vertex with larger degree will be extended later\n\nimpl PatternOrderTrait<PatternId> for Pattern {\n\n fn compare(&self, v1: &PatternId, v2: &PatternId) -> IrPatternResult<Ordering> {\n\n let v1_weight = self.get_vertex_weight(*v1)?;\n\n let v2_weight = self.get_vertex_weight(*v2)?;\n\n let vertex_order = v1_weight\n\n .partial_cmp(&v2_weight)\n", "file_path": "interactive_engine/executor/ir/core/src/glogue/pattern.rs", "rank": 3, "score": 438622.36255158304 }, { "content": "pub fn run<DI, DO, F, FN>(conf: JobConf, func: F) -> Result<ResultStream<DO>, JobSubmitError>\n\nwhere\n\n DI: Data,\n\n DO: Debug + Send + 'static,\n\n F: Fn() -> FN,\n\n FN: FnOnce(&mut Source<DI>, ResultSink<DO>) -> Result<(), BuildJobError> + 'static,\n\n{\n\n let (tx, rx) = crossbeam_channel::unbounded();\n\n let sink = ResultSink::new(tx);\n\n let cancel_hook = sink.get_cancel_hook().clone();\n\n let results = ResultStream::new(conf.job_id, cancel_hook, rx);\n\n run_opt(conf, sink, |worker| worker.dataflow(func()))?;\n\n Ok(results)\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/lib.rs", "rank": 4, "score": 435909.95487494196 }, { "content": "pub fn run_opt<DI, DO, F>(conf: JobConf, sink: ResultSink<DO>, mut logic: F) -> Result<(), JobSubmitError>\n\nwhere\n\n DI: Data,\n\n DO: Debug + Send + 'static,\n\n F: FnMut(&mut Worker<DI, DO>) -> Result<(), BuildJobError>,\n\n{\n\n init_env();\n\n let cancel_hook = sink.get_cancel_hook().clone();\n\n let mut lock = JOB_CANCEL_MAP.write().expect(\"lock poisoned\");\n\n lock.insert(conf.job_id, cancel_hook);\n\n let peer_guard = Arc::new(AtomicUsize::new(0));\n\n let conf = Arc::new(conf);\n\n let workers = allocate_local_worker(&conf)?;\n\n if workers.is_none() {\n\n return Ok(());\n\n }\n\n let worker_ids = workers.unwrap();\n\n let mut workers = Vec::new();\n\n for id in worker_ids {\n\n let mut worker = Worker::new(&conf, id, &peer_guard, sink.clone());\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/lib.rs", "rank": 5, "score": 411586.9210905839 }, { "content": "fn idents_to_vars(idents: Vec<String>) -> ExprResult<pb::VariableKeys> {\n\n let mut vars = Vec::with_capacity(idents.len());\n\n for ident in idents {\n\n if !ident.starts_with(VAR_PREFIX) {\n\n return Err(format!(\"invalid variable token: {:?}, a variable must start with \\\"@\\\"\", ident)\n\n .as_str()\n\n .into());\n\n } else {\n\n let var: pb::Variable = ident.into();\n\n vars.push(var)\n\n }\n\n }\n\n\n\n Ok(pb::VariableKeys { keys: vars })\n\n}\n\n\n\nimpl TryFrom<Token> for pb::ExprOpr {\n\n type Error = ExprError;\n\n\n\n fn try_from(token: Token) -> ExprResult<Self> {\n", "file_path": "interactive_engine/executor/ir/common/src/expr_parse/mod.rs", "rank": 6, "score": 404048.3764344813 }, { "content": "fn parse_conf_req(mut req: pb::JobConfig) -> JobConf {\n\n let mut conf = JobConf::new(req.job_name);\n\n if req.job_id != 0 {\n\n conf.job_id = req.job_id;\n\n }\n\n\n\n if req.workers != 0 {\n\n conf.workers = req.workers;\n\n }\n\n\n\n if req.time_limit != 0 {\n\n conf.time_limit = req.time_limit;\n\n }\n\n\n\n if req.batch_size != 0 {\n\n conf.batch_size = req.batch_size;\n\n }\n\n\n\n if req.batch_capacity != 0 {\n\n conf.batch_capacity = req.batch_capacity;\n", "file_path": "interactive_engine/executor/engine/pegasus/server/src/rpc.rs", "rank": 7, "score": 391267.03526296734 }, { "content": "type Cmp<D> = Arc<dyn Fn(&D, &D) -> Ordering + Send + 'static>;\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/operator/concise/limit.rs", "rank": 8, "score": 385243.0948898227 }, { "content": "fn get_or_set_tag_id(tag_pb: &mut common_pb::NameOrId, plan_meta: &mut PlanMeta) -> IrResult<TagId> {\n\n use common_pb::name_or_id::Item;\n\n if let Some(tag_item) = tag_pb.item.as_mut() {\n\n let (_, tag_id) = match tag_item {\n\n Item::Name(tag) => plan_meta.get_or_set_tag_id(tag),\n\n Item::Id(id) => {\n\n plan_meta.set_max_tag_id(*id as TagId + 1);\n\n (true, *id as TagId)\n\n }\n\n };\n\n *tag_pb = (tag_id as i32).into();\n\n\n\n Ok(tag_id)\n\n } else {\n\n Err(IrError::MissingData(\"NameOrId::Item\".to_string()))\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/executor/ir/core/src/plan/logical.rs", "rank": 9, "score": 375821.36143862124 }, { "content": "struct ResultTOfContextOrderPair<\n\n Context<CTX_HEAD_T, cur_alias, base_tag, CTX_PREV...>, ORDER_PAIR> {\n\n static constexpr int tag_id = ORDER_PAIR::tag_id;\n\n static constexpr size_t col_id = ORDER_PAIR::col_id;\n\n using context_t = Context<CTX_HEAD_T, cur_alias, base_tag, CTX_PREV...>;\n\n using context_iter_t = typename context_t::iterator;\n\n // using ctx_node_t =\n\n // std::remove_reference_t<decltype(std::declval<context_t>().template\n\n // GetNode<tag_id>())>;\n\n using data_tuple_t = decltype(std::declval<context_iter_t>().GetAllData());\n\n\n\n using tag_data_tuple_t =\n\n typename gs::tuple_element<tag_id, data_tuple_t>::type;\n\n using result_t = typename gs::tuple_element<col_id, tag_data_tuple_t>::type;\n\n};\n\n\n\ntemplate <typename GRAPH_INTERFACE>\n", "file_path": "flex/engines/hqps_db/core/operator/sort.h", "rank": 10, "score": 364556.4840068029 }, { "content": "struct ResultTOfContextOrderPair;\n\n\n\n// Result of the data type after apply order pair\n\ntemplate <typename CTX_HEAD_T, int cur_alias, int base_tag,\n\n typename... CTX_PREV, typename ORDER_PAIR>\n", "file_path": "flex/engines/hqps_db/core/operator/sort.h", "rank": 11, "score": 364556.4840068029 }, { "content": "#[inline]\n\nfn allocate_local_worker(conf: &Arc<JobConf>) -> Result<Option<WorkerIdIter>, BuildJobError> {\n\n let server_conf = conf.servers();\n\n let servers = match server_conf {\n\n ServerConf::Local => {\n\n return Ok(Some(WorkerIdIter::new(conf.job_id, conf.workers, 0, 0, 1)));\n\n }\n\n ServerConf::Partial(ids) => ids.clone(),\n\n ServerConf::All => get_servers(),\n\n };\n\n\n\n if servers.is_empty() || (servers.len() == 1) {\n\n Ok(Some(WorkerIdIter::new(conf.job_id, conf.workers, 0, 0, 1)))\n\n } else {\n\n if let Some(my_id) = server_id() {\n\n let mut my_index = -1;\n\n for (index, id) in servers.iter().enumerate() {\n\n if *id == my_id {\n\n my_index = index as i64;\n\n }\n\n }\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/lib.rs", "rank": 12, "score": 361623.69360840204 }, { "content": "#[bench]\n\nfn tag_three_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = HashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::Three(3, 6, 9), ());\n\n map.remove(&Tag::Three(3, 6, 9));\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 13, "score": 357128.4873237123 }, { "content": "#[bench]\n\nfn tag_one_a_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = AHashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::One(77), ());\n\n map.remove(&Tag::One(77));\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 14, "score": 357128.4873237123 }, { "content": "#[bench]\n\nfn tag_one_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = HashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::One(77), ());\n\n map.remove(&Tag::One(77));\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 15, "score": 357128.4873237123 }, { "content": "#[bench]\n\nfn tag_two_a_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = AHashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::Two(3, 6), ());\n\n map.remove(&Tag::Two(3, 6));\n\n })\n\n}\n\n\n\n// #[bench]\n\n// fn tag_two_hash_w_r_hit_1000(b: &mut test::Bencher) {\n\n// let mut map = HashMap::new();\n\n// for i in 0..1000 {\n\n// map.insert(Tag::Two(3, i), ());\n\n// }\n\n// b.iter(|| {\n\n// map.insert(Tag::Two(3, 2000), ());\n\n// map.remove(&Tag::Two(3, 2000));\n\n// })\n\n// }\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 16, "score": 357128.4873237123 }, { "content": "#[bench]\n\nfn tag_two_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = HashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::Two(3, 6), ());\n\n map.remove(&Tag::Two(3, 6));\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 17, "score": 357128.4873237123 }, { "content": "#[bench]\n\nfn tag_root_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = HashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::Root, ());\n\n map.remove(&Tag::Root);\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 18, "score": 357128.4873237123 }, { "content": "#[bench]\n\nfn tag_three_a_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = AHashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::Three(3, 6, 9), ());\n\n map.remove(&Tag::Three(3, 6, 9));\n\n })\n\n}\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 19, "score": 357128.4873237123 }, { "content": "// TODO:\n\n// 1. Currently, the behavior of filtering none-entry is identical to e.g., `g.V().values('name')`,\n\n// but differs to `g.V().valueMap('name')`, which will output the none-entry.\n\n// To support both cases, we may further need a flag to identify whether to filter or not.\n\n// BTW, if it is necessary to output none-entry,\n\n// we may need to further distinguish the cases of none-exist tags (filtering case) and none-exist properties (output none-entry).\n\n// 2. When projecting multiple columns, even all projected columns are none-entry, the record won't be filtered for now.\n\n// This seems ambiguous. But multi-column project always appears in the end of the query. Can modify this logic if necessary.\n\nfn exec_projector(input: &Record, projector: &Projector) -> FnExecResult<DynEntry> {\n\n let entry = match projector {\n\n Projector::ExprProjector(evaluator) => {\n\n let projected_result = evaluator.eval::<DynEntry, Record>(Some(&input))?;\n\n DynEntry::new(projected_result)\n\n }\n\n Projector::GraphElementProjector(tag_key) => tag_key.get_arc_entry(input)?,\n\n Projector::MultiGraphElementProjector(key_vals) => {\n\n let mut collection = Vec::with_capacity(key_vals.len());\n\n for (key, tag_key) in key_vals.iter() {\n\n let entry = tag_key.get_arc_entry(input)?;\n\n if let Some(key) = key {\n\n collection.push(PairEntry::new(key.clone().into(), entry).into());\n\n } else {\n\n collection.push(entry);\n\n }\n\n }\n\n DynEntry::new(CollectionEntry { inner: collection })\n\n }\n\n };\n", "file_path": "interactive_engine/executor/ir/runtime/src/process/operator/map/project.rs", "rank": 20, "score": 356645.9149584103 }, { "content": "#[bench]\n\nfn tag_one_int_map_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = IntMap::default();\n\n b.iter(|| {\n\n map.insert(77, ());\n\n map.remove(&77);\n\n })\n\n}\n\n\n\n// #[bench]\n\n// fn tag_one_hash_w_r_hit_1000(b: &mut test::Bencher) {\n\n// let mut map = HashMap::new();\n\n// for i in 0..1024u32 {\n\n// map.insert(Tag::One(i), ());\n\n// }\n\n// b.iter(|| {\n\n// map.insert(Tag::One(2000), ());\n\n// map.remove(&Tag::One(2000));\n\n// })\n\n// }\n\n//\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 21, "score": 353909.88224100607 }, { "content": "/// Each serving consists of a tuple, where tuple.0 indicates the player id,\n\n/// and tuple.1 indicates the ball that it is serving. The game continues\n\n/// until any player hits a LOSS ball, or it exceeds a random `max_iters`.\n\nfn single_play(serving: Stream<(u32, u32)>) -> Result<Stream<(u32, u32)>, BuildJobError> {\n\n let max_iters = 30;\n\n let mut until = IterCondition::<(u32, u32)>::max_iters(max_iters);\n\n until.until(move |(_player, ball)| Ok(*ball == LOSS));\n\n\n\n serving.iterate_until(until, |start| {\n\n start\n\n // Hit the ball to the opponent side, aka, 0 -> 1, 1 -> 0\n\n .repartition(|(player, _ball)| Ok((*player ^ 1) as u64))\n\n .map(|(player, ball)| {\n\n // The larger ball is, the easier it is to hit the ball back, which means\n\n // the less possible for the other player to loss (hit a zero number)\n\n let new_ball = thread_rng().gen_range(LOSS..ball);\n\n println!(\"Player {:?} hits a new ball {:?}\", player ^ 1, new_ball);\n\n Ok((player ^ 1, new_ball))\n\n })\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/examples/ping_pong.rs", "rank": 22, "score": 351615.9052288394 }, { "content": "fn get_intersect_id(expand_oprs_vec: &Vec<Vec<pb::logical_plan::Operator>>, child_offset: usize) -> KeyId {\n\n (expand_oprs_vec\n\n .iter()\n\n .map(|expand_oprs| expand_oprs.len())\n\n .sum::<usize>()\n\n + child_offset) as KeyId\n\n}\n\n\n", "file_path": "interactive_engine/executor/ir/core/src/glogue/pattern.rs", "rank": 23, "score": 349852.3331392668 }, { "content": "fn double_to_data_type(x: f64, data_type: &DataType) -> GraphTraceResult<Vec<u8>> {\n\n match *data_type {\n\n DataType::Bool => Ok(Property::Bool(x != 0.0).to_vec()),\n\n DataType::Char => {\n\n if x > u8::max_value() as f64 || x < u8::min_value() as f64 {\n\n let msg = format!(\"{} cannot be transformed to char\", x);\n\n let err = graph_err!(GraphErrorCode::DataError, msg, double_to_data_type, x, data_type);\n\n Err(err)\n\n } else {\n\n Ok(Property::Char(x as u8).to_vec())\n\n }\n\n }\n\n DataType::Short => {\n\n if x > i16::max_value() as f64 || x < i16::min_value() as f64 {\n\n let msg = format!(\"{} cannot be transformed to short\", x);\n\n let err = graph_err!(GraphErrorCode::DataError, msg, double_to_data_type, x, data_type);\n\n Err(err)\n\n } else {\n\n Ok(Property::Short(x as i16).to_vec())\n\n }\n", "file_path": "interactive_engine/executor/store/groot/src/api/property.rs", "rank": 24, "score": 339155.0674981692 }, { "content": "fn long_to_data_type(x: i64, data_type: &DataType) -> GraphTraceResult<Vec<u8>> {\n\n match *data_type {\n\n DataType::Bool => Ok(Property::Bool(x != 0).to_vec()),\n\n DataType::Char => {\n\n if x > u8::max_value() as i64 || x < 0 {\n\n let msg = format!(\"{} cannot be transformed to char\", x);\n\n let err = graph_err!(GraphErrorCode::DataError, msg, long_to_data_type, x, data_type);\n\n Err(err)\n\n } else {\n\n Ok(Property::Char(x as u8).to_vec())\n\n }\n\n }\n\n DataType::Short => {\n\n if x > i16::max_value() as i64 || x < i16::min_value() as i64 {\n\n let msg = format!(\"{} cannot be transformed to short\", x);\n\n let err = graph_err!(GraphErrorCode::DataError, msg, long_to_data_type, x, data_type);\n\n Err(err)\n\n } else {\n\n Ok(Property::Short(x as i16).to_vec())\n\n }\n", "file_path": "interactive_engine/executor/store/groot/src/api/property.rs", "rank": 25, "score": 339155.0674981692 }, { "content": "type RecordLeftJoin = Box<dyn ApplyGen<Record, Vec<Record>, Option<Record>>>;\n", "file_path": "interactive_engine/executor/ir/runtime/src/assembly.rs", "rank": 26, "score": 332497.674834969 }, { "content": "pub fn get_vertex_id_by_primary_keys<'a, T: Deref<Target = Vec<u8>>>(\n\n label_id: LabelId, pks: impl Iterator<Item = T>,\n\n) -> VertexId {\n\n BUFFER.with(|bytes| {\n\n let mut bytes = bytes.borrow_mut();\n\n bytes.clear();\n\n bytes\n\n .write_i32::<BigEndian>(label_id as i32)\n\n .unwrap();\n\n for pk in pks {\n\n let pk = pk.as_slice();\n\n bytes\n\n .write_i32::<BigEndian>(pk.len() as i32)\n\n .unwrap();\n\n bytes.write(pk).unwrap();\n\n }\n\n hash64(bytes.as_slice(), bytes.len())\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/mod.rs", "rank": 27, "score": 318054.12774864235 }, { "content": "#[test]\n\nfn join_test_empty_stream() {\n\n let mut conf = JobConf::new(\"inner_join\");\n\n conf.set_workers(2);\n\n let mut result = pegasus::run(conf, || {\n\n let id = pegasus::get_current_worker().index;\n\n move |input, output| {\n\n let (src1, src2) = create_src(id, input)?;\n\n let src2 = src2.filter_map(|_| Ok(None))?;\n\n src1.key_by(|x| Ok((x, x)))?\n\n .inner_join(src2.key_by(|x| Ok((x, x)))?)?\n\n .map(|(d1, d2)| Ok(((d1.key, d1.value), (d2.key, d2.value))))?\n\n .collect::<Vec<((i32, i32), (i32, i32))>>()?\n\n .sink_into(output)\n\n }\n\n })\n\n .expect(\"run job failure;\");\n\n\n\n let mut result = result.next().unwrap().unwrap();\n\n result.sort_by_key(|x| x.0 .0);\n\n assert_eq!(result, []);\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/tests/join_test.rs", "rank": 28, "score": 314940.1504743673 }, { "content": "class KeyedRowVertexSetImpl<LabelT, KEY_T, VID_T, grape::EmptyType> {\n\n public:\n\n using key_t = KEY_T;\n\n using self_type_t =\n\n KeyedRowVertexSetImpl<LabelT, KEY_T, VID_T, grape::EmptyType>;\n\n using lid_t = VID_T;\n\n using data_tuple_t = std::tuple<grape::EmptyType>;\n\n using flat_t = self_type_t;\n\n\n\n using iterator =\n\n KeyedRowVertexSetIter<LabelT, KEY_T, VID_T, grape::EmptyType>;\n\n using filtered_vertex_set = self_type_t;\n\n using ground_vertex_set_t = RowVertexSet<LabelT, VID_T, grape::EmptyType>;\n\n using index_ele_tuple_t = std::tuple<size_t, VID_T>;\n\n // from this tuple, we can reconstruct the partial set.\n\n using flat_ele_tuple_t = std::tuple<size_t, VID_T>;\n\n using EntityValueType = VID_T;\n\n\n\n template <typename... Ts>\n\n using with_data_t = KeyedRowVertexSetImpl<LabelT, KEY_T, VID_T, Ts...>;\n", "file_path": "flex/engines/hqps_db/structures/multi_vertex_set/keyed_row_vertex_set.h", "rank": 29, "score": 311595.01912271307 }, { "content": "pub fn startup(conf: Configuration) -> Result<(), StartupError> {\n\n if let Some(pool_size) = conf.max_pool_size {\n\n pegasus_executor::set_core_pool_size(pool_size as usize);\n\n }\n\n pegasus_executor::try_start_executor_async();\n\n\n\n let mut servers = HashSet::new();\n\n let server_id = conf.server_id();\n\n servers.insert(server_id);\n\n if let Some(id) = set_server_id(server_id) {\n\n return Err(StartupError::AlreadyStarted(id));\n\n }\n\n if let Some(net_conf) = conf.network_config() {\n\n if let Some(peers) = net_conf.get_servers()? {\n\n let addr = net_conf.local_addr()?;\n\n let conn_conf = net_conf.get_connection_param();\n\n for p in peers.iter() {\n\n servers.insert(p.id);\n\n }\n\n let addr = pegasus_network::start_up(server_id, conn_conf, addr, peers)?;\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/lib.rs", "rank": 30, "score": 310882.74937536253 }, { "content": "struct CommonBuilderT<CTX_T, GroupKey<col_id, grape::EmptyType>> {\n\n using set_t = std::remove_const_t<std::remove_reference_t<decltype(\n\n std::declval<CTX_T>().template GetNode<col_id>())>>;\n\n using builder_t = typename set_t::builder_t;\n\n using result_t = typename builder_t::result_t;\n\n using result_ele_t = typename result_t::element_type;\n\n};\n\n\n\ntemplate <typename CTX_T, int col_id, typename T>\n", "file_path": "flex/engines/hqps_db/core/operator/group_by.h", "rank": 31, "score": 309769.4528778525 }, { "content": "class KeyedRowVertexSetBuilderImpl<LabelT, KEY_T, VID_T, grape::EmptyType> {\n\n public:\n\n using key_t = KEY_T;\n\n using lid_t = VID_T;\n\n using build_res_t =\n\n KeyedRowVertexSetImpl<LabelT, KEY_T, VID_T, grape::EmptyType>;\n\n\n\n KeyedRowVertexSetBuilderImpl(LabelT label) : label_(label), ind_(0) {}\n\n\n\n KeyedRowVertexSetBuilderImpl(\n\n const RowVertexSet<LabelT, VID_T, grape::EmptyType>& old_set)\n\n : label_(old_set.GetLabel()), ind_(0) {}\n\n\n\n size_t insert(std::tuple<size_t, VID_T> ele_tuple) {\n\n auto key = std::get<1>(ele_tuple);\n\n if (prop2ind_.find(key) != prop2ind_.end()) {\n\n return prop2ind_[key];\n\n } else {\n\n prop2ind_[key] = ind_;\n\n keys_.emplace_back(key);\n", "file_path": "flex/engines/hqps_db/structures/multi_vertex_set/keyed_row_vertex_set.h", "rank": 32, "score": 308617.6599932054 }, { "content": "#[test]\n\nfn apply_count_empty_stream() {\n\n let mut conf = JobConf::new(\"apply_x_flatmap_any_x_test\");\n\n conf.set_workers(2);\n\n let mut result = pegasus::run(conf, || {\n\n |input, output| {\n\n input\n\n .input_from(0..1000u32)?\n\n .apply(|sub| {\n\n sub.repartition(|x| Ok(*x as u64))\n\n .flat_map(|i| Ok(vec![i, i + 2].into_iter()))?\n\n .filter(|i| Ok(*i % 2 == 0))?\n\n .map(|x| Ok(x))?\n\n .count()\n\n })?\n\n .filter_map(|(x, cnt)| if cnt == 0 { Ok(None) } else { Ok(Some((x, cnt))) })?\n\n .sink_into(output)\n\n }\n\n })\n\n .expect(\"build job failure\");\n\n\n\n let mut count = 0;\n\n while let Some(Ok(d)) = result.next() {\n\n assert_eq!(d.0 % 2, 0);\n\n assert_eq!(d.1, 2);\n\n count += 1;\n\n }\n\n\n\n assert_eq!(count, 1000);\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/tests/subtask_test.rs", "rank": 33, "score": 307856.3179944338 }, { "content": "#[test]\n\nfn apply_collect_empty_stream() {\n\n let mut conf = JobConf::new(\"apply_x_flatmap_any_x_test\");\n\n conf.set_workers(2);\n\n let mut result = pegasus::run(conf, || {\n\n |input, output| {\n\n input\n\n .input_from(0..1000u32)?\n\n .apply(|sub| {\n\n sub.repartition(|x| Ok(*x as u64))\n\n .flat_map(|i| Ok(vec![i, i + 2].into_iter()))?\n\n .filter(|i| Ok(*i % 2 == 0))?\n\n .map(|x| Ok(x))?\n\n .collect::<Vec<_>>()\n\n })?\n\n .filter_map(\n\n |(x, vec)| if vec.is_empty() { Ok(None) } else { Ok(Some((x, vec.len() as u64))) },\n\n )?\n\n .sink_into(output)\n\n }\n\n })\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/tests/subtask_test.rs", "rank": 34, "score": 307856.3179944338 }, { "content": "#[test]\n\nfn test_condition_cmp_operation() {\n\n let entites = prepare_entites().collect::<Vec<LocalEntity>>();\n\n let predicate = PredCondition::new_predicate(\n\n Operand::PropId(1),\n\n CmpOperator::Equal,\n\n Operand::Const(Property::Int(10)),\n\n );\n\n assert_eq!(\n\n 1,\n\n entites\n\n .clone()\n\n .into_iter()\n\n .filter(|v| predicate.filter_vertex(v).unwrap_or(false))\n\n .collect::<Vec<LocalEntity>>()\n\n .len()\n\n );\n\n assert_eq!(\n\n 1,\n\n entites\n\n .clone()\n", "file_path": "interactive_engine/executor/store/groot/src/api/condition/test.rs", "rank": 35, "score": 307642.39812907577 }, { "content": "struct ResultOfContextKeyAlias;\n\n\n\n// project one single property\n\ntemplate <typename CTX_HEAD_T, int cur_alias, int base_tag,\n\n typename... CTX_PREV, int in_col_id, typename T>\n", "file_path": "flex/engines/hqps_db/core/operator/project.h", "rank": 36, "score": 304920.7760186025 }, { "content": "struct ResultOfContextKeyAlias<\n\n Context<CTX_HEAD_T, cur_alias, base_tag, CTX_PREV...>,\n\n IdentityMapper<in_col_id, PropertySelector<grape::EmptyType>>> {\n\n using context_t = Context<CTX_HEAD_T, cur_alias, base_tag, CTX_PREV...>;\n\n using ctx_node_t = std::remove_reference_t<decltype(\n\n std::declval<context_t>().template GetNode<in_col_id>())>;\n\n using result_t = ctx_node_t;\n\n};\n\n\n\ntemplate <int new_head_alias, typename new_head_t, int cur_alias,\n\n typename old_head_t, int base_tag, typename tuple>\n", "file_path": "flex/engines/hqps_db/core/operator/project.h", "rank": 37, "score": 304920.7760186025 }, { "content": "/// Preprocess a plan such that it does not contain two root nodes. More speciically,\n\n/// we will add a common `As(None)` operator as the parent of the two original roots\n\n/// in the plan, which becomes the new and only root node of the plan\n\nfn preprocess_plan(plan: &mut pb::LogicalPlan) -> IrResult<()> {\n\n if plan.roots.len() == 1 {\n\n Ok(())\n\n } else if plan.roots.len() == 2 {\n\n let new_root = pb::logical_plan::Node {\n\n opr: Some(pb::As { alias: None }.into()),\n\n children: plan.roots.iter().map(|id| *id + 1).collect(),\n\n };\n\n let mut i = plan.nodes.len();\n\n plan.nodes.push(plan.nodes[i - 1].clone());\n\n\n\n while i > 0 {\n\n for child in plan.nodes[i - 1].children.iter_mut() {\n\n *child += 1;\n\n }\n\n plan.nodes.swap(i - 1, i);\n\n i -= 1;\n\n }\n\n plan.nodes[0] = new_root;\n\n plan.roots = vec![0];\n", "file_path": "interactive_engine/executor/ir/core/src/plan/patmat.rs", "rank": 38, "score": 304067.9552154045 }, { "content": "struct AppendProperty {\n\n static void append(arrow::ArrayBuilder* builder, Property const* prop) {\n\n LOG(FATAL) << \"Unimplemented...\";\n\n }\n\n};\n\n\n\ntemplate <>\n", "file_path": "interactive_engine/executor/store/global_query/src/store_impl/v6d/native/property_graph_stream.h", "rank": 39, "score": 301752.67938263546 }, { "content": "struct Vertex {\n\n std::string label; // This field is used to set metadata of arrow table\n\n std::string vid; // when vid is single digit, it means column index of vertex\n\n // id. Otherwise, it represents column name\n\n std::string protocol; // file/oss/numpy/pandas/vineyard\n\n std::string values; // from location, vineyard or pandas\n\n std::string vformat; // defines vertex format,\n\n\n\n std::string SerializeToString() const {\n\n std::stringstream ss;\n\n ss << \"V \";\n\n ss << label << \" \" << vid << \" \";\n\n ss << protocol << \" \" << values << \" \";\n\n ss << vformat << \"\\n\";\n\n return ss.str();\n\n }\n\n};\n\n\n\n/**\n\n * @brief This is the model class to represent how to load edge data from the\n\n * data source.\n\n */\n", "file_path": "analytical_engine/core/io/property_parser.h", "rank": 40, "score": 300236.2433458438 }, { "content": "#[rustversion::since(1.72.0)]\n\nfn type_id_from_bytes<R: ReadExt>(reader: &mut R) -> io::Result<TypeId> {\n\n let number = <u128>::read_from(reader)?;\n\n Ok(unsafe { std::mem::transmute(number) })\n\n}\n\n\n\nimpl Encode for DateTimeFormats {\n\n fn write_to<W: WriteExt>(&self, writer: &mut W) -> io::Result<()> {\n\n match self {\n\n DateTimeFormats::Date(d) => {\n\n writer.write_u8(0)?;\n\n writer.write_i16(d.year() as i16)?;\n\n writer.write_u8(d.month() as u8)?;\n\n writer.write_u8(d.day() as u8)?;\n\n }\n\n DateTimeFormats::Time(t) => {\n\n writer.write_u8(1)?;\n\n writer.write_u8(t.hour() as u8)?;\n\n writer.write_u8(t.minute() as u8)?;\n\n writer.write_u8(t.second() as u8)?;\n\n writer.write_u32(t.nanosecond() as u32)?;\n", "file_path": "interactive_engine/executor/common/dyn_type/src/serde.rs", "rank": 41, "score": 299874.2884710644 }, { "content": "// Try to apply the optimize rule: ExpandE + GetV = ExpandV, it it satisfies:\n\n// 1. the previous op is ExpandE, and with no alias (which means that the edges won't be accessed later).\n\n// 2. `GetV` is GetV(Adj) (i.e., opt=Start/End/Other) without any filters or further query semantics.\n\n// 3. the direction should be: outE + inV = out; inE + outV = in; and bothE + otherV = both\n\n// In addition, if PathExpand + GetV, make opt of GetV to be `End`.\n\nfn build_and_try_fuse_get_v(builder: &mut PlanBuilder, mut get_v: pb::GetV) -> IrResult<()> {\n\n if get_v.opt == 4 {\n\n return Err(IrError::Unsupported(\"Try to fuse GetV with Opt=Self into ExpandE\".to_string()));\n\n }\n\n if let Some(params) = get_v.params.as_mut() {\n\n if params.has_predicates() || params.has_columns() {\n\n return Err(IrError::Unsupported(\"Try to fuse GetV with predicates into ExpandE\".to_string()));\n\n } else if params.has_labels() {\n\n // although this doesn't need query, it cannot be fused into ExpandExpand since we cannot specify vertex labels in ExpandV\n\n builder.get_v(get_v);\n\n return Ok(());\n\n }\n\n }\n\n // Try to fuse: ExpandE + GetV(Adj) = ExpandV\n\n if let Some(last_op) = builder.get_last_op_mut() {\n\n let op_kind = last_op\n\n .opr\n\n .as_mut()\n\n .ok_or_else(|| IrError::MissingData(format!(\"PhysicalOpr\")))?\n\n .op_kind\n", "file_path": "interactive_engine/executor/ir/core/src/plan/physical.rs", "rank": 42, "score": 299136.8617514099 }, { "content": "fn get_max_tag_id(pattern: &pb::Pattern) -> TagId {\n\n use pb::pattern::binder::Item as BinderItem;\n\n let mut tag_id_set = BTreeSet::new();\n\n for sentence in pattern.sentences.iter() {\n\n if let Some(start_tag) = sentence.start.as_ref() {\n\n let start_tag_id = pb_name_or_id_to_id(start_tag).unwrap();\n\n tag_id_set.insert(start_tag_id);\n\n }\n\n if let Some(end_tag) = sentence.end.as_ref() {\n\n let end_tag_id = pb_name_or_id_to_id(end_tag).unwrap();\n\n tag_id_set.insert(end_tag_id);\n\n }\n\n for binder in sentence.binders.iter() {\n\n let alias = match binder.item.as_ref() {\n\n Some(BinderItem::Edge(edge_expand)) => edge_expand.alias.as_ref(),\n\n Some(BinderItem::Path(path_expand)) => path_expand.alias.as_ref(),\n\n Some(BinderItem::Vertex(get_v)) => get_v.alias.as_ref(),\n\n _ => None,\n\n };\n\n if let Some(tag) = alias {\n\n let tag_id = pb_name_or_id_to_id(tag).unwrap();\n\n tag_id_set.insert(tag_id);\n\n }\n\n }\n\n }\n\n tag_id_set.len() as TagId\n\n}\n\n\n", "file_path": "interactive_engine/executor/ir/core/tests/common/pattern_cases.rs", "rank": 43, "score": 297728.13198628067 }, { "content": "#[allow(dead_code)]\n\nstruct MultiOutputsMerge {\n\n output_size: usize,\n\n scope_level: u32,\n\n cancel_merge: Vec<TidyTagMap<IntSet<u64>>>,\n\n}\n\n\n\nimpl MultiOutputsMerge {\n\n fn new(output_size: usize, scope_level: u32) -> MultiOutputsMerge {\n\n let mut cancel_merge = Vec::with_capacity(scope_level as usize + 1);\n\n for i in 0..scope_level + 1 {\n\n cancel_merge.push(TidyTagMap::new(i));\n\n }\n\n MultiOutputsMerge { output_size, scope_level, cancel_merge }\n\n }\n\n\n\n // TODO: enable merge cancel from parent into children;\n\n fn merge_cancel(&mut self, n: Cancel) -> Option<Tag> {\n\n let level = n.tag().len();\n\n assert!(level < self.cancel_merge.len());\n\n if let Some(mut in_merge) = self.cancel_merge[level].remove(n.tag()) {\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/operator/mod.rs", "rank": 44, "score": 296972.1516976311 }, { "content": "struct MultiInputsMerge {\n\n input_size: usize,\n\n end_merge: Vec<TidyTagMap<(EndOfScope, IntSet<u64>)>>,\n\n}\n\n\n\nimpl MultiInputsMerge {\n\n pub fn new(input_size: usize, scope_level: u32) -> Self {\n\n let mut end_merge = Vec::with_capacity(scope_level as usize + 1);\n\n for i in 0..scope_level + 1 {\n\n end_merge.push(TidyTagMap::new(i));\n\n }\n\n MultiInputsMerge { input_size, end_merge }\n\n }\n\n\n\n fn merge_end(&mut self, n: End) -> Vec<EndOfScope> {\n\n let idx = n.tag().len();\n\n assert!(idx < self.end_merge.len());\n\n let mut ends = vec![];\n\n let guard = self.input_size;\n\n if idx + 1 < self.end_merge.len() {\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/operator/mod.rs", "rank": 45, "score": 296953.21236153017 }, { "content": "/// return (vertex_id, ts)\n\npub fn parse_vertex_key(key: &[u8]) -> GraphResult<(VertexId, SnapshotId)> {\n\n if key.len() != 24 {\n\n let msg = format!(\"invalid key, key len is {}\", key.len());\n\n let err = gen_graph_err!(GraphErrorCode::InvalidData, msg, parse_vertex_key);\n\n error!(\"parse_vertex_key failed. error: {:?}\", err);\n\n return Err(err);\n\n }\n\n let reader = UnsafeBytesReader::new(key);\n\n let vertex_id = reader.read_i64(8).to_be();\n\n let ts = !reader.read_i64(16).to_be();\n\n Ok((vertex_id, ts))\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/bin.rs", "rank": 46, "score": 296663.26006637147 }, { "content": "pub fn cancel_job(job_id: u64) -> Result<(), CancelError> {\n\n if let Ok(mut hook) = JOB_CANCEL_MAP.write() {\n\n if let Some(cancel_hook) = hook.get_mut(&job_id) {\n\n cancel_hook.store(true, Ordering::SeqCst);\n\n } else {\n\n return Err(CancelError::JobNotFoundError(job_id));\n\n }\n\n } else {\n\n return Err(CancelError::CancelMapPoisonedError);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/lib.rs", "rank": 47, "score": 296530.1754272443 }, { "content": "#[rustversion::since(1.72.0)]\n\nfn type_id_to_bytes<W: WriteExt>(typeid: TypeId, out: &mut W) -> io::Result<()> {\n\n let number: u128 = unsafe { std::mem::transmute(typeid) };\n\n number.write_to(out)\n\n}\n\n\n\nimpl<T: Any + Send + Sync + Clone + Debug + Encode> DynType for T {\n\n fn to_bytes(&self) -> io::Result<Vec<u8>> {\n\n let mut bytes = vec![];\n\n type_id_to_bytes(TypeId::of::<T>(), &mut bytes)?;\n\n self.write_to(&mut bytes)?;\n\n Ok(bytes)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use pegasus_common::codec::{ReadExt, WriteExt};\n\n\n\n use super::*;\n\n use crate::{Object, OwnedOrRef};\n", "file_path": "interactive_engine/executor/common/dyn_type/src/serde_dyn.rs", "rank": 48, "score": 293242.58642297407 }, { "content": "struct VertexTypeManager {\n\n map: HashMap<LabelId, TypeInfoList>,\n\n}\n\n\n\nimpl VertexTypeManager {\n\n fn new() -> Self {\n\n VertexTypeManager { map: HashMap::new() }\n\n }\n\n\n\n fn create(&mut self, si: SnapshotId, label: LabelId, type_def: TypeDef) {\n\n self.map\n\n .entry(label)\n\n .or_insert_with(|| TypeInfoList::new())\n\n .add(si, type_def);\n\n }\n\n\n\n fn drop(&mut self, si: SnapshotId, label: LabelId) {\n\n self.map\n\n .entry(label)\n\n .or_insert_with(|| TypeInfoList::new())\n\n .drop(si);\n\n }\n\n\n\n fn get_type_def(&self, si: SnapshotId, label: LabelId) -> Option<&TypeDef> {\n\n self.map.get(&label)?.get(si)\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/tests/helper.rs", "rank": 49, "score": 291925.4050258495 }, { "content": "fn mock_process_2(servers: Vec<Server>, conf: ConnectionParams) -> std::thread::JoinHandle<()> {\n\n std::thread::Builder::new()\n\n .name(\"process-2\".to_owned())\n\n .spawn(move || {\n\n let detector = MockServerDetect { servers };\n\n let addr = pegasus_network::start_up(2, conf, \"127.0.0.1:1236\", detector).unwrap();\n\n info!(\"server 2 start at {:?}\", addr);\n\n let remotes = vec![0, 1];\n\n while !pegasus_network::check_connect(2, &remotes) {\n\n std::thread::sleep(Duration::from_secs(1));\n\n }\n\n let ipc_ch = pegasus_network::ipc_channel::<Entry>(1, 2, &remotes).unwrap();\n\n let (mut sends, recv) = ipc_ch.take();\n\n let entry = Entry::new(2);\n\n sends[0].send(&entry).unwrap();\n\n sends[1].send(&entry).unwrap();\n\n sends[0].close().unwrap();\n\n sends[1].close().unwrap();\n\n let mut receives = vec![];\n\n loop {\n", "file_path": "interactive_engine/executor/engine/pegasus/network/tests/ipc_test.rs", "rank": 50, "score": 290990.4556745143 }, { "content": "fn mock_process_1(servers: Vec<Server>, conf: ConnectionParams) -> std::thread::JoinHandle<()> {\n\n std::thread::Builder::new()\n\n .name(\"process-1\".to_owned())\n\n .spawn(move || {\n\n let detector = MockServerDetect { servers };\n\n let addr = pegasus_network::start_up(1, conf, \"127.0.0.1:1235\", detector).unwrap();\n\n info!(\"server 1 start at {:?}\", addr);\n\n let remotes = vec![0, 2];\n\n\n\n while !pegasus_network::check_connect(1, &remotes) {\n\n std::thread::sleep(Duration::from_secs(1));\n\n }\n\n\n\n let ipc_ch = pegasus_network::ipc_channel::<Entry>(1, 1, &remotes).unwrap();\n\n let (mut sends, recv) = ipc_ch.take();\n\n let entry = Entry::new(1);\n\n sends[0].send(&entry).unwrap();\n\n sends[1].send(&entry).unwrap();\n\n sends[0].close().unwrap();\n\n sends[1].close().unwrap();\n", "file_path": "interactive_engine/executor/engine/pegasus/network/tests/ipc_test.rs", "rank": 51, "score": 290990.4556745143 }, { "content": "fn mock_process_0(servers: Vec<Server>, conf: ConnectionParams) -> std::thread::JoinHandle<()> {\n\n std::thread::Builder::new()\n\n .name(\"process-0\".to_owned())\n\n .spawn(move || {\n\n let detector = MockServerDetect { servers };\n\n let addr = pegasus_network::start_up(0, conf, \"127.0.0.1:1234\", detector).unwrap();\n\n info!(\"server 0 start at {:?}\", addr);\n\n let remotes = vec![1, 2];\n\n while !pegasus_network::check_connect(0, &remotes) {\n\n std::thread::sleep(Duration::from_secs(1));\n\n }\n\n\n\n let ipc_ch = pegasus_network::ipc_channel::<Entry>(1, 0, &remotes).unwrap();\n\n let (mut sends, recv) = ipc_ch.take();\n\n let entry = Entry::new(0);\n\n sends[0].send(&entry).unwrap();\n\n sends[1].send(&entry).unwrap();\n\n sends[0].close().unwrap();\n\n sends[1].close().unwrap();\n\n let mut receives = vec![];\n", "file_path": "interactive_engine/executor/engine/pegasus/network/tests/ipc_test.rs", "rank": 52, "score": 290990.4556745143 }, { "content": "struct AppendProperty<double> {\n\n static void append(arrow::ArrayBuilder* builder, Property const* prop) {\n\n vineyard::htap::htap_types::PodProperties pp;\n\n pp.long_value = prop->len;\n\n CHECK_ARROW_ERROR(\n\n dynamic_cast<arrow::DoubleBuilder*>(builder)->Append(pp.double_value));\n\n }\n\n};\n\n\n\ntemplate <>\n", "file_path": "interactive_engine/executor/store/global_query/src/store_impl/v6d/native/property_graph_stream.h", "rank": 53, "score": 290875.1946804563 }, { "content": "struct AppendProperty<char> {\n\n static void append(arrow::ArrayBuilder* builder, Property const* prop) {\n\n vineyard::htap::htap_types::PodProperties pp;\n\n pp.long_value = prop->len;\n\n CHECK_ARROW_ERROR(\n\n dynamic_cast<arrow::Int8Builder*>(builder)->Append(pp.char_value));\n\n }\n\n};\n\n\n\ntemplate <>\n", "file_path": "interactive_engine/executor/store/global_query/src/store_impl/v6d/native/property_graph_stream.h", "rank": 54, "score": 290875.1946804563 }, { "content": "struct AppendProperty<bool> {\n\n static void append(arrow::ArrayBuilder* builder, Property const* prop) {\n\n vineyard::htap::htap_types::PodProperties pp;\n\n pp.long_value = prop->len;\n\n CHECK_ARROW_ERROR(\n\n dynamic_cast<arrow::BooleanBuilder*>(builder)->Append(pp.bool_value));\n\n }\n\n};\n\n\n\ntemplate <>\n", "file_path": "interactive_engine/executor/store/global_query/src/store_impl/v6d/native/property_graph_stream.h", "rank": 55, "score": 290875.1946804563 }, { "content": "struct AppendProperty<float> {\n\n static void append(arrow::ArrayBuilder* builder, Property const* prop) {\n\n vineyard::htap::htap_types::PodProperties pp;\n\n pp.long_value = prop->len;\n\n CHECK_ARROW_ERROR(\n\n dynamic_cast<arrow::FloatBuilder*>(builder)->Append(pp.float_value));\n\n }\n\n};\n\n\n\ntemplate <>\n", "file_path": "interactive_engine/executor/store/global_query/src/store_impl/v6d/native/property_graph_stream.h", "rank": 56, "score": 290875.1946804563 }, { "content": "struct AppendProperty<int64_t> {\n\n static void append(arrow::ArrayBuilder* builder, Property const* prop) {\n\n CHECK_ARROW_ERROR(\n\n dynamic_cast<arrow::Int64Builder*>(builder)->Append(prop->len));\n\n }\n\n};\n\n\n\ntemplate <>\n", "file_path": "interactive_engine/executor/store/global_query/src/store_impl/v6d/native/property_graph_stream.h", "rank": 57, "score": 290875.1946804563 }, { "content": "struct AppendProperty<void> {\n\n static void append(arrow::ArrayBuilder* builder, Property const* prop) {\n\n CHECK_ARROW_ERROR(\n\n dynamic_cast<arrow::NullBuilder*>(builder)->Append(nullptr));\n\n }\n\n};\n\n\n\ntemplate <typename T>\n\nvoid generic_appender(arrow::ArrayBuilder* builder, T const& value) {\n\n CHECK_ARROW_ERROR(\n\n dynamic_cast<typename ConvertToArrowType<T>::BuilderType*>(builder)\n\n ->Append(value));\n\n}\n\n\n\nusing property_appender_func = void (*)(arrow::ArrayBuilder*,\n\n Property const* prop);\n\n\n", "file_path": "interactive_engine/executor/store/global_query/src/store_impl/v6d/native/property_graph_stream.h", "rank": 58, "score": 290875.1946804563 }, { "content": "struct AppendProperty<int16_t> {\n\n static void append(arrow::ArrayBuilder* builder, Property const* prop) {\n\n vineyard::htap::htap_types::PodProperties pp;\n\n pp.long_value = prop->len;\n\n CHECK_ARROW_ERROR(\n\n dynamic_cast<arrow::Int16Builder*>(builder)->Append(pp.int16_value));\n\n }\n\n};\n\n\n\ntemplate <>\n", "file_path": "interactive_engine/executor/store/global_query/src/store_impl/v6d/native/property_graph_stream.h", "rank": 59, "score": 290875.1946804563 }, { "content": "struct AppendProperty<int32_t> {\n\n static void append(arrow::ArrayBuilder* builder, Property const* prop) {\n\n vineyard::htap::htap_types::PodProperties pp;\n\n pp.long_value = prop->len;\n\n CHECK_ARROW_ERROR(\n\n dynamic_cast<arrow::Int32Builder*>(builder)->Append(pp.int_value));\n\n }\n\n};\n\n\n\ntemplate <>\n", "file_path": "interactive_engine/executor/store/global_query/src/store_impl/v6d/native/property_graph_stream.h", "rank": 60, "score": 290875.1946804563 }, { "content": "#[bench]\n\nfn bench_and(b: &mut test::Bencher) {\n\n let p = 3u32;\n\n let mut x = 0;\n\n b.iter(|| {\n\n for i in 1..1001 {\n\n x += i & p;\n\n }\n\n });\n\n println!(\"{}\", x);\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_div.rs", "rank": 61, "score": 289962.4278152024 }, { "content": "// g.V(5).in().out().hasId(5).in()\n\nfn modern_graph_filter_flatmap_test() -> ResultStream<u32> {\n\n let mut conf = JobConf::default();\n\n let num_workers = 2;\n\n conf.set_workers(num_workers);\n\n let result_stream = pegasus::run(conf, || {\n\n let src = if pegasus::get_current_worker().index == 0 { vec![] } else { vec![5] };\n\n move |input, output| {\n\n input\n\n .input_from(src)?\n\n .repartition(|x| Ok(*x as u64))\n\n .flat_map(move |x| Ok(MAP.get(&x).unwrap().1.iter().cloned()))?\n\n .repartition(|x| Ok(*x as u64))\n\n .flat_map(move |x| Ok(MAP.get(&x).unwrap().0.iter().cloned()))?\n\n .repartition(|x| Ok(*x as u64))\n\n .filter(|x| Ok(*x == 5))?\n\n .repartition(|x| Ok(*x as u64))\n\n .flat_map(|x| Ok(MAP.get(&x).unwrap().1.iter().cloned()))?\n\n .sink_into(output)\n\n }\n\n })\n\n .expect(\"submit job failure\");\n\n result_stream\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/tests/filter_test.rs", "rank": 62, "score": 289007.8098488199 }, { "content": "/// Resolves all partial tokens by converting them to complex tokens.\n\nfn partial_tokens_to_tokens(mut tokens: &[PartialToken]) -> ExprResult<Vec<Token>> {\n\n let mut result = Vec::new();\n\n let mut recent_token: Option<Token> = None;\n\n while !tokens.is_empty() {\n\n let first = tokens[0].clone();\n\n let second = tokens.get(1).cloned();\n\n let third = tokens.get(2).cloned();\n\n let mut cutoff = 2;\n\n\n\n let curr_token = match first {\n\n PartialToken::Token(token) => {\n\n cutoff = 1;\n\n Some(token)\n\n }\n\n PartialToken::Literal(literal) => {\n\n cutoff = 1;\n\n if let Ok(number) = literal.parse::<i64>() {\n\n Some(Token::Int(number))\n\n } else if let Ok(number) = literal.parse::<f64>() {\n\n Some(Token::Float(number))\n", "file_path": "interactive_engine/executor/ir/common/src/expr_parse/token.rs", "rank": 63, "score": 288041.61747108144 }, { "content": "#[bench]\n\nfn bench_add(b: &mut test::Bencher) {\n\n let mut rng = rand::thread_rng();\n\n let mut d: u32 = rng.gen();\n\n let mut x = 0u32;\n\n b.iter(|| {\n\n for i in 1..1001 {\n\n if d > 1000 {\n\n x += i;\n\n }\n\n }\n\n //d = d.wrapping_add(1);\n\n });\n\n println!(\"{}\", x);\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_div.rs", "rank": 64, "score": 287202.02870031586 }, { "content": "#[bench]\n\nfn bench_hash(b: &mut test::Bencher) {\n\n let mut x = 0;\n\n b.iter(|| {\n\n for i in 1..1001 {\n\n let mut h = DefaultHasher::new();\n\n h.write_u32(i);\n\n x += h.finish();\n\n }\n\n });\n\n println!(\"{}\", x);\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_div.rs", "rank": 65, "score": 287202.02870031586 }, { "content": "#[bench]\n\nfn bench_mix_and(b: &mut test::Bencher) {\n\n let p = Mix::And(3);\n\n let mut x = 0;\n\n b.iter(|| {\n\n for i in 1..1001 {\n\n match p {\n\n Mix::Div(d) => x += i % d,\n\n Mix::And(a) => x += i & a,\n\n }\n\n }\n\n });\n\n println!(\"{}\", x);\n\n}\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_div.rs", "rank": 66, "score": 287202.02870031586 }, { "content": "#[bench]\n\nfn bench_div(b: &mut test::Bencher) {\n\n let p = 4u32;\n\n let mut x = 0;\n\n b.iter(|| {\n\n for i in 1..1001 {\n\n x += i % p;\n\n }\n\n });\n\n println!(\"{}\", x);\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_div.rs", "rank": 67, "score": 287202.02870031586 }, { "content": "#[inline]\n\nfn to_runtime_vertex<V>(v: V, prop_keys: Option<Vec<NameOrId>>) -> Vertex\n\nwhere\n\n V: 'static + StoreVertex,\n\n{\n\n let id = v.get_id() as ID;\n\n let label = encode_runtime_v_label(&v);\n\n let details = LazyVertexDetails::new(v, prop_keys);\n\n Vertex::new(id, Some(label), DynDetails::lazy(details))\n\n}\n\n\n", "file_path": "interactive_engine/executor/ir/graph_proxy/src/adapters/gs_store/read_graph.rs", "rank": 68, "score": 286073.9312683529 }, { "content": "struct ShadeCmp<C> {\n\n cmp: Arc<C>,\n\n}\n\n\n\nunsafe impl<C: Send> Send for ShadeCmp<C> {}\n\n\n\nimpl<C> Clone for ShadeCmp<C> {\n\n fn clone(&self) -> Self {\n\n ShadeCmp { cmp: self.cmp.clone() }\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/operator/concise/limit.rs", "rank": 69, "score": 285710.22113508376 }, { "content": "#[bench]\n\nfn bench_mix_div(b: &mut test::Bencher) {\n\n let p = Mix::Div(4);\n\n let mut x = 0;\n\n b.iter(|| {\n\n for i in 1..1001 {\n\n match p {\n\n Mix::Div(d) => x += i % d,\n\n Mix::And(a) => x += i & a,\n\n }\n\n }\n\n });\n\n println!(\"{}\", x);\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_div.rs", "rank": 70, "score": 284523.2539082677 }, { "content": "pub fn str_to_expr_pb(expr_str: String) -> ExprResult<pb::Expression> {\n\n let mut operators = vec![];\n\n for token in tokenize(&expr_str)? {\n\n operators.push(token.try_into()?);\n\n }\n\n\n\n Ok(pb::Expression { operators })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::expr_parse::token::tokenize;\n\n\n\n #[test]\n\n fn test_to_suffix_expr() {\n\n // 1 + 2\n\n let case1 = tokenize(\"1 + 2\").unwrap();\n\n let expected_case1 = vec![Token::Int(1), Token::Int(2), Token::Plus];\n\n assert_eq!(to_suffix_expr(case1).unwrap(), expected_case1);\n", "file_path": "interactive_engine/executor/ir/common/src/expr_parse/mod.rs", "rank": 71, "score": 283029.70348579955 }, { "content": "#[inline]\n\nfn to_runtime_vertex(v: LocalVertex<'static, DefaultId>, prop_keys: Option<Vec<NameOrId>>) -> Vertex {\n\n // For vertices, we query properties via vid\n\n let id = v.get_id() as ID;\n\n let label = encode_runtime_v_label(&v);\n\n let details = LazyVertexDetails::new(v, prop_keys);\n\n Vertex::new(id, Some(label), DynDetails::lazy(details))\n\n}\n\n\n", "file_path": "interactive_engine/executor/ir/graph_proxy/src/adapters/exp_store/read_graph.rs", "rank": 72, "score": 282941.85235179245 }, { "content": "type RecordKeySelector = Box<dyn KeyFunction<Record, RecordKey, Record>>;\n", "file_path": "interactive_engine/executor/ir/runtime/src/assembly.rs", "rank": 73, "score": 282534.8256311258 }, { "content": "struct AppendProperty<std::string> {\n\n static void append(arrow::ArrayBuilder* builder, Property const* prop) {\n\n CHECK_ARROW_ERROR(dynamic_cast<arrow::LargeStringBuilder*>(builder)->Append(\n\n static_cast<uint8_t*>(prop->data), prop->len));\n\n }\n\n};\n\n\n\ntemplate <>\n", "file_path": "interactive_engine/executor/store/global_query/src/store_impl/v6d/native/property_graph_stream.h", "rank": 74, "score": 282187.121771478 }, { "content": "pub fn gen_vertex_properties(\n\n si: SnapshotId, label: LabelId, id: VertexId, type_def: &TypeDef,\n\n) -> HashMap<PropertyId, Value> {\n\n let mut map = HashMap::new();\n\n for prop_def in type_def.get_prop_defs() {\n\n let p = vertex_prop(si, label, id, prop_def.r#type);\n\n map.insert(prop_def.id, p);\n\n }\n\n map\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/tests/data.rs", "rank": 75, "score": 280595.64725604746 }, { "content": "fn delete_vertex<G: MultiVersionGraph>(graph: &G, snapshot_id: i64, op: &OperationPb) -> GraphResult<()> {\n\n trace!(\"delete_vertex\");\n\n let data_operation_pb = parse_pb::<DataOperationPb>(op.get_dataBytes())?;\n\n\n\n let vertex_id_pb = parse_pb::<VertexIdPb>(data_operation_pb.get_keyBlob())?;\n\n let vertex_id = vertex_id_pb.get_id();\n\n\n\n let label_id_pb = parse_pb::<LabelIdPb>(data_operation_pb.get_locationBlob())?;\n\n let label_id = label_id_pb.get_id();\n\n\n\n graph.delete_vertex(snapshot_id, vertex_id, label_id)\n\n}\n\n\n", "file_path": "interactive_engine/executor/assembly/groot/src/store/graph.rs", "rank": 76, "score": 280573.2201124912 }, { "content": "fn update_vertex<G: MultiVersionGraph>(graph: &G, snapshot_id: i64, op: &OperationPb) -> GraphResult<()> {\n\n trace!(\"update_vertex\");\n\n let data_operation_pb = parse_pb::<DataOperationPb>(op.get_dataBytes())?;\n\n\n\n let vertex_id_pb = parse_pb::<VertexIdPb>(data_operation_pb.get_keyBlob())?;\n\n let vertex_id = vertex_id_pb.get_id();\n\n\n\n let label_id_pb = parse_pb::<LabelIdPb>(data_operation_pb.get_locationBlob())?;\n\n let label_id = label_id_pb.get_id();\n\n\n\n let property_map = <dyn PropertyMap>::from_proto(data_operation_pb.get_props());\n\n graph.insert_update_vertex(snapshot_id, vertex_id, label_id, &property_map)\n\n}\n\n\n", "file_path": "interactive_engine/executor/assembly/groot/src/store/graph.rs", "rank": 77, "score": 280573.2201124912 }, { "content": "pub fn create_vertex_type_def(\n\n label: LabelId, name: &str, version: u32, properties: Vec<PropDef>, is_dimension: bool, comment: &str,\n\n) -> TypeDef {\n\n let proto = create_vertex_type_def_proto(label, name, version, properties, is_dimension, comment);\n\n TypeDef::from(&proto)\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/schema/test_util.rs", "rank": 78, "score": 280529.22561120696 }, { "content": "fn ipc_with_conf(conf: ConnectionParams) {\n\n pegasus_common::logs::init_log();\n\n let mut servers = vec![];\n\n servers.push(Server { id: 0, addr: \"127.0.0.1:1234\".parse().unwrap() });\n\n servers.push(Server { id: 1, addr: \"127.0.0.1:1235\".parse().unwrap() });\n\n servers.push(Server { id: 2, addr: \"127.0.0.1:1236\".parse().unwrap() });\n\n let g1 = mock_process_0(servers.clone(), conf);\n\n let g2 = mock_process_1(servers.clone(), conf);\n\n let g3 = mock_process_2(servers, conf);\n\n g1.join().unwrap();\n\n g2.join().unwrap();\n\n g3.join().unwrap();\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/network/tests/ipc_test.rs", "rank": 79, "score": 280464.67348933313 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n if cfg!(target_os = \"linux\") {\n\n println!(\"cargo:rustc-link-arg=-Wl,--unresolved-symbols=ignore-all\");\n\n } else if cfg!(target_os = \"macos\") {\n\n println!(\"cargo:rustc-link-arg=-Wl,-undefined\");\n\n println!(\"cargo:rustc-link-arg=-Wl,dynamic_lookup\");\n\n } else {\n\n unimplemented!()\n\n }\n\n Ok(())\n\n}\n", "file_path": "interactive_engine/executor/ir/integrated/build.rs", "rank": 80, "score": 279773.6296618112 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n codegen_inplace()\n\n}\n\n\n\n#[cfg(feature = \"proto_inplace\")]\n\nuse std::path::PathBuf;\n\n#[cfg(feature = \"proto_inplace\")]\n\nconst GEN_DIR: &'static str = \"src/generated\";\n\n\n", "file_path": "interactive_engine/executor/ir/common/build.rs", "rank": 81, "score": 279773.6296618112 }, { "content": "fn get_string_value(json: &Value, key: &str) -> Result<String, ConfigParseError> {\n\n if let Some(v) = json[key].as_str() {\n\n Ok(v.to_owned())\n\n } else {\n\n Err(ConfigParseError::from(format!(r#\"\"{}\" not found in config: {:?}\"#, key, json)))\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ConfigParseError {\n\n err_msg: String,\n\n}\n\n\n\nimpl From<String> for ConfigParseError {\n\n fn from(err_msg: String) -> Self {\n\n ConfigParseError { err_msg }\n\n }\n\n}\n\n\n\nimpl ToString for ConfigParseError {\n", "file_path": "interactive_engine/executor/store/groot/src/config/csv_config.rs", "rank": 82, "score": 279584.57286333904 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n println!(\"cargo:rerun-if-changed=proto/clickhouse_grpc.proto\");\n\n codegen_inplace()\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/benchmark/build.rs", "rank": 83, "score": 277192.53969111625 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n println!(\"cargo:rerun-if-changed=proto/job_service.proto\");\n\n println!(\"cargo:rerun-if-changed=proto/job_plan.proto\");\n\n codegen_inplace()\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/server/build.rs", "rank": 84, "score": 277192.53969111625 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n codegen_inplace()\n\n}\n\n\n\n#[cfg(feature = \"with_v6d\")]\n\nconst NATIVE_DIR: &'static str = \"src/store_impl/v6d/native\";\n\n\n", "file_path": "interactive_engine/executor/store/global_query/build.rs", "rank": 85, "score": 277192.53969111625 }, { "content": "#[cfg(not(feature = \"proto_inplace\"))]\n\nfn codegen_inplace() -> Result<(), Box<dyn std::error::Error>> {\n\n println!(\"cargo:rerun-if-changed=../proto/common.proto\");\n\n println!(\"cargo:rerun-if-changed=../proto/expr.proto\");\n\n println!(\"cargo:rerun-if-changed=../proto/algebra.proto\");\n\n println!(\"cargo:rerun-if-changed=../proto/schema.proto\");\n\n println!(\"cargo:rerun-if-changed=../proto/results.proto\");\n\n println!(\"cargo:rerun-if-changed=../proto/physical.proto\");\n\n println!(\"cargo:rerun-if-changed=../proto/type.proto\");\n\n prost_build::Config::new()\n\n .type_attribute(\".\", \"#[derive(Serialize,Deserialize)]\")\n\n .compile_protos(\n\n &[\n\n \"../proto/common.proto\",\n\n \"../proto/expr.proto\",\n\n \"../proto/algebra.proto\",\n\n \"../proto/schema.proto\",\n\n \"../proto/results.proto\",\n\n \"../proto/physical.proto\",\n\n \"../proto/type.proto\",\n\n ],\n\n &[\"../proto\"],\n\n )?;\n\n\n\n Ok(())\n\n}\n", "file_path": "interactive_engine/executor/ir/common/build.rs", "rank": 86, "score": 277192.53969111625 }, { "content": "pub fn gen_vertex_update_properties(\n\n si: SnapshotId, label: LabelId, id: VertexId, type_def: &TypeDef,\n\n) -> HashMap<PropertyId, Value> {\n\n let mut map = HashMap::new();\n\n let x = si as i64 + label as i64 + id as i64;\n\n let count = ValueType::count() as i64;\n\n for i in x..x + count / 2 {\n\n let prop_id = (i % count) as PropertyId + 1;\n\n let prop_def = type_def.get_prop_def(prop_id).unwrap();\n\n let v = vertex_prop(si, label, id, prop_def.r#type);\n\n map.insert(prop_id, v);\n\n }\n\n map\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/tests/data.rs", "rank": 87, "score": 277027.1537444669 }, { "content": "pub fn create_vertex_type_def_proto(\n\n label: LabelId, name: &str, version: u32, properties: Vec<PropDef>, is_dimension: bool, comment: &str,\n\n) -> TypeDefProto {\n\n let mut proto = TypeDefProto::new();\n\n proto.set_id(label as i32);\n\n proto.set_version(version as i32);\n\n proto.set_isDimensionType(is_dimension);\n\n proto.set_label(name.to_owned());\n\n proto.set_field_type(TypeIdProto_Type::VERTEX);\n\n for p in properties {\n\n proto.property.push(p.to_proto());\n\n proto\n\n .gidToPid\n\n .insert(p.get_prop_id() as i32, p.get_prop_id() as i32);\n\n }\n\n proto.set_comment(comment.to_owned());\n\n proto\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/schema/test_util.rs", "rank": 88, "score": 276961.71413547633 }, { "content": "pub fn remove_cancel_hook(job_id: u64) -> Result<(), CancelError> {\n\n if let Ok(mut hook) = JOB_CANCEL_MAP.write() {\n\n hook.remove(&job_id);\n\n } else {\n\n return Err(CancelError::CancelMapPoisonedError);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/lib.rs", "rank": 89, "score": 276811.9642066994 }, { "content": "/// Get the edge expand's label\n\n/// - in current realization, edge_expand only allows to have one label\n\n/// - if it has no label or more than one label, give Error\n\nfn get_edge_expand_labels(edge_expand: &pb::EdgeExpand) -> IrPatternResult<Vec<PatternLabelId>> {\n\n if let Some(params) = edge_expand.params.as_ref() {\n\n params\n\n .tables\n\n .iter()\n\n .map(|label| pb_name_or_id_to_id(label).map(|l| l as PatternLabelId))\n\n .collect::<Result<_, _>>()\n\n } else {\n\n Ok(vec![])\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/executor/ir/core/src/glogue/pattern.rs", "rank": 90, "score": 276431.7402847827 }, { "content": "/// Finally, if the results contain any pattern vertices with system-given aliases,\n\n/// we additional project the user-given aliases, i.e., those may be referred later.\n\n///\n\n/// Here, origin_pattern.vertices.len() indicates total number of pattern vertices;\n\n/// and origin_pattern.tag_vertex_map.len() indicates the number of pattern vertices with user-given aliases\n\nfn generate_project_operator(pattern: &Pattern) -> IrPatternResult<Option<pb::logical_plan::Operator>> {\n\n let max_tag_id = pattern.get_max_tag_id() as KeyId;\n\n let max_vertex_id = pattern\n\n .get_max_vertex_id()\n\n .ok_or_else(|| (IrPatternError::InvalidExtendPattern(format!(\"Empty pattern {:?}\", pattern))))?\n\n as KeyId;\n\n if max_vertex_id >= max_tag_id {\n\n let mut mappings = Vec::with_capacity(max_tag_id as usize);\n\n for tag_id in 0..max_tag_id {\n\n let expr = str_to_expr_pb(format!(\"@{}\", tag_id)).ok();\n\n let mapping = pb::project::ExprAlias { expr, alias: Some(tag_id.into()) };\n\n mappings.push(mapping);\n\n }\n\n // TODO: the meta_data of project is identical with the meta_data of \"Pattern\"\n\n Ok(Some(pb::Project { mappings, is_append: false, meta_data: vec![] }.into()))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/executor/ir/core/src/glogue/pattern.rs", "rank": 91, "score": 276310.5726840421 }, { "content": "pub fn test_drop_vertex_type<G: MultiVersionGraph>(graph: G) {\n\n let tester = tester::DropVertexTypeTester::new(graph);\n\n tester.execute();\n\n}\n\n\n\nmod tester {\n\n use super::common::*;\n\n use super::*;\n\n use crate::db::api::multi_version_graph::MultiVersionGraph;\n\n\n\n pub struct GetVertexTester<G: MultiVersionGraph> {\n\n graph: G,\n\n }\n\n\n\n impl<G: MultiVersionGraph> GetVertexTester<G> {\n\n pub fn new(graph: G) -> Self {\n\n GetVertexTester { graph }\n\n }\n\n\n\n pub fn execute(&self) {\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/tests/vertex.rs", "rank": 92, "score": 275722.3193804973 }, { "content": "fn _gen_key(key: &str) -> Vec<u8> {\n\n let mut buf = Vec::new();\n\n buf.extend(transform::i64_to_vec(META_TABLE_ID.to_be()));\n\n buf.extend(key.as_bytes());\n\n buf\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/meta.rs", "rank": 93, "score": 274941.08364974527 }, { "content": "fn meta_key(key: &str) -> Vec<u8> {\n\n let bytes = key.as_bytes();\n\n let mut ret = Vec::with_capacity(8 + key.len());\n\n let prefix = transform::i64_to_arr(META_TABLE_ID.to_be());\n\n ret.extend_from_slice(&prefix);\n\n ret.extend_from_slice(bytes);\n\n ret\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/meta.rs", "rank": 94, "score": 274941.08364974527 }, { "content": "#[cfg(feature = \"gcip\")]\n\nfn codegen_inplace() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = \"src/generated\";\n\n if std::path::Path::new(&dir).exists() {\n\n std::fs::remove_dir_all(&dir).unwrap();\n\n }\n\n std::fs::create_dir(&dir).unwrap();\n\n tonic_build::configure()\n\n .build_server(true)\n\n .build_client(true)\n\n .out_dir(\"src/generated\")\n\n .compile(&[\"proto/job_service.proto\", \"proto/job_plan.proto\"], &[\"proto\"])?;\n\n Ok(())\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/server/build.rs", "rank": 95, "score": 274679.8539550125 }, { "content": " let sort_func = sort_opr.gen_cmp().unwrap();\n\n stream = stream.sort_by(move |a, b| sort_func.compare(a, b))?;\n\n stream.sink_into(output)\n\n }\n\n })\n\n .expect(\"build job failure\");\n\n result\n\n }\n\n\n\n // g.V().order()\n\n #[test]\n\n fn sort_simple_ascending_test() {\n\n let sort_opr = pb::OrderBy {\n\n pairs: vec![pb::order_by::OrderingPair {\n\n key: Some(common_pb::Variable { tag: None, property: None, node_type: None }),\n\n order: 1, // ascending\n\n }],\n\n limit: None,\n\n };\n\n let mut result = sort_test(init_source(), sort_opr);\n", "file_path": "interactive_engine/executor/ir/runtime/src/process/operator/sort/sort.rs", "rank": 96, "score": 104.16001542928555 }, { "content": " fn try_from(order_pb: algebra_pb::OrderBy) -> Result<Self, Self::Error> {\n\n let mut tag_key_order = Vec::with_capacity(order_pb.pairs.len());\n\n for order_pair in order_pb.pairs {\n\n let key = order_pair\n\n .key\n\n .ok_or_else(|| ParsePbError::EmptyFieldError(\"key is empty in order\".to_string()))?\n\n .try_into()?;\n\n let order: Order = unsafe { ::std::mem::transmute(order_pair.order) };\n\n tag_key_order.push((key, order));\n\n }\n\n Ok(RecordCompare { tag_key_order })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use ahash::HashMap;\n\n use dyn_type::Object;\n\n use graph_proxy::apis::{DynDetails, GraphElement, Vertex};\n\n use ir_common::generated::algebra as pb;\n", "file_path": "interactive_engine/executor/ir/runtime/src/process/operator/sort/sort.rs", "rank": 97, "score": 98.88600448902162 }, { "content": " use ir_common::generated::common as common_pb;\n\n use ir_common::NameOrId;\n\n use pegasus::api::{Sink, SortBy};\n\n use pegasus::result::ResultStream;\n\n use pegasus::JobConf;\n\n\n\n use crate::process::entry::Entry;\n\n use crate::process::operator::sort::CompareFunctionGen;\n\n use crate::process::operator::tests::{\n\n init_source, init_source_with_tag, to_var_pb, PERSON_LABEL, TAG_A,\n\n };\n\n use crate::process::record::Record;\n\n\n\n fn sort_test(source: Vec<Record>, sort_opr: pb::OrderBy) -> ResultStream<Record> {\n\n let conf = JobConf::new(\"sort_test\");\n\n let result = pegasus::run(conf, || {\n\n let source = source.clone();\n\n let sort_opr = sort_opr.clone();\n\n |input, output| {\n\n let mut stream = input.input_from(source.into_iter())?;\n", "file_path": "interactive_engine/executor/ir/runtime/src/process/operator/sort/sort.rs", "rank": 98, "score": 93.52817618339492 }, { "content": " let conf = JobConf::new(\"join_test\");\n\n let mut result = pegasus::run(conf, || {\n\n move |input, output| {\n\n let s1 = input.input_from(source_s1_gen())?;\n\n let s2 = input.input_from(source_s2_gen())?;\n\n let join_opr_pb = pb::Join {\n\n left_keys: vec![common_pb::Variable::from(\"@.~id\".to_string())],\n\n right_keys: vec![common_pb::Variable::from(\"@.~id\".to_string())],\n\n join_kind,\n\n left_plan: None,\n\n right_plan: None,\n\n };\n\n let left_key_selector = join_opr_pb.gen_left_kv_fn()?;\n\n let right_key_selector = join_opr_pb.gen_right_kv_fn()?;\n\n let join_kind = join_opr_pb.get_join_kind();\n\n let left_stream = s1\n\n .key_by(move |record| left_key_selector.get_kv(record))?\n\n // TODO(bingqing): remove this when new keyed-join in gaia-x is ready;\n\n .partition_by_key();\n\n let right_stream = s2\n", "file_path": "interactive_engine/executor/ir/runtime/src/process/operator/join/join.rs", "rank": 99, "score": 93.23681369865034 } ]
Rust
src/parser/lexer/mod.rs
mitsuhiko/lol-html
2772fd0e9feb4a820bfc0cb460e9441b0d0c205d
#[macro_use] mod actions; mod conditions; mod lexeme; use crate::base::{Align, Range}; use crate::html::{LocalNameHash, Namespace, TextType}; use crate::parser::state_machine::{ ActionError, ActionResult, FeedbackDirective, StateMachine, StateResult, }; use crate::parser::{ ParserDirective, ParsingAmbiguityError, TreeBuilderFeedback, TreeBuilderSimulator, }; use crate::rewriter::RewritingError; use std::cell::RefCell; use std::rc::Rc; pub use self::lexeme::*; const DEFAULT_ATTR_BUFFER_CAPACITY: usize = 256; pub trait LexemeSink { fn handle_tag(&mut self, lexeme: &TagLexeme) -> Result<ParserDirective, RewritingError>; fn handle_non_tag_content( &mut self, lexeme: &NonTagContentLexeme, ) -> Result<(), RewritingError>; } pub type State<S> = fn(&mut Lexer<S>, &[u8]) -> StateResult; pub type SharedAttributeBuffer = Rc<RefCell<Vec<AttributeOutline>>>; pub struct Lexer<S: LexemeSink> { next_pos: usize, is_last_input: bool, lexeme_start: usize, token_part_start: usize, is_state_enter: bool, cdata_allowed: bool, lexeme_sink: S, state: State<S>, current_tag_token: Option<TagTokenOutline>, current_non_tag_content_token: Option<NonTagContentTokenOutline>, current_attr: Option<AttributeOutline>, last_start_tag_name_hash: LocalNameHash, closing_quote: u8, attr_buffer: SharedAttributeBuffer, tree_builder_simulator: Rc<RefCell<TreeBuilderSimulator>>, last_text_type: TextType, feedback_directive: FeedbackDirective, } impl<S: LexemeSink> Lexer<S> { pub fn new(lexeme_sink: S, tree_builder_simulator: Rc<RefCell<TreeBuilderSimulator>>) -> Self { Lexer { next_pos: 0, is_last_input: false, lexeme_start: 0, token_part_start: 0, is_state_enter: true, cdata_allowed: false, lexeme_sink, state: Lexer::data_state, current_tag_token: None, current_non_tag_content_token: None, current_attr: None, last_start_tag_name_hash: LocalNameHash::default(), closing_quote: b'"', attr_buffer: Rc::new(RefCell::new(Vec::with_capacity( DEFAULT_ATTR_BUFFER_CAPACITY, ))), tree_builder_simulator, last_text_type: TextType::Data, feedback_directive: FeedbackDirective::None, } } fn try_get_tree_builder_feedback( &mut self, token: &TagTokenOutline, ) -> Result<Option<TreeBuilderFeedback>, ParsingAmbiguityError> { Ok(match self.feedback_directive.take() { FeedbackDirective::ApplyUnhandledFeedback(feedback) => Some(feedback), FeedbackDirective::Skip => None, FeedbackDirective::None => Some({ let mut simulator = self.tree_builder_simulator.borrow_mut(); match *token { TagTokenOutline::StartTag { name_hash, .. } => { simulator.get_feedback_for_start_tag(name_hash)? } TagTokenOutline::EndTag { name_hash, .. } => { simulator.get_feedback_for_end_tag(name_hash) } } }), }) } fn handle_tree_builder_feedback(&mut self, feedback: TreeBuilderFeedback, lexeme: &TagLexeme) { match feedback { TreeBuilderFeedback::SwitchTextType(text_type) => self.set_last_text_type(text_type), TreeBuilderFeedback::SetAllowCdata(cdata_allowed) => self.cdata_allowed = cdata_allowed, TreeBuilderFeedback::RequestLexeme(mut callback) => { let feedback = callback(&mut self.tree_builder_simulator.borrow_mut(), lexeme); self.handle_tree_builder_feedback(feedback, lexeme); } TreeBuilderFeedback::None => (), } } #[inline] fn emit_lexeme(&mut self, lexeme: &NonTagContentLexeme) -> ActionResult { trace!(@output lexeme); self.lexeme_start = lexeme.raw_range().end; self.lexeme_sink .handle_non_tag_content(lexeme) .map_err(ActionError::RewritingError) } #[inline] fn emit_tag_lexeme(&mut self, lexeme: &TagLexeme) -> Result<ParserDirective, RewritingError> { trace!(@output lexeme); self.lexeme_start = lexeme.raw_range().end; self.lexeme_sink.handle_tag(lexeme) } #[inline] fn create_lexeme_with_raw<'i, T>( &mut self, input: &'i [u8], token: T, raw_end: usize, ) -> Lexeme<'i, T> { Lexeme::new( input.into(), token, Range { start: self.lexeme_start, end: raw_end, }, ) } #[inline] fn create_lexeme_with_raw_inclusive<'i, T>( &mut self, input: &'i [u8], token: T, ) -> Lexeme<'i, T> { let raw_end = self.pos() + 1; self.create_lexeme_with_raw(input, token, raw_end) } #[inline] fn create_lexeme_with_raw_exclusive<'i, T>( &mut self, input: &'i [u8], token: T, ) -> Lexeme<'i, T> { let raw_end = self.pos(); self.create_lexeme_with_raw(input, token, raw_end) } } impl<S: LexemeSink> StateMachine for Lexer<S> { impl_common_sm_accessors!(); impl_common_input_cursor_methods!(); #[inline] fn set_state(&mut self, state: State<S>) { self.state = state; } #[inline] fn state(&self) -> State<S> { self.state } #[inline] fn get_consumed_byte_count(&self, _input: &[u8]) -> usize { self.lexeme_start } fn adjust_for_next_input(&mut self) { self.token_part_start.align(self.lexeme_start); self.current_tag_token.align(self.lexeme_start); self.current_non_tag_content_token.align(self.lexeme_start); self.current_attr.align(self.lexeme_start); self.lexeme_start = 0; } #[inline] fn adjust_to_bookmark(&mut self, pos: usize, feedback_directive: FeedbackDirective) { self.lexeme_start = pos; self.feedback_directive = feedback_directive; } #[inline] fn enter_ch_sequence_matching(&mut self) { trace!(@noop); } #[inline] fn leave_ch_sequence_matching(&mut self) { trace!(@noop); } }
#[macro_use] mod actions; mod conditions; mod lexeme; use crate::base::{Align, Range}; use crate::html::{LocalNameHash, Namespace, TextType}; use crate::parser::state_machine::{ ActionError, ActionResult, FeedbackDirective, StateMachine, StateResult, }; use crate::parser::{ ParserDirective, ParsingAmbiguityError, TreeBuilderFeedback, TreeBuilderSimulator, }; use crate::rewriter::RewritingError; use std::cell::RefCell; use std::rc::Rc; pub use self::lexeme::*; const DEFAULT_ATTR_BUFFER_CAPACITY: usize = 256; pub trait LexemeSink { fn handle_tag(&mut self, lexeme: &TagLexeme) -> Result<ParserDirective, RewritingError>; fn handle_non_tag_content( &mut self, lexeme: &NonTagContentLexeme, ) -> Result<(), RewritingError>; } pub type State<S> = fn(&mut Lexer<S>, &[u8]) -> StateResult; pub type SharedAttributeBuffer = Rc<RefCell<Vec<AttributeOutline>>>; pub struct Lexer<S: LexemeSink> { next_pos: usize, is_last_input: bool, lexeme_start: usize, token_part_start: usize, is_state_enter: bool, cdata_allowed: bool, lexeme_sink: S, state: State<S>, current_tag_token: Option<TagTokenOutline>, current_non_tag_content_token: Option<NonTagContentTokenOutline>, current_attr: Option<AttributeOutline>, last_start_tag_name_hash: LocalNameHash, closing_quote: u8, attr_buffer: SharedAttributeBuffer, tree_builder_simulator: Rc<RefCell<TreeBuilderSimulator>>, last_text_type: TextType, feedback_directive: FeedbackDirective, } impl<S: LexemeSink> Lexer<S> { pub fn new(lexeme_sink: S, tree_builder_simulator: Rc<RefCell<TreeBuilderSimulator>>) -> Self { Lexer { next_pos: 0, is_last_input: false, lexeme_start: 0, token_part_start: 0, is_state_enter: true, cdata_allowed: false, lexeme_sink, state: Lexer::data_state, current_tag_token: None, current_non_tag_content_token: None, current_attr: None, last_start_tag_name_hash: LocalNameHash::default(), closing_quote: b'"', attr_buffer: Rc::new(RefCell::new(Vec::with_capacity( DEFAULT_ATTR_BUFFER_CAPACITY, ))), tree_builder_simulator, last_text_type: TextType::Data, feedback_directive: FeedbackDirective::None, } } fn try_get_tree_builder_feedback( &mut self, token: &TagTokenOutline, ) -> Result<Option<TreeBuilderFeedback>, ParsingAmbiguityError> { Ok(match self.feedback_directive.take() { FeedbackDirective::ApplyUnhandledFeedback(feedback) => Some(feedback), FeedbackDirective::Skip => None, FeedbackDirective::None => Some({ let mut simulator = self.tree_builder_simulator.borrow_mut(); match *token { TagTokenOutline::StartTag { name_hash, .. } => { simulator.get_feedback_for_start_tag(name_hash)? } TagTokenOutline::EndTag { name_hash, .. } => { simulator.get_feedback_for_end_tag(name_hash) } } }), }) } fn handle_tree_builder_feedback(&mut self, feedback: TreeBuilderFeedback, lexeme: &TagLexeme) { match feedback { TreeBuilderFeedback::SwitchTextType(text_type) => self.set_last_text_type(text_type), TreeBuilderFeedback::SetAllowCdata(cdata_allowed) => self.cdata_allowed = cdata_allowed, TreeBuilderFeedback::RequestLexeme(mut callback) => { let feedback = callback(&mut self.tree_builder_simulator.borrow_mut(), lexeme); self.handle_tree_builder_feedback(feedback, lexeme); } TreeBuilderFeedback::None => (), } } #[inline] fn emit_lexeme(&mut self, lexeme: &NonTagContentLexeme) -> ActionResult { trace!(@output lexeme); self.lexeme_start = lexeme.raw_range().end; self.lexeme_sink .handle_non_tag_content(lexeme) .map_err(ActionError::RewritingError) } #[inline] fn emit_tag_lexeme(&mut self, lexeme: &TagLexem
#[inline] fn create_lexeme_with_raw<'i, T>( &mut self, input: &'i [u8], token: T, raw_end: usize, ) -> Lexeme<'i, T> { Lexeme::new( input.into(), token, Range { start: self.lexeme_start, end: raw_end, }, ) } #[inline] fn create_lexeme_with_raw_inclusive<'i, T>( &mut self, input: &'i [u8], token: T, ) -> Lexeme<'i, T> { let raw_end = self.pos() + 1; self.create_lexeme_with_raw(input, token, raw_end) } #[inline] fn create_lexeme_with_raw_exclusive<'i, T>( &mut self, input: &'i [u8], token: T, ) -> Lexeme<'i, T> { let raw_end = self.pos(); self.create_lexeme_with_raw(input, token, raw_end) } } impl<S: LexemeSink> StateMachine for Lexer<S> { impl_common_sm_accessors!(); impl_common_input_cursor_methods!(); #[inline] fn set_state(&mut self, state: State<S>) { self.state = state; } #[inline] fn state(&self) -> State<S> { self.state } #[inline] fn get_consumed_byte_count(&self, _input: &[u8]) -> usize { self.lexeme_start } fn adjust_for_next_input(&mut self) { self.token_part_start.align(self.lexeme_start); self.current_tag_token.align(self.lexeme_start); self.current_non_tag_content_token.align(self.lexeme_start); self.current_attr.align(self.lexeme_start); self.lexeme_start = 0; } #[inline] fn adjust_to_bookmark(&mut self, pos: usize, feedback_directive: FeedbackDirective) { self.lexeme_start = pos; self.feedback_directive = feedback_directive; } #[inline] fn enter_ch_sequence_matching(&mut self) { trace!(@noop); } #[inline] fn leave_ch_sequence_matching(&mut self) { trace!(@noop); } }
e) -> Result<ParserDirective, RewritingError> { trace!(@output lexeme); self.lexeme_start = lexeme.raw_range().end; self.lexeme_sink.handle_tag(lexeme) }
function_block-function_prefixed
[ { "content": "type CapturerEventHandler<'h> = &'h mut dyn FnMut(TokenCapturerEvent) -> Result<(), RewritingError>;\n\n\n\npub struct TokenCapturer {\n\n encoding: &'static Encoding,\n\n text_decoder: TextDecoder,\n\n capture_flags: TokenCaptureFlags,\n\n}\n\n\n\nimpl TokenCapturer {\n\n pub fn new(capture_flags: TokenCaptureFlags, encoding: &'static Encoding) -> Self {\n\n TokenCapturer {\n\n encoding,\n\n text_decoder: TextDecoder::new(encoding),\n\n capture_flags,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn has_captures(&self) -> bool {\n\n !self.capture_flags.is_empty()\n", "file_path": "src/rewritable_units/tokens/capturer/mod.rs", "rank": 1, "score": 273112.4692690578 }, { "content": "pub trait StateMachine: StateMachineActions + StateMachineConditions {\n\n define_states!();\n\n\n\n fn state(&self) -> fn(&mut Self, &[u8]) -> StateResult;\n\n fn set_state(&mut self, state: fn(&mut Self, &[u8]) -> StateResult);\n\n\n\n fn is_state_enter(&self) -> bool;\n\n fn set_is_state_enter(&mut self, val: bool);\n\n\n\n fn last_start_tag_name_hash(&self) -> LocalNameHash;\n\n fn set_last_start_tag_name_hash(&mut self, name_hash: LocalNameHash);\n\n\n\n fn set_last_text_type(&mut self, text_type: TextType);\n\n fn last_text_type(&self) -> TextType;\n\n\n\n fn set_cdata_allowed(&mut self, cdata_allowed: bool);\n\n\n\n fn closing_quote(&self) -> u8;\n\n\n\n fn adjust_for_next_input(&mut self);\n", "file_path": "src/parser/state_machine/mod.rs", "rank": 2, "score": 266805.40133743663 }, { "content": "pub trait StateMachineConditions {\n\n fn is_appropriate_end_tag(&self) -> bool;\n\n fn cdata_allowed(&self) -> bool;\n\n}\n\n\n", "file_path": "src/parser/state_machine/mod.rs", "rank": 3, "score": 254234.40249344838 }, { "content": "pub trait StateMachineActions {\n\n fn emit_eof(&mut self, input: &[u8]) -> ActionResult;\n\n fn emit_text(&mut self, input: &[u8]) -> ActionResult;\n\n fn emit_current_token(&mut self, input: &[u8]) -> ActionResult;\n\n fn emit_tag(&mut self, input: &[u8]) -> ActionResult;\n\n fn emit_current_token_and_eof(&mut self, input: &[u8]) -> ActionResult;\n\n fn emit_raw_without_token(&mut self, input: &[u8]) -> ActionResult;\n\n fn emit_raw_without_token_and_eof(&mut self, input: &[u8]) -> ActionResult;\n\n\n\n fn create_start_tag(&mut self, input: &[u8]);\n\n fn create_end_tag(&mut self, input: &[u8]);\n\n fn create_doctype(&mut self, input: &[u8]);\n\n fn create_comment(&mut self, input: &[u8]);\n\n\n\n fn start_token_part(&mut self, input: &[u8]);\n\n\n\n fn mark_comment_text_end(&mut self, input: &[u8]);\n\n fn shift_comment_text_end_by(&mut self, input: &[u8], offset: usize);\n\n\n\n fn set_force_quirks(&mut self, input: &[u8]);\n", "file_path": "src/parser/state_machine/mod.rs", "rank": 4, "score": 254120.8703605946 }, { "content": "type TokenHandler<'h> = Box<dyn FnMut(&mut Token) + 'h>;\n\n\n\npub struct TestTransformController<'h> {\n\n token_handler: TokenHandler<'h>,\n\n capture_flags: TokenCaptureFlags,\n\n}\n\n\n\nimpl<'h> TestTransformController<'h> {\n\n pub fn new(token_handler: TokenHandler<'h>, capture_flags: TokenCaptureFlags) -> Self {\n\n TestTransformController {\n\n token_handler,\n\n capture_flags,\n\n }\n\n }\n\n}\n\n\n\nimpl TransformController for TestTransformController<'_> {\n\n fn initial_capture_flags(&self) -> TokenCaptureFlags {\n\n self.capture_flags\n\n }\n", "file_path": "tests/fixtures/token_capturing.rs", "rank": 5, "score": 227372.80949548946 }, { "content": "#[inline]\n\nfn eq_case_insensitive(actual: &Bytes, expected: &[u8]) -> bool {\n\n if actual.len() != expected.len() {\n\n return false;\n\n }\n\n\n\n for i in 0..actual.len() {\n\n if actual[i].to_ascii_lowercase() != expected[i] {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n", "file_path": "src/parser/tree_builder_simulator/mod.rs", "rank": 6, "score": 220066.6789727721 }, { "content": "#[inline]\n\nfn get_text_type_adjustment(tag_name: LocalNameHash) -> TreeBuilderFeedback {\n\n use TextType::*;\n\n\n\n if tag_is_one_of!(tag_name, [Textarea, Title]) {\n\n RCData.into()\n\n } else if tag_name == Tag::Plaintext {\n\n PlainText.into()\n\n } else if tag_name == Tag::Script {\n\n ScriptData.into()\n\n } else if tag_is_one_of!(tag_name, [Style, Iframe, Xmp, Noembed, Noframes, Noscript]) {\n\n RawText.into()\n\n } else {\n\n TreeBuilderFeedback::None\n\n }\n\n}\n\n\n", "file_path": "src/parser/tree_builder_simulator/mod.rs", "rank": 7, "score": 216479.44872080124 }, { "content": "pub trait Serialize {\n\n fn to_bytes(&self, output_handler: &mut dyn FnMut(&[u8]));\n\n}\n\n\n\nmacro_rules! impl_serialize {\n\n ($Token:ident) => {\n\n impl crate::rewritable_units::Serialize for $Token<'_> {\n\n #[inline]\n\n fn to_bytes(&self, output_handler: &mut dyn FnMut(&[u8])) {\n\n let Mutations {\n\n content_before,\n\n replacement,\n\n content_after,\n\n removed,\n\n ..\n\n } = &self.mutations;\n\n\n\n if !content_before.is_empty() {\n\n output_handler(content_before);\n\n }\n", "file_path": "src/rewritable_units/tokens/mod.rs", "rank": 8, "score": 211826.12360691157 }, { "content": "pub trait ParserOutputSink: LexemeSink + TagHintSink {}\n\n\n\npub struct Parser<S: ParserOutputSink> {\n\n lexer: Lexer<Rc<RefCell<S>>>,\n\n tag_scanner: TagScanner<Rc<RefCell<S>>>,\n\n current_directive: ParserDirective,\n\n}\n\n\n\n// NOTE: dynamic dispatch can't be used for the StateMachine trait\n\n// because it's not object-safe due to the usage of `Self` in function\n\n// signatures, so we use this macro instead.\n\nmacro_rules! with_current_sm {\n\n ($self:tt, sm.$fn:ident($($args:tt)*) ) => {\n\n match $self.current_directive {\n\n ParserDirective::WherePossibleScanForTagsOnly => $self.tag_scanner.$fn($($args)*),\n\n ParserDirective::Lex => $self.lexer.$fn($($args)*),\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 9, "score": 202363.3400489745 }, { "content": "#[inline]\n\nfn is_attr_whitespace(b: u8) -> bool {\n\n b == b' ' || b == b'\\n' || b == b'\\r' || b == b'\\t' || b == b'\\x0c'\n\n}\n\n\n", "file_path": "src/selectors_vm/attribute_matcher.rs", "rank": 10, "score": 194350.68097151446 }, { "content": "#[inline]\n\nfn is_html_integration_point_in_svg(tag_name: LocalNameHash) -> bool {\n\n tag_is_one_of!(tag_name, [Desc, Title, ForeignObject])\n\n}\n\n\n\n// TODO limit ns stack\n\npub struct TreeBuilderSimulator {\n\n ns_stack: Vec<Namespace>,\n\n current_ns: Namespace,\n\n ambiguity_guard: AmbiguityGuard,\n\n strict: bool,\n\n}\n\n\n\nimpl TreeBuilderSimulator {\n\n pub fn new(strict: bool) -> Self {\n\n let mut simulator = TreeBuilderSimulator {\n\n ns_stack: Vec::with_capacity(DEFAULT_NS_STACK_CAPACITY),\n\n current_ns: Namespace::Html,\n\n ambiguity_guard: AmbiguityGuard::default(),\n\n strict,\n\n };\n", "file_path": "src/parser/tree_builder_simulator/mod.rs", "rank": 11, "score": 189711.54099414195 }, { "content": "#[inline]\n\nfn causes_foreign_content_exit(tag_name: LocalNameHash) -> bool {\n\n tag_is_one_of!(\n\n tag_name,\n\n [\n\n B, Big, Blockquote, Body, Br, Center, Code, Dd, Div, Dl, Dt, Em, Embed, H1, H2, H3, H4,\n\n H5, H6, Head, Hr, I, Img, Li, Listing, Menu, Meta, Nobr, Ol, P, Pre, Ruby, S, Small,\n\n Span, Strong, Strike, Sub, Sup, Table, Tt, U, Ul, Var\n\n ]\n\n )\n\n}\n\n\n", "file_path": "src/parser/tree_builder_simulator/mod.rs", "rank": 12, "score": 189711.54099414195 }, { "content": "#[inline]\n\nfn is_text_integration_point_in_math_ml(tag_name: LocalNameHash) -> bool {\n\n tag_is_one_of!(tag_name, [Mi, Mo, Mn, Ms, Mtext])\n\n}\n\n\n", "file_path": "src/parser/tree_builder_simulator/mod.rs", "rank": 13, "score": 186984.0420986316 }, { "content": "#[inline]\n\nfn request_lexeme(\n\n callback: impl FnMut(&mut TreeBuilderSimulator, &TagLexeme) -> TreeBuilderFeedback + 'static,\n\n) -> TreeBuilderFeedback {\n\n TreeBuilderFeedback::RequestLexeme(Box::new(callback))\n\n}\n\n\n\nmacro_rules! expect_tag {\n\n ($lexeme:expr, $tag_pat:pat => $action:expr) => {\n\n match *$lexeme.token_outline() {\n\n $tag_pat => $action,\n\n _ => unreachable!(\"Got unexpected tag type\"),\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/parser/tree_builder_simulator/mod.rs", "rank": 14, "score": 186827.50428506025 }, { "content": "pub trait ToToken {\n\n fn to_token(\n\n &self,\n\n capture_flags: &mut TokenCaptureFlags,\n\n encoding: &'static Encoding,\n\n ) -> ToTokenResult;\n\n}\n\n\n\nimpl ToToken for TagLexeme<'_> {\n\n fn to_token(\n\n &self,\n\n capture_flags: &mut TokenCaptureFlags,\n\n encoding: &'static Encoding,\n\n ) -> ToTokenResult {\n\n match *self.token_outline() {\n\n TagTokenOutline::StartTag {\n\n name,\n\n ref attributes,\n\n ns,\n\n self_closing,\n", "file_path": "src/rewritable_units/tokens/capturer/to_token.rs", "rank": 15, "score": 184637.54557821495 }, { "content": "pub fn default_initial_states() -> Vec<String> {\n\n vec![String::from(\"Data state\")]\n\n}\n\n\n\n#[derive(Deserialize, Default, Clone, PartialEq, Eq, Debug)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Bailout {\n\n pub reason: String,\n\n pub parsed_chunk: String,\n\n}\n\n\n\n#[derive(Deserialize, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct TestCase {\n\n pub description: String,\n\n pub input: Input,\n\n\n\n #[serde(rename = \"output\")]\n\n pub expected_tokens: Vec<TestToken>,\n\n\n", "file_path": "tests/harness/suites/html5lib_tests/mod.rs", "rank": 16, "score": 181823.69173678994 }, { "content": "pub fn get(input: &str) -> Vec<TestToken> {\n\n let mut tokens = Vec::default();\n\n let mut b = BufferQueue::new();\n\n\n\n b.push_back(StrTendril::from(input));\n\n\n\n {\n\n let mut t = Tokenizer::new(\n\n TokenSinkProxy {\n\n inner: TreeBuilder::new(RcDom::default(), TreeBuilderOpts::default()),\n\n tokens: &mut tokens,\n\n },\n\n TokenizerOpts::default(),\n\n );\n\n\n\n while let TokenizerResult::Script(_) = t.feed(&mut b) {\n\n // ignore script markers\n\n }\n\n\n\n t.end();\n\n }\n\n\n\n tokens\n\n}\n", "file_path": "tests/harness/suites/html5lib_tests/feedback_tests/expected_tokens.rs", "rank": 17, "score": 175497.18536272898 }, { "content": "pub fn parse(\n\n input: &Input,\n\n capture_flags: TokenCaptureFlags,\n\n initial_text_type: TextType,\n\n last_start_tag_name_hash: LocalNameHash,\n\n token_handler: TokenHandler,\n\n) -> Result<String, RewritingError> {\n\n let encoding = input\n\n .encoding()\n\n .expect(\"Input should be initialized before parsing\");\n\n\n\n let mut output = Output::new(encoding.into());\n\n let transform_controller = TestTransformController::new(token_handler, capture_flags);\n\n let memory_limiter = MemoryLimiter::new_shared(2048);\n\n\n\n let mut transform_stream = TransformStream::new(\n\n TransformStreamSettings {\n\n transform_controller,\n\n output_sink: |chunk: &[u8]| output.push(chunk),\n\n preallocated_parsing_buffer_size: 0,\n", "file_path": "tests/fixtures/token_capturing.rs", "rank": 18, "score": 174414.63138924126 }, { "content": "pub fn get_test_cases() -> Vec<TestCase> {\n\n let mut tests = Vec::default();\n\n let expected_bailouts = load_expected_bailouts();\n\n\n\n let mut add_tests = |file| {\n\n tests.extend(parse_inputs(file).into_iter().map(|input| {\n\n let expected_bailout = expected_bailouts.0.get(&input).cloned();\n\n\n\n TestCase {\n\n description: input\n\n .chars()\n\n .flat_map(char::escape_default)\n\n .collect::<String>()\n\n + \" (with feedback)\",\n\n expected_tokens: expected_tokens::get(&input),\n\n input: input.into(),\n\n initial_states: default_initial_states(),\n\n double_escaped: false,\n\n last_start_tag: String::new(),\n\n expected_bailout,\n\n }\n\n }));\n\n };\n\n\n\n for_each_test_file(\"html5lib-tests/tree-construction/*.dat\", &mut add_tests);\n\n for_each_test_file(\"regression/*.dat\", &mut add_tests);\n\n\n\n tests\n\n}\n", "file_path": "tests/harness/suites/html5lib_tests/feedback_tests/mod.rs", "rank": 19, "score": 172834.03612373688 }, { "content": "pub fn run_rewriter(data: &[u8]) -> () {\n\n // fuzzing with randomly picked selector and encoding\n\n // works much faster (50 times) that iterating over all\n\n // selectors/encoding per single run. It's recommended\n\n // to make iterations as fast as possible per fuzzing docs.\n\n run_rewriter_iter(data, get_random_selector(), get_random_encoding());\n\n}\n\n\n", "file_path": "fuzz/test_case/src/lib.rs", "rank": 20, "score": 171668.82758948999 }, { "content": "/// Data that can be attached to a rewritable unit by a user and shared between content handler\n\n/// invocations.\n\n///\n\n/// Same rewritable units can be passed to different content handlers if all of them capture the\n\n/// unit. `UserData` trait provides capability to attach arbitrary data to a rewritable unit, so\n\n/// handlers can make decision on how to process the unit based on the information provided by\n\n/// previous handlers.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use lol_html::{rewrite_str, element, RewriteStrSettings};\n\n/// use lol_html::html_content::UserData;\n\n///\n\n/// rewrite_str(\n\n/// r#\"<div id=\"foo\"></div>\"#,\n\n/// RewriteStrSettings {\n\n/// element_content_handlers: vec![\n\n/// element!(\"*\", |el| {\n\n/// el.set_user_data(\"Captured by `*`\");\n\n///\n\n/// Ok(())\n\n/// }),\n\n/// element!(\"#foo\", |el| {\n\n/// let user_data = el.user_data_mut().downcast_mut::<&'static str>().unwrap();\n\n///\n\n/// assert_eq!(*user_data, \"Captured by `*`\");\n\n///\n\n/// *user_data = \"Captured by `#foo`\";\n\n///\n\n/// Ok(())\n\n/// }),\n\n/// element!(\"div\", |el| {\n\n/// let user_data = el.user_data().downcast_ref::<&'static str>().unwrap();\n\n///\n\n/// assert_eq!(*user_data, \"Captured by `#foo`\");\n\n///\n\n/// Ok(())\n\n/// })\n\n/// ],\n\n/// ..RewriteStrSettings::default()\n\n/// }\n\n/// ).unwrap();\n\n/// ```\n\npub trait UserData {\n\n /// Returns a reference to the attached user data.\n\n fn user_data(&self) -> &dyn Any;\n\n /// Returns a mutable reference to the attached user data.\n\n fn user_data_mut(&mut self) -> &mut dyn Any;\n\n /// Attaches user data to a rewritable unit.\n\n fn set_user_data(&mut self, data: impl Any);\n\n}\n\n\n\nmacro_rules! impl_user_data {\n\n ($Unit:ident<$($lt:lifetime),+>) => {\n\n impl crate::rewritable_units::UserData for $Unit<$($lt),+> {\n\n #[inline]\n\n fn user_data(&self) -> &dyn Any {\n\n &*self.user_data\n\n }\n\n\n\n #[inline]\n\n fn user_data_mut(&mut self) -> &mut dyn Any {\n\n &mut *self.user_data\n", "file_path": "src/rewritable_units/mod.rs", "rank": 21, "score": 171253.12079339928 }, { "content": "pub fn run_c_api_rewriter(data: &[u8]) -> () {\n\n run_c_api_rewriter_iter(data, get_random_encoding().name());\n\n}\n\n\n", "file_path": "fuzz/test_case/src/lib.rs", "rank": 22, "score": 168973.6884870382 }, { "content": "pub trait TagHintSink {\n\n fn handle_start_tag_hint(\n\n &mut self,\n\n name: LocalName,\n\n ns: Namespace,\n\n ) -> Result<ParserDirective, RewritingError>;\n\n fn handle_end_tag_hint(&mut self, name: LocalName) -> Result<ParserDirective, RewritingError>;\n\n}\n\n\n\npub type State<S> = fn(&mut TagScanner<S>, &[u8]) -> StateResult;\n\n\n\n/// Tag scanner skips the majority of lexer operations and, thus,\n\n/// is faster. It also has much less requirements for buffering which makes it more\n\n/// prone to bailouts caused by buffer exhaustion (actually it buffers only tag names).\n\n///\n\n/// Tag scanner produces tag previews as an output which serve as a hint for\n\n/// the matcher which can then switch to the lexer if required.\n\n///\n\n/// It's not guaranteed that tag preview will actually produce the token in the end\n\n/// of the input (e.g. `<div` will produce a tag preview, but not tag token). However,\n", "file_path": "src/parser/tag_scanner/mod.rs", "rank": 23, "score": 165206.381561669 }, { "content": "pub trait TestFixture<T> {\n\n fn test_cases() -> Vec<T>;\n\n fn run(test: &T);\n\n}\n\n\n\nmacro_rules! create_test {\n\n ($name:expr, $should_panic:expr, $body:tt) => {{\n\n use rustc_test::{TestDesc, TestDescAndFn, TestFn, TestName};\n\n\n\n TestDescAndFn {\n\n desc: TestDesc {\n\n name: TestName::DynTestName($name),\n\n ignore: false,\n\n should_panic: $should_panic,\n\n allow_fail: false,\n\n },\n\n testfn: TestFn::DynTestFn(Box::new(move || $body)),\n\n }\n\n }};\n\n}\n", "file_path": "tests/harness/mod.rs", "rank": 24, "score": 165184.17657246906 }, { "content": "/// Rewrites given `html` string with the provided `settings`.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use lol_html::{rewrite_str, element, RewriteStrSettings};\n\n///\n\n/// let element_content_handlers = vec![\n\n/// // Rewrite insecure hyperlinks\n\n/// element!(\"a[href]\", |el| {\n\n/// let href = el\n\n/// .get_attribute(\"href\")\n\n/// .unwrap()\n\n/// .replace(\"http:\", \"https:\");\n\n///\n\n/// el.set_attribute(\"href\", &href).unwrap();\n\n///\n\n/// Ok(())\n\n/// })\n\n/// ];\n\n/// let output = rewrite_str(\n\n/// r#\"<div><a href=\"http://example.com\"></a></div>\"#,\n\n/// RewriteStrSettings {\n\n/// element_content_handlers,\n\n/// ..RewriteStrSettings::default()\n\n/// }\n\n/// ).unwrap();\n\n///\n\n/// assert_eq!(output, r#\"<div><a href=\"https://example.com\"></a></div>\"#);\n\n/// ```\n\npub fn rewrite_str<'h, 's>(\n\n html: &str,\n\n settings: impl Into<Settings<'h, 's>>,\n\n) -> Result<String, RewritingError> {\n\n let mut output = vec![];\n\n\n\n let mut rewriter = HtmlRewriter::new(settings.into(), |c: &[u8]| {\n\n output.extend_from_slice(c);\n\n });\n\n\n\n rewriter.write(html.as_bytes())?;\n\n rewriter.end()?;\n\n\n\n // NOTE: it's ok to unwrap here as we guarantee encoding validity of the output\n\n Ok(String::from_utf8(output).unwrap())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/rewriter/mod.rs", "rank": 25, "score": 162935.70894034242 }, { "content": "fn for_each_test_file(path: &str, handler: &mut dyn FnMut(BufReader<File>)) {\n\n glob::glob(&data_dir_path(path)).unwrap().for_each(|path| {\n\n handler(BufReader::new(File::open(path.unwrap()).unwrap()));\n\n });\n\n}\n\n\n", "file_path": "tests/harness/suites/mod.rs", "rank": 26, "score": 159504.82417692454 }, { "content": "type DoctypeHandler = unsafe extern \"C\" fn(*mut Doctype, *mut c_void) -> RewriterDirective;\n", "file_path": "c-api/src/rewriter_builder.rs", "rank": 27, "score": 154050.72325001375 }, { "content": "type ElementHandler = unsafe extern \"C\" fn(*mut Element, *mut c_void) -> RewriterDirective;\n", "file_path": "c-api/src/rewriter_builder.rs", "rank": 28, "score": 154050.72325001375 }, { "content": "type CommentsHandler = unsafe extern \"C\" fn(*mut Comment, *mut c_void) -> RewriterDirective;\n", "file_path": "c-api/src/rewriter_builder.rs", "rank": 29, "score": 154050.72325001375 }, { "content": "type TextHandler = unsafe extern \"C\" fn(*mut TextChunk, *mut c_void) -> RewriterDirective;\n", "file_path": "c-api/src/rewriter_builder.rs", "rank": 30, "score": 152040.75560039148 }, { "content": "type DocumentEndHandler = unsafe extern \"C\" fn(*mut DocumentEnd, *mut c_void) -> RewriterDirective;\n\n\n", "file_path": "c-api/src/rewriter_builder.rs", "rank": 31, "score": 150103.9722001813 }, { "content": "type RecoveryPointHandler<T, E, P> = fn(\n\n &mut SelectorMatchingVm<E>,\n\n &mut ExecutionCtx<'static, E>,\n\n &AttributeMatcher,\n\n T,\n\n &mut dyn FnMut(MatchInfo<P>),\n\n);\n\n\n", "file_path": "src/selectors_vm/mod.rs", "rank": 32, "score": 144710.94170831284 }, { "content": "pub fn get_test_cases() -> Vec<TestCase> {\n\n let mut tests = Vec::default();\n\n let mut non_unescapable_count = 0;\n\n let mut with_unmappable_chars_count = 0;\n\n\n\n #[derive(Deserialize)]\n\n struct Suite {\n\n #[serde(default)]\n\n pub tests: Vec<TestCase>,\n\n }\n\n\n\n for_each_test_file(\"html5lib-tests/tokenizer/*.test\", &mut |file| {\n\n tests.extend(from_reader::<_, Suite>(file).unwrap().tests);\n\n });\n\n\n\n tests.append(&mut self::feedback_tests::get_test_cases());\n\n\n\n let tests = tests\n\n .iter_mut()\n\n .filter_map(|t| {\n", "file_path": "tests/harness/suites/html5lib_tests/mod.rs", "rank": 33, "score": 143231.53329850076 }, { "content": "pub fn decode_text(text: &str, text_type: TextType) -> String {\n\n let mut decoder = Decoder::new(text);\n\n\n\n if text_type.should_replace_unsafe_null_in_text() {\n\n decoder = decoder.unsafe_null();\n\n }\n\n\n\n if text_type.allows_html_entities() {\n\n decoder = decoder.text_entities();\n\n }\n\n\n\n decoder.run()\n\n}\n\n\n", "file_path": "tests/harness/suites/html5lib_tests/decoder.rs", "rank": 34, "score": 140199.29655424572 }, { "content": "pub trait Align {\n\n fn align(&mut self, offset: usize);\n\n}\n\n\n\nimpl<T: Align> Align for Vec<T> {\n\n #[inline]\n\n fn align(&mut self, offset: usize) {\n\n for item in self.iter_mut() {\n\n item.align(offset);\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Align> Align for Option<T> {\n\n #[inline]\n\n fn align(&mut self, offset: usize) {\n\n if let Some(val) = self {\n\n val.align(offset);\n\n }\n\n }\n", "file_path": "src/base/align.rs", "rank": 35, "score": 136008.9643749526 }, { "content": "/// Defines an interface for the [`HtmlRewriter`]'s output.\n\n///\n\n/// Implemented for [`Fn`] and [`FnMut`].\n\n///\n\n/// [`HtmlRewriter`]: struct.HtmlRewriter.html\n\n/// [`Fn`]: https://doc.rust-lang.org/std/ops/trait.Fn.html\n\n/// [`FnMut`]: https://doc.rust-lang.org/std/ops/trait.FnMut.html\n\npub trait OutputSink {\n\n /// Handles rewriter's output chunk.\n\n ///\n\n /// # Note\n\n /// The last chunk of the output has zero length.\n\n fn handle_chunk(&mut self, chunk: &[u8]);\n\n}\n\n\n\nimpl<F: FnMut(&[u8])> OutputSink for F {\n\n fn handle_chunk(&mut self, chunk: &[u8]) {\n\n self(chunk);\n\n }\n\n}\n\n\n\npub struct Dispatcher<C, O>\n\nwhere\n\n C: TransformController,\n\n O: OutputSink,\n\n{\n\n transform_controller: C,\n", "file_path": "src/transform_stream/dispatcher.rs", "rank": 36, "score": 131512.97117403767 }, { "content": "#[inline]\n\nfn add_expr_to_list<E>(list: &mut Vec<Expr<E>>, expr: E, negation: bool)\n\nwhere\n\n E: PartialEq + Eq + Debug,\n\n{\n\n list.push(Expr::new(expr, negation))\n\n}\n\n\n\nimpl Predicate {\n\n #[inline]\n\n fn add_component(&mut self, component: &Component<SelectorImplDescriptor>, negation: bool) {\n\n match Condition::from(component) {\n\n Condition::OnTagName(e) =>\n\n add_expr_to_list(&mut self.on_tag_name_exprs, e, negation),\n\n Condition::OnAttributes(e) =>\n\n add_expr_to_list(&mut self.on_attr_exprs, e, negation),\n\n }\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, Debug)]\n", "file_path": "src/selectors_vm/ast.rs", "rank": 37, "score": 130474.59049002582 }, { "content": "fn load_expected_bailouts() -> ExpectedBailouts {\n\n from_reader::<_, ExpectedBailouts>(get_test_file_reader(\"/expected_bailouts.json\")).unwrap()\n\n}\n\n\n", "file_path": "tests/harness/suites/html5lib_tests/feedback_tests/mod.rs", "rank": 38, "score": 129442.8601146742 }, { "content": "pub trait Unescape {\n\n fn unescape(&mut self) -> Result<(), Error>;\n\n}\n\n\n\nimpl Unescape for String {\n\n // dummy but does the job\n\n fn unescape(&mut self) -> Result<(), Error> {\n\n *self = parse_json(&format!(r#\"\"{}\"\"#, self))?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<T: Unescape> Unescape for Option<T> {\n\n fn unescape(&mut self) -> Result<(), Error> {\n\n if let Some(ref mut inner) = *self {\n\n inner.unescape()?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "tests/harness/suites/html5lib_tests/unescape.rs", "rank": 39, "score": 129406.73373949392 }, { "content": "mod token_outline;\n\n\n\nuse crate::base::{Bytes, Range};\n\nuse std::fmt::{self, Debug, Write};\n\n\n\npub use self::token_outline::*;\n\n\n\npub struct Lexeme<'i, T> {\n\n input: Bytes<'i>,\n\n raw_range: Range,\n\n pub(super) token_outline: T,\n\n}\n\n\n\npub type TagLexeme<'i> = Lexeme<'i, TagTokenOutline>;\n\npub type NonTagContentLexeme<'i> = Lexeme<'i, Option<NonTagContentTokenOutline>>;\n\n\n\nimpl<'i, T> Lexeme<'i, T> {\n\n pub fn new(input: Bytes<'i>, token_outline: T, raw_range: Range) -> Self {\n\n Lexeme {\n\n input,\n", "file_path": "src/parser/lexer/lexeme/mod.rs", "rank": 40, "score": 128817.94356508559 }, { "content": " #[inline]\n\n pub fn part(&self, range: Range) -> Bytes {\n\n self.input.slice(range)\n\n }\n\n\n\n #[inline]\n\n pub fn opt_part(&self, range: Option<Range>) -> Option<Bytes> {\n\n self.input.opt_slice(range)\n\n }\n\n\n\n #[inline]\n\n pub fn raw(&self) -> Bytes {\n\n self.input.slice(self.raw_range())\n\n }\n\n}\n\n\n\nimpl<T: Debug> Debug for Lexeme<'_, T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let mut builder = f.debug_struct(\"Lexeme\");\n\n let mut pretty_raw = self.input.as_debug_string();\n", "file_path": "src/parser/lexer/lexeme/mod.rs", "rank": 41, "score": 128816.71797194303 }, { "content": " raw_range,\n\n token_outline,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn input(&self) -> &Bytes<'i> {\n\n &self.input\n\n }\n\n\n\n #[inline]\n\n pub fn token_outline(&self) -> &T {\n\n &self.token_outline\n\n }\n\n\n\n #[inline]\n\n pub fn raw_range(&self) -> Range {\n\n self.raw_range\n\n }\n\n\n", "file_path": "src/parser/lexer/lexeme/mod.rs", "rank": 42, "score": 128803.22927399077 }, { "content": " let mut start = String::new();\n\n let mut end = String::new();\n\n\n\n write!(start, \"|{}|\", self.raw_range.start)?;\n\n write!(end, \"|{}|\", self.raw_range.end)?;\n\n\n\n pretty_raw.insert_str(self.raw_range.end, &end);\n\n pretty_raw.insert_str(self.raw_range.start, &start);\n\n\n\n builder\n\n .field(\"raw\", &pretty_raw)\n\n .field(\"token_outline\", self.token_outline())\n\n .finish()\n\n }\n\n}\n", "file_path": "src/parser/lexer/lexeme/mod.rs", "rank": 43, "score": 128795.81931024021 }, { "content": "use crate::base::{Align, Range};\n\nuse crate::html::{LocalNameHash, Namespace, TextType};\n\nuse crate::parser::SharedAttributeBuffer;\n\n\n\n#[derive(Debug, Default, Copy, Clone)]\n\npub struct AttributeOutline {\n\n pub name: Range,\n\n pub value: Range,\n\n pub raw_range: Range,\n\n}\n\n\n\nimpl Align for AttributeOutline {\n\n #[inline]\n\n fn align(&mut self, offset: usize) {\n\n self.name.align(offset);\n\n self.value.align(offset);\n\n self.raw_range.align(offset);\n\n }\n\n}\n\n\n", "file_path": "src/parser/lexer/lexeme/token_outline.rs", "rank": 44, "score": 126240.95350572601 }, { "content": "\n\n Doctype {\n\n name: Option<Range>,\n\n public_id: Option<Range>,\n\n system_id: Option<Range>,\n\n force_quirks: bool,\n\n },\n\n\n\n Eof,\n\n}\n\n\n\nimpl Align for TagTokenOutline {\n\n #[inline]\n\n fn align(&mut self, offset: usize) {\n\n match self {\n\n TagTokenOutline::StartTag {\n\n name, attributes, ..\n\n } => {\n\n name.align(offset);\n\n attributes.borrow_mut().align(offset);\n", "file_path": "src/parser/lexer/lexeme/token_outline.rs", "rank": 45, "score": 126238.17094878918 }, { "content": "#[derive(Debug)]\n\npub enum TagTokenOutline {\n\n StartTag {\n\n name: Range,\n\n name_hash: LocalNameHash,\n\n ns: Namespace,\n\n attributes: SharedAttributeBuffer,\n\n self_closing: bool,\n\n },\n\n\n\n EndTag {\n\n name: Range,\n\n name_hash: LocalNameHash,\n\n },\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum NonTagContentTokenOutline {\n\n Text(TextType),\n\n Comment(Range),\n", "file_path": "src/parser/lexer/lexeme/token_outline.rs", "rank": 46, "score": 126230.35213913493 }, { "content": " }\n\n TagTokenOutline::EndTag { name, .. } => name.align(offset),\n\n }\n\n }\n\n}\n\n\n\nimpl Align for NonTagContentTokenOutline {\n\n #[inline]\n\n fn align(&mut self, offset: usize) {\n\n match self {\n\n NonTagContentTokenOutline::Comment(text) => text.align(offset),\n\n NonTagContentTokenOutline::Doctype {\n\n name,\n\n public_id,\n\n system_id,\n\n ..\n\n } => {\n\n name.align(offset);\n\n public_id.align(offset);\n\n system_id.align(offset);\n\n }\n\n _ => (),\n\n }\n\n }\n\n}\n", "file_path": "src/parser/lexer/lexeme/token_outline.rs", "rank": 47, "score": 126227.40820404342 }, { "content": "pub trait TransformController: Sized {\n\n fn initial_capture_flags(&self) -> TokenCaptureFlags;\n\n fn handle_start_tag(&mut self, name: LocalName, ns: Namespace) -> StartTagHandlingResult<Self>;\n\n fn handle_end_tag(&mut self, name: LocalName) -> TokenCaptureFlags;\n\n fn handle_token(&mut self, token: &mut Token) -> Result<(), RewritingError>;\n\n fn handle_end(&mut self, document_end: &mut DocumentEnd) -> Result<(), RewritingError>;\n\n fn should_emit_content(&self) -> bool;\n\n}\n\n\n", "file_path": "src/transform_stream/dispatcher.rs", "rank": 48, "score": 123350.6326996072 }, { "content": "type JsResult<T> = Result<T, JsValue>;\n\n\n", "file_path": "js-api/src/lib.rs", "rank": 49, "score": 120989.48232696425 }, { "content": "#[inline]\n\nfn is_void_element(local_name: &LocalName, enable_esi_tags: bool) -> bool {\n\n // NOTE: fast path for the most commonly used elements\n\n if tag_is_one_of!(*local_name, [Div, A, Span, Li]) {\n\n return false;\n\n }\n\n\n\n if tag_is_one_of!(\n\n *local_name,\n\n [\n\n Area, Base, Basefont, Bgsound, Br, Col, Embed, Hr, Img, Input, Keygen, Link, Meta,\n\n Param, Source, Track, Wbr\n\n ]\n\n ) {\n\n return true;\n\n }\n\n\n\n if enable_esi_tags {\n\n if let LocalName::Bytes(bytes) = local_name {\n\n // https://www.w3.org/TR/esi-lang/\n\n if &**bytes == b\"esi:include\" || &**bytes == b\"esi:comment\" {\n\n return true;\n\n }\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "src/selectors_vm/stack.rs", "rank": 50, "score": 119915.42370317678 }, { "content": "#[inline]\n\nfn to_ptr_mut<T>(val: T) -> *mut T {\n\n Box::into_raw(Box::new(val))\n\n}\n\n\n\n// NOTE: abort the thread if we receive NULL where unexpected\n\nmacro_rules! assert_not_null {\n\n ($var:ident) => {\n\n assert!(!$var.is_null(), \"{} is NULL\", stringify!($var));\n\n };\n\n}\n\n\n\n// NOTE: all these utilities are macros so we can propagate the variable\n\n// name to the null pointer assertion.\n\nmacro_rules! to_ref {\n\n ($ptr:ident) => {{\n\n assert_not_null!($ptr);\n\n unsafe { &*$ptr }\n\n }};\n\n}\n\n\n", "file_path": "c-api/src/lib.rs", "rank": 51, "score": 118662.76750971987 }, { "content": "pub trait ElementData: Default + 'static {\n\n type MatchPayload: PartialEq + Eq + Copy + Debug + Hash + 'static;\n\n\n\n fn matched_payload_mut(&mut self) -> &mut HashSet<Self::MatchPayload>;\n\n}\n\n\n\npub enum StackDirective {\n\n Push,\n\n PushIfNotSelfClosing,\n\n PopImmediately,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct ChildCounter {\n\n cumulative: i32,\n\n}\n\n\n\nimpl ChildCounter {\n\n #[inline]\n\n pub fn new_and_inc() -> Self {\n", "file_path": "src/selectors_vm/stack.rs", "rank": 52, "score": 116640.04741972819 }, { "content": "fn parse_inputs(file: BufReader<File>) -> Vec<String> {\n\n let mut inputs = Vec::default();\n\n let mut in_data = 0;\n\n\n\n for line in file.lines().map(Result::unwrap) {\n\n if line == \"#data\" {\n\n in_data = 1;\n\n } else if line.starts_with('#') {\n\n in_data = 0;\n\n } else if in_data > 0 {\n\n if in_data > 1 {\n\n let s: &mut String = inputs.last_mut().unwrap();\n\n s.push('\\n');\n\n s.push_str(&line);\n\n } else {\n\n inputs.push(line);\n\n }\n\n in_data += 1;\n\n }\n\n }\n\n\n\n inputs\n\n}\n\n\n\n#[derive(Deserialize, Default)]\n\npub struct ExpectedBailouts(HashMap<String, Bailout>);\n\n\n", "file_path": "tests/harness/suites/html5lib_tests/feedback_tests/mod.rs", "rank": 53, "score": 114161.30168268122 }, { "content": "#[derive(Default)]\n\nstruct JumpPtr {\n\n instr_set_idx: usize,\n\n offset: usize,\n\n}\n\n\n", "file_path": "src/selectors_vm/mod.rs", "rank": 54, "score": 111789.9917859056 }, { "content": "#[derive(Default)]\n\nstruct HereditaryJumpPtr {\n\n stack_offset: usize,\n\n instr_set_idx: usize,\n\n offset: usize,\n\n}\n\n\n", "file_path": "src/selectors_vm/mod.rs", "rank": 55, "score": 109629.67550917607 }, { "content": "#[inline]\n\nfn to_ptr<T>(val: T) -> *const T {\n\n Box::into_raw(Box::new(val))\n\n}\n\n\n", "file_path": "c-api/src/lib.rs", "rank": 56, "score": 109496.71501715129 }, { "content": "#[derive(Copy, Clone)]\n\nenum State {\n\n Default,\n\n InSelect,\n\n InTemplateInSelect(u64),\n\n InOrAfterFrameset,\n\n}\n\n\n\npub struct AmbiguityGuard {\n\n state: State,\n\n}\n\n\n\nimpl Default for AmbiguityGuard {\n\n fn default() -> Self {\n\n AmbiguityGuard {\n\n state: State::Default,\n\n }\n\n }\n\n}\n\n\n\nimpl AmbiguityGuard {\n", "file_path": "src/parser/tree_builder_simulator/ambiguity_guard.rs", "rank": 57, "score": 108962.94562164218 }, { "content": "pub fn to_null_decoded(s: &str) -> String {\n\n Decoder::new(s).unsafe_null().run()\n\n}\n\n\n", "file_path": "tests/harness/suites/html5lib_tests/decoder.rs", "rank": 58, "score": 108455.66718483175 }, { "content": "struct Bailout<T> {\n\n at_addr: usize,\n\n recovery_point: T,\n\n}\n\n\n\n/// A container for tracking state from various places on the stack.\n\npub struct SelectorState<'i> {\n\n pub cumulative: &'i ChildCounter,\n\n pub typed: Option<&'i ChildCounter>,\n\n}\n\n\n", "file_path": "src/selectors_vm/mod.rs", "rank": 59, "score": 107758.58893456784 }, { "content": "fn fold_text_tokens(tokens: Vec<TestToken>) -> Vec<TestToken> {\n\n tokens.into_iter().fold(Vec::default(), |mut res, t| {\n\n if let TestToken::Text(ref text) = t {\n\n if let Some(TestToken::Text(last)) = res.last_mut() {\n\n *last += text;\n\n\n\n return res;\n\n }\n\n }\n\n\n\n res.push(t);\n\n\n\n res\n\n })\n\n}\n\n\n\npub struct TokenCapturingTests;\n\n\n\nimpl TokenCapturingTests {\n\n fn run_test_case(\n", "file_path": "tests/fixtures/token_capturing.rs", "rank": 60, "score": 107330.4395040117 }, { "content": "trait IntoJsResult<T> {\n\n fn into_js_result(self) -> JsResult<T>;\n\n}\n\n\n\nimpl<T, E: ToString> IntoJsResult<T> for Result<T, E> {\n\n #[inline]\n\n fn into_js_result(self) -> JsResult<T> {\n\n self.map_err(|e| JsValue::from(e.to_string()))\n\n }\n\n}\n\n\n", "file_path": "js-api/src/lib.rs", "rank": 61, "score": 107112.6045583545 }, { "content": "pub fn decode_attr_value(s: &str) -> String {\n\n Decoder::new(s).unsafe_null().attr_entities().run()\n\n}\n\n\n", "file_path": "tests/harness/suites/html5lib_tests/decoder.rs", "rank": 62, "score": 106744.35566701175 }, { "content": "fn run_c_api_rewriter_iter(data: &[u8], encoding: &str) -> () {\n\n let c_encoding = CString::new(encoding).expect(\"CString::new failed.\");\n\n\n\n unsafe {\n\n let builder = lol_html_rewriter_builder_new();\n\n let mut output_data = {};\n\n let output_data_ptr: *mut c_void = &mut output_data as *mut _ as *mut c_void;\n\n\n\n let rewriter = lol_html_rewriter_build(\n\n builder,\n\n c_encoding.as_ptr(),\n\n encoding.len(),\n\n lol_html_memory_settings_t {\n\n preallocated_parsing_buffer_size: 0,\n\n max_allowed_memory_usage: std::usize::MAX,\n\n },\n\n Some(empty_handler),\n\n output_data_ptr,\n\n false,\n\n );\n\n\n\n let cstr = CStr::from_bytes_with_nul_unchecked(data);\n\n\n\n lol_html_rewriter_write(rewriter, cstr.as_ptr(), data.len());\n\n lol_html_rewriter_builder_free(builder);\n\n lol_html_rewriter_free(rewriter);\n\n }\n\n}\n", "file_path": "fuzz/test_case/src/lib.rs", "rank": 63, "score": 105465.6742721928 }, { "content": "fn filter_tokens(tokens: &[TestToken], capture_flags: TokenCaptureFlags) -> Vec<TestToken> {\n\n tokens\n\n .iter()\n\n .cloned()\n\n .filter(|t| match t {\n\n TestToken::Doctype { .. } if capture_flags.contains(TokenCaptureFlags::DOCTYPES) => {\n\n true\n\n }\n\n TestToken::StartTag { .. }\n\n if capture_flags.contains(TokenCaptureFlags::NEXT_START_TAG) =>\n\n {\n\n true\n\n }\n\n TestToken::EndTag { .. } if capture_flags.contains(TokenCaptureFlags::NEXT_END_TAG) => {\n\n true\n\n }\n\n TestToken::Comment(_) if capture_flags.contains(TokenCaptureFlags::COMMENTS) => true,\n\n TestToken::Text(_) if capture_flags.contains(TokenCaptureFlags::TEXT) => true,\n\n _ => false,\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "tests/fixtures/token_capturing.rs", "rank": 64, "score": 104472.82377059644 }, { "content": "fn rewrite_url_in_attr(el: &mut Element, attr_name: &str) {\n\n let attr = el\n\n .get_attribute(attr_name)\n\n .unwrap()\n\n .replace(\"http://\", \"https://\");\n\n\n\n el.set_attribute(attr_name, &attr).unwrap();\n\n}\n\n\n", "file_path": "examples/mixed_content_rewriter/main.rs", "rank": 65, "score": 100736.01454581098 }, { "content": "fn parse_options() -> Option<Matches> {\n\n let mut opts = Options::new();\n\n\n\n opts.optopt(\n\n \"t\",\n\n \"text_type\",\n\n \"Initial text type\",\n\n \"-t (Data state|PLAINTEXT state|RCDATA state|RAWTEXT state|Script data state|CDATA section state)\",\n\n );\n\n\n\n opts.optopt(\"l\", \"last_start_tag\", \"Last start tag name\", \"-l\");\n\n opts.optopt(\"c\", \"chunk_size\", \"Chunk size\", \"-c\");\n\n opts.optflag(\"H\", \"tag_hint_mode\", \"Trace in tag hint mode\");\n\n opts.optflag(\"h\", \"help\", \"Show this help\");\n\n\n\n let matches = match opts.parse(args().skip(1)) {\n\n Ok(matches) => {\n\n if matches.free.is_empty() {\n\n eprintln!(\"Missing HTML input\");\n\n None\n", "file_path": "tools/parser_trace/src/main.rs", "rank": 66, "score": 99144.24492631195 }, { "content": "pub fn get_test_cases(suite: &'static str) -> Vec<TestCase> {\n\n let mut test_cases = Vec::new();\n\n let mut ignored_count = 0;\n\n\n\n for_each_test_file(&format!(\"{}/*-info.json\", suite), &mut |file| {\n\n let test_data = from_reader::<_, TestData>(file).unwrap();\n\n let src_data = read_test_file(suite, &test_data.src);\n\n let input = Input::from(src_data);\n\n\n\n for (selector, expected_file) in test_data.selectors {\n\n for encoding in ASCII_COMPATIBLE_ENCODINGS.iter() {\n\n let mut input = input.to_owned();\n\n let chunk_size = input.init(encoding, false).unwrap();\n\n\n\n let description = format!(\n\n \"{} ({}) - Encoding: {} - Chunk size: {}\",\n\n test_data.description,\n\n selector,\n\n encoding.name(),\n\n chunk_size\n", "file_path": "tests/harness/suites/selectors_tests.rs", "rank": 67, "score": 96428.60594521022 }, { "content": "struct ExecutionCtx<'i, E: ElementData> {\n\n stack_item: StackItem<'i, E>,\n\n with_content: bool,\n\n ns: Namespace,\n\n enable_esi_tags: bool,\n\n}\n\n\n\nimpl<'i, E: ElementData> ExecutionCtx<'i, E> {\n\n #[inline]\n\n pub fn new(local_name: LocalName<'i>, ns: Namespace, enable_esi_tags: bool) -> Self {\n\n ExecutionCtx {\n\n stack_item: StackItem::new(local_name),\n\n with_content: true,\n\n ns,\n\n enable_esi_tags,\n\n }\n\n }\n\n\n\n pub fn add_execution_branch(\n\n &mut self,\n", "file_path": "src/selectors_vm/mod.rs", "rank": 68, "score": 96188.64798202302 }, { "content": "fn run_rewriter_iter(data: &[u8], selector: &str, encoding: &'static Encoding) -> () {\n\n use std::convert::TryInto;\n\n\n\n let mut rewriter = HtmlRewriter::new(\n\n Settings {\n\n element_content_handlers: vec![\n\n element!(selector, |el| {\n\n el.before(\n\n &format!(\"<!--[ELEMENT('{}')]-->\", selector),\n\n ContentType::Html,\n\n );\n\n el.after(\n\n &format!(\"<!--[/ELEMENT('{}')]-->\", selector),\n\n ContentType::Html,\n\n );\n\n el.set_inner_content(\n\n &format!(\"<!--Replaced ({}) -->\", selector),\n\n ContentType::Html,\n\n );\n\n\n", "file_path": "fuzz/test_case/src/lib.rs", "rank": 69, "score": 94552.53147948332 }, { "content": "fn data_dir_path(path: &str) -> String {\n\n format!(\"{}/tests/data/{}\", env!(\"CARGO_MANIFEST_DIR\"), path)\n\n}\n\n\n", "file_path": "tests/harness/suites/mod.rs", "rank": 70, "score": 93567.4898836051 }, { "content": "#[inline]\n\nfn compile_literal(encoding: &'static Encoding, lit: &str) -> Result<Bytes<'static>, HasReplacementsError> {\n\n Bytes::from_str_without_replacements(lit, encoding).map(Bytes::into_owned)\n\n}\n\n\n", "file_path": "src/selectors_vm/compiler.rs", "rank": 71, "score": 89628.50527653722 }, { "content": "use super::*;\n\nuse crate::parser::state_machine::StateMachineConditions;\n\n\n\nimpl<S: LexemeSink> StateMachineConditions for Lexer<S> {\n\n #[inline]\n\n fn is_appropriate_end_tag(&self) -> bool {\n\n match self.current_tag_token {\n\n Some(TagTokenOutline::EndTag { name_hash, .. }) => {\n\n self.last_start_tag_name_hash == name_hash\n\n }\n\n _ => unreachable!(\"End tag should exist at this point\"),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn cdata_allowed(&self) -> bool {\n\n self.cdata_allowed\n\n }\n\n}\n", "file_path": "src/parser/lexer/conditions.rs", "rank": 72, "score": 89379.7339543911 }, { "content": " let lexeme = self.create_lexeme_with_raw_exclusive(input, token);\n\n\n\n self.emit_lexeme(&lexeme)?;\n\n self.emit_eof(input)\n\n }\n\n\n\n #[inline]\n\n fn emit_raw_without_token(&mut self, input: &[u8]) -> ActionResult {\n\n let lexeme = self.create_lexeme_with_raw_inclusive(input, None);\n\n\n\n self.emit_lexeme(&lexeme)\n\n }\n\n\n\n #[inline]\n\n fn emit_raw_without_token_and_eof(&mut self, input: &[u8]) -> ActionResult {\n\n // NOTE: since we are at EOF we use exclusive range for token's raw.\n\n let lexeme = self.create_lexeme_with_raw_exclusive(input, None);\n\n\n\n self.emit_lexeme(&lexeme)?;\n\n self.emit_eof(input)\n", "file_path": "src/parser/lexer/actions.rs", "rank": 73, "score": 89252.22308682906 }, { "content": "use super::*;\n\nuse crate::parser::state_machine::StateMachineActions;\n\n\n\nuse NonTagContentTokenOutline::*;\n\nuse TagTokenOutline::*;\n\n\n\n// NOTE: use macro instead of the function to make borrow\n\n// checker happy with range construction inside match arm\n\n// with a mutable borrow of lexer.\n\nmacro_rules! get_token_part_range {\n\n ($self:tt) => {\n\n Range {\n\n start: $self.token_part_start,\n\n end: $self.next_pos - 1,\n\n }\n\n };\n\n}\n\n\n\nimpl<S: LexemeSink> StateMachineActions for Lexer<S> {\n\n impl_common_sm_actions!();\n", "file_path": "src/parser/lexer/actions.rs", "rank": 74, "score": 89249.43298068248 }, { "content": "\n\n #[inline]\n\n fn emit_eof(&mut self, input: &[u8]) -> ActionResult {\n\n let lexeme = self.create_lexeme_with_raw_exclusive(input, Some(Eof));\n\n\n\n self.emit_lexeme(&lexeme)\n\n }\n\n\n\n #[inline]\n\n fn emit_text(&mut self, input: &[u8]) -> ActionResult {\n\n if self.pos() > self.lexeme_start {\n\n // NOTE: unlike any other tokens (except EOF), text tokens don't have\n\n // any lexical symbols that determine their bounds. Therefore,\n\n // representation of text token content is the raw slice.\n\n // Also, we always emit text if we encounter some other bounded\n\n // lexical structure and, thus, we use exclusive range for the raw slice.\n\n let lexeme =\n\n self.create_lexeme_with_raw_exclusive(input, Some(Text(self.last_text_type)));\n\n\n\n self.emit_lexeme(&lexeme)?;\n", "file_path": "src/parser/lexer/actions.rs", "rank": 75, "score": 89249.18079474548 }, { "content": " self.last_start_tag_name_hash = name_hash;\n\n *ns = self.tree_builder_simulator.borrow().current_ns();\n\n }\n\n\n\n match self\n\n .emit_tag_lexeme(&lexeme)\n\n .map_err(ActionError::RewritingError)?\n\n {\n\n ParserDirective::Lex => Ok(()),\n\n ParserDirective::WherePossibleScanForTagsOnly => self.change_parser_directive(\n\n self.lexeme_start,\n\n ParserDirective::WherePossibleScanForTagsOnly,\n\n FeedbackDirective::None,\n\n ),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn emit_current_token_and_eof(&mut self, input: &[u8]) -> ActionResult {\n\n let token = self.current_non_tag_content_token.take();\n", "file_path": "src/parser/lexer/actions.rs", "rank": 76, "score": 89248.64484867295 }, { "content": " {\n\n *public_id = Some(get_token_part_range!(self));\n\n }\n\n }\n\n\n\n #[inline]\n\n fn finish_doctype_system_id(&mut self, _input: &[u8]) {\n\n if let Some(Doctype {\n\n ref mut system_id, ..\n\n }) = self.current_non_tag_content_token\n\n {\n\n *system_id = Some(get_token_part_range!(self));\n\n }\n\n }\n\n\n\n #[inline]\n\n fn finish_tag_name(&mut self, _input: &[u8]) -> ActionResult {\n\n match self.current_tag_token {\n\n Some(StartTag { ref mut name, .. }) | Some(EndTag { ref mut name, .. }) => {\n\n *name = get_token_part_range!(self)\n", "file_path": "src/parser/lexer/actions.rs", "rank": 77, "score": 89243.3424704635 }, { "content": " }\n\n\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn emit_current_token(&mut self, input: &[u8]) -> ActionResult {\n\n let token = self.current_non_tag_content_token.take();\n\n let lexeme = self.create_lexeme_with_raw_inclusive(input, token);\n\n\n\n self.emit_lexeme(&lexeme)\n\n }\n\n\n\n #[inline]\n\n fn emit_tag(&mut self, input: &[u8]) -> ActionResult {\n\n let token = self\n\n .current_tag_token\n\n .take()\n\n .expect(\"Tag token should exist at this point\");\n\n\n", "file_path": "src/parser/lexer/actions.rs", "rank": 78, "score": 89243.16689282889 }, { "content": " });\n\n }\n\n\n\n #[inline]\n\n fn create_doctype(&mut self, _input: &[u8]) {\n\n self.current_non_tag_content_token = Some(Doctype {\n\n name: None,\n\n public_id: None,\n\n system_id: None,\n\n force_quirks: false,\n\n });\n\n }\n\n\n\n #[inline]\n\n fn create_comment(&mut self, _input: &[u8]) {\n\n self.current_non_tag_content_token = Some(Comment(Range::default()));\n\n }\n\n\n\n #[inline]\n\n fn start_token_part(&mut self, _input: &[u8]) {\n", "file_path": "src/parser/lexer/actions.rs", "rank": 79, "score": 89238.00028415873 }, { "content": " let feedback = self\n\n .try_get_tree_builder_feedback(&token)\n\n .map_err(ActionError::from)?;\n\n\n\n let mut lexeme = self.create_lexeme_with_raw_inclusive(input, token);\n\n\n\n // NOTE: exit from any non-initial text parsing mode always happens on tag emission\n\n // (except for CDATA, but there is a special action to take care of it).\n\n self.set_last_text_type(TextType::Data);\n\n\n\n if let Some(feedback) = feedback {\n\n self.handle_tree_builder_feedback(feedback, &lexeme);\n\n }\n\n\n\n if let StartTag {\n\n ref mut ns,\n\n name_hash,\n\n ..\n\n } = lexeme.token_outline\n\n {\n", "file_path": "src/parser/lexer/actions.rs", "rank": 80, "score": 89237.60947268498 }, { "content": " self.token_part_start = self.pos();\n\n }\n\n\n\n #[inline]\n\n fn mark_comment_text_end(&mut self, _input: &[u8]) {\n\n if let Some(Comment(ref mut text)) = self.current_non_tag_content_token {\n\n *text = get_token_part_range!(self);\n\n }\n\n }\n\n\n\n #[inline]\n\n fn shift_comment_text_end_by(&mut self, _input: &[u8], offset: usize) {\n\n if let Some(Comment(ref mut text)) = self.current_non_tag_content_token {\n\n text.end += offset;\n\n }\n\n }\n\n\n\n #[inline]\n\n fn set_force_quirks(&mut self, _input: &[u8]) {\n\n if let Some(Doctype {\n", "file_path": "src/parser/lexer/actions.rs", "rank": 81, "score": 89236.00233403085 }, { "content": " ref mut force_quirks,\n\n ..\n\n }) = self.current_non_tag_content_token\n\n {\n\n *force_quirks = true;\n\n }\n\n }\n\n\n\n #[inline]\n\n fn finish_doctype_name(&mut self, _input: &[u8]) {\n\n if let Some(Doctype { ref mut name, .. }) = self.current_non_tag_content_token {\n\n *name = Some(get_token_part_range!(self));\n\n }\n\n }\n\n\n\n #[inline]\n\n fn finish_doctype_public_id(&mut self, _input: &[u8]) {\n\n if let Some(Doctype {\n\n ref mut public_id, ..\n\n }) = self.current_non_tag_content_token\n", "file_path": "src/parser/lexer/actions.rs", "rank": 82, "score": 89235.40332855772 }, { "content": " }\n\n\n\n #[inline]\n\n fn mark_as_self_closing(&mut self, _input: &[u8]) {\n\n if let Some(StartTag {\n\n ref mut self_closing,\n\n ..\n\n }) = self.current_tag_token\n\n {\n\n *self_closing = true;\n\n }\n\n }\n\n\n\n #[inline]\n\n fn start_attr(&mut self, input: &[u8]) {\n\n // NOTE: create attribute only if we are parsing a start tag\n\n if let Some(StartTag { .. }) = self.current_tag_token {\n\n self.current_attr = Some(AttributeOutline::default());\n\n\n\n self.start_token_part(input);\n", "file_path": "src/parser/lexer/actions.rs", "rank": 83, "score": 89235.39052121172 }, { "content": " ref mut raw_range,\n\n ..\n\n }) = self.current_attr\n\n {\n\n *value = get_token_part_range!(self);\n\n\n\n // NOTE: include closing quote into the raw value if it's present\n\n raw_range.end = match input.get(self.next_pos - 1).copied() {\n\n Some(ch) if ch == self.closing_quote => value.end + 1,\n\n _ => value.end,\n\n };\n\n }\n\n }\n\n\n\n #[inline]\n\n fn finish_attr(&mut self, _input: &[u8]) {\n\n if let Some(attr) = self.current_attr.take() {\n\n self.attr_buffer.borrow_mut().push(attr);\n\n }\n\n }\n\n\n\n noop_action!(mark_tag_start, unmark_tag_start);\n\n}\n", "file_path": "src/parser/lexer/actions.rs", "rank": 84, "score": 89235.18837439896 }, { "content": " }\n\n\n\n #[inline]\n\n fn create_start_tag(&mut self, _input: &[u8]) {\n\n self.attr_buffer.borrow_mut().clear();\n\n\n\n self.current_tag_token = Some(StartTag {\n\n name: Range::default(),\n\n name_hash: LocalNameHash::new(),\n\n ns: Namespace::default(),\n\n attributes: Rc::clone(&self.attr_buffer),\n\n self_closing: false,\n\n });\n\n }\n\n\n\n #[inline]\n\n fn create_end_tag(&mut self, _input: &[u8]) {\n\n self.current_tag_token = Some(EndTag {\n\n name: Range::default(),\n\n name_hash: LocalNameHash::new(),\n", "file_path": "src/parser/lexer/actions.rs", "rank": 85, "score": 89234.44383103527 }, { "content": " }\n\n }\n\n\n\n #[inline]\n\n fn finish_attr_name(&mut self, _input: &[u8]) {\n\n if let Some(AttributeOutline {\n\n ref mut name,\n\n ref mut raw_range,\n\n ..\n\n }) = self.current_attr\n\n {\n\n *name = get_token_part_range!(self);\n\n *raw_range = *name;\n\n }\n\n }\n\n\n\n #[inline]\n\n fn finish_attr_value(&mut self, input: &[u8]) {\n\n if let Some(AttributeOutline {\n\n ref mut value,\n", "file_path": "src/parser/lexer/actions.rs", "rank": 86, "score": 89232.63116020344 }, { "content": " }\n\n _ => unreachable!(\"Tag should exist at this point\"),\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn update_tag_name_hash(&mut self, input: &[u8]) {\n\n if let Some(ch) = input.get(self.pos()).copied() {\n\n match self.current_tag_token {\n\n Some(StartTag {\n\n ref mut name_hash, ..\n\n })\n\n | Some(EndTag {\n\n ref mut name_hash, ..\n\n }) => name_hash.update(ch),\n\n _ => unreachable!(\"Tag should exist at this point\"),\n\n }\n\n }\n", "file_path": "src/parser/lexer/actions.rs", "rank": 87, "score": 89227.99739278873 }, { "content": "#[inline]\n\nfn compile_literal_lowercase(encoding: &'static Encoding, lit: &str) -> Result<Bytes<'static>, HasReplacementsError> {\n\n compile_literal(encoding, &lit.to_ascii_lowercase())\n\n}\n\n\n", "file_path": "src/selectors_vm/compiler.rs", "rank": 88, "score": 88253.48844478457 }, { "content": "fn get_test_file_reader(path: &str) -> BufReader<File> {\n\n BufReader::new(File::open(data_dir_path(path)).unwrap())\n\n}\n\n\n\npub mod html5lib_tests;\n\npub mod selectors_tests;\n", "file_path": "tests/harness/suites/mod.rs", "rank": 99, "score": 86668.52466285715 } ]
Rust
src/main.rs
matthias-t/workspace
2ff2562d55d2d92b007379fe5a283e3bb0d81582
#[macro_use] mod macros; mod app; mod exit; mod shell; mod tilde; mod workspace; use clap::ArgMatches; use colored::Colorize; use failure::Fail; use std::env; use std::fs; use std::io::Write; use std::path; use std::process; use crate::exit::Exit; use crate::tilde::Tilde; use crate::workspace::Workspace; pub static mut VERBOSE: bool = false; fn main() { let matches = app::cli().get_matches(); unsafe { VERBOSE = matches.is_present("verbose"); } if !matches.is_present("shell-wrapper") && matches.subcommand_matches("shell").is_none() { warn!("You are using the workspace binary, which is the backend for the `ws` function."); indent_warn!( "To set `ws` up in your shell, see the README.md or run `workspace shell --help`" ) } match matches.subcommand() { ("open", Some(matches)) => { let name: &str = matches.value_of("NAME").unwrap(); let ws = Workspace::get(name) .unwrap_or_exit(&format!("A workspace called '{}' does not exist", name)) .unwrap_or_else(|error| { let path = Workspace::file_path(name); error!("{} from {}", error, path.tilde_format()); if let Some(cause) = error.cause() { indent_error!("{}", cause); } if let Some(backtrace) = error.backtrace() { log!("{}", backtrace); } process::exit(1) }); if !ws.path.exists() { error!("The location of this workspace does not exist anymore"); indent_error!("the path '{}' was moved or deleted", ws.path.tilde_format()); process::exit(1); } let dir_only = matches.is_present("directory"); ws.open(dir_only); } ("add", Some(matches)) => { let name = matches.value_of("NAME").unwrap().to_string(); if Workspace::exists(&name) { error!("A workspace called '{}' already exists", name); process::exit(1); } let path = env::current_dir().unwrap_or_exit("Could not read current directory"); let sames: Vec<_> = Workspace::all() .into_iter() .filter_map(|(name, result)| { if let (Some(name), Ok(workspace)) = (name, result) { if workspace.path == path { return Some(name); } } None }) .collect(); if !sames.is_empty() { warn!( "Found {} pointing to this directory: {}", if sames.len() == 1 { "another workspace" } else { "other workspaces" }, sames.join(", ") ); confirm!("Create a new workspace here anyway"); } let ws = Workspace { path, commands: workspace::Commands::default(), tabs: Vec::default(), }; ws.write(&name); Workspace::edit(&name); println!("Created workspace '{}' in {}", name, ws.path.tilde_format()); } ("edit", Some(matches)) => { let name = matches.value_of("NAME").unwrap(); if !Workspace::exists(&name) { error!("A workspace called '{}' does not exist", name); process::exit(1); } Workspace::edit(name); } ("rename", Some(matches)) => { let old_name = matches.value_of("OLD_NAME").unwrap(); let new_name = matches.value_of("NEW_NAME").unwrap(); if !Workspace::exists(&old_name) { error!("A workspace called '{}' does not exist", old_name); process::exit(1); } if Workspace::exists(&new_name) { error!( "Cannot rename to '{}' because a workspace with that name already exists", new_name ); process::exit(1) } std::fs::rename( Workspace::file_path(old_name), Workspace::file_path(new_name), ) .unwrap_or_exit("Could not rename config file"); } ("delete", Some(matches)) => { let name: &str = matches.value_of("NAME").unwrap(); if !Workspace::file_path(name).exists() { error!("A workspace called '{}' does not exist", name); process::exit(1); } if !matches.is_present("yes") { confirm!("Delete the workspace '{}'", name); } Workspace::delete(name); println!("Deleted workspace '{}'", name); } ("list", Some(_)) => { let all = Workspace::all(); if all.is_empty() { eprintln!("No workspaces found.\nRun `ws add <NAME>` to create one."); return; } use term_grid::{Direction, Filling, Grid, GridOptions}; let mut grid = Grid::new(GridOptions { filling: Filling::Spaces(2), direction: Direction::LeftToRight, }); for (name, result) in all { let path: String; let mut moved = String::new(); match result { Ok(ws) => { path = ws.path.tilde_format().bright_black().to_string(); if !ws.path.exists() { moved = format!("{} path has moved", "warning:".bold().yellow()); } } Err(error) => { path = format!("{} {}", "warning:".bold().yellow(), error); } } let name = name.unwrap_or_else(|| format!("{} invalid UTF-8", "warning:".bold().yellow())); grid.add(name.into()); grid.add(path.into()); grid.add(moved.into()); } print!("{}", grid.fit_into_columns(3)); } ("shell", Some(matches)) => { if matches.subcommand_matches("bash").is_some() { println!("{}", shell::BASH); } else if matches.subcommand_matches("fish").is_some() { println!("{}", shell::FISH); } else if matches.subcommand_matches("powershell").is_some() { println!("{}", shell::POWERSHELL) } else if let Some(matches) = matches.subcommand_matches("cmd") { let path: path::PathBuf = path_to_binary_or_arg(&matches); let mut file: fs::File = fs::OpenOptions::new() .read(false) .write(true) .create(true) .append(false) .truncate(true) .open(&path) .unwrap_or_exit(&format!( "Could not create batch file at {}", path.tilde_format() )); file.write_fmt(format_args!("{}", shell::CMD)) .unwrap_or_exit("Could not write to batch file"); println!("Wrote {}", path.tilde_format()); } } _ => {} } } fn path_to_binary_or_arg(matches: &ArgMatches) -> path::PathBuf { if let Some(path) = matches.value_of("PATH") { return path::Path::new(path) .with_file_name("ws") .with_extension("bat") .to_path_buf(); } else { let mut path = env::current_exe().unwrap_or_exit("Could not determine path to binary"); path.set_file_name("ws"); path.set_extension("bat"); return path; } }
#[macro_use] mod macros; mod app; mod exit; mod shell; mod tilde; mod workspace; use clap::ArgMatches; use colored::Colorize; use failure::Fail; use std::env; use std::fs; use std::io::Write; use std::path; use std::process; use crate::exit::Exit; use crate::tilde::Tilde; use crate::workspace::Workspace; pub static mut VERBOSE: bool = false;
fn path_to_binary_or_arg(matches: &ArgMatches) -> path::PathBuf { if let Some(path) = matches.value_of("PATH") { return path::Path::new(path) .with_file_name("ws") .with_extension("bat") .to_path_buf(); } else { let mut path = env::current_exe().unwrap_or_exit("Could not determine path to binary"); path.set_file_name("ws"); path.set_extension("bat"); return path; } }
fn main() { let matches = app::cli().get_matches(); unsafe { VERBOSE = matches.is_present("verbose"); } if !matches.is_present("shell-wrapper") && matches.subcommand_matches("shell").is_none() { warn!("You are using the workspace binary, which is the backend for the `ws` function."); indent_warn!( "To set `ws` up in your shell, see the README.md or run `workspace shell --help`" ) } match matches.subcommand() { ("open", Some(matches)) => { let name: &str = matches.value_of("NAME").unwrap(); let ws = Workspace::get(name) .unwrap_or_exit(&format!("A workspace called '{}' does not exist", name)) .unwrap_or_else(|error| { let path = Workspace::file_path(name); error!("{} from {}", error, path.tilde_format()); if let Some(cause) = error.cause() { indent_error!("{}", cause); } if let Some(backtrace) = error.backtrace() { log!("{}", backtrace); } process::exit(1) }); if !ws.path.exists() { error!("The location of this workspace does not exist anymore"); indent_error!("the path '{}' was moved or deleted", ws.path.tilde_format()); process::exit(1); } let dir_only = matches.is_present("directory"); ws.open(dir_only); } ("add", Some(matches)) => { let name = matches.value_of("NAME").unwrap().to_string(); if Workspace::exists(&name) { error!("A workspace called '{}' already exists", name); process::exit(1); } let path = env::current_dir().unwrap_or_exit("Could not read current directory"); let sames: Vec<_> = Workspace::all() .into_iter() .filter_map(|(name, result)| { if let (Some(name), Ok(workspace)) = (name, result) { if workspace.path == path { return Some(name); } } None }) .collect(); if !sames.is_empty() { warn!( "Found {} pointing to this directory: {}", if sames.len() == 1 { "another workspace" } else { "other workspaces" }, sames.join(", ") ); confirm!("Create a new workspace here anyway"); } let ws = Workspace { path, commands: workspace::Commands::default(), tabs: Vec::default(), }; ws.write(&name); Workspace::edit(&name); println!("Created workspace '{}' in {}", name, ws.path.tilde_format()); } ("edit", Some(matches)) => { let name = matches.value_of("NAME").unwrap(); if !Workspace::exists(&name) { error!("A workspace called '{}' does not exist", name); process::exit(1); } Workspace::edit(name); } ("rename", Some(matches)) => { let old_name = matches.value_of("OLD_NAME").unwrap(); let new_name = matches.value_of("NEW_NAME").unwrap(); if !Workspace::exists(&old_name) { error!("A workspace called '{}' does not exist", old_name); process::exit(1); } if Workspace::exists(&new_name) { error!( "Cannot rename to '{}' because a workspace with that name already exists", new_name ); process::exit(1) } std::fs::rename( Workspace::file_path(old_name), Workspace::file_path(new_name), ) .unwrap_or_exit("Could not rename config file"); } ("delete", Some(matches)) => { let name: &str = matches.value_of("NAME").unwrap(); if !Workspace::file_path(name).exists() { error!("A workspace called '{}' does not exist", name); process::exit(1); } if !matches.is_present("yes") { confirm!("Delete the workspace '{}'", name); } Workspace::delete(name); println!("Deleted workspace '{}'", name); } ("list", Some(_)) => { let all = Workspace::all(); if all.is_empty() { eprintln!("No workspaces found.\nRun `ws add <NAME>` to create one."); return; } use term_grid::{Direction, Filling, Grid, GridOptions}; let mut grid = Grid::new(GridOptions { filling: Filling::Spaces(2), direction: Direction::LeftToRight, }); for (name, result) in all { let path: String; let mut moved = String::new(); match result { Ok(ws) => { path = ws.path.tilde_format().bright_black().to_string(); if !ws.path.exists() { moved = format!("{} path has moved", "warning:".bold().yellow()); } } Err(error) => { path = format!("{} {}", "warning:".bold().yellow(), error); } } let name = name.unwrap_or_else(|| format!("{} invalid UTF-8", "warning:".bold().yellow())); grid.add(name.into()); grid.add(path.into()); grid.add(moved.into()); } print!("{}", grid.fit_into_columns(3)); } ("shell", Some(matches)) => { if matches.subcommand_matches("bash").is_some() { println!("{}", shell::BASH); } else if matches.subcommand_matches("fish").is_some() { println!("{}", shell::FISH); } else if matches.subcommand_matches("powershell").is_some() { println!("{}", shell::POWERSHELL) } else if let Some(matches) = matches.subcommand_matches("cmd") { let path: path::PathBuf = path_to_binary_or_arg(&matches); let mut file: fs::File = fs::OpenOptions::new() .read(false) .write(true) .create(true) .append(false) .truncate(true) .open(&path) .unwrap_or_exit(&format!( "Could not create batch file at {}", path.tilde_format() )); file.write_fmt(format_args!("{}", shell::CMD)) .unwrap_or_exit("Could not write to batch file"); println!("Wrote {}", path.tilde_format()); } } _ => {} } }
function_block-full_function
[ { "content": "pub fn cli() -> App<'static, 'static> {\n\n App::new(\"workspace\")\n\n .version(crate_version!())\n\n .about(\"A command-line project manager\")\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .global_setting(AppSettings::ColoredHelp)\n\n .global_setting(AppSettings::ColorAlways)\n\n .arg(\n\n Arg::with_name(\"verbose\")\n\n .long(\"verbose\")\n\n .short(\"v\")\n\n .help(\"Causes verbose output to be logged\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"shell-wrapper\")\n\n .long(\"--from-shell-wrapper\")\n\n .hidden(true)\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"open\")\n", "file_path": "src/app.rs", "rank": 0, "score": 85605.04443672765 }, { "content": "pub trait Exit<T> {\n\n fn unwrap_or_exit(self, message: &str) -> T;\n\n}\n\n\n\nimpl<T> Exit<T> for Option<T> {\n\n fn unwrap_or_exit(self, message: &str) -> T {\n\n match self {\n\n Some(value) => value,\n\n None => {\n\n error!(\"{}\", message);\n\n process::exit(1)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T, E: Display> Exit<T> for Result<T, E> {\n\n fn unwrap_or_exit(self, message: &str) -> T {\n\n match self {\n\n Ok(value) => value,\n\n Err(verbose_error) => {\n\n error!(\"{}\", message);\n\n log!(\"{}\", verbose_error);\n\n process::exit(1)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/exit.rs", "rank": 1, "score": 58803.88039599328 }, { "content": "pub trait Tilde {\n\n fn tilde_format(&self) -> String;\n\n}\n\n\n\nimpl Tilde for PathBuf {\n\n fn tilde_format(&self) -> String {\n\n let path = self.display().to_string();\n\n let home_dir = match dirs::home_dir() {\n\n Some(dir) => dir.display().to_string(),\n\n None => String::new(),\n\n };\n\n if path.starts_with(home_dir.as_str()) {\n\n path.replacen(home_dir.as_str(), \"~\", 1)\n\n } else {\n\n path\n\n }\n\n }\n\n}\n", "file_path": "src/tilde.rs", "rank": 2, "score": 52062.126620370436 }, { "content": "fn is_default<T: Default + PartialEq>(t: &T) -> bool {\n\n t == &T::default()\n\n}\n", "file_path": "src/workspace.rs", "rank": 3, "score": 33810.085611320654 }, { "content": "pub const BASH: &str = r#\"function ws {\n\n while read -r line; do\n\n if [[ \"$line\" == RUN\\>* ]]; then\n\n eval \"${line:4}\" < /dev/tty;\n\n else\n\n echo \"$line\";\n\n fi;\n\n done < <( workspace --from-shell-wrapper \"$@\" );\n\n}\"#;\n\n\n\npub const FISH: &str = r#\"function ws\n\n workspace --from-shell-wrapper $argv | while read line\n\n if set command (string replace -r \"^RUN>\" \"\" $line)\n\n eval $command < /dev/tty\n\n else\n\n echo $line\n\n end\n\n end\n\nend\"#;\n\n\n", "file_path": "src/shell.rs", "rank": 4, "score": 21122.959953951657 }, { "content": "pub const POWERSHELL: &str = r#\"function ws {\n\n workspace --from-shell-wrapper $args | % {\n\n if ($_ -match \"^RUN>\") {\n\n . ([scriptblock]::Create($_.Substring(4)))\n\n } else {\n\n Write-Output $_\n\n }\n\n }\n\n}\"#;\n\n\n\npub const CMD: &str = r#\"@ECHO off\n\nFOR /F \"tokens=* delims=\" %%G IN ('workspace --from-shell-wrapper %*') DO (\n\n CALL :subroutine \"%%G\"\n\n)\n\nGOTO :EOF\n\n\n\n:subroutine\n\n SET \"temp=%~1\"\n\n IF \"%temp:~0,4%\" == \"RUN>\" (\n\n CALL %temp:~4%\n\n ) ELSE (\n\n ECHO %~1\n\n )\n\n GOTO :EOF\"#;\n", "file_path": "src/shell.rs", "rank": 5, "score": 21122.91512999673 }, { "content": "use crate::VERBOSE;\n\nuse colored::Colorize;\n\nuse std::fmt::Display;\n\nuse std::process;\n\n\n", "file_path": "src/exit.rs", "rank": 6, "score": 21120.355778312914 }, { "content": "use std::path::PathBuf;\n\n\n", "file_path": "src/tilde.rs", "rank": 7, "score": 21119.78007670609 }, { "content": " confirm_once!();\n\n }\n\n };\n\n ($prompt:expr$(,$arg:expr)*) => {\n\n confirm!{\n\n warn!(concat!($prompt, \"? [y/n]\")$(,$arg)*);\n\n }\n\n };\n\n}\n\n\n\n// Dependencies: Exit (src/exit.rs)\n\nmacro_rules! confirm_once {\n\n () => {\n\n let mut response = String::new();\n\n ::std::io::stdin()\n\n .read_line(&mut response)\n\n .unwrap_or_exit(\"Could not read line\");\n\n response = response.to_lowercase();\n\n let response: &str = response.trim();\n\n if response == \"y\" || response == \"yes\" {\n\n break;\n\n }\n\n if response == \"n\" || response == \"no\" {\n\n println!(\"Aborting\");\n\n return;\n\n }\n\n };\n\n}\n", "file_path": "src/macros.rs", "rank": 8, "score": 20749.538055216923 }, { "content": "macro_rules! warn {\n\n ($message:expr$(,$arg:expr)*) => {\n\n eprintln!(concat!(\"{} \", $message), \"warning:\".yellow().bold()$(, $arg)*);\n\n };\n\n}\n\n\n\nmacro_rules! indent_warn {\n\n ($message:expr$(,$arg:expr)*) => {\n\n eprintln!(concat!(\" \", $message)$(, $arg)*);\n\n };\n\n}\n\n\n\n// Dependencies: VERBOSE: bool, colored::Colorize\n\nmacro_rules! log {\n\n ($message:expr$(,$arg:expr)*) => {\n\n unsafe {\n\n if VERBOSE {\n\n println!(concat!(\"{} \", $message), \"info:\".bright_blue().bold()$(, $arg)*);\n\n }\n\n }\n", "file_path": "src/macros.rs", "rank": 9, "score": 20747.732818993496 }, { "content": " };\n\n}\n\n\n\n// Dependencies: log!\n\nmacro_rules! skip {\n\n ($condition:expr) => {\n\n if $condition {\n\n continue;\n\n }\n\n };\n\n ($condition:expr, $message:expr) => {\n\n if $condition {\n\n log!(\"{}\", $message);\n\n continue;\n\n }\n\n };\n\n}\n\n\n\n// Dependencies: skip!\n\nmacro_rules! skip_err {\n", "file_path": "src/macros.rs", "rank": 10, "score": 20746.4616499985 }, { "content": "macro_rules! run {\n\n ($command:expr$(,$arg:expr)*) => {\n\n println!(concat!(\"RUN>\", $command)$(, $arg)*);\n\n };\n\n}\n\n\n\n// Dependencies: colored::Colorize\n\nmacro_rules! error {\n\n ($message:expr$(,$arg:expr)*) => {\n\n eprintln!(concat!(\"{} \", $message), \"error:\".red().bold()$(, $arg)*);\n\n };\n\n}\n\n\n\nmacro_rules! indent_error {\n\n ($message:expr$(,$arg:expr)*) => {\n\n eprintln!(concat!(\" \", $message)$(, $arg)*);\n\n };\n\n}\n\n\n\n// Dependencies: colored::Colorize\n", "file_path": "src/macros.rs", "rank": 11, "score": 20746.399494874386 }, { "content": " ($result:expr) => {\n\n skip!($result.is_err(), $result.unwrap_err());\n\n };\n\n}\n\n\n\n// Dependencies: skip!\n\nmacro_rules! skip_none {\n\n ($option:expr) => {\n\n skip!($option.is_none());\n\n };\n\n ($option:expr, $message:expr) => {\n\n skip!($option.is_none(), $message);\n\n };\n\n}\n\n\n\n// Dependencies: warn!, confirm_once!\n\nmacro_rules! confirm {\n\n ($confirm:stmt;) => {\n\n loop {\n\n $confirm\n", "file_path": "src/macros.rs", "rank": 12, "score": 20746.174036796216 }, { "content": " .short(\"y\")\n\n .help(\"Skips confirmation prompt\"),\n\n ),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"list\")\n\n .alias(\"ls\")\n\n .about(\"Lists all workspaces\"),\n\n )\n\n .subcommand({\n\n SubCommand::with_name(\"shell\")\n\n .about(\"Sets up `ws` in your shell\")\n\n .setting(AppSettings::ArgRequiredElseHelp)\n\n .subcommand(\n\n SubCommand::with_name(\"bash\")\n\n .about(\"Returns a bash function to source in your bashrc\")\n\n .long_about(\n\n \"Returns a bash function to source in your bashrc with \\nsource <(workspace shell bash)\"\n\n ),\n\n )\n", "file_path": "src/app.rs", "rank": 13, "score": 20491.665817378438 }, { "content": " .subcommand(\n\n SubCommand::with_name(\"fish\")\n\n .about(\"Returns a fish function to source in your fish.config\")\n\n .long_about(\n\n \"Returns a fish function to source in your fish.config with \\nworkspace shell fish | source -\"\n\n ),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"powershell\")\n\n .alias(\"PowerShell\")\n\n .alias(\"posh\")\n\n .about(\"Returns a PowerShell function to source in your shell profile\")\n\n .long_about(\n\n \"Returns a PowerShell function to source in your shell profile with \\nInvoke-Expression \\\"$(workspace shell powershell)\\\"\"\n\n ),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"cmd\")\n\n .about(\"Creates a cmd batch file\")\n\n .long_about(\n\n \"Creates a cmd batch file. Unless PATH is specified, it will be created in the same folder as the workspace binary\",\n\n )\n\n .arg(Arg::with_name(\"PATH\")),\n\n )\n\n })\n\n}\n", "file_path": "src/app.rs", "rank": 14, "score": 20490.31991884614 }, { "content": "use clap::*;\n\n\n", "file_path": "src/app.rs", "rank": 15, "score": 20489.115450634235 }, { "content": " .required(true),\n\n ),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"edit\")\n\n .about(\"Edits a workspace\")\n\n .arg(\n\n Arg::with_name(\"NAME\")\n\n .help(\"Name of the workspace to edit\")\n\n .required(true),\n\n ),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"rename\")\n\n .alias(\"mv\")\n\n .about(\"Renames a workspace\")\n\n .arg(\n\n Arg::with_name(\"OLD_NAME\")\n\n .help(\"Name of the workspace to rename\")\n\n .required(true),\n", "file_path": "src/app.rs", "rank": 16, "score": 20488.61824071425 }, { "content": " .about(\"Opens a workspace\")\n\n .arg(\n\n Arg::with_name(\"NAME\")\n\n .help(\"Name of the workspace to open\")\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"directory\")\n\n .help(\"Only change the directory\")\n\n .short(\"d\")\n\n .long(\"directory\"),\n\n ),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"add\")\n\n .alias(\"new\")\n\n .about(\"Creates a new workspace in this directory\")\n\n .arg(\n\n Arg::with_name(\"NAME\")\n\n .help(\"Name of the new workspace\")\n", "file_path": "src/app.rs", "rank": 17, "score": 20488.596702870997 }, { "content": " )\n\n .arg(\n\n Arg::with_name(\"NEW_NAME\")\n\n .help(\"New name of the workspace\")\n\n .required(true),\n\n ),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"delete\")\n\n .alias(\"remove\")\n\n .alias(\"rm\")\n\n .about(\"Deletes a workspace\")\n\n .arg(\n\n Arg::with_name(\"NAME\")\n\n .help(\"Name of the workspace to delete\")\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"yes\")\n\n .long(\"yes\")\n", "file_path": "src/app.rs", "rank": 18, "score": 20488.464352957242 }, { "content": "use crate::exit::Exit;\n\nuse crate::tilde::Tilde;\n\nuse crate::VERBOSE;\n\n\n\nuse std::env;\n\nuse std::fs;\n\nuse std::io::{self, Read, Write};\n\nuse std::path::PathBuf;\n\nuse std::process::{self, Stdio};\n\n\n\nuse colored::Colorize;\n\nuse failure::Fail;\n\nuse serde_derive::{Deserialize, Serialize};\n\n\n\n#[serde(deny_unknown_fields)]\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Workspace {\n\n pub path: PathBuf,\n\n #[serde(default, skip_serializing_if = \"is_default\")]\n\n pub tabs: Vec<String>,\n", "file_path": "src/workspace.rs", "rank": 19, "score": 19002.151616617193 }, { "content": " let mut path = Self::folder_path();\n\n path.push(name);\n\n path.set_extension(\"yaml\");\n\n path\n\n }\n\n\n\n fn folder_path() -> PathBuf {\n\n let mut path = dirs::config_dir().unwrap_or_exit(\"Could not find configuration directory\");\n\n path.push(\"workspace\");\n\n\n\n if !path.exists() {\n\n fs::create_dir(&path).unwrap_or_exit(&format!(\n\n \"Could not create directory {}\",\n\n path.tilde_format()\n\n ));\n\n }\n\n\n\n path\n\n }\n\n}\n", "file_path": "src/workspace.rs", "rank": 20, "score": 18999.25228434949 }, { "content": " let serialized = serde_yaml::to_string(self).unwrap();\n\n file.write_fmt(format_args!(\"{}\", serialized))\n\n .unwrap_or_exit(ERR_MESSAGE);\n\n }\n\n\n\n pub fn edit(name: &str) {\n\n let path = Self::file_path(name);\n\n let editor = env::var(\"EDITOR\").unwrap_or_else(|_| {\n\n env::var(\"VISUAL\").unwrap_or_exit(\"Please set $EDITOR or $VISUAL to edit workspaces\")\n\n });\n\n run!(\"{} {}\", editor, path.display());\n\n }\n\n\n\n pub fn delete(name: &str) {\n\n let path = Self::file_path(name);\n\n fs::remove_file(path).unwrap_or_exit(\"Could not delete workspace data\");\n\n }\n\n\n\n pub fn exists(name: &str) -> bool {\n\n Self::file_path(name).exists()\n", "file_path": "src/workspace.rs", "rank": 21, "score": 18999.23824954049 }, { "content": " log!(\"{}\", result.unwrap_err())\n\n }\n\n }\n\n } else {\n\n error!(\"Please set $BROWSER to open browser tabs\")\n\n }\n\n }\n\n }\n\n\n\n pub fn write(&self, name: &str) {\n\n const ERR_MESSAGE: &str = \"Could not write workspace data\";\n\n\n\n let path = Self::file_path(name);\n\n let mut file = fs::OpenOptions::new()\n\n .read(false)\n\n .write(true)\n\n .create(true)\n\n .open(path)\n\n .unwrap_or_exit(ERR_MESSAGE);\n\n\n", "file_path": "src/workspace.rs", "rank": 22, "score": 18998.11753782002 }, { "content": " #[serde(default, skip_serializing_if = \"is_default\")]\n\n pub commands: Commands,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Default, PartialEq)]\n\npub struct Commands {\n\n #[serde(default, skip_serializing_if = \"is_default\")]\n\n pub local: Vec<String>,\n\n #[serde(default, skip_serializing_if = \"is_default\")]\n\n pub external: Vec<String>,\n\n #[serde(default, skip_serializing_if = \"is_default\")]\n\n pub background: Vec<String>,\n\n}\n\n\n\nimpl Workspace {\n\n pub fn open(&self, dir_only: bool) {\n\n run!(\"cd {}\", self.path.display());\n\n if dir_only {\n\n return;\n\n }\n", "file_path": "src/workspace.rs", "rank": 23, "score": 18997.82790121425 }, { "content": " fn paths() -> Vec<PathBuf> {\n\n let entries =\n\n fs::read_dir(Self::folder_path()).unwrap_or_exit(\"Could not find workspace data\");\n\n let mut paths: Vec<PathBuf> = Vec::new();\n\n\n\n for entry in entries {\n\n skip_err!(entry);\n\n let entry = entry.unwrap();\n\n let path = entry.path();\n\n\n\n skip_err!(entry.file_type());\n\n let file_type = entry.file_type().unwrap();\n\n skip!(\n\n !file_type.is_file(),\n\n format!(\"Skipping {} because it's not a file\", path.tilde_format())\n\n );\n\n\n\n skip_none!(\n\n path.extension(),\n\n format!(\n", "file_path": "src/workspace.rs", "rank": 24, "score": 18997.327902293688 }, { "content": " \"Skipping {} because it has no file extension\",\n\n path.tilde_format()\n\n )\n\n );\n\n let extension = path.extension().unwrap();\n\n skip!(\n\n extension.to_string_lossy() != \"yaml\",\n\n format!(\n\n \"Skipping {} because it's not a YAML file\",\n\n path.tilde_format()\n\n )\n\n );\n\n\n\n paths.push(entry.path());\n\n }\n\n\n\n paths\n\n }\n\n\n\n pub fn file_path(name: &str) -> PathBuf {\n", "file_path": "src/workspace.rs", "rank": 25, "score": 18996.563591325666 }, { "content": " .collect()\n\n }\n\n\n\n fn parse(path: &PathBuf) -> Result<Workspace, Error> {\n\n let content: String = Self::read(&path)?;\n\n let ws: Workspace = serde_yaml::from_str(&content)?;\n\n Ok(ws)\n\n }\n\n\n\n fn read(path: &PathBuf) -> io::Result<String> {\n\n let mut content: String = String::new();\n\n\n\n fs::OpenOptions::new()\n\n .read(true)\n\n .open(&path)?\n\n .read_to_string(&mut content)?;\n\n\n\n Ok(content)\n\n }\n\n\n", "file_path": "src/workspace.rs", "rank": 26, "score": 18996.377966274475 }, { "content": " }\n\n\n\n pub fn get(name: &str) -> Option<Result<Workspace, Error>> {\n\n let path = Self::file_path(name);\n\n if path.exists() {\n\n Some(Self::parse(&path))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn all() -> Vec<(Option<String>, Result<Workspace, Error>)> {\n\n Self::paths()\n\n .into_iter()\n\n .map(|path| {\n\n // Safe to unwrap here, because paths() cannot contain a file without a stem\n\n let name = path.file_stem().unwrap().to_str().map(str::to_owned);\n\n (name, path)\n\n })\n\n .map(|(name, path)| (name, Self::parse(&path)))\n", "file_path": "src/workspace.rs", "rank": 27, "score": 18996.133495006878 }, { "content": "\n\n#[derive(Fail, Debug)]\n\npub enum Error {\n\n #[fail(display = \"Could not read workspace data\")]\n\n Read(#[cause] io::Error),\n\n #[fail(display = \"Could not parse workspace data\")]\n\n Parse(#[cause] serde_yaml::Error),\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(cause: io::Error) -> Error {\n\n Error::Read(cause)\n\n }\n\n}\n\n\n\nimpl From<serde_yaml::Error> for Error {\n\n fn from(cause: serde_yaml::Error) -> Error {\n\n Error::Parse(cause)\n\n }\n\n}\n\n\n", "file_path": "src/workspace.rs", "rank": 28, "score": 18995.737421048303 }, { "content": " }\n\n } else {\n\n error!(\"Please set $TERMINAL to run external commands\");\n\n }\n\n }\n\n\n\n if !&self.commands.background.is_empty() {\n\n if let Ok(shell) = env::var(\"SHELL\") {\n\n for command in &self.commands.background {\n\n let result = process::Command::new(&shell)\n\n .arg(\"-c\")\n\n .arg(command)\n\n .current_dir(&self.path)\n\n .stdin(Stdio::null())\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .spawn();\n\n\n\n if result.is_err() {\n\n error!(\"Could not run command: {}\", command);\n", "file_path": "src/workspace.rs", "rank": 29, "score": 18995.129134358027 }, { "content": " log!(\"{}\", result.unwrap_err());\n\n }\n\n }\n\n } else {\n\n error!(\"Please set $SHELL to run commands in the background.\");\n\n }\n\n }\n\n\n\n if !self.tabs.is_empty() {\n\n if let Ok(browser) = env::var(\"BROWSER\") {\n\n for tab in &self.tabs {\n\n let result = process::Command::new(&browser)\n\n .arg(tab)\n\n .stdin(Stdio::null())\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .spawn();\n\n\n\n if result.is_err() {\n\n error!(\"Could not open tab: {}\", tab);\n", "file_path": "src/workspace.rs", "rank": 30, "score": 18994.242796038237 }, { "content": "\n\n for command in &self.commands.local {\n\n run!(\"{}\", command);\n\n }\n\n\n\n if !self.commands.external.is_empty() {\n\n if let Ok(terminal) = env::var(\"TERMINAL\") {\n\n for command in &self.commands.external {\n\n let result = process::Command::new(&terminal)\n\n .arg(command)\n\n .current_dir(&self.path)\n\n .stdin(Stdio::null())\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .spawn();\n\n\n\n if result.is_err() {\n\n error!(\"Could not run command: {}\", command);\n\n log!(\"{}\", result.unwrap_err());\n\n }\n", "file_path": "src/workspace.rs", "rank": 31, "score": 18992.851473117797 }, { "content": "## workspace [![build status](https://travis-ci.com/matthias-t/workspace.svg?branch=master)](https://travis-ci.com/matthias-t/workspace) [![crates.io](https://img.shields.io/crates/v/workspace.svg)](https://crates.io/crates/workspace)\n\n\n\n`ws` is a CLI to manage and interpret small YAML files that specify tasks to open a project like opening an editor, launching a server or visiting a chat or documentation in the browser. It can be used to efficiently switch between work and side projects.\n\n\n\n### Installation\n\n\n\n```bash\n\ncargo install workspace\n\n```\n\n\n\nThen setup the `ws` command in your shell:\n\n\n\n- **bash**: Add this line to your `.bashrc`\n\n\n\n ```bash\n\n eval $(workspace shell bash)\n\n ```\n\n\n\n- **fish**: Add this line to your `config.fish`\n\n\n\n ```fish\n\n workspace shell fish | source -\n\n ```\n\n\n\n- **PowerShell**: Add this line to your `profile.ps1`\n\n\n\n ```powershell\n\n Invoke-Expression \"$(workspace shell posh)\"\n\n ```\n\n\n\n> `workspace shell` prints a shell function `ws` that delegates output from `workspace` but intercepts commands to run. This lets you change the directory and run commands directly in the shell, e.g. if they need user input.\n\n\n", "file_path": "README.md", "rank": 32, "score": 11051.442267814098 }, { "content": "### [`0.4.1`](https://github.com/matthias-t/workspace/commit/48bd22e8079f0ea32a0a3127c37294c1fd9bab51)\n\n- small style changes\n\n- improve the `README.md`\n\n\n\n### [`0.4.0`](https://github.com/matthias-t/workspace/commit/48bd22e8079f0ea32a0a3127c37294c1fd9bab51)\n\n- switch to `YAML`\n\n- add documentation about workspace format\n\n- error on unknown workspace fields\n\n- `ws add` doesn't generate empty lists and tables anymore\n\n- improve some help messages\n\n- print warnings to `stderr`\n\n- show a warning when the binary is used directly\n\n- improve code style, modernize imports\n\n- keep a `CHANGELOG.md`\n\n\n\n### [`0.3.0`](https://github.com/matthias-t/workspace/commit/7b09d1948816439b7c598f92dc0535f0b2ab101a)\n\n- instead of the shell wrappers, `std::process` is now used to open tabs or run commands in a new terminal\n\n- new `commands.background` field: each background command is the argument of a new shell process\n\n- updated the project description\n\n\n\n### [`0.2.2`](https://github.com/matthias-t/workspace/commit/a5d0aad79c12a809cbab90bddbf5155aac526d7d)\n\n- paths above home directory are now shortened with a tilde `~`\n\n- updated the project description and README.md\n\n\n\n### [`0.2.1`](https://github.com/matthias-t/workspace/commit/fc4532683b6be21cd51efe3596aa64e4132136e1)\n\nfix a bug that made the `add` subcommand panic\n\n\n\n### [`0.2.0`](https://github.com/matthias-t/workspace/commit/d380b6924e4df26cf85ff8e842d95b1b2c2f0ce8)\n\n- new workspace field: `tabs`, a list of strings to be opened with `$BROWSER`\n\n- `commands` is now `commands.local` and there is a new `commands.external` field\n\n\n\n### [`0.1.0`](https://github.com/matthias-t/workspace/commit/1ace6469b076889a7114484f56724fdd533585c2)\n\ninitial release\n", "file_path": "CHANGELOG.md", "rank": 33, "score": 11050.905032299772 }, { "content": "### Documentation\n\n\n\nFor the CLI, see:\n\n```\n\nws --help\n\n```\n\n\n\nWorkspaces can have the following fields:\n\n\n\n- `path`, list of strings\n\n\n\n path to the workspace\n\n\n\n- `tabs`, list of strings\n\n\n\n tabs to open in `$BROWSER`\n\n\n\n- `commands`, table\n\n\n\n - `local`, list of strings\n\n\n\n commands to execute in the current shell\n\n\n\n - `background`, list of strings\n\n\n\n commands to execute as background processes\n\n\n\n - `external`, list of strings\n\n\n\n commands to execute in a new `$TERMINAL`\n\n\n\n> Note: `path` is mandatory and created automatically by `ws new`\n\n\n\nFor example, this is the workspace I use for my blog:\n\n\n\n```\n\npath: /home/matthias/code/web/blog/\n\n\n\ncommands:\n\n local:\n\n - git status\n\n - sudo systemctl start nginx\n\n background:\n\n - code -r .\n\n external:\n\n - gulp\n\n\n\ntabs:\n\n- https://developer.mozilla.org/en-US/\n\n- localhost\n\n```\n\n\n\nIt will `cd` into `~/code/web/blog/`, print the git status, open the directory\n\nin visual studio code, start the `gulp` build in a new terminal, launch `nginx`\n\nto serve the files and open `localhost` and MDN in the browser.\n", "file_path": "README.md", "rank": 34, "score": 11049.204186942494 } ]
Rust
cli/src/forge.rs
Genysys/foundry
0527eb95ce9c17101b434a0626ba3861836c0a9e
use ethers::{ providers::Provider, solc::{remappings::Remapping, ArtifactOutput, Project}, }; use evm_adapters::{ sputnik::{vicinity, ForkMemoryBackend, PRECOMPILES_MAP}, FAUCET_ACCOUNT, }; use regex::Regex; use sputnik::backend::Backend; use structopt::StructOpt; use forge::MultiContractRunnerBuilder; use ansi_term::Colour; use ethers::types::U256; mod forge_opts; use forge_opts::{EvmType, Opts, Subcommands}; use crate::forge_opts::{Dependency, FullContractInfo}; use std::{collections::HashMap, convert::TryFrom, process::Command, str::FromStr, sync::Arc}; mod cmd; mod utils; #[tracing::instrument(err)] fn main() -> eyre::Result<()> { utils::subscriber(); let opts = Opts::from_args(); match opts.sub { Subcommands::Test { opts, env, json, pattern, evm_type, fork_url, fork_block_number, initial_balance, sender, ffi, verbosity, allow_failure, } => { let cfg = proptest::test_runner::Config { failure_persistence: None, ..Default::default() }; let fuzzer = proptest::test_runner::TestRunner::new(cfg); let project = Project::try_from(&opts)?; let builder = MultiContractRunnerBuilder::default() .fuzzer(fuzzer) .initial_balance(initial_balance) .sender(sender); match evm_type { #[cfg(feature = "sputnik-evm")] EvmType::Sputnik => { use evm_adapters::sputnik::Executor; use sputnik::backend::MemoryBackend; let mut cfg = opts.evm_version.sputnik_cfg(); cfg.create_contract_limit = None; let vicinity = if let Some(ref url) = fork_url { let provider = Provider::try_from(url.as_str())?; let rt = tokio::runtime::Runtime::new().expect("could not start tokio rt"); rt.block_on(vicinity(&provider, fork_block_number))? } else { env.sputnik_state() }; let mut backend = MemoryBackend::new(&vicinity, Default::default()); let faucet = backend.state_mut().entry(*FAUCET_ACCOUNT).or_insert_with(Default::default); faucet.balance = U256::MAX; let backend: Box<dyn Backend> = if let Some(ref url) = fork_url { let provider = Provider::try_from(url.as_str())?; let init_state = backend.state().clone(); let backend = ForkMemoryBackend::new( provider, backend, fork_block_number, init_state, ); Box::new(backend) } else { Box::new(backend) }; let backend = Arc::new(backend); let precompiles = PRECOMPILES_MAP.clone(); let evm = Executor::new_with_cheatcodes( backend, env.gas_limit, &cfg, &precompiles, ffi, ); test(builder, project, evm, pattern, json, verbosity, allow_failure)?; } #[cfg(feature = "evmodin-evm")] EvmType::EvmOdin => { use evm_adapters::evmodin::EvmOdin; use evmodin::tracing::NoopTracer; let revision = opts.evm_version.evmodin_cfg(); let host = env.evmodin_state(); let evm = EvmOdin::new(host, env.gas_limit, revision, NoopTracer); test(builder, project, evm, pattern, json, verbosity, allow_failure)?; } } } Subcommands::Build { opts } => { let project = Project::try_from(&opts)?; let output = project.compile()?; if output.has_compiler_errors() { eyre::bail!(output.to_string()) } else if output.is_unchanged() { println!("no files changed, compilation skippped."); } else { println!("success."); } } Subcommands::VerifyContract { contract, address, constructor_args } => { let FullContractInfo { path, name } = contract; let rt = tokio::runtime::Runtime::new().expect("could not start tokio rt"); rt.block_on(cmd::verify::run(path, name, address, constructor_args))?; } Subcommands::Create { contract: _, verify: _ } => { unimplemented!("Not yet implemented") } Subcommands::Update { lib } => { let repo = git2::Repository::open(".")?; if let Some(lib) = lib { println!("Updating submodule {:?}", lib); repo.find_submodule( &lib.into_os_string().into_string().expect("invalid submodule path"), )? .update(true, None)?; } else { Command::new("git") .args(&["submodule", "update", "--init", "--recursive"]) .spawn()? .wait()?; } } Subcommands::Install { dependencies } => { install(std::env::current_dir()?, dependencies)?; } Subcommands::Remappings { lib_paths, root } => { let root = root.unwrap_or_else(|| std::env::current_dir().unwrap()); let root = std::fs::canonicalize(root)?; let lib_paths = if lib_paths.is_empty() { vec![root.join("lib")] } else { lib_paths }; let remappings: Vec<_> = lib_paths .iter() .map(|path| Remapping::find_many(&path).unwrap()) .flatten() .collect(); remappings.iter().for_each(|x| println!("{}", x)); } Subcommands::Init { root, template } => { let root = root.unwrap_or_else(|| std::env::current_dir().unwrap()); if !root.exists() { std::fs::create_dir_all(&root)?; } let root = std::fs::canonicalize(root)?; if let Some(ref template) = template { println!("Initializing {} from {}...", root.display(), template); Command::new("git") .args(&["clone", template, &root.display().to_string()]) .spawn()? .wait()?; } else { println!("Initializing {}...", root.display()); let src = root.join("src"); let test = src.join("test"); std::fs::create_dir_all(&test)?; let lib = root.join("lib"); std::fs::create_dir(&lib)?; let contract_path = src.join("Contract.sol"); std::fs::write(contract_path, include_str!("../../assets/ContractTemplate.sol"))?; let contract_path = test.join("Contract.t.sol"); std::fs::write(contract_path, include_str!("../../assets/ContractTemplate.t.sol"))?; Command::new("git").arg("init").current_dir(&root).spawn()?.wait()?; Command::new("git").args(&["add", "."]).current_dir(&root).spawn()?.wait()?; Command::new("git") .args(&["commit", "-m", "chore: forge init"]) .current_dir(&root) .spawn()? .wait()?; Dependency::from_str("https://github.com/dapphub/ds-test") .and_then(|dependency| install(root, vec![dependency]))?; } println!("Done."); } Subcommands::Completions { shell } => { Subcommands::clap().gen_completions_to("forge", shell, &mut std::io::stdout()) } Subcommands::Clean { root } => { let root = root.unwrap_or_else(|| std::env::current_dir().unwrap()); utils::cleanup(root)?; } } Ok(()) } fn test<A: ArtifactOutput + 'static, S: Clone, E: evm_adapters::Evm<S>>( builder: MultiContractRunnerBuilder, project: Project<A>, evm: E, pattern: Regex, json: bool, verbosity: u8, allow_failure: bool, ) -> eyre::Result<HashMap<String, HashMap<String, forge::TestResult>>> { let mut runner = builder.build(project, evm)?; let mut exit_code = 0; let results = runner.test(pattern)?; if json { let res = serde_json::to_string(&results)?; println!("{}", res); } else { for (i, (contract_name, tests)) in results.iter().enumerate() { if i > 0 { println!() } if !tests.is_empty() { let term = if tests.len() > 1 { "tests" } else { "test" }; println!("Running {} {} for {}", tests.len(), term, contract_name); } for (name, result) in tests { let status = if result.success { Colour::Green.paint("[PASS]") } else { exit_code = -1; let txt = match (&result.reason, &result.counterexample) { (Some(ref reason), Some(ref counterexample)) => { format!( "[FAIL. Reason: {}. Counterexample: {}]", reason, counterexample ) } (None, Some(ref counterexample)) => { format!("[FAIL. Counterexample: {}]", counterexample) } (Some(ref reason), None) => { format!("[FAIL. Reason: {}]", reason) } (None, None) => "[FAIL]".to_string(), }; Colour::Red.paint(txt) }; println!( "{} {} (gas: {})", status, name, result .gas_used .map(|x| x.to_string()) .unwrap_or_else(|| "[fuzztest]".to_string()) ); } if verbosity > 1 { println!(); for (name, result) in tests { let status = if result.success { "Success" } else { "Failure" }; println!("{}: {}", status, name); println!(); for log in &result.logs { println!(" {}", log); } println!(); } } } } if allow_failure { exit_code = 0; } std::process::exit(exit_code); } fn install(root: impl AsRef<std::path::Path>, dependencies: Vec<Dependency>) -> eyre::Result<()> { let libs = std::path::Path::new("lib"); dependencies.iter().try_for_each(|dep| -> eyre::Result<_> { let path = libs.join(&dep.name); println!("Installing {} in {:?}, (url: {}, tag: {:?})", dep.name, path, dep.url, dep.tag); Command::new("git") .args(&["submodule", "add", &dep.url, &path.display().to_string()]) .current_dir(&root) .spawn()? .wait()?; Command::new("git") .args(&["submodule", "update", "--init", "--recursive", &path.display().to_string()]) .current_dir(&root) .spawn()? .wait()?; let message = if let Some(ref tag) = dep.tag { Command::new("git") .args(&["checkout", "--recurse-submodules", tag]) .current_dir(&path) .spawn()? .wait()?; Command::new("git").args(&["add", &path.display().to_string()]).spawn()?.wait()?; format!("forge install: {}\n\n{}", dep.name, tag) } else { format!("forge install: {}", dep.name) }; Command::new("git").args(&["commit", "-m", &message]).current_dir(&root).spawn()?.wait()?; Ok(()) }) }
use ethers::{ providers::Provider, solc::{remappings::Remapping, ArtifactOutput, Project}, }; use evm_adapters::{ sputnik::{vicinity, ForkMemoryBackend, PRECOMPILES_MAP}, FAUCET_ACCOUNT, }; use regex::Regex; use sputnik::backend::Backend; use structopt::StructOpt; use forge::MultiContractRunnerBuilder; use ansi_term::Colour; use ethers::types::U256; mod forge_opts; use forge_opts::{EvmType, Opts, Subcommands}; use crate::forge_opts::{Dependency, FullContractInfo}; use std::{collections::HashMap, convert::TryFrom, process::Command, str::FromStr, sync::Arc}; mod cmd; mod utils; #[tracing::instrument(err)] fn main() -> eyre::Result<()> { utils::subscriber(); let opts = Opts::from_args(); match opts.sub { Subcommands::Test { opts, env, json, pattern, evm_type, fork_url, fork_block_number, initial_balance, sender, ffi, verbosity, allow_failure, } => { let cfg = proptest::test_runner::Config { failure_persistence: None, ..Default::default() }; let fuzzer = proptest::test_runner::TestRunner::new(cfg); let project = Project::try_from(&opts)?; let builder = MultiContractRunnerBuilder::default() .fuzzer(fuzzer) .initial_balance(initial_balance) .sender(sender); match evm_type { #[cfg(feature = "sputnik-evm")] EvmType::Sputnik => { use evm_adapters::sputnik::Executor; use sputnik::backend::MemoryBackend; let mut cfg = opts.evm_version.sputnik_cfg(); cfg.create_contract_limit = None; let vicinity = if let Some(ref url) = fork_url { let provider = Provider::try_from(url.as_str())?; let rt = tokio::runtime::Runtime::new().expect("could not start tokio rt"); rt.block_on(vicinity(&provider, fork_block_number))? } else { env.sputnik_state() }; let mut backend = MemoryBackend::new(&vicinity, Default::default()); let faucet = backend.state_mut().entry(*FAUCET_ACCOUNT).or_insert_with(Default::default); faucet.balance = U256::MAX; let backend: Box<dyn Backend> = if let Some(ref url) = fork_url { let provider = Provider::try_from(url.as_str())?; let init_state = backend.state().clone(); let backend = ForkMemoryBackend::new( provider, backend, fork_block_number, init_state, ); Box::new(backend) } else { Box::new(backend) }; let backend = Arc::new(backend); let precompiles = PRECOMPILES_MAP.clone(); let evm = Executor::new_with_cheatcodes( backend, env.gas_limit, &cfg, &precompiles, ffi, ); test(builder, project, evm, pattern, json, verbosity, allow_failure)?; } #[cfg(feature = "evmodin-evm")] EvmType::EvmOdin => { use evm_adapters::evmodin::EvmOdin; use evmodin::tracing::NoopTracer; let revision = opts.evm_version.evmodin_cfg(); let host = env.evmodin_state(); let evm = EvmOdin::new(host, env.gas_limit, revision, NoopTracer); test(builder, project, evm, pattern, json, verbosity, allow_failure)?; } } } Subcommands::Build { opts } => { let project = Project::try_from(&opts)?; let output = project.compile()?; if output.has_compiler_errors() { eyre::bail!(output.to_string()) } else if output.is_unchanged() { println!("no files changed, compilation skippped."); } else { println!("success."); } } Subcommands::VerifyContract { contract, address, constructor_args } => { let FullContractInfo { path, name } = contract; let rt = tokio::runtime::Runtime::new().expect("could not start tokio rt"); rt.block_on(cmd::verify::run(path, name, address, constructor_args))?; } Subcommands::Create { contract: _, verify: _ } => { unimplemented!("Not yet implemented") } Subcommands::Update { lib } => { let repo = git2::Repository::open(".")?; if let Some(lib) = lib { println!("Updating submodule {:?}", lib); repo.find_submodule( &lib.into_os_string().into_string().expect("invalid submodule path"), )? .update(true, None)?; } else { Command::new("git") .args(&["submodule", "update", "--init", "--recursive"]) .spawn()? .wait()?; } } Subcommands::Install { dependencies } => { install(std::env::current_dir()?, dependencies)?; } Subcommands::Remappings { lib_paths, root } => { let root = root.unwrap_or_else(|| std::env::current_dir().unwrap()); let root = std::fs::canonicalize(root)?; let lib_paths = if lib_paths.is_empty() { vec![root.join("lib")] } else { lib_paths }; let remappings: Vec<_> = lib_paths .iter() .map(|path| Remapping::find_many(&path).unwrap()) .flatten() .collect(); remappings.iter().for_each(|x| println!("{}", x)); } Subcommands::Init { root, template } => { let root = root.unwrap_or_else(|| std::env::current_dir().unwrap()); if !root.exists() { std::fs::create_dir_all(&root)?; } let root = std::fs::canonicalize(root)?; if let Some(ref template) = template { println!("Initializing {} from {}...", root.display(), template); Command::new("git") .args(&["clone", template, &root.display().to_string()]) .spawn()? .wait()?; } else { println!("Initializing {}...", root.display()); let src = root.join("src"); let test = src.join("test"); std::fs::create_dir_all(&test)?; let lib = root.join("lib"); std::fs::create_dir(&lib)?; let contract_path = src.join("Contract.sol"); std::fs::write(contract_path, include_str!("../../assets/ContractTemplate.sol"))?; let contract_path = test.join("Contract.t.sol"); std::fs::write(contract_path, include_str!("../../assets/ContractTemplate.t.sol"))?; Command::new("git").arg("init").current_dir(&root).spawn()?.wait()?; Command::new("git").args(&["add", "."]).current_dir(&root).spawn()?.wait()?; Command::new("git") .args(&["commit", "-m", "chore: forge init"]) .current_dir(&root) .spawn()? .wait()?; Dependency::from_str("https://github.com/dapphub/ds-test") .and_then(|dependency| install(root, vec![dependency]))?; } println!("Done."); } Subcommands::Completions { shell } => { Subcommands::clap().gen_completions_to("forge", shell, &mut std::io::stdout()) } Subcommands::Clean { root } => { let root = root.unwrap_or_else(|| std::env::current_dir().unwrap()); utils::cleanup(root)?; } } Ok(()) } fn test<A: ArtifactOutput + 'static, S: Clone, E: evm_adapters::Evm<S>>( builder: MultiContractRunnerBuilder, project: Project<A>, evm: E, pattern: Regex, json: bool, verbosity: u8, allow_failure: bool, ) -> eyre::Result<HashMap<String, HashMap<String, forge::TestResult>>> { let mut runner = builder.build(project, evm)?; let mut exit_code = 0; let results = runner.test(pattern)?; if json { let res = serde_json::to_string(&results)?; println!("{}", res); } else { for (i, (contract_name, tests)) in results.iter().enumerate() { if i > 0 { println!() } if !tests.is_empty() { let term = if tests.len() > 1 { "tests" } else { "test" }; println!("Running {} {} for {}", tests.len(), term, contract_name); } for (name, result) in tests { let status = if result.success { Colour::Green.paint("[PASS]") } else { exit_code = -1; let txt = match (&result.reason, &result.counterexample) { (Some(ref reason), Some(ref counterexample)) => { format!( "[FAIL. Reason: {}. Counterexample: {}]", reason, counte
fn install(root: impl AsRef<std::path::Path>, dependencies: Vec<Dependency>) -> eyre::Result<()> { let libs = std::path::Path::new("lib"); dependencies.iter().try_for_each(|dep| -> eyre::Result<_> { let path = libs.join(&dep.name); println!("Installing {} in {:?}, (url: {}, tag: {:?})", dep.name, path, dep.url, dep.tag); Command::new("git") .args(&["submodule", "add", &dep.url, &path.display().to_string()]) .current_dir(&root) .spawn()? .wait()?; Command::new("git") .args(&["submodule", "update", "--init", "--recursive", &path.display().to_string()]) .current_dir(&root) .spawn()? .wait()?; let message = if let Some(ref tag) = dep.tag { Command::new("git") .args(&["checkout", "--recurse-submodules", tag]) .current_dir(&path) .spawn()? .wait()?; Command::new("git").args(&["add", &path.display().to_string()]).spawn()?.wait()?; format!("forge install: {}\n\n{}", dep.name, tag) } else { format!("forge install: {}", dep.name) }; Command::new("git").args(&["commit", "-m", &message]).current_dir(&root).spawn()?.wait()?; Ok(()) }) }
rexample ) } (None, Some(ref counterexample)) => { format!("[FAIL. Counterexample: {}]", counterexample) } (Some(ref reason), None) => { format!("[FAIL. Reason: {}]", reason) } (None, None) => "[FAIL]".to_string(), }; Colour::Red.paint(txt) }; println!( "{} {} (gas: {})", status, name, result .gas_used .map(|x| x.to_string()) .unwrap_or_else(|| "[fuzztest]".to_string()) ); } if verbosity > 1 { println!(); for (name, result) in tests { let status = if result.success { "Success" } else { "Failure" }; println!("{}: {}", status, name); println!(); for log in &result.logs { println!(" {}", log); } println!(); } } } } if allow_failure { exit_code = 0; } std::process::exit(exit_code); }
function_block-function_prefixed
[ { "content": "/// Tries to extract the `Contract` in the `DAPP_JSON` file\n\npub fn find_dapp_json_contract(path: &str, name: &str) -> eyre::Result<Contract> {\n\n let dapp_json = dapp_json_path();\n\n let mut value: serde_json::Value = serde_json::from_reader(std::fs::File::open(&dapp_json)?)\n\n .wrap_err(\"Failed to read DAPP_JSON artifacts\")?;\n\n\n\n let contracts = value[\"contracts\"]\n\n .as_object_mut()\n\n .wrap_err_with(|| format!(\"No `contracts` found in `{}`\", dapp_json.display()))?;\n\n\n\n let contract = if let serde_json::Value::Object(mut contract) = contracts[path].take() {\n\n contract\n\n .remove(name)\n\n .wrap_err_with(|| format!(\"No contract found at `.contract.{}.{}`\", path, name))?\n\n } else {\n\n let key = format!(\"{}:{}\", path, name);\n\n contracts\n\n .remove(&key)\n\n .wrap_err_with(|| format!(\"No contract found at `.contract.{}`\", key))?\n\n };\n\n\n\n Ok(serde_json::from_value(contract)?)\n\n}\n\n\n", "file_path": "cli/src/utils.rs", "rank": 1, "score": 245195.4654767519 }, { "content": "/// Given an ABI encoded error string with the function signature `Error(string)`, it decodes\n\n/// it and returns the revert error message.\n\npub fn decode_revert(error: &[u8]) -> std::result::Result<String, ethers_core::abi::Error> {\n\n let error = error.strip_prefix(&ethers_core::utils::id(\"Error(string)\")).unwrap_or(error);\n\n if !error.is_empty() {\n\n Ok(abi::decode(&[abi::ParamType::String], error)?[0].to_string())\n\n } else {\n\n Ok(\"No revert reason found\".to_owned())\n\n }\n\n}\n\n\n", "file_path": "utils/src/lib.rs", "rank": 4, "score": 190677.38925554312 }, { "content": "pub fn cleanup(root: PathBuf) -> eyre::Result<()> {\n\n std::fs::remove_dir_all(root.join(\"cache\"))?;\n\n std::fs::remove_dir_all(root.join(\"out\"))?;\n\n Ok(())\n\n}\n", "file_path": "cli/src/utils.rs", "rank": 5, "score": 189269.44499537925 }, { "content": "pub fn decode_revert(error: &[u8]) -> Result<String> {\n\n Ok(abi::decode(&[abi::ParamType::String], &error[4..])?[0].to_string())\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test_helpers {\n\n use ethers::{\n\n prelude::Lazy,\n\n solc::{CompilerOutput, Project, ProjectPathsConfig},\n\n };\n\n\n\n pub static COMPILED: Lazy<CompilerOutput> = Lazy::new(|| {\n\n // NB: should we add a test-helper function that makes creating these\n\n // ephemeral projects easier?\n\n let paths =\n\n ProjectPathsConfig::builder().root(\"testdata\").sources(\"testdata\").build().unwrap();\n\n let project = Project::builder().paths(paths).ephemeral().no_artifacts().build().unwrap();\n\n project.compile().unwrap().output()\n\n });\n\n}\n", "file_path": "forge/src/lib.rs", "rank": 6, "score": 187319.28303008073 }, { "content": "fn parse_name_or_address(s: &str) -> eyre::Result<NameOrAddress> {\n\n Ok(if s.starts_with(\"0x\") {\n\n NameOrAddress::Address(s.parse::<Address>()?)\n\n } else {\n\n NameOrAddress::Name(s.into())\n\n })\n\n}\n\n\n", "file_path": "cli/src/cast_opts.rs", "rank": 7, "score": 182745.5363996303 }, { "content": "/// Given a function and a vector of string arguments, it proceeds to convert the args to ethabi\n\n/// Tokens and then ABI encode them.\n\npub fn encode_args(func: &Function, args: &[impl AsRef<str>]) -> Result<Vec<u8>> {\n\n let params = func\n\n .inputs\n\n .iter()\n\n .zip(args)\n\n .map(|(input, arg)| (&input.kind, arg.as_ref()))\n\n .collect::<Vec<_>>();\n\n let tokens = parse_tokens(params, true)?;\n\n Ok(func.encode_input(&tokens)?)\n\n}\n", "file_path": "utils/src/lib.rs", "rank": 8, "score": 156760.9489700176 }, { "content": "/// The path to where the contract artifacts are stored\n\npub fn dapp_json_path() -> PathBuf {\n\n PathBuf::from(DAPP_JSON)\n\n}\n\n\n", "file_path": "cli/src/utils.rs", "rank": 9, "score": 147645.83487993365 }, { "content": "/// Given a function signature string, it tries to parse it as a `Function`\n\npub fn get_func(sig: &str) -> Result<Function> {\n\n // TODO: Make human readable ABI better / more minimal\n\n let abi = parse_abi(&[sig])?;\n\n // get the function\n\n let (_, func) =\n\n abi.functions.iter().next().ok_or_else(|| eyre::eyre!(\"function name not found\"))?;\n\n let func = func.get(0).ok_or_else(|| eyre::eyre!(\"functions array empty\"))?;\n\n Ok(func.clone())\n\n}\n\n\n", "file_path": "utils/src/lib.rs", "rank": 10, "score": 145311.08075944026 }, { "content": "/// Given a gas value and a calldata array, it subtracts the calldata cost from the\n\n/// gas value, as well as the 21k base gas cost for all transactions.\n\npub fn remove_extra_costs(gas: U256, calldata: &[u8]) -> U256 {\n\n let mut calldata_cost = 0;\n\n for i in calldata {\n\n if *i != 0 {\n\n // TODO: Check if EVM pre-eip2028 and charge 64\n\n calldata_cost += 16\n\n } else {\n\n calldata_cost += 8;\n\n }\n\n }\n\n gas - calldata_cost - BASE_TX_COST\n\n}\n\n\n", "file_path": "utils/src/lib.rs", "rank": 11, "score": 137341.26316642208 }, { "content": "type StorageFuture<Err> = Pin<Box<dyn Future<Output = (Result<H256, Err>, Address, H256)> + Send>>;\n\n\n", "file_path": "evm-adapters/src/sputnik/forked_backend/cache.rs", "rank": 12, "score": 136838.82466123442 }, { "content": "/// Given a k/v serde object, it pretty prints its keys and values as a table.\n\npub fn to_table(value: serde_json::Value) -> String {\n\n match value {\n\n serde_json::Value::String(s) => s,\n\n serde_json::Value::Object(map) => {\n\n let mut s = String::new();\n\n for (k, v) in map.iter() {\n\n s.push_str(&format!(\"{: <20} {}\\n\", k, v));\n\n }\n\n s\n\n }\n\n _ => \"\".to_owned(),\n\n }\n\n}\n\n\n", "file_path": "utils/src/lib.rs", "rank": 13, "score": 130006.95666735801 }, { "content": "type PrecompileFn =\n\n fn(&[u8], Option<u64>, &sputnik::Context, bool) -> Result<PrecompileOutput, PrecompileFailure>;\n\n\n\n/// Precompiled contracts which should be provided when instantiating the EVM.\n\npub static PRECOMPILES: Lazy<revm_precompiles::Precompiles> = Lazy::new(|| {\n\n // We use the const to immediately choose the latest revision of available\n\n // precompiles. Is this wrong maybe?\n\n revm_precompiles::Precompiles::new::<3>()\n\n});\n\n\n\n// https://github.com/rust-blockchain/evm-tests/blob/d53b17989db45d76b5876b33db63bcaf367a53fa/jsontests/src/state.rs#L55\n\n// We need to do this because closures can only be coerced to `fn` types if they do not capture any\n\n// variables\n\nmacro_rules! precompile_entry {\n\n ($map:expr, $index:expr) => {\n\n let x: fn(\n\n &[u8],\n\n Option<u64>,\n\n &Context,\n\n bool,\n", "file_path": "evm-adapters/src/sputnik/mod.rs", "rank": 14, "score": 128136.30449886173 }, { "content": "// helper for creating an exit type\n\nfn evm_error(retdata: &str) -> Capture<(ExitReason, Vec<u8>), Infallible> {\n\n Capture::Exit((\n\n ExitReason::Revert(ExitRevert::Reverted),\n\n ethers::abi::encode(&[Token::String(retdata.to_owned())]),\n\n ))\n\n}\n\n\n\nimpl<'a, 'b, B: Backend, P: PrecompileSet> CheatcodeStackExecutor<'a, 'b, B, P> {\n\n /// Given a transaction's calldata, it tries to parse it a console call and print the call\n\n fn console_log(&mut self, input: Vec<u8>) -> Capture<(ExitReason, Vec<u8>), Infallible> {\n\n let decoded = match ConsoleCalls::decode(&input) {\n\n Ok(inner) => inner,\n\n Err(err) => return evm_error(&err.to_string()),\n\n };\n\n self.console_logs.push(decoded.to_string());\n\n Capture::Exit((ExitReason::Succeed(ExitSucceed::Stopped), vec![]))\n\n }\n\n\n\n /// Given a transaction's calldata, it tries to parse it as an [`HEVM cheatcode`](super::HEVM)\n\n /// call and modify the state accordingly.\n", "file_path": "evm-adapters/src/sputnik/cheatcodes/cheatcode_handler.rs", "rank": 15, "score": 127185.12958373158 }, { "content": "/// Parses string input as Token against the expected ParamType\n\npub fn parse_tokens<'a, I: IntoIterator<Item = (&'a ParamType, &'a str)>>(\n\n params: I,\n\n lenient: bool,\n\n) -> eyre::Result<Vec<Token>> {\n\n params\n\n .into_iter()\n\n .map(|(param, value)| {\n\n if lenient {\n\n LenientTokenizer::tokenize(param, value)\n\n } else {\n\n StrictTokenizer::tokenize(param, value)\n\n }\n\n })\n\n .collect::<Result<_, _>>()\n\n .wrap_err(\"Failed to parse tokens\")\n\n}\n\n\n", "file_path": "utils/src/lib.rs", "rank": 16, "score": 123655.28628901219 }, { "content": "/// Reads the `ETHERSCAN_API_KEY` env variable\n\npub fn etherscan_api_key() -> eyre::Result<String> {\n\n std::env::var(\"ETHERSCAN_API_KEY\").map_err(|err| match err {\n\n VarError::NotPresent => {\n\n eyre::eyre!(\n\n r#\"\n\n You need an Etherscan Api Key to verify contracts.\n\n Create one at https://etherscan.io/myapikey\n\n Then export it with \\`export ETHERSCAN_API_KEY=xxxxxxxx'\"#\n\n )\n\n }\n\n VarError::NotUnicode(err) => {\n\n eyre::eyre!(\"Invalid `ETHERSCAN_API_KEY`: {:?}\", err)\n\n }\n\n })\n\n}\n\n\n", "file_path": "cli/src/utils.rs", "rank": 17, "score": 121316.02597197307 }, { "content": "fn parse_slot(s: &str) -> eyre::Result<H256> {\n\n Ok(if s.starts_with(\"0x\") {\n\n let padded = format!(\"{:0>64}\", s.strip_prefix(\"0x\").unwrap());\n\n H256::from_str(&padded)?\n\n } else {\n\n H256::from_low_u64_be(u64::from_str(s)?)\n\n })\n\n}\n\n\n\n#[derive(Debug, StructOpt)]\n\npub struct Opts {\n\n #[structopt(subcommand)]\n\n pub sub: Subcommands,\n\n}\n\n\n\n#[derive(StructOpt, Debug, Clone)]\n\npub struct EthereumOpts {\n\n #[structopt(\n\n env = \"ETH_RPC_URL\",\n\n short,\n", "file_path": "cli/src/cast_opts.rs", "rank": 18, "score": 119189.03344837845 }, { "content": "/// Extension trait over [`Backend`] which provides additional methods for interacting with the\n\n/// state\n\npub trait BackendExt: Backend {\n\n fn set_storage(&mut self, address: Address, slot: H256, value: H256);\n\n}\n\n\n\nimpl<'a> BackendExt for MemoryBackend<'a> {\n\n fn set_storage(&mut self, address: Address, slot: H256, value: H256) {\n\n let account = self.state_mut().entry(address).or_insert_with(Default::default);\n\n let slot = account.storage.entry(slot).or_insert_with(Default::default);\n\n *slot = value;\n\n }\n\n}\n\n\n\nethers::contract::abigen!(\n\n HEVM,\n\n r#\"[\n\n roll(uint256)\n\n warp(uint256)\n\n store(address,bytes32,bytes32)\n\n load(address,bytes32)(bytes32)\n\n ffi(string[])(bytes)\n", "file_path": "evm-adapters/src/sputnik/cheatcodes/mod.rs", "rank": 19, "score": 118633.82909331791 }, { "content": "/// The rpc url to use\n\n/// If the `ETH_RPC_URL` is not present, it falls back to the default `http://127.0.0.1:8545`\n\npub fn rpc_url() -> String {\n\n std::env::var(\"ETH_RPC_URL\").unwrap_or_else(|_| LOCAL_RPC_URL.to_string())\n\n}\n\n\n", "file_path": "cli/src/utils.rs", "rank": 20, "score": 117804.47621584203 }, { "content": "/// Runs the provided precompile against the input data.\n\npub fn exec(\n\n builtin: &revm_precompiles::Precompile,\n\n input: &[u8],\n\n gas_limit: u64,\n\n) -> Result<PrecompileOutput, PrecompileFailure> {\n\n let res = match builtin {\n\n revm_precompiles::Precompile::Standard(func) => func(input, gas_limit),\n\n revm_precompiles::Precompile::Custom(func) => func(input, gas_limit),\n\n };\n\n match res {\n\n Ok(res) => {\n\n let logs = res\n\n .logs\n\n .into_iter()\n\n .map(|log| sputnik::backend::Log {\n\n topics: log.topics,\n\n data: log.data.to_vec(),\n\n address: log.address,\n\n })\n\n .collect();\n", "file_path": "evm-adapters/src/sputnik/mod.rs", "rank": 21, "score": 115214.74874467692 }, { "content": "fn parse_block_id(s: &str) -> eyre::Result<BlockId> {\n\n Ok(match s {\n\n \"earliest\" => BlockId::Number(BlockNumber::Earliest),\n\n \"latest\" => BlockId::Number(BlockNumber::Latest),\n\n s if s.starts_with(\"0x\") => BlockId::Hash(H256::from_str(s)?),\n\n s => BlockId::Number(BlockNumber::Number(u64::from_str(s)?.into())),\n\n })\n\n}\n\n\n", "file_path": "cli/src/cast_opts.rs", "rank": 22, "score": 113197.01909638746 }, { "content": "// TODO: Any reason this should be an async trait?\n\n/// Low-level abstraction layer for interfacing with various EVMs. Once instantiated, one\n\n/// only needs to specify the transaction parameters\n\npub trait Evm<State> {\n\n /// The returned reason type from an EVM (Success / Revert/ Stopped etc.)\n\n type ReturnReason: std::fmt::Debug + PartialEq;\n\n\n\n /// Gets the revert reason type\n\n fn revert() -> Self::ReturnReason;\n\n\n\n /// Whether a return reason should be considered successful\n\n fn is_success(reason: &Self::ReturnReason) -> bool;\n\n /// Whether a return reason should be considered failing\n\n fn is_fail(reason: &Self::ReturnReason) -> bool;\n\n\n\n /// Sets the provided contract bytecode at the corresponding addresses\n\n fn initialize_contracts<I: IntoIterator<Item = (Address, Bytes)>>(&mut self, contracts: I);\n\n\n\n /// Gets a reference to the current state of the EVM\n\n fn state(&self) -> &State;\n\n\n\n /// Sets the balance at the specified address\n\n fn set_balance(&mut self, address: Address, amount: U256);\n", "file_path": "evm-adapters/src/lib.rs", "rank": 23, "score": 98422.40879206889 }, { "content": "pub mod cache;\n\npub use cache::{new_shared_cache, MemCache, SharedBackend, SharedCache};\n\npub mod rpc;\n\npub use rpc::ForkMemoryBackend;\n", "file_path": "evm-adapters/src/sputnik/forked_backend/mod.rs", "rank": 24, "score": 98366.5364651795 }, { "content": "/// Helper trait for exposing additional functionality over EVMOdin Hosts\n\npub trait HostExt: Host {\n\n /// Gets the bytecode at the specified address. `None` if the specified address\n\n /// is not a contract account.\n\n fn get_code(&self, address: &Address) -> Option<&bytes::Bytes>;\n\n /// Sets the bytecode at the specified address to the provided value.\n\n fn set_code(&mut self, address: Address, code: bytes::Bytes);\n\n /// Sets the account's balance to the provided value.\n\n fn set_balance(&mut self, address: Address, balance: U256);\n\n}\n\n\n\nimpl<S: HostExt, Tr: Tracer> Evm<S> for EvmOdin<S, Tr> {\n\n type ReturnReason = StatusCode;\n\n\n\n fn revert() -> Self::ReturnReason {\n\n StatusCode::Revert\n\n }\n\n\n\n fn is_success(reason: &Self::ReturnReason) -> bool {\n\n matches!(reason, StatusCode::Success)\n\n }\n", "file_path": "evm-adapters/src/evmodin.rs", "rank": 25, "score": 97978.57505544298 }, { "content": "/// Initializes a tracing Subscriber for logging\n\npub fn subscriber() {\n\n tracing_subscriber::FmtSubscriber::builder()\n\n // .with_timer(tracing_subscriber::fmt::time::uptime())\n\n .with_env_filter(tracing_subscriber::EnvFilter::from_default_env())\n\n .init();\n\n}\n\n\n", "file_path": "cli/src/utils.rs", "rank": 26, "score": 95730.45892606996 }, { "content": "fn strip_0x(s: &str) -> &str {\n\n s.strip_prefix(\"0x\").unwrap_or(s)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::SimpleCast as Cast;\n\n\n\n #[test]\n\n fn calldata_uint() {\n\n assert_eq!(\n\n \"0xb3de648b0000000000000000000000000000000000000000000000000000000000000001\",\n\n Cast::calldata(\"f(uint a)\", &[\"1\"]).unwrap().as_str()\n\n );\n\n }\n\n\n\n #[test]\n\n fn calldata_bool() {\n\n assert_eq!(\n\n \"0x6fae94120000000000000000000000000000000000000000000000000000000000000000\",\n\n Cast::calldata(\"bar(bool)\", &[\"false\"]).unwrap().as_str()\n\n );\n\n }\n\n}\n", "file_path": "cast/src/lib.rs", "rank": 27, "score": 89814.40084066473 }, { "content": "fn unwrap_or_stdin<T>(what: Option<T>) -> eyre::Result<T>\n\nwhere\n\n T: FromStr + Send + Sync,\n\n T::Err: Send + Sync + std::error::Error + 'static,\n\n{\n\n Ok(match what {\n\n Some(what) => what,\n\n None => {\n\n let input = std::io::stdin();\n\n let mut what = String::new();\n\n input.read_line(&mut what)?;\n\n T::from_str(&what.replace(\"\\n\", \"\"))?\n\n }\n\n })\n\n}\n\n\n\nasync fn cast_send<M: Middleware, F: Into<NameOrAddress>, T: Into<NameOrAddress>>(\n\n provider: M,\n\n from: F,\n\n to: T,\n", "file_path": "cli/src/cast.rs", "rank": 28, "score": 88288.70047137805 }, { "content": "/// Request variants that are executed by the provider\n\nenum ProviderRequest<Err> {\n\n Account(AccountFuture<Err>),\n\n Storage(StorageFuture<Err>),\n\n}\n\n\n\n/// The Request type the Backend listens for\n", "file_path": "evm-adapters/src/sputnik/forked_backend/cache.rs", "rank": 29, "score": 88110.45652081851 }, { "content": "#![doc = include_str!(\"../README.md\")]\n\nuse ethers_core::{\n\n abi::{\n\n self, parse_abi,\n\n token::{LenientTokenizer, StrictTokenizer, Tokenizer},\n\n AbiParser, Function, ParamType, Token,\n\n },\n\n types::*,\n\n};\n\nuse eyre::{Result, WrapErr};\n\n\n\nconst BASE_TX_COST: u64 = 21000;\n\n\n\n/// Helper trait for converting types to Functions. Helpful for allowing the `call`\n\n/// function on the EVM to be generic over `String`, `&str` and `Function`.\n", "file_path": "utils/src/lib.rs", "rank": 30, "score": 86235.66604965612 }, { "content": " let cfg = Config::istanbul();\n\n let compiled = COMPILED.find(\"GreeterTest\").expect(\"could not find contract\");\n\n let vicinity = new_vicinity();\n\n let backend = new_backend(&vicinity, Default::default());\n\n\n\n let precompiles = PRECOMPILES_MAP.clone();\n\n let mut evm = Executor::new(12_000_000, &cfg, &backend, &precompiles);\n\n let (addr, _, _, _) =\n\n evm.deploy(Address::zero(), compiled.bin.unwrap().clone(), 0.into()).unwrap();\n\n\n\n let mut runner =\n\n ContractRunner::new(&mut evm, compiled.abi.as_ref().unwrap(), addr, None, &[]);\n\n\n\n let mut cfg = FuzzConfig::default();\n\n cfg.failure_persistence = None;\n\n let mut fuzzer = TestRunner::new(cfg);\n\n let results = runner\n\n .run_tests(&Regex::from_str(\"testFuzz.*\").unwrap(), Some(&mut fuzzer))\n\n .unwrap();\n\n for (_, res) in results {\n", "file_path": "forge/src/runner.rs", "rank": 31, "score": 85270.81856800966 }, { "content": " Self {\n\n evm,\n\n contract,\n\n address,\n\n init_logs,\n\n state: PhantomData,\n\n sender: sender.unwrap_or_default(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, S: Clone, E: Evm<S>> ContractRunner<'a, S, E> {\n\n /// Runs all tests for a contract whose names match the provided regular expression\n\n pub fn run_tests(\n\n &mut self,\n\n regex: &Regex,\n\n fuzzer: Option<&mut TestRunner>,\n\n ) -> Result<HashMap<String, TestResult>> {\n\n tracing::info!(\"starting tests\");\n\n let start = Instant::now();\n", "file_path": "forge/src/runner.rs", "rank": 32, "score": 85270.43647269398 }, { "content": " assert!(!res.success);\n\n assert!(res.counterexample.is_some());\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_fuzzing_ok() {\n\n let cfg = Config::istanbul();\n\n let compiled = COMPILED.find(\"GreeterTest\").expect(\"could not find contract\");\n\n let vicinity = new_vicinity();\n\n let backend = new_backend(&vicinity, Default::default());\n\n\n\n let precompiles = PRECOMPILES_MAP.clone();\n\n let mut evm = Executor::new(u64::MAX, &cfg, &backend, &precompiles);\n\n let (addr, _, _, _) =\n\n evm.deploy(Address::zero(), compiled.bin.unwrap().clone(), 0.into()).unwrap();\n\n\n\n let mut runner =\n\n ContractRunner::new(&mut evm, compiled.abi.as_ref().unwrap(), addr, None, &[]);\n\n\n", "file_path": "forge/src/runner.rs", "rank": 33, "score": 85266.31519645562 }, { "content": " let mut evm = Executor::new(12_000_000, &cfg, &backend, &precompiles);\n\n let (addr, _, _, _) =\n\n evm.deploy(Address::zero(), compiled.bin.unwrap().clone(), 0.into()).unwrap();\n\n\n\n let mut runner =\n\n ContractRunner::new(&mut evm, compiled.abi.as_ref().unwrap(), addr, None, &[]);\n\n\n\n let mut cfg = FuzzConfig::default();\n\n cfg.failure_persistence = None;\n\n let mut fuzzer = TestRunner::new(cfg);\n\n let results = runner\n\n .run_tests(&Regex::from_str(\"testGreeting\").unwrap(), Some(&mut fuzzer))\n\n .unwrap();\n\n assert!(results[\"testGreeting()\"].success);\n\n assert!(results[\"testGreeting(string)\"].success);\n\n assert!(results[\"testGreeting(string,string)\"].success);\n\n }\n\n\n\n #[test]\n\n fn test_fuzzing_counterexamples() {\n", "file_path": "forge/src/runner.rs", "rank": 34, "score": 85265.58896743589 }, { "content": " let mut cfg = FuzzConfig::default();\n\n cfg.failure_persistence = None;\n\n let fuzzer = TestRunner::new(cfg);\n\n let func = get_func(\"testStringFuzz(string)\").unwrap();\n\n let res = runner.run_fuzz_test(&func, true, fuzzer.clone()).unwrap();\n\n assert!(res.success);\n\n assert!(res.counterexample.is_none());\n\n }\n\n\n\n #[test]\n\n fn test_fuzz_shrinking() {\n\n let cfg = Config::istanbul();\n\n let compiled = COMPILED.find(\"GreeterTest\").expect(\"could not find contract\");\n\n let vicinity = new_vicinity();\n\n let backend = new_backend(&vicinity, Default::default());\n\n\n\n let precompiles = PRECOMPILES_MAP.clone();\n\n let mut evm = Executor::new(12_000_000, &cfg, &backend, &precompiles);\n\n let (addr, _, _, _) =\n\n evm.deploy(Address::zero(), compiled.bin.unwrap().clone(), 0.into()).unwrap();\n", "file_path": "forge/src/runner.rs", "rank": 35, "score": 85264.97474033645 }, { "content": " let needs_setup = self.contract.functions().any(|func| func.name == \"setUp\");\n\n let test_fns = self\n\n .contract\n\n .functions()\n\n .into_iter()\n\n .filter(|func| func.name.starts_with(\"test\"))\n\n .filter(|func| regex.is_match(&func.name))\n\n .collect::<Vec<_>>();\n\n\n\n let init_state = self.evm.state().clone();\n\n\n\n // run all unit tests\n\n let unit_tests = test_fns\n\n .iter()\n\n .filter(|func| func.inputs.is_empty())\n\n .map(|func| {\n\n // Before each test run executes, ensure we're at our initial state.\n\n self.evm.reset(init_state.clone());\n\n let result = self.run_test(func, needs_setup)?;\n\n Ok((func.signature(), result))\n", "file_path": "forge/src/runner.rs", "rank": 36, "score": 85261.15826828884 }, { "content": "\n\n #[test]\n\n #[ignore]\n\n fn test_runner() {\n\n let revision = Revision::Istanbul;\n\n let compiled = COMPILED.find(\"GreeterTest\").expect(\"could not find contract\");\n\n\n\n let host = MockedHost::default();\n\n\n\n let gas_limit = 12_000_000;\n\n let evm = EvmOdin::new(host, gas_limit, revision, NoopTracer);\n\n super::test_runner(evm, compiled);\n\n }\n\n }\n\n\n\n pub fn test_runner<S: Clone, E: Evm<S>>(mut evm: E, compiled: CompactContractRef) {\n\n let (addr, _, _, _) =\n\n evm.deploy(Address::zero(), compiled.bin.unwrap().clone(), 0.into()).unwrap();\n\n\n\n let mut runner =\n\n ContractRunner::new(&mut evm, compiled.abi.as_ref().unwrap(), addr, None, &[]);\n\n\n\n let res = runner.run_tests(&\".*\".parse().unwrap(), None).unwrap();\n\n assert!(!res.is_empty());\n\n assert!(res.iter().all(|(_, result)| result.success));\n\n }\n\n}\n", "file_path": "forge/src/runner.rs", "rank": 37, "score": 85260.36173721115 }, { "content": " cfg.failure_persistence = None;\n\n // we reduce the shrinking iters and observe a larger result\n\n cfg.max_shrink_iters = 5;\n\n let fuzzer = TestRunner::new(cfg);\n\n let res = runner.run_fuzz_test(&func, true, fuzzer).unwrap();\n\n assert!(!res.success);\n\n\n\n // get the non-shrunk result\n\n let counterexample = res.counterexample.unwrap();\n\n let args =\n\n counterexample.args.into_iter().map(|x| x.into_uint().unwrap()).collect::<Vec<_>>();\n\n let product_without_shrinking = args[0].saturating_mul(args[1]);\n\n assert!(product_without_shrinking > product_with_shrinking.into());\n\n }\n\n }\n\n\n\n mod evmodin {\n\n use super::*;\n\n use ::evmodin::{tracing::NoopTracer, util::mocked_host::MockedHost, Revision};\n\n use evm_adapters::evmodin::EvmOdin;\n", "file_path": "forge/src/runner.rs", "rank": 38, "score": 85258.41324281754 }, { "content": "\n\n #[test]\n\n fn test_runner() {\n\n let cfg = Config::istanbul();\n\n let compiled = COMPILED.find(\"GreeterTest\").expect(\"could not find contract\");\n\n let vicinity = new_vicinity();\n\n let backend = new_backend(&vicinity, Default::default());\n\n let precompiles = PRECOMPILES_MAP.clone();\n\n let evm = Executor::new(12_000_000, &cfg, &backend, &precompiles);\n\n super::test_runner(evm, compiled);\n\n }\n\n\n\n #[test]\n\n fn test_function_overriding() {\n\n let cfg = Config::istanbul();\n\n let compiled = COMPILED.find(\"GreeterTest\").expect(\"could not find contract\");\n\n let vicinity = new_vicinity();\n\n let backend = new_backend(&vicinity, Default::default());\n\n\n\n let precompiles = PRECOMPILES_MAP.clone();\n", "file_path": "forge/src/runner.rs", "rank": 39, "score": 85256.84477457519 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::test_helpers::COMPILED;\n\n use ethers::solc::artifacts::CompactContractRef;\n\n use evm::Config;\n\n use evm_adapters::sputnik::PRECOMPILES_MAP;\n\n\n\n mod sputnik {\n\n use std::str::FromStr;\n\n\n\n use evm_adapters::sputnik::{\n\n helpers::{new_backend, new_vicinity},\n\n Executor,\n\n };\n\n use foundry_utils::get_func;\n\n use proptest::test_runner::Config as FuzzConfig;\n\n\n\n use super::*;\n", "file_path": "forge/src/runner.rs", "rank": 40, "score": 85256.03256525975 }, { "content": " #[tracing::instrument(name = \"fuzz-test\", skip_all, fields(name = %func.signature()))]\n\n pub fn run_fuzz_test(\n\n &mut self,\n\n func: &Function,\n\n setup: bool,\n\n runner: TestRunner,\n\n ) -> Result<TestResult> {\n\n let start = Instant::now();\n\n let should_fail = func.name.starts_with(\"testFail\");\n\n tracing::debug!(func = ?func.signature(), should_fail, \"fuzzing\");\n\n\n\n // call the setup function in each test to reset the test's state.\n\n if setup {\n\n self.evm.setup(self.address)?;\n\n }\n\n\n\n // instantiate the fuzzed evm in line\n\n let evm = FuzzedExecutor::new(self.evm, runner, self.sender);\n\n let result = evm.fuzz(func, self.address, should_fail);\n\n\n", "file_path": "forge/src/runner.rs", "rank": 41, "score": 85255.7354654415 }, { "content": "\n\n let mut runner =\n\n ContractRunner::new(&mut evm, compiled.abi.as_ref().unwrap(), addr, None, &[]);\n\n\n\n let mut cfg = FuzzConfig::default();\n\n cfg.failure_persistence = None;\n\n let fuzzer = TestRunner::new(cfg);\n\n let func = get_func(\"function testShrinking(uint256 x, uint256 y) public\").unwrap();\n\n let res = runner.run_fuzz_test(&func, true, fuzzer.clone()).unwrap();\n\n assert!(!res.success);\n\n\n\n // get the counterexample with shrinking enabled by default\n\n let counterexample = res.counterexample.unwrap();\n\n let product_with_shrinking: u64 =\n\n // casting to u64 here is safe because the shrunk result is always gonna be small\n\n // enough to fit in a u64, whereas as seen below, that's not possible without\n\n // shrinking\n\n counterexample.args.into_iter().map(|x| x.into_uint().unwrap().as_u64()).product();\n\n\n\n let mut cfg = FuzzConfig::default();\n", "file_path": "forge/src/runner.rs", "rank": 42, "score": 85254.30574732643 }, { "content": " (status, None, gas_used, logs)\n\n }\n\n Err(err) => match err {\n\n EvmError::Execution { reason, gas_used, logs: execution_logs } => {\n\n logs.extend(execution_logs);\n\n (E::revert(), Some(reason), gas_used, logs)\n\n }\n\n err => {\n\n tracing::error!(?err);\n\n return Err(err.into())\n\n }\n\n },\n\n };\n\n let success = self.evm.check_success(self.address, &status, should_fail);\n\n let duration = Instant::now().duration_since(start);\n\n tracing::debug!(?duration, %success, %gas_used);\n\n\n\n Ok(TestResult { success, reason, gas_used: Some(gas_used), counterexample: None, logs })\n\n }\n\n\n", "file_path": "forge/src/runner.rs", "rank": 43, "score": 85252.85879516024 }, { "content": " let (success, counterexample) = match result {\n\n Ok(_) => (true, None),\n\n Err(TestError::Fail(_, value)) => {\n\n // skip the function selector when decoding\n\n let args = func.decode_input(&value.as_ref()[4..])?;\n\n let counterexample = CounterExample { calldata: value.clone(), args };\n\n tracing::info!(\"Found minimal failing case: {}\", hex::encode(&value));\n\n (false, Some(counterexample))\n\n }\n\n result => panic!(\"Unexpected result: {:?}\", result),\n\n };\n\n\n\n let duration = Instant::now().duration_since(start);\n\n tracing::debug!(?duration, %success);\n\n\n\n // TODO: How can we have proptest also return us the gas_used and the revert reason\n\n // from that call?\n\n Ok(TestResult { success, reason: None, gas_used: None, counterexample, logs: vec![] })\n\n }\n\n}\n", "file_path": "forge/src/runner.rs", "rank": 44, "score": 85251.29122455085 }, { "content": " if !map.is_empty() {\n\n let successful = map.iter().filter(|(_, tst)| tst.success).count();\n\n let duration = Instant::now().duration_since(start);\n\n tracing::info!(?duration, \"done. {}/{} successful\", successful, map.len());\n\n }\n\n Ok(map)\n\n }\n\n\n\n #[tracing::instrument(name = \"test\", skip_all, fields(name = %func.signature()))]\n\n pub fn run_test(&mut self, func: &Function, setup: bool) -> Result<TestResult> {\n\n let start = Instant::now();\n\n // the expected result depends on the function name\n\n // DAppTools' ds-test will not revert inside its `assertEq`-like functions\n\n // which allows to test multiple assertions in 1 test function while also\n\n // preserving logs.\n\n let should_fail = func.name.starts_with(\"testFail\");\n\n tracing::debug!(func = ?func.signature(), should_fail, \"unit-testing\");\n\n\n\n let mut logs = self.init_logs.to_vec();\n\n\n", "file_path": "forge/src/runner.rs", "rank": 45, "score": 85251.096053949 }, { "content": "use ethers::{\n\n abi::{Abi, Function, Token},\n\n types::{Address, Bytes},\n\n};\n\n\n\nuse evm_adapters::{fuzz::FuzzedExecutor, Evm, EvmError};\n\n\n\nuse eyre::{Context, Result};\n\nuse regex::Regex;\n\nuse std::{collections::HashMap, fmt, time::Instant};\n\n\n\nuse proptest::test_runner::{TestError, TestRunner};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct CounterExample {\n\n pub calldata: Bytes,\n\n // Token does not implement Serde (lol), so we just serialize the calldata\n\n #[serde(skip)]\n\n pub args: Vec<Token>,\n", "file_path": "forge/src/runner.rs", "rank": 46, "score": 85249.24628556322 }, { "content": " /// The deployed contract's ABI\n\n pub contract: &'a Abi,\n\n /// The deployed contract's address\n\n pub address: Address,\n\n /// The address which will be used as the `from` field in all EVM calls\n\n pub sender: Address,\n\n /// Any logs emitted in the constructor of the specific contract\n\n pub init_logs: &'a [String],\n\n // need to constrain the trait generic\n\n state: PhantomData<S>,\n\n}\n\n\n\nimpl<'a, S, E> ContractRunner<'a, S, E> {\n\n pub fn new(\n\n evm: &'a mut E,\n\n contract: &'a Abi,\n\n address: Address,\n\n sender: Option<Address>,\n\n init_logs: &'a [String],\n\n ) -> Self {\n", "file_path": "forge/src/runner.rs", "rank": 47, "score": 85247.71130721063 }, { "content": " })\n\n .collect::<Result<HashMap<_, _>>>()?;\n\n\n\n let map = if let Some(fuzzer) = fuzzer {\n\n let fuzz_tests = test_fns\n\n .iter()\n\n .filter(|func| !func.inputs.is_empty())\n\n .map(|func| {\n\n let result = self.run_fuzz_test(func, needs_setup, fuzzer.clone())?;\n\n Ok((func.signature(), result))\n\n })\n\n .collect::<Result<HashMap<_, _>>>()?;\n\n\n\n let mut map = unit_tests;\n\n map.extend(fuzz_tests);\n\n map\n\n } else {\n\n unit_tests\n\n };\n\n\n", "file_path": "forge/src/runner.rs", "rank": 48, "score": 85245.91440110798 }, { "content": " pub gas_used: Option<u64>,\n\n\n\n /// Minimal reproduction test case for failing fuzz tests\n\n pub counterexample: Option<CounterExample>,\n\n\n\n /// Any captured & parsed as strings logs along the test's execution which should\n\n /// be printed to the user.\n\n pub logs: Vec<String>,\n\n}\n\n\n\nuse std::marker::PhantomData;\n\n\n\npub struct ContractRunner<'a, S, E> {\n\n /// Mutable reference to the EVM type.\n\n /// This is a temporary hack to work around the mutability restrictions of\n\n /// [`proptest::TestRunner::run`] which takes a `Fn` preventing interior mutability. [See also](https://github.com/gakonst/dapptools-rs/pull/44).\n\n /// Wrapping it like that allows the `test` function to gain mutable access regardless and\n\n /// since we don't use any parallelized fuzzing yet the `test` function has exclusive access of\n\n /// the mutable reference over time of its existence.\n\n pub evm: &'a mut E,\n", "file_path": "forge/src/runner.rs", "rank": 49, "score": 85245.18165309819 }, { "content": " // call the setup function in each test to reset the test's state.\n\n if setup {\n\n tracing::trace!(\"setting up\");\n\n let setup_logs = self\n\n .evm\n\n .setup(self.address)\n\n .wrap_err(format!(\"could not setup during {} test\", func.signature()))?\n\n .1;\n\n logs.extend_from_slice(&setup_logs);\n\n }\n\n\n\n let (status, reason, gas_used, logs) = match self.evm.call::<(), _, _>(\n\n self.sender,\n\n self.address,\n\n func.clone(),\n\n (),\n\n 0.into(),\n\n ) {\n\n Ok((_, status, gas_used, execution_logs)) => {\n\n logs.extend(execution_logs);\n", "file_path": "forge/src/runner.rs", "rank": 50, "score": 85245.11812112055 }, { "content": "}\n\n\n\nimpl fmt::Display for CounterExample {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"calldata=0x{}, args={:?}\", hex::encode(&self.calldata), self.args)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct TestResult {\n\n /// Whether the test case was successful. This means that the transaction executed\n\n /// properly, or that there was a revert and that the test was expected to fail\n\n /// (prefixed with `testFail`)\n\n pub success: bool,\n\n\n\n /// If there was a revert, this field will be populated. Note that the test can\n\n /// still be successful (i.e self.success == true) when it's expected to fail.\n\n pub reason: Option<String>,\n\n\n\n /// The gas used during execution\n", "file_path": "forge/src/runner.rs", "rank": 51, "score": 85240.46430485982 }, { "content": "mod runner;\n\npub use runner::{ContractRunner, TestResult};\n\n\n\nmod multi_runner;\n\npub use multi_runner::{MultiContractRunner, MultiContractRunnerBuilder};\n\n\n\nuse ethers::abi;\n\nuse eyre::Result;\n\n\n", "file_path": "forge/src/lib.rs", "rank": 52, "score": 84968.50415376764 }, { "content": "//! Verify contract source on etherscan\n\n\n\nuse crate::utils;\n\n\n\nuse cast::SimpleCast;\n\nuse ethers::{\n\n abi::{Address, Function, FunctionExt},\n\n core::types::Chain,\n\n prelude::Provider,\n\n providers::Middleware,\n\n};\n\nuse ethers_etherscan::{contract::VerifyContract, Client};\n\nuse eyre::ContextCompat;\n\nuse std::convert::TryFrom;\n\n\n\n/// Run the verify command to submit the contract's source code for verification on etherscan\n\npub async fn run(\n\n path: String,\n\n name: String,\n\n address: Address,\n", "file_path": "cli/src/cmd/verify.rs", "rank": 53, "score": 83251.69641380665 }, { "content": " .as_u64();\n\n\n\n let contract = utils::find_dapp_json_contract(&path, &name)?;\n\n std::fs::write(\"meta.json\", serde_json::to_string_pretty(&contract).unwrap()).unwrap();\n\n let metadata = contract.metadata.wrap_err(\"No compiler version found\")?;\n\n let compiler_version = format!(\"v{}\", metadata.compiler.version);\n\n let mut constructor_args = None;\n\n if let Some(constructor) = contract.abi.unwrap().constructor {\n\n // convert constructor into function\n\n #[allow(deprecated)]\n\n let fun = Function {\n\n name: \"constructor\".to_string(),\n\n inputs: constructor.inputs,\n\n outputs: vec![],\n\n constant: false,\n\n state_mutability: Default::default(),\n\n };\n\n\n\n constructor_args = Some(SimpleCast::calldata(fun.abi_signature(), &args)?);\n\n } else if !args.is_empty() {\n", "file_path": "cli/src/cmd/verify.rs", "rank": 54, "score": 83246.72363247641 }, { "content": " eyre::bail!(\"No constructor found but contract arguments provided\")\n\n }\n\n\n\n let chain = match chain {\n\n 1 => Chain::Mainnet,\n\n 3 => Chain::Ropsten,\n\n 4 => Chain::Rinkeby,\n\n 5 => Chain::Goerli,\n\n 42 => Chain::Kovan,\n\n 100 => Chain::XDai,\n\n _ => eyre::bail!(\"unexpected chain {}\", chain),\n\n };\n\n let etherscan = Client::new(chain, etherscan_api_key)\n\n .map_err(|err| eyre::eyre!(\"Failed to create etherscan client: {}\", err))?;\n\n\n\n let source = std::fs::read_to_string(&path)?;\n\n\n\n let contract = VerifyContract::new(address, source, compiler_version)\n\n .constructor_arguments(constructor_args)\n\n .optimization(metadata.settings.optimizer.enabled.unwrap_or_default())\n", "file_path": "cli/src/cmd/verify.rs", "rank": 55, "score": 83242.44017535284 }, { "content": " .runs(metadata.settings.optimizer.runs.unwrap_or_default() as u32);\n\n\n\n let resp = etherscan\n\n .submit_contract_verification(&contract)\n\n .await\n\n .map_err(|err| eyre::eyre!(\"Failed to submit contract verification: {}\", err))?;\n\n\n\n if resp.status == \"0\" {\n\n if resp.message == \"Contract source code already verified\" {\n\n println!(\"Contract source code already verified.\");\n\n Ok(())\n\n } else {\n\n eyre::bail!(\n\n \"Encountered an error verifying this contract:\\nResponse: `{}`\\nDetails: `{}`\",\n\n resp.message,\n\n resp.result\n\n );\n\n }\n\n } else {\n\n println!(\n", "file_path": "cli/src/cmd/verify.rs", "rank": 56, "score": 83240.80579572354 }, { "content": " r#\"Submitted contract for verification:\n\n Response: `{}`\n\n GUID: `{}`\n\n url: {}#code\"#,\n\n resp.message,\n\n resp.result,\n\n etherscan.address_url(address)\n\n );\n\n Ok(())\n\n }\n\n}\n", "file_path": "cli/src/cmd/verify.rs", "rank": 57, "score": 83240.34137389086 }, { "content": " args: Vec<String>,\n\n) -> eyre::Result<()> {\n\n let etherscan_api_key = utils::etherscan_api_key()?;\n\n let rpc_url = utils::rpc_url();\n\n let provider = Provider::try_from(rpc_url)?;\n\n let chain = provider\n\n .get_chainid()\n\n .await\n\n .map_err(|err| {\n\n eyre::eyre!(\n\n r#\"Please make sure that you are running a local Ethereum node:\n\n For example, try running either `parity' or `geth --rpc'.\n\n You could also try connecting to an external Ethereum node:\n\n For example, try `export ETH_RPC_URL=https://mainnet.infura.io'.\n\n If you have an Infura API key, add it to the end of the URL.\n\n\n\n Error: {}\"#,\n\n err\n\n )\n\n })?\n", "file_path": "cli/src/cmd/verify.rs", "rank": 58, "score": 83235.13032608622 }, { "content": "//! Subcommands for dapp\n\n\n\npub mod verify;\n", "file_path": "cli/src/cmd/mod.rs", "rank": 59, "score": 83032.80804355006 }, { "content": "/// Create a new shareable state cache.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use evm_adapters::sputnik::{MemCache,new_shared_cache};\n\n/// let cache = new_shared_cache(MemCache::default());\n\n/// ```\n\npub fn new_shared_cache<T>(cache: T) -> SharedCache<T> {\n\n Arc::new(RwLock::new(cache))\n\n}\n\n\n", "file_path": "evm-adapters/src/sputnik/forked_backend/cache.rs", "rank": 60, "score": 82497.33825288343 }, { "content": "/// Given a function, it returns a proptest strategy which generates valid abi-encoded calldata\n\n/// for that function's input types.\n\npub fn fuzz_calldata(func: &Function) -> impl Strategy<Value = Bytes> + '_ {\n\n // We need to compose all the strategies generated for each parameter in all\n\n // possible combinations\n\n let strats = func.inputs.iter().map(|input| fuzz_param(&input.kind)).collect::<Vec<_>>();\n\n\n\n strats.prop_map(move |tokens| {\n\n tracing::trace!(input = ?tokens);\n\n func.encode_input(&tokens).unwrap().into()\n\n })\n\n}\n\n\n\n/// The max length of arrays we fuzz for is 256.\n\nconst MAX_ARRAY_LEN: usize = 256;\n\n\n", "file_path": "evm-adapters/src/fuzz.rs", "rank": 61, "score": 81651.01682075675 }, { "content": " address: Address,\n\n init_logs: &[String],\n\n pattern: &Regex,\n\n ) -> Result<HashMap<String, TestResult>> {\n\n let mut runner =\n\n ContractRunner::new(&mut self.evm, contract, address, self.sender, init_logs);\n\n runner.run_tests(pattern, self.fuzzer.as_mut())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use ethers::solc::ProjectPathsConfig;\n\n use std::path::PathBuf;\n\n\n\n fn project() -> Project {\n\n let root = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\")).join(\"testdata\");\n\n\n\n let paths = ProjectPathsConfig::builder().root(&root).sources(&root).build().unwrap();\n", "file_path": "forge/src/multi_runner.rs", "rank": 62, "score": 81475.24471210278 }, { "content": " /// The address which will be used as the `from` field in all EVM calls\n\n sender: Option<Address>,\n\n /// Market type for the EVM state being used\n\n state: PhantomData<S>,\n\n}\n\n\n\nimpl<E, S> MultiContractRunner<E, S>\n\nwhere\n\n E: Evm<S>,\n\n S: Clone,\n\n{\n\n pub fn test(&mut self, pattern: Regex) -> Result<HashMap<String, HashMap<String, TestResult>>> {\n\n // TODO: Convert to iterator, ideally parallel one?\n\n let contracts = std::mem::take(&mut self.contracts);\n\n let results = contracts\n\n .iter()\n\n .map(|(name, (abi, address, logs))| {\n\n let result = self.run_tests(name, abi, *address, logs, &pattern)?;\n\n Ok((name.clone(), result))\n\n })\n", "file_path": "forge/src/multi_runner.rs", "rank": 63, "score": 81461.24936764858 }, { "content": "\n\n let project = Project::builder()\n\n // need to add the ilb path here. would it be better placed in the ProjectPathsConfig\n\n // instead? what is the `libs` modifier useful for then? linked libraries?\n\n .allowed_path(root.join(\"../../evm-adapters/testdata\"))\n\n .paths(paths)\n\n .ephemeral()\n\n .no_artifacts()\n\n .build()\n\n .unwrap();\n\n\n\n project\n\n }\n\n\n\n fn runner<S: Clone, E: Evm<S>>(evm: E) -> MultiContractRunner<E, S> {\n\n MultiContractRunnerBuilder::default().build(project(), evm).unwrap()\n\n }\n\n\n\n fn test_multi_runner<S: Clone, E: Evm<S>>(evm: E) {\n\n let mut runner = runner(evm);\n", "file_path": "forge/src/multi_runner.rs", "rank": 64, "score": 81454.30435890944 }, { "content": "#[derive(Debug, Default)]\n\npub struct MultiContractRunnerBuilder {\n\n /// The fuzzer to be used for running fuzz tests\n\n pub fuzzer: Option<TestRunner>,\n\n /// The address which will be used to deploy the initial contracts and send all\n\n /// transactions\n\n pub sender: Option<Address>,\n\n /// The initial balance for each one of the deployed smart contracts\n\n pub initial_balance: U256,\n\n}\n\n\n\nimpl MultiContractRunnerBuilder {\n\n /// Given an EVM, proceeds to return a runner which is able to execute all tests\n\n /// against that evm\n\n pub fn build<A, E, S>(\n\n self,\n\n project: Project<A>,\n\n mut evm: E,\n\n ) -> Result<MultiContractRunner<E, S>>\n\n where\n", "file_path": "forge/src/multi_runner.rs", "rank": 65, "score": 81453.56390179668 }, { "content": "use crate::{runner::TestResult, ContractRunner};\n\nuse evm_adapters::Evm;\n\n\n\nuse ethers::{\n\n abi::Abi,\n\n prelude::ArtifactOutput,\n\n solc::{Artifact, Project},\n\n types::{Address, U256},\n\n};\n\n\n\nuse proptest::test_runner::TestRunner;\n\nuse regex::Regex;\n\n\n\nuse eyre::{Context, Result};\n\nuse std::{\n\n collections::{BTreeMap, HashMap},\n\n marker::PhantomData,\n\n};\n\n\n\n/// Builder used for instantiating the multi-contract runner\n", "file_path": "forge/src/multi_runner.rs", "rank": 66, "score": 81453.27318309285 }, { "content": " use evm_adapters::sputnik::{\n\n helpers::{new_backend, new_vicinity},\n\n Executor, PRECOMPILES_MAP,\n\n };\n\n\n\n #[test]\n\n fn test_sputnik_debug_logs() {\n\n let config = Config::istanbul();\n\n let gas_limit = 12_500_000;\n\n let env = new_vicinity();\n\n let backend = new_backend(&env, Default::default());\n\n // important to instantiate the VM with cheatcodes\n\n let precompiles = PRECOMPILES_MAP.clone();\n\n let evm =\n\n Executor::new_with_cheatcodes(backend, gas_limit, &config, &precompiles, false);\n\n\n\n let mut runner = runner(evm);\n\n let results = runner.test(Regex::new(\".*\").unwrap()).unwrap();\n\n\n\n let reasons = results[\"DebugLogsTest\"]\n", "file_path": "forge/src/multi_runner.rs", "rank": 67, "score": 81452.17056494832 }, { "content": " .iter()\n\n .map(|(name, res)| (name, res.logs.clone()))\n\n .collect::<HashMap<_, _>>();\n\n dbg!(&reasons);\n\n assert_eq!(\n\n reasons[&\"test1()\".to_owned()],\n\n vec![\"constructor\".to_owned(), \"setUp\".to_owned(), \"one\".to_owned()]\n\n );\n\n assert_eq!(\n\n reasons[&\"test2()\".to_owned()],\n\n vec![\"constructor\".to_owned(), \"setUp\".to_owned(), \"two\".to_owned()]\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_sputnik_multi_runner() {\n\n let config = Config::istanbul();\n\n let gas_limit = 12_500_000;\n\n let env = new_vicinity();\n\n let backend = new_backend(&env, Default::default());\n\n let precompiles = PRECOMPILES_MAP.clone();\n\n let evm = Executor::new(gas_limit, &config, &backend, &precompiles);\n\n test_multi_runner(evm);\n\n }\n\n }\n\n\n\n // TODO: Add EvmOdin tests once we get the Mocked Host working\n\n}\n", "file_path": "forge/src/multi_runner.rs", "rank": 68, "score": 81449.61605396795 }, { "content": " self.initial_balance = initial_balance;\n\n self\n\n }\n\n\n\n pub fn fuzzer(mut self, fuzzer: TestRunner) -> Self {\n\n self.fuzzer = Some(fuzzer);\n\n self\n\n }\n\n}\n\n\n\n/// A multi contract runner receives a set of contracts deployed in an EVM instance and proceeds\n\n/// to run all test functions in these contracts.\n\npub struct MultiContractRunner<E, S> {\n\n /// Mapping of contract name to compiled bytecode, deployed address and logs emitted during\n\n /// deployment\n\n contracts: BTreeMap<String, (Abi, Address, Vec<String>)>,\n\n /// The EVM instance used in the test runner\n\n evm: E,\n\n /// The fuzzer which will be used to run parametric tests (w/ non-0 solidity args)\n\n fuzzer: Option<TestRunner>,\n", "file_path": "forge/src/multi_runner.rs", "rank": 69, "score": 81447.99749929272 }, { "content": " evm.set_balance(addr, initial_balance);\n\n Ok((name, (abi, addr, logs)))\n\n })\n\n .collect::<Result<BTreeMap<_, _>>>()?;\n\n\n\n Ok(MultiContractRunner {\n\n contracts,\n\n evm,\n\n state: PhantomData,\n\n sender: self.sender,\n\n fuzzer: self.fuzzer,\n\n })\n\n }\n\n\n\n pub fn sender(mut self, sender: Address) -> Self {\n\n self.sender = Some(sender);\n\n self\n\n }\n\n\n\n pub fn initial_balance(mut self, initial_balance: U256) -> Self {\n", "file_path": "forge/src/multi_runner.rs", "rank": 70, "score": 81447.29073342001 }, { "content": " // TODO: Can we remove the static? It's due to the `into_artifacts()` call below\n\n A: ArtifactOutput + 'static,\n\n E: Evm<S>,\n\n {\n\n let output = project.compile()?;\n\n if output.has_compiler_errors() {\n\n // return the diagnostics error back to the user.\n\n eyre::bail!(output.to_string())\n\n } else if output.is_unchanged() {\n\n println!(\"no files changed, compilation skippped.\");\n\n } else {\n\n println!(\"success.\");\n\n }\n\n\n\n let sender = self.sender.unwrap_or_default();\n\n let initial_balance = self.initial_balance;\n\n\n\n // This is just the contracts compiled, but we need to merge this with the read cached\n\n // artifacts\n\n let contracts = output.into_artifacts();\n", "file_path": "forge/src/multi_runner.rs", "rank": 71, "score": 81446.6817445426 }, { "content": " let results = runner.test(Regex::new(\".*\").unwrap()).unwrap();\n\n\n\n // 6 contracts being built\n\n assert_eq!(results.keys().len(), 5);\n\n for (_, contract_tests) in results {\n\n assert_ne!(contract_tests.keys().len(), 0);\n\n assert!(contract_tests.iter().all(|(_, result)| result.success));\n\n }\n\n\n\n // can also filter\n\n let only_gm = runner.test(Regex::new(\"testGm.*\").unwrap()).unwrap();\n\n assert_eq!(only_gm.len(), 1);\n\n\n\n assert_eq!(only_gm[\"GmTest\"].len(), 1);\n\n assert!(only_gm[\"GmTest\"][\"testGm()\"].success);\n\n }\n\n\n\n mod sputnik {\n\n use super::*;\n\n use evm::Config;\n", "file_path": "forge/src/multi_runner.rs", "rank": 72, "score": 81440.84702247738 }, { "content": " .filter_map(|x: Result<_>| x.ok())\n\n .filter_map(|(name, res)| if res.is_empty() { None } else { Some((name, res)) })\n\n .collect::<HashMap<_, _>>();\n\n\n\n self.contracts = contracts;\n\n\n\n Ok(results)\n\n }\n\n\n\n // The _name field is unused because we only want it for tracing\n\n #[tracing::instrument(\n\n name = \"contract\",\n\n skip_all,\n\n err,\n\n fields(name = %_name)\n\n )]\n\n fn run_tests(\n\n &mut self,\n\n _name: &str,\n\n contract: &Abi,\n", "file_path": "forge/src/multi_runner.rs", "rank": 73, "score": 81438.9618436903 }, { "content": " let contracts: BTreeMap<String, (Abi, Address, Vec<String>)> = contracts\n\n .map(|(fname, contract)| {\n\n let (abi, bytecode) = contract.into_inner();\n\n (fname, abi.unwrap(), bytecode.unwrap())\n\n })\n\n // Only take contracts with empty constructors.\n\n .filter(|(_, abi, _)| {\n\n abi.constructor.as_ref().map(|c| c.inputs.is_empty()).unwrap_or(true)\n\n })\n\n // Only take contracts which contain a `test` function\n\n .filter(|(_, abi, _)| abi.functions().any(|func| func.name.starts_with(\"test\")))\n\n // deploy the contracts\n\n .map(|(name, abi, bytecode)| {\n\n let span = tracing::trace_span!(\"deploying\", ?name);\n\n let _enter = span.enter();\n\n\n\n let (addr, _, _, logs) = evm\n\n .deploy(sender, bytecode, 0.into())\n\n .wrap_err(format!(\"could not deploy {}\", name))?;\n\n\n", "file_path": "forge/src/multi_runner.rs", "rank": 74, "score": 81431.22065523337 }, { "content": "\n\n #[structopt(\n\n help = \"if set to true, the process will exit with an exit code = 0, even if the tests fail\",\n\n long,\n\n env = \"FORGE_ALLOW_FAILURE\"\n\n )]\n\n allow_failure: bool,\n\n },\n\n\n\n #[structopt(about = \"build your smart contracts\")]\n\n #[structopt(alias = \"b\")]\n\n Build {\n\n #[structopt(flatten)]\n\n opts: BuildOpts,\n\n },\n\n\n\n #[structopt(alias = \"u\", about = \"fetches all upstream lib changes\")]\n\n Update {\n\n #[structopt(\n\n help = \"the submodule name of the library you want to update (will update all if none is provided)\"\n", "file_path": "cli/src/forge_opts.rs", "rank": 75, "score": 81237.0554373659 }, { "content": " )]\n\n lib: Option<PathBuf>,\n\n },\n\n\n\n #[structopt(alias = \"i\", about = \"installs one or more dependencies as git submodules\")]\n\n Install {\n\n #[structopt(\n\n help = \"the submodule name of the library you want to update (will update all if none is provided)\"\n\n )]\n\n dependencies: Vec<Dependency>,\n\n },\n\n\n\n #[structopt(\n\n alias = \"r\",\n\n about = \"prints the automatically inferred remappings for this repository\"\n\n )]\n\n Remappings {\n\n #[structopt(help = \"the project's root path, default being the current directory\", long)]\n\n root: Option<PathBuf>,\n\n #[structopt(help = \"the paths where your libraries are installed\", long)]\n", "file_path": "cli/src/forge_opts.rs", "rank": 76, "score": 81234.24078062529 }, { "content": " root.join(artifacts)\n\n } else {\n\n root.join(\"out\")\n\n };\n\n\n\n // 4. Set where the libraries are going to be read from\n\n // default to the lib path being the `lib/` dir\n\n let lib_paths =\n\n if opts.lib_paths.is_empty() { vec![root.join(\"lib\")] } else { opts.lib_paths.clone() };\n\n\n\n // get all the remappings corresponding to the lib paths\n\n let mut remappings: Vec<_> =\n\n lib_paths.iter().map(|path| Remapping::find_many(&path).unwrap()).flatten().collect();\n\n\n\n // extend them with the once manually provided in the opts\n\n remappings.extend_from_slice(&opts.remappings);\n\n\n\n // extend them with the one via the env vars\n\n if let Some(ref env) = opts.remappings_env {\n\n remappings.extend(remappings_from_newline(env))\n", "file_path": "cli/src/forge_opts.rs", "rank": 77, "score": 81234.19823749767 }, { "content": "use structopt::StructOpt;\n\n\n\nuse ethers::{\n\n solc::{remappings::Remapping, Project, ProjectPathsConfig},\n\n types::{Address, U256},\n\n};\n\nuse std::{path::PathBuf, str::FromStr};\n\n\n\n#[derive(Debug, StructOpt)]\n\npub struct Opts {\n\n #[structopt(subcommand)]\n\n pub sub: Subcommands,\n\n}\n\n\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"forge\")]\n\n#[structopt(about = \"Build, test, fuzz, formally verify, debug & deploy solidity contracts.\")]\n\n#[allow(clippy::large_enum_variant)]\n\npub enum Subcommands {\n\n #[structopt(about = \"test your smart contracts\")]\n", "file_path": "cli/src/forge_opts.rs", "rank": 78, "score": 81233.46015448273 }, { "content": " #[structopt(alias = \"t\")]\n\n Test {\n\n #[structopt(help = \"print the test results in json format\", long, short)]\n\n json: bool,\n\n\n\n #[structopt(flatten)]\n\n env: Env,\n\n\n\n #[structopt(\n\n long = \"--match\",\n\n short = \"-m\",\n\n help = \"only run test methods matching regex\",\n\n default_value = \".*\"\n\n )]\n\n pattern: regex::Regex,\n\n\n\n #[structopt(flatten)]\n\n opts: BuildOpts,\n\n\n\n #[structopt(\n", "file_path": "cli/src/forge_opts.rs", "rank": 79, "score": 81231.80074041986 }, { "content": " let mut paths_builder =\n\n ProjectPathsConfig::builder().root(&root).sources(contracts).artifacts(artifacts);\n\n\n\n if !remappings.is_empty() {\n\n paths_builder = paths_builder.remappings(remappings);\n\n }\n\n\n\n let paths = paths_builder.build()?;\n\n\n\n // build the project w/ allowed paths = root and all the libs\n\n let mut builder =\n\n Project::builder().paths(paths).allowed_path(&root).allowed_paths(lib_paths);\n\n\n\n if opts.no_auto_detect {\n\n builder = builder.no_auto_detect();\n\n }\n\n\n\n let project = builder.build()?;\n\n\n\n // if `--force` is provided, it proceeds to remove the cache\n", "file_path": "cli/src/forge_opts.rs", "rank": 80, "score": 81231.02012647495 }, { "content": " split.next().ok_or_else(|| eyre::eyre!(\"no dependency path was provided\"))?.to_string();\n\n let name = url\n\n .split('/')\n\n .last()\n\n .ok_or_else(|| eyre::eyre!(\"no dependency name found\"))?\n\n .to_string();\n\n let tag = split.next().map(ToString::to_string);\n\n\n\n Ok(Dependency { name, url, tag })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn parses_dependencies() {\n\n [\n\n (\"gakonst/lootloose\", \"https://github.com/gakonst/lootloose\", None),\n", "file_path": "cli/src/forge_opts.rs", "rank": 81, "score": 81229.79632414317 }, { "content": "}\n\n\n\nimpl std::convert::TryFrom<&BuildOpts> for Project {\n\n type Error = eyre::Error;\n\n\n\n /// Defaults to converting to DAppTools-style repo layout, but can be customized.\n\n fn try_from(opts: &BuildOpts) -> eyre::Result<Project> {\n\n // 1. Set the root dir\n\n let root = opts.root.clone().unwrap_or_else(|| std::env::current_dir().unwrap());\n\n let root = std::fs::canonicalize(&root)?;\n\n\n\n // 2. Set the contracts dir\n\n let contracts = if let Some(ref contracts) = opts.contracts {\n\n root.join(contracts)\n\n } else {\n\n root.join(\"src\")\n\n };\n\n\n\n // 3. Set the output dir\n\n let artifacts = if let Some(ref artifacts) = opts.out_path {\n", "file_path": "cli/src/forge_opts.rs", "rank": 82, "score": 81227.04065658875 }, { "content": " #[structopt(\n\n help = \"the initial balance of each deployed test contract\",\n\n long,\n\n default_value = \"0xffffffffffffffffffffffff\"\n\n )]\n\n initial_balance: U256,\n\n\n\n #[structopt(\n\n help = \"the address which will be executing all tests\",\n\n long,\n\n default_value = \"0x0000000000000000000000000000000000000000\",\n\n env = \"DAPP_TEST_ADDRESS\"\n\n )]\n\n sender: Address,\n\n\n\n #[structopt(help = \"enables the FFI cheatcode\", long)]\n\n ffi: bool,\n\n\n\n #[structopt(help = \"verbosity of 'forge test' output (0-3)\", long, default_value = \"0\")]\n\n verbosity: u8,\n", "file_path": "cli/src/forge_opts.rs", "rank": 83, "score": 81226.08193967484 }, { "content": "\n\n #[structopt(alias = \"i\", about = \"initializes a new forge repository\")]\n\n Init {\n\n #[structopt(help = \"the project's root path, default being the current directory\")]\n\n root: Option<PathBuf>,\n\n #[structopt(help = \"optional solidity template to start from\", long, short)]\n\n template: Option<String>,\n\n },\n\n\n\n Completions {\n\n #[structopt(help = \"the shell you are using\")]\n\n shell: structopt::clap::Shell,\n\n },\n\n\n\n #[structopt(about = \"removes the build artifacts and cache directories\")]\n\n Clean {\n\n #[structopt(help = \"the project's root path, default being the current directory\", long)]\n\n root: Option<PathBuf>,\n\n },\n\n}\n", "file_path": "cli/src/forge_opts.rs", "rank": 84, "score": 81225.8004295092 }, { "content": " // and recompile the contracts.\n\n if opts.force {\n\n crate::utils::cleanup(root)?;\n\n }\n\n\n\n Ok(project)\n\n }\n\n}\n\n\n\n#[derive(Debug, StructOpt)]\n\npub struct BuildOpts {\n\n #[structopt(help = \"the project's root path, default being the current directory\", long)]\n\n pub root: Option<PathBuf>,\n\n\n\n #[structopt(\n\n help = \"the directory relative to the root under which the smart contrats are\",\n\n long,\n\n short\n\n )]\n\n #[structopt(env = \"DAPP_SRC\")]\n", "file_path": "cli/src/forge_opts.rs", "rank": 85, "score": 81224.72336076481 }, { "content": " pub contracts: Option<PathBuf>,\n\n\n\n #[structopt(help = \"the remappings\", long, short)]\n\n pub remappings: Vec<ethers::solc::remappings::Remapping>,\n\n #[structopt(long = \"remappings-env\", env = \"DAPP_REMAPPINGS\")]\n\n pub remappings_env: Option<String>,\n\n\n\n #[structopt(help = \"the paths where your libraries are installed\", long)]\n\n pub lib_paths: Vec<PathBuf>,\n\n\n\n #[structopt(help = \"path to where the contract artifacts are stored\", long = \"out\", short)]\n\n pub out_path: Option<PathBuf>,\n\n\n\n #[structopt(help = \"choose the evm version\", long, default_value = \"london\")]\n\n pub evm_version: EvmVersion,\n\n\n\n #[structopt(\n\n help = \"if set to true, skips auto-detecting solc and uses what is in the user's $PATH \",\n\n long\n\n )]\n", "file_path": "cli/src/forge_opts.rs", "rank": 86, "score": 81224.30206013792 }, { "content": "\n\n/// Represents the common dapp argument pattern for `<path>:<contractname>` where `<path>:` is\n\n/// optional.\n\n#[derive(Clone, Debug)]\n\npub struct ContractInfo {\n\n /// Location of the contract\n\n pub path: Option<String>,\n\n /// Name of the contract\n\n pub name: String,\n\n}\n\n\n\nimpl FromStr for ContractInfo {\n\n type Err = eyre::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let mut iter = s.rsplit(':');\n\n let name = iter.next().unwrap().to_string();\n\n let path = iter.next().map(str::to_string);\n\n Ok(Self { path, name })\n\n }\n", "file_path": "cli/src/forge_opts.rs", "rank": 87, "score": 81223.22607549638 }, { "content": " lib_paths: Vec<PathBuf>,\n\n },\n\n\n\n #[structopt(about = \"build your smart contracts. Requires `ETHERSCAN_API_KEY` to be set.\")]\n\n VerifyContract {\n\n #[structopt(help = \"contract source info `<path>:<contractname>`\")]\n\n contract: FullContractInfo,\n\n #[structopt(help = \"the address of the contract to verify.\")]\n\n address: Address,\n\n #[structopt(help = \"constructor args calldata arguments.\")]\n\n constructor_args: Vec<String>,\n\n },\n\n\n\n #[structopt(alias = \"c\", about = \"deploy a compiled contract\")]\n\n Create {\n\n #[structopt(help = \"contract source info `<path>:<contractname>` or `<contractname>`\")]\n\n contract: ContractInfo,\n\n #[structopt(long, help = \"verify on Etherscan\")]\n\n verify: bool,\n\n },\n", "file_path": "cli/src/forge_opts.rs", "rank": 88, "score": 81222.46512139268 }, { "content": "///\n\n/// A dependency can be provided as a raw URL, or as a path to a Github repository\n\n/// e.g. `org-name/repo-name`\n\n///\n\n/// Providing a ref can be done in the following 3 ways:\n\n/// * branch: master\n\n/// * tag: v0.1.1\n\n/// * commit: 8e8128\n\n///\n\n/// Non Github URLs must be provided with an https:// prefix.\n\n/// Adding dependencies as local paths is not supported yet.\n\n#[derive(Clone, Debug)]\n\npub struct Dependency {\n\n /// The name of the dependency\n\n pub name: String,\n\n /// The url to the git repository corresponding to the dependency\n\n pub url: String,\n\n /// Optional tag corresponding to a Git SHA, tag, or branch.\n\n pub tag: Option<String>,\n\n}\n", "file_path": "cli/src/forge_opts.rs", "rank": 89, "score": 81220.53688662729 }, { "content": "}\n\n\n\n/// Represents the common dapp argument pattern `<path>:<contractname>`\n\n#[derive(Clone, Debug)]\n\npub struct FullContractInfo {\n\n /// Location of the contract\n\n pub path: String,\n\n /// Name of the contract\n\n pub name: String,\n\n}\n\n\n\nimpl FromStr for FullContractInfo {\n\n type Err = eyre::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let (path, name) = s\n\n .split_once(':')\n\n .ok_or_else(|| eyre::eyre!(\"Expected `<path>:<contractname>`, got `{}`\", s))?;\n\n Ok(Self { path: path.to_string(), name: name.to_string() })\n\n }\n", "file_path": "cli/src/forge_opts.rs", "rank": 90, "score": 81218.26822519704 }, { "content": " pub no_auto_detect: bool,\n\n\n\n #[structopt(\n\n help = \"force recompilation of the project, deletes the cache and artifacts folders\",\n\n long\n\n )]\n\n pub force: bool,\n\n}\n\n#[derive(Clone, Debug)]\n\npub enum EvmType {\n\n #[cfg(feature = \"sputnik-evm\")]\n\n Sputnik,\n\n #[cfg(feature = \"evmodin-evm\")]\n\n EvmOdin,\n\n}\n\n\n\nimpl FromStr for EvmType {\n\n type Err = eyre::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n", "file_path": "cli/src/forge_opts.rs", "rank": 91, "score": 81216.55227332973 }, { "content": "\n\n #[structopt(help = \"the block.number value during EVM execution\", long, default_value = \"0\")]\n\n #[structopt(env = \"DAPP_TEST_NUMBER\")]\n\n pub block_number: u64,\n\n\n\n #[structopt(\n\n help = \"the block.difficulty value during EVM execution\",\n\n long,\n\n default_value = \"0\"\n\n )]\n\n pub block_difficulty: u64,\n\n\n\n #[structopt(help = \"the block.gaslimit value during EVM execution\", long)]\n\n pub block_gas_limit: Option<u64>,\n\n // TODO: Add configuration option for base fee.\n\n}\n\n\n\n#[cfg(feature = \"sputnik-evm\")]\n\nuse sputnik::backend::MemoryVicinity;\n\n\n", "file_path": "cli/src/forge_opts.rs", "rank": 92, "score": 81216.48930666553 }, { "content": " long,\n\n short,\n\n help = \"the EVM type you want to use (e.g. sputnik, evmodin)\",\n\n default_value = \"sputnik\"\n\n )]\n\n evm_type: EvmType,\n\n\n\n #[structopt(\n\n help = \"fetch state over a remote instead of starting from empty state\",\n\n long,\n\n short\n\n )]\n\n #[structopt(alias = \"rpc-url\")]\n\n #[structopt(env = \"ETH_RPC_URL\")]\n\n fork_url: Option<String>,\n\n\n\n #[structopt(help = \"pins the block number for the state fork\", long)]\n\n #[structopt(env = \"DAPP_FORK_BLOCK\")]\n\n fork_block_number: Option<u64>,\n\n\n", "file_path": "cli/src/forge_opts.rs", "rank": 93, "score": 81215.21868344683 }, { "content": " Ok(match s.to_lowercase().as_str() {\n\n #[cfg(feature = \"sputnik-evm\")]\n\n \"sputnik\" => EvmType::Sputnik,\n\n #[cfg(feature = \"evmodin-evm\")]\n\n \"evmodin\" => EvmType::EvmOdin,\n\n other => eyre::bail!(\"unknown EVM type {}\", other),\n\n })\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub enum EvmVersion {\n\n Frontier,\n\n Istanbul,\n\n Berlin,\n\n London,\n\n}\n\n\n\n#[cfg(feature = \"sputnik-evm\")]\n\nuse sputnik::Config;\n", "file_path": "cli/src/forge_opts.rs", "rank": 94, "score": 81214.26307982151 }, { "content": "\n\nconst GITHUB: &str = \"github.com\";\n\nconst VERSION_SEPARATOR: char = '@';\n\n\n\nimpl FromStr for Dependency {\n\n type Err = eyre::Error;\n\n fn from_str(dependency: &str) -> Result<Self, Self::Err> {\n\n // TODO: Is there a better way to normalize these paths to having a\n\n // `https://github.com/` prefix?\n\n let path = if dependency.starts_with(\"https://\") {\n\n dependency.to_string()\n\n } else if dependency.starts_with(GITHUB) {\n\n format!(\"https://{}\", dependency)\n\n } else {\n\n format!(\"https://{}/{}\", GITHUB, dependency)\n\n };\n\n\n\n // everything after the \"@\" should be considered the version\n\n let mut split = path.split(VERSION_SEPARATOR);\n\n let url =\n", "file_path": "cli/src/forge_opts.rs", "rank": 95, "score": 81213.98833815448 }, { "content": "#[cfg(feature = \"evmodin-evm\")]\n\nuse evmodin::util::mocked_host::MockedHost;\n\n\n\nimpl Env {\n\n #[cfg(feature = \"sputnik-evm\")]\n\n pub fn sputnik_state(&self) -> MemoryVicinity {\n\n MemoryVicinity {\n\n chain_id: self.chain_id.into(),\n\n\n\n gas_price: self.gas_price.into(),\n\n origin: self.tx_origin,\n\n\n\n block_coinbase: self.block_coinbase,\n\n block_number: self.block_number.into(),\n\n block_timestamp: self.block_timestamp.into(),\n\n block_difficulty: self.block_difficulty.into(),\n\n block_base_fee_per_gas: self.block_base_fee_per_gas.into(),\n\n block_gas_limit: self.block_gas_limit.unwrap_or(self.gas_limit).into(),\n\n block_hashes: Vec::new(),\n\n }\n", "file_path": "cli/src/forge_opts.rs", "rank": 96, "score": 81213.16882238841 }, { "content": " }\n\n\n\n // extend them with the one via the requirements.txt\n\n if let Ok(ref remap) = std::fs::read_to_string(root.join(\"remappings.txt\")) {\n\n remappings.extend(remappings_from_newline(remap))\n\n }\n\n\n\n // helper function for parsing newline-separated remappings\n\n fn remappings_from_newline(remappings: &str) -> impl Iterator<Item = Remapping> + '_ {\n\n remappings.split('\\n').filter(|x| !x.is_empty()).map(|x| {\n\n Remapping::from_str(x)\n\n .unwrap_or_else(|_| panic!(\"could not parse remapping: {}\", x))\n\n })\n\n }\n\n\n\n // remove any potential duplicates\n\n remappings.sort_unstable();\n\n remappings.dedup();\n\n\n\n // build the path\n", "file_path": "cli/src/forge_opts.rs", "rank": 97, "score": 81212.90486319512 }, { "content": " help = \"the tx.origin value during EVM execution\",\n\n long,\n\n default_value = \"0x0000000000000000000000000000000000000000\"\n\n )]\n\n pub tx_origin: Address,\n\n\n\n #[structopt(\n\n help = \"the block.coinbase value during EVM execution\",\n\n long,\n\n // TODO: It'd be nice if we could use Address::zero() here.\n\n default_value = \"0x0000000000000000000000000000000000000000\"\n\n )]\n\n pub block_coinbase: Address,\n\n #[structopt(\n\n help = \"the block.timestamp value during EVM execution\",\n\n long,\n\n default_value = \"0\",\n\n env = \"DAPP_TEST_TIMESTAMP\"\n\n )]\n\n pub block_timestamp: u64,\n", "file_path": "cli/src/forge_opts.rs", "rank": 98, "score": 81212.90920301252 }, { "content": "\n\n#[cfg(feature = \"evmodin-evm\")]\n\nuse evmodin::Revision;\n\n\n\nimpl EvmVersion {\n\n #[cfg(feature = \"sputnik-evm\")]\n\n pub fn sputnik_cfg(self) -> Config {\n\n use EvmVersion::*;\n\n match self {\n\n Frontier => Config::frontier(),\n\n Istanbul => Config::istanbul(),\n\n Berlin => Config::berlin(),\n\n London => Config::london(),\n\n }\n\n }\n\n\n\n #[cfg(feature = \"evmodin-evm\")]\n\n pub fn evmodin_cfg(self) -> Revision {\n\n use EvmVersion::*;\n\n match self {\n", "file_path": "cli/src/forge_opts.rs", "rank": 99, "score": 81211.24362533184 } ]
Rust
graphannis/src/annis/db/token_helper.rs
corpus-tools/graphANNIS
6b1bf752a33f851f3fffe8e49e1e72cfc29b60f5
use crate::{ annis::{ db::{ aql::model::{AnnotationComponentType, TOKEN_KEY}, AnnotationStorage, }, errors::GraphAnnisError, }, errors::Result, graph::GraphStorage, AnnotationGraph, }; use graphannis_core::{ graph::ANNIS_NS, types::{Component, NodeID}, }; use std::collections::HashSet; use std::sync::Arc; #[derive(Clone)] pub struct TokenHelper<'a> { node_annos: &'a dyn AnnotationStorage<NodeID>, left_edges: Arc<dyn GraphStorage>, right_edges: Arc<dyn GraphStorage>, cov_edges: Vec<Arc<dyn GraphStorage>>, } lazy_static! { static ref COMPONENT_LEFT: Component<AnnotationComponentType> = { Component::new( AnnotationComponentType::LeftToken, ANNIS_NS.into(), "".into(), ) }; static ref COMPONENT_RIGHT: Component<AnnotationComponentType> = { Component::new( AnnotationComponentType::RightToken, ANNIS_NS.into(), "".into(), ) }; } pub fn necessary_components(db: &AnnotationGraph) -> HashSet<Component<AnnotationComponentType>> { let mut result = HashSet::default(); result.insert(COMPONENT_LEFT.clone()); result.insert(COMPONENT_RIGHT.clone()); result.extend( db.get_all_components(Some(AnnotationComponentType::Coverage), None) .into_iter(), ); result } impl<'a> TokenHelper<'a> { pub fn new(graph: &'a AnnotationGraph) -> Result<TokenHelper<'a>> { let cov_edges: Vec<Arc<dyn GraphStorage>> = graph .get_all_components(Some(AnnotationComponentType::Coverage), None) .into_iter() .filter_map(|c| graph.get_graphstorage(&c)) .filter(|gs| { if let Some(stats) = gs.get_statistics() { stats.nodes > 0 } else { true } }) .collect(); let left_edges = graph.get_graphstorage(&COMPONENT_LEFT).ok_or_else(|| { GraphAnnisError::ImpossibleSearch( "LeftToken component is missing (needed for all text coverage related operators)" .to_string(), ) })?; let right_edges = graph.get_graphstorage(&COMPONENT_RIGHT).ok_or_else(|| { GraphAnnisError::ImpossibleSearch( "RightToken component is missing (needed for all text coverage related operators)" .to_string(), ) })?; Ok(TokenHelper { node_annos: graph.get_node_annos(), left_edges, right_edges, cov_edges, }) } pub fn get_gs_coverage(&self) -> &Vec<Arc<dyn GraphStorage>> { &self.cov_edges } pub fn get_gs_left_token(&self) -> &dyn GraphStorage { self.left_edges.as_ref() } pub fn get_gs_right_token_(&self) -> &dyn GraphStorage { self.right_edges.as_ref() } pub fn is_token(&self, id: NodeID) -> Result<bool> { if self.node_annos.has_value_for_item(&id, &TOKEN_KEY)? { let has_outgoing = self.has_outgoing_coverage_edges(id)?; Ok(!has_outgoing) } else { Ok(false) } } pub fn has_outgoing_coverage_edges(&self, id: NodeID) -> Result<bool> { for c in self.cov_edges.iter() { if c.has_outgoing_edges(id)? { return Ok(true); } } Ok(false) } pub fn right_token_for(&self, n: NodeID) -> Result<Option<NodeID>> { if self.is_token(n)? { Ok(Some(n)) } else { let mut out = self.right_edges.get_outgoing_edges(n); match out.next() { Some(out) => Ok(Some(out?)), None => Ok(None), } } } pub fn left_token_for(&self, n: NodeID) -> Result<Option<NodeID>> { if self.is_token(n)? { Ok(Some(n)) } else { let mut out = self.left_edges.get_outgoing_edges(n); match out.next() { Some(out) => Ok(Some(out?)), None => Ok(None), } } } pub fn left_right_token_for(&self, n: NodeID) -> Result<(Option<NodeID>, Option<NodeID>)> { if self.is_token(n)? { Ok((Some(n), Some(n))) } else { let out_left = match self.left_edges.get_outgoing_edges(n).next() { Some(out) => Some(out?), None => None, }; let out_right = match self.right_edges.get_outgoing_edges(n).next() { Some(out) => Some(out?), None => None, }; Ok((out_left, out_right)) } } }
use crate::{ annis::{ db::{ aql::model::{AnnotationComponentType, TOKEN_KEY}, AnnotationStorage, }, errors::GraphAnnisError, }, errors::Result, graph::GraphStorage, AnnotationGraph, }; use graphannis_core::{ graph::ANNIS_NS, types::{Component, NodeID}, }; use std::collections::HashSet; use std::sync::Arc; #[derive(Clone)] pub struct TokenHelper<'a> { node_annos: &'a dyn AnnotationStorage<NodeID>, left_edges: Arc<dyn GraphStorage>, right_edges: Arc<dyn GraphStorage>, cov_edges: Vec<Arc<dyn GraphStorage>>, } lazy_static! { static ref COMPONENT_LEFT: Component<AnnotationComponentType> = { Component::new( AnnotationComponentType::LeftToken, ANNIS_NS.into(), "".into(), ) }; static ref COMPONENT_RIGHT: Component<AnnotationComponentType> = { Component::new( AnnotationComponentType::RightToken, ANNIS_NS.into(), "".into(), ) }; } pub fn necessary_components(db: &AnnotationGraph) -> HashSet<Component<AnnotationComponentType>> { let mut result = HashSet::default(); result.insert(COMPONENT_LEFT.clone()); result.insert(COMPONENT_RIGHT.clone()); result.extend( db.get_all_components(Some(AnnotationComponentType::Coverage), None) .into_iter(), ); result } impl<'a> TokenHelper<'a> { pub fn new(graph: &'a AnnotationGraph) -> Result<TokenHelper<'a>> { let cov_edges: Vec<Arc<dyn GraphStorage>> = graph .get_all_components(Some(AnnotationComponentType::Coverage), None) .into_iter() .filter_map(|c| graph.get_graphstorage(&c)) .filter(|gs| { if let Some(stats) = gs.get_statistics() { stats.nodes > 0 } else { true } }) .collect(); let left_edges = graph.get_graphstorage(&COMPONENT_LEFT).ok_or_else(|| { GraphAnnisError::ImpossibleSearch( "LeftToken component is missing (needed for all text coverage related operators)" .to_string(), ) })?; let right_edges = graph.get_graphstorage(&COMPONENT_RIGHT).ok_or_else(|| { GraphAnnisError::ImpossibleSearch( "RightToken component is missing (needed for all text coverage related operators)" .to_string(), ) })?; Ok(TokenHelper { node_annos: graph.get_node_annos(), left_edges, right_edges, cov_edges, }) } pub fn get_gs_coverage(&self) -> &Vec<Arc<dyn GraphStorage>> { &self.cov_edges } pub fn get_gs_left_token(&self) -> &dyn GraphStorage { self.left_edges.as_ref() } pub fn get_gs_right_token_(&self) -> &dyn GraphStorage { self.right_edges.as_ref() } pub fn is_token(&self, id: NodeID) -> Result<bool> { if self.node_annos.has_value_for_item(&id, &TOKEN_KEY)? { let has_outgoing = self.has_outgoing_coverage_edges(id)?; Ok(!has_outgoing) } else { Ok(false) } } pub fn has_outgoing_coverage_edges(&self, id: NodeID) -> Result<bool> { for c in self.cov_edges.iter() { if c.has_outgoing_edges(id)? { return Ok(true); } } Ok(false) } pub fn right_token_for(&self, n: NodeID) -> Result<Option<NodeID>> { if self.is_token(n)? { Ok(Some(n)) } else { let mut out = self.right_edges.get_outgoing_edges(n); match out.next() { Some(out) => Ok(Some(out?)), None => Ok(None), } } } pub fn left_token_for(&self, n: NodeID) -> Result<Option<NodeID>> { if self.is_token(n)? { Ok(Some(n)) } else { let mut out = self.left_edges.get_outgoing_edges(n); match out.next() { Some(out) => Ok(Some(out?)), None => Ok(None), } } } pub fn left_right_token_for(&self, n: NodeID) -> Result<(Option<NodeID>, Option<NodeID>)> { if self.is_token(n)? { Ok((Some(n), Some(n))) } else { let out_left = match self.left_edges.get_outgoing_edges(n).next() { Some(out) => Some(out?), None => None, }; let out_right =
; Ok((out_left, out_right)) } } }
match self.right_edges.get_outgoing_edges(n).next() { Some(out) => Some(out?), None => None, }
if_condition
[ { "content": "pub fn compare_match_by_text_pos(\n\n m1: &Match,\n\n m2: &Match,\n\n node_annos: &dyn AnnotationStorage<NodeID>,\n\n token_helper: Option<&TokenHelper>,\n\n gs_order: Option<&dyn GraphStorage>,\n\n collation: CollationType,\n\n quirks_mode: bool,\n\n) -> Result<Ordering> {\n\n if m1.node == m2.node {\n\n // same node, use annotation name and namespace to compare\n\n Ok(m1.anno_key.cmp(&m2.anno_key))\n\n } else {\n\n // get the node paths and names\n\n let m1_anno_val = node_annos.get_value_for_item(&m1.node, &NODE_NAME_KEY)?;\n\n let m2_anno_val = node_annos.get_value_for_item(&m2.node, &NODE_NAME_KEY)?;\n\n\n\n if let (Some(m1_anno_val), Some(m2_anno_val)) = (m1_anno_val, m2_anno_val) {\n\n let (m1_path, m1_name) = split_path_and_nodename(&m1_anno_val);\n\n let (m2_path, m2_name) = split_path_and_nodename(&m2_anno_val);\n", "file_path": "graphannis/src/annis/db/sort_matches.rs", "rank": 1, "score": 301504.08762200474 }, { "content": "pub fn compare_matchgroup_by_text_pos(\n\n m1: &[Match],\n\n m2: &[Match],\n\n node_annos: &dyn AnnotationStorage<NodeID>,\n\n token_helper: Option<&TokenHelper>,\n\n gs_order: Option<&dyn GraphStorage>,\n\n collation: CollationType,\n\n reverse_path: bool,\n\n) -> Result<Ordering> {\n\n for i in 0..std::cmp::min(m1.len(), m2.len()) {\n\n let element_cmp = compare_match_by_text_pos(\n\n &m1[i],\n\n &m2[i],\n\n node_annos,\n\n token_helper,\n\n gs_order,\n\n collation,\n\n reverse_path,\n\n )?;\n\n if element_cmp != Ordering::Equal {\n\n return Ok(element_cmp);\n\n }\n\n }\n\n // Sort longer vectors (\"more specific\") before shorter ones\n\n // This originates from the old SQL based system, where an \"unfilled\" match position had the NULL value.\n\n // NULL values where sorted *after* the ones with actual values. In practice, this means the more specific\n\n // matches come first.\n\n Ok(m2.len().cmp(&m1.len()))\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/sort_matches.rs", "rank": 2, "score": 289553.3268019365 }, { "content": "/// Create update events for the following corpus structure:\n\n///\n\n/// ```\n\n/// rootCorpus\n\n/// / \\\n\n/// \tsubCorpus1 subCorpus2\n\n/// \t/ \\ / \\\n\n/// doc1 doc2 doc3 doc4\n\n/// ```\n\npub fn create_corpus_structure(update: &mut GraphUpdate) {\n\n update\n\n .add_event(UpdateEvent::AddNode {\n\n node_name: \"root\".to_string(),\n\n node_type: \"corpus\".to_string(),\n\n })\n\n .unwrap();\n\n\n\n update\n\n .add_event(UpdateEvent::AddNode {\n\n node_name: \"root/subCorpus1\".to_string(),\n\n node_type: \"corpus\".to_string(),\n\n })\n\n .unwrap();\n\n update\n\n .add_event(UpdateEvent::AddEdge {\n\n source_node: \"root/subCorpus1\".to_string(),\n\n target_node: \"root\".to_string(),\n\n layer: \"\".to_string(),\n\n component_type: \"PartOf\".to_string(),\n", "file_path": "graphannis/src/annis/db/example_generator.rs", "rank": 3, "score": 283847.80661366845 }, { "content": "/// Create update events for the following corpus structure:\n\n///\n\n/// ```\n\n/// rootCorpus\n\n/// |\n\n/// docc1\n\n/// ```\n\npub fn create_corpus_structure_simple(update: &mut GraphUpdate) {\n\n update\n\n .add_event(UpdateEvent::AddNode {\n\n node_name: \"root\".to_string(),\n\n node_type: \"corpus\".to_string(),\n\n })\n\n .unwrap();\n\n\n\n update\n\n .add_event(UpdateEvent::AddNode {\n\n node_name: \"root/doc1\".to_string(),\n\n node_type: \"corpus\".to_string(),\n\n })\n\n .unwrap();\n\n\n\n update\n\n .add_event(UpdateEvent::AddEdge {\n\n source_node: \"root/doc1\".to_string(),\n\n target_node: \"root\".to_string(),\n\n layer: \"\".to_string(),\n\n component_type: \"PartOf\".to_string(),\n\n component_name: \"\".to_string(),\n\n })\n\n .unwrap();\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/example_generator.rs", "rank": 4, "score": 279327.14514444454 }, { "content": "fn load_component_from_disk(component_path: &Path) -> Result<Arc<dyn GraphStorage>> {\n\n // load component into memory\n\n let impl_path = PathBuf::from(component_path).join(\"impl.cfg\");\n\n let mut f_impl = std::fs::File::open(impl_path)?;\n\n let mut impl_name = String::new();\n\n f_impl.read_to_string(&mut impl_name)?;\n\n\n\n let gs = registry::deserialize(&impl_name, component_path)?;\n\n\n\n Ok(gs)\n\n}\n\n\n", "file_path": "core/src/graph/mod.rs", "rank": 5, "score": 265427.1817078096 }, { "content": "pub fn create_from_info(info: &GSInfo) -> Result<Arc<dyn GraphStorage>> {\n\n (info.constructor)()\n\n}\n\n\n", "file_path": "core/src/graph/storage/registry.rs", "rank": 6, "score": 264739.12455276935 }, { "content": "/// Creates éxample token objects. If a document name is given, the\n\n/// token objects are attached to it.\n\n///\n\n/// The example tokens are\n\n/// - Is\n\n/// - this\n\n/// - example\n\n/// - more\n\n/// - complicated\n\n/// - than\n\n/// - it\n\n/// - appears\n\n/// - to\n\n/// - be\n\n/// - ?\n\n/// \n\npub fn create_tokens(update: &mut GraphUpdate, document_name: Option<&str>) {\n\n let prefix = if let Some(document_name) = document_name {\n\n format!(\"{}#\", document_name)\n\n } else {\n\n \"\".to_string()\n\n };\n\n\n\n let token_strings = vec![\n\n \"Is\",\n\n \"this\",\n\n \"example\",\n\n \"more\",\n\n \"complicated\",\n\n \"than\",\n\n \"it\",\n\n \"appears\",\n\n \"to\",\n\n \"be\",\n\n \"?\",\n\n ];\n", "file_path": "graphannis/src/annis/db/example_generator.rs", "rank": 7, "score": 258111.38521480456 }, { "content": "pub fn deserialize(impl_name: &str, location: &Path) -> Result<Arc<dyn GraphStorage>> {\n\n let info = REGISTRY\n\n .get(impl_name)\n\n .ok_or_else(|| GraphAnnisCoreError::UnknownGraphStorageImpl(impl_name.to_string()))?;\n\n (info.deserialize_func)(location)\n\n}\n", "file_path": "core/src/graph/storage/registry.rs", "rank": 8, "score": 252927.67825717397 }, { "content": "fn calculate_binary_outputsize(op: &dyn BinaryOperatorBase, num_tuples: usize) -> Result<usize> {\n\n let output = match op.estimation_type()? {\n\n EstimationType::Selectivity(selectivity) => {\n\n let num_tuples = num_tuples as f64;\n\n if let Some(edge_sel) = op.edge_anno_selectivity()? {\n\n (num_tuples * selectivity * edge_sel).round() as usize\n\n } else {\n\n (num_tuples * selectivity).round() as usize\n\n }\n\n }\n\n EstimationType::Min => num_tuples,\n\n };\n\n // always assume at least one output item otherwise very small selectivity can fool the planner\n\n Ok(std::cmp::max(output, 1))\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/exec/filter.rs", "rank": 9, "score": 251634.16504131473 }, { "content": "pub fn make_span(update: &mut GraphUpdate, node_name: &str, covered_token_names: &[&str]) {\n\n for c in covered_token_names {\n\n update\n\n .add_event(UpdateEvent::AddEdge {\n\n source_node: node_name.to_string(),\n\n target_node: c.to_string(),\n\n layer: \"\".to_string(),\n\n component_type: \"Coverage\".to_string(),\n\n component_name: \"\".to_string(),\n\n })\n\n .unwrap();\n\n }\n\n}\n", "file_path": "graphannis/src/annis/db/example_generator.rs", "rank": 10, "score": 243327.25159166794 }, { "content": "#[derive(Serialize, Deserialize, Clone, MallocSizeOf)]\n\nstruct Text {\n\n name: String,\n\n val: String,\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 11, "score": 241140.53343766098 }, { "content": "/// Make sure all items of the complete vector are sorted by the given comparision function.\n\npub fn sort<T, F>(items: &mut Vec<T>, order_func: F) -> Result<()>\n\nwhere\n\n T: Send,\n\n F: Fn(&T, &T) -> Result<std::cmp::Ordering>,\n\n{\n\n let item_len = items.len();\n\n if item_len > 0 {\n\n quicksort(items, item_len, &order_func)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "graphannis/src/annis/util/quicksort.rs", "rank": 12, "score": 234714.97609742667 }, { "content": "/// Make sure that the first `n` items of the complete vector are sorted by the given comparision function.\n\n///\n\n/// This returns the original items and it is guaranteed that the items (0..n) are\n\n/// sorted and that all of these items are smaller or equal to the n-th item.\n\npub fn sort_first_n_items<T, F>(items: &mut Vec<T>, n: usize, order_func: F) -> Result<()>\n\nwhere\n\n T: Send,\n\n F: Fn(&T, &T) -> Result<std::cmp::Ordering>,\n\n{\n\n let item_len = items.len();\n\n if item_len > 0 {\n\n quicksort(items, n, &order_func)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "graphannis/src/annis/util/quicksort.rs", "rank": 13, "score": 234047.44371136872 }, { "content": "fn compare_corpora(g1: &AnnotationGraph, g2: &AnnotationGraph, rhs_remove_annis_coverage: bool) {\n\n // Check all nodes and node annotations exist in both corpora\n\n let nodes1: Vec<String> = g1\n\n .get_node_annos()\n\n .exact_anno_search(Some(\"annis\"), \"node_name\", ValueSearch::Any)\n\n .filter_map(|m| m.unwrap().extract_annotation(g1.get_node_annos()).unwrap())\n\n .map(|a| a.val.into())\n\n .sorted()\n\n .collect();\n\n let nodes2: Vec<String> = g2\n\n .get_node_annos()\n\n .exact_anno_search(Some(\"annis\"), \"node_name\", ValueSearch::Any)\n\n .filter_map(|m| m.unwrap().extract_annotation(g1.get_node_annos()).unwrap())\n\n .map(|a| a.val.into())\n\n .sorted()\n\n .collect();\n\n assert_eq!(&nodes1, &nodes2);\n\n\n\n let nodes1: Vec<NodeID> = nodes1\n\n .into_iter()\n", "file_path": "graphannis/src/annis/db/corpusstorage/tests.rs", "rank": 14, "score": 233294.43817874693 }, { "content": "/// Make sure that the first `n` items of the complete vector are sorted by the given comparision function.\n\n///\n\n/// This returns the original items and it is guaranteed that the items (0..n) are\n\n/// sorted and that all of these items are smaller or equal to the n-th item.\n\npub fn sort_first_n_items_parallel<T, F>(items: &mut Vec<T>, n: usize, order_func: F) -> Result<()>\n\nwhere\n\n T: Send,\n\n F: Fn(&T, &T) -> Result<std::cmp::Ordering> + Sync,\n\n{\n\n let item_len = items.len();\n\n if item_len > 0 {\n\n quicksort_parallel(items, n, &order_func)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "graphannis/src/annis/util/quicksort.rs", "rank": 15, "score": 231036.1286975568 }, { "content": "fn component_type_from_short_name(short_type: &str) -> Result<AnnotationComponentType> {\n\n match short_type {\n\n \"c\" => Ok(AnnotationComponentType::Coverage),\n\n \"d\" => Ok(AnnotationComponentType::Dominance),\n\n \"p\" => Ok(AnnotationComponentType::Pointing),\n\n \"o\" => Ok(AnnotationComponentType::Ordering),\n\n _ => Err(RelAnnisError::InvalidComponentShortName(short_type.to_string()).into()),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_escape_field() {\n\n assert_eq!(escape_field(\"ab\\\\$c\"), \"ab$c\");\n\n assert_eq!(escape_field(\"ab\\\\\\\\cd\\\\\\\\\"), \"ab\\\\cd\\\\\",);\n\n assert_eq!(escape_field(\"ab\\\\'cd\\\\te\"), \"ab'cd\\te\");\n\n assert_eq!(escape_field(\"a\\\\n\"), \"a\\n\");\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 16, "score": 229424.45900938782 }, { "content": "/// Creates a new error from the internal type\n\npub fn new(err: Box<dyn StdError>) -> *mut ErrorList {\n\n Box::into_raw(Box::new(create_error_list(err)))\n\n}\n\n\n\n/// Returns the number of errors in the list.\n\n#[no_mangle]\n\npub extern \"C\" fn annis_error_size(ptr: *const ErrorList) -> size_t {\n\n vec_size(ptr)\n\n}\n\n\n\n/// Get the message for the error at position `i` in the list.\n\n#[no_mangle]\n\npub extern \"C\" fn annis_error_get_msg(ptr: *const ErrorList, i: size_t) -> *const c_char {\n\n let item = vec_get(ptr, i);\n\n if item.is_null() {\n\n return std::ptr::null();\n\n }\n\n let err: &Error = cast_const(item);\n\n err.msg.as_ptr()\n\n}\n", "file_path": "capi/src/cerror.rs", "rank": 17, "score": 227878.8974636815 }, { "content": "pub fn parse(query_as_aql: &str, quirks_mode: bool) -> Result<Disjunction> {\n\n let ast = AQL_PARSER.with(|p| p.parse(query_as_aql));\n\n match ast {\n\n Ok(ast) => {\n\n let offsets = get_line_offsets(query_as_aql);\n\n\n\n // make sure AST is in DNF\n\n let ast: ast::Expr = ast.simplify_via_laws();\n\n let ast = get_alternatives_from_dnf(ast);\n\n\n\n let mut legacy_meta_search: Vec<(NodeSearchSpec, ast::Pos)> = Vec::new();\n\n if quirks_mode {\n\n for conjunction in &ast {\n\n for literal in conjunction {\n\n if let ast::Literal::LegacyMetaSearch { spec, pos } = literal {\n\n legacy_meta_search.push((spec.clone(), pos.clone()));\n\n }\n\n }\n\n }\n\n }\n", "file_path": "graphannis/src/annis/db/aql/mod.rs", "rank": 18, "score": 227683.05276637588 }, { "content": "fn iter_next<T>(ptr: *mut Box<dyn Iterator<Item = Result<T>>>, err: *mut *mut ErrorList) -> *mut T {\n\n let it: &mut Box<dyn Iterator<Item = Result<T>>> = cast_mut(ptr);\n\n if let Some(v) = it.next() {\n\n if let Some(v) = map_cerr(v, err) {\n\n return Box::into_raw(Box::new(v));\n\n }\n\n }\n\n std::ptr::null_mut()\n\n}\n\n\n\n/// Returns a pointer to the next node ID for the iterator given by the `ptr` argument\n\n/// or `NULL` if iterator is empty.\n\n///\n\n/// - `err` - Pointer to a list of errors. If any error occured, this list will be non-empty.\n\n#[no_mangle]\n\npub extern \"C\" fn annis_iter_nodeid_next(\n\n ptr: *mut IterPtr<NodeID>,\n\n err: *mut *mut ErrorList,\n\n) -> *mut NodeID {\n\n iter_next(ptr, err)\n\n}\n\n\n", "file_path": "capi/src/data.rs", "rank": 19, "score": 221893.1424492829 }, { "content": "fn create_token_leaf_filter(g: &AnnotationGraph) -> MatchValueFilterFunc {\n\n let cov_gs: Vec<Arc<dyn GraphStorage>> = g\n\n .get_all_components(Some(AnnotationComponentType::Coverage), None)\n\n .into_iter()\n\n .filter_map(|c| g.get_graphstorage(&c))\n\n .filter(|gs| {\n\n if let Some(stats) = gs.get_statistics() {\n\n stats.nodes > 0\n\n } else {\n\n true\n\n }\n\n })\n\n .collect();\n\n\n\n let filter_func: MatchValueFilterFunc = Box::new(move |m, _| {\n\n for cov in cov_gs.iter() {\n\n if cov.get_outgoing_edges(m.node).next().is_some() {\n\n return Ok(false);\n\n }\n\n }\n", "file_path": "graphannis/src/annis/db/exec/nodesearch.rs", "rank": 20, "score": 219758.3283508812 }, { "content": "fn current_inprogress_changeset(changesets: &mut Vec<ChangeSet>) -> Result<&mut ChangeSet> {\n\n let needs_new_changeset = if let Some(c) = changesets.last_mut() {\n\n match c {\n\n ChangeSet::InProgress { .. } => false,\n\n ChangeSet::Finished { .. } => true,\n\n }\n\n } else {\n\n true\n\n };\n\n\n\n if needs_new_changeset {\n\n // Create a new changeset\n\n let outfile = NamedTempFile::new()?;\n\n let table_builder = TableBuilder::new(sstable::Options::default(), outfile.reopen()?);\n\n let c = ChangeSet::InProgress {\n\n table_builder: Box::new(table_builder),\n\n outfile,\n\n };\n\n changesets.push(c);\n\n }\n", "file_path": "core/src/graph/update.rs", "rank": 21, "score": 219723.23710258762 }, { "content": "fn finish_all_changesets(changesets: &mut Vec<ChangeSet>) -> Result<()> {\n\n // Remove all changesets from the vector and finish them\n\n let finished: Result<Vec<ChangeSet>> = changesets\n\n .drain(..)\n\n .map(|c| match c {\n\n ChangeSet::InProgress {\n\n table_builder,\n\n outfile,\n\n } => {\n\n table_builder.finish()?;\n\n // Re-open as table\n\n let file = outfile.reopen()?;\n\n let size = file.metadata()?.len();\n\n let table = Table::new(sstable::Options::default(), Box::new(file), size as usize)?;\n\n Ok(ChangeSet::Finished { table })\n\n }\n\n ChangeSet::Finished { table } => Ok(ChangeSet::Finished { table }),\n\n })\n\n .collect();\n\n // Re-add the finished changesets\n\n changesets.extend(finished?);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "core/src/graph/update.rs", "rank": 22, "score": 218304.23909612192 }, { "content": "fn get_read_or_error<'a>(lock: &'a RwLockReadGuard<CacheEntry>) -> Result<&'a AnnotationGraph> {\n\n if let CacheEntry::Loaded(ref db) = &**lock {\n\n Ok(db)\n\n } else {\n\n Err(GraphAnnisError::LoadingGraphFailed {\n\n name: \"\".to_string(),\n\n })\n\n }\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/corpusstorage.rs", "rank": 23, "score": 216919.44905128097 }, { "content": "type FindIterator<'a> = Box<dyn Iterator<Item = Result<MatchGroup>> + 'a>;\n\n\n\nimpl CorpusStorage {\n\n /// Create a new instance with a maximum size for the internal corpus cache.\n\n ///\n\n /// - `db_dir` - The path on the filesystem where the corpus storage content is located. Must be an existing directory.\n\n /// - `cache_strategy`: A strategy for clearing the cache.\n\n /// - `use_parallel_joins` - If `true` parallel joins are used by the system, using all available cores.\n\n pub fn with_cache_strategy(\n\n db_dir: &Path,\n\n cache_strategy: CacheStrategy,\n\n use_parallel_joins: bool,\n\n ) -> Result<CorpusStorage> {\n\n init_locale();\n\n\n\n let query_config = aql::Config { use_parallel_joins };\n\n\n\n #[allow(clippy::mutex_atomic)]\n\n let active_background_workers = Arc::new((Mutex::new(0), Condvar::new()));\n\n let cs = CorpusStorage {\n", "file_path": "graphannis/src/annis/db/corpusstorage.rs", "rank": 24, "score": 216636.98275816953 }, { "content": "/// Load a c corpus in the legacy relANNIS format from the specified `path`.\n\n///\n\n/// Returns a tuple consisting of the corpus name and the extracted annotation graph.\n\npub fn load<F>(\n\n path: &Path,\n\n disk_based: bool,\n\n progress_callback: F,\n\n) -> Result<(String, AnnotationGraph, CorpusConfiguration)>\n\nwhere\n\n F: Fn(&str),\n\n{\n\n // convert to path\n\n let path = PathBuf::from(path);\n\n if path.is_dir() && path.exists() {\n\n // check if this is the ANNIS 3.3 import format\n\n let annis_version_path = path.join(\"annis.version\");\n\n let is_annis_33 = if annis_version_path.exists() {\n\n let mut file = File::open(&annis_version_path)?;\n\n let mut version_str = std::string::String::new();\n\n file.read_to_string(&mut version_str)?;\n\n\n\n version_str == \"3.3\"\n\n } else {\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 25, "score": 215564.2822429138 }, { "content": "pub fn create_token_node(\n\n update: &mut GraphUpdate,\n\n node_name: &str,\n\n token_value: &str,\n\n parent_document: Option<&str>,\n\n) {\n\n update\n\n .add_event(UpdateEvent::AddNode {\n\n node_name: node_name.to_string(),\n\n node_type: \"node\".to_string(),\n\n })\n\n .unwrap();\n\n update\n\n .add_event(UpdateEvent::AddNodeLabel {\n\n node_name: node_name.to_string(),\n\n anno_ns: \"annis\".to_string(),\n\n anno_name: \"tok\".to_string(),\n\n anno_value: token_value.to_string(),\n\n })\n\n .unwrap();\n", "file_path": "graphannis/src/annis/db/example_generator.rs", "rank": 26, "score": 213078.80153368376 }, { "content": "fn make_unary_operator_spec(op: ast::UnaryOpSpec) -> Arc<dyn UnaryOperatorSpec> {\n\n match op {\n\n ast::UnaryOpSpec::Arity(spec) => Arc::new(spec),\n\n }\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/aql/mod.rs", "rank": 27, "score": 210340.22320692224 }, { "content": "pub fn get_line_and_column_for_pos(\n\n pos: usize,\n\n offset_to_line: &BTreeMap<usize, usize>,\n\n) -> LineColumn {\n\n // get the offset for the position by searching for all offsets smaller than the position and taking the last one\n\n offset_to_line\n\n .range(..=pos)\n\n .rev()\n\n .map(|(offset, line)| {\n\n // column starts with 1 at line offset\n\n let column: usize = pos - offset + 1;\n\n LineColumn {\n\n line: *line,\n\n column,\n\n }\n\n })\n\n .next()\n\n .unwrap_or(LineColumn { line: 0, column: 0 })\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/aql/mod.rs", "rank": 28, "score": 209519.36123601894 }, { "content": "fn calculate_unary_outputsize(op: &dyn UnaryOperator, num_tuples: usize) -> usize {\n\n let output = match op.estimation_type() {\n\n EstimationType::Selectivity(selectivity) => {\n\n let num_tuples = num_tuples as f64;\n\n (num_tuples * selectivity).round() as usize\n\n }\n\n EstimationType::Min => num_tuples,\n\n };\n\n // always assume at least one output item otherwise very small selectivity can fool the planner\n\n std::cmp::max(output, 1)\n\n}\n\n\n\nimpl<'a> Filter<'a> {\n\n pub fn new_binary(\n\n exec: Box<dyn ExecutionNode<Item = Result<MatchGroup>> + 'a>,\n\n lhs_idx: usize,\n\n rhs_idx: usize,\n\n op_entry: BinaryOperatorEntry<'a>,\n\n ) -> Result<Filter<'a>> {\n\n let desc = if let Some(orig_desc) = exec.get_desc() {\n", "file_path": "graphannis/src/annis/db/exec/filter.rs", "rank": 29, "score": 207447.39248408406 }, { "content": "pub fn default_deserialize_gs<GS>(location: &Path) -> Result<GS>\n\nwhere\n\n for<'de> GS: std::marker::Sized + Deserialize<'de>,\n\n{\n\n let data_path = location.join(\"component.bin\");\n\n let f_data = std::fs::File::open(data_path)?;\n\n let input = std::io::BufReader::new(f_data);\n\n\n\n let result = bincode::deserialize_from(input)?;\n\n\n\n Ok(result)\n\n}\n\n\n", "file_path": "core/src/graph/storage/mod.rs", "rank": 30, "score": 206992.06637753645 }, { "content": "pub fn create_writeable<CT: ComponentType>(\n\n graph: &Graph<CT>,\n\n orig: Option<&dyn GraphStorage>,\n\n) -> Result<Arc<dyn GraphStorage>> {\n\n if graph.disk_based {\n\n let mut result = DiskAdjacencyListStorage::new()?;\n\n if let Some(orig) = orig {\n\n result.copy(graph.get_node_annos(), orig)?;\n\n }\n\n Ok(Arc::from(result))\n\n } else {\n\n let mut result = AdjacencyListStorage::new();\n\n if let Some(orig) = orig {\n\n result.copy(graph.get_node_annos(), orig)?;\n\n }\n\n Ok(Arc::from(result))\n\n }\n\n}\n\n\n", "file_path": "core/src/graph/storage/registry.rs", "rank": 31, "score": 206064.31057366426 }, { "content": "fn component_to_relative_path<CT: ComponentType>(c: &Component<CT>) -> PathBuf {\n\n let mut p = PathBuf::new();\n\n p.push(\"gs\");\n\n p.push(c.get_type().to_string());\n\n p.push(if c.layer.is_empty() {\n\n DEFAULT_EMPTY_LAYER\n\n } else {\n\n &c.layer\n\n });\n\n p.push(c.name.as_str());\n\n p\n\n}\n\n\n", "file_path": "core/src/graph/mod.rs", "rank": 32, "score": 205594.30591839695 }, { "content": "struct ArityOperator {\n\n graphstorages: Vec<Arc<dyn GraphStorage>>,\n\n allowed_range: RangeSpec,\n\n}\n\n\n\nimpl std::fmt::Display for ArityOperator {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \":arity={}\", self.allowed_range)\n\n }\n\n}\n\n\n\nimpl UnaryOperator for ArityOperator {\n\n fn filter_match(&self, m: &Match) -> Result<bool> {\n\n let mut children: FxHashSet<NodeID> = FxHashSet::default();\n\n for gs in self.graphstorages.iter() {\n\n for out in gs.get_outgoing_edges(m.node) {\n\n let out = out?;\n\n children.insert(out);\n\n }\n\n }\n", "file_path": "graphannis/src/annis/db/aql/operators/arity.rs", "rank": 33, "score": 204763.09960734908 }, { "content": "fn get_adjacencylist_impl<CT: ComponentType>(db: &Graph<CT>, stats: &GraphStatistic) -> GSInfo {\n\n if db.disk_based {\n\n create_info_diskadjacency()\n\n } else {\n\n // check if a large percentage of nodes are part of the graph storage\n\n if let Ok(Some(largest_node_id)) = db.node_annos.get_largest_item() {\n\n if stats.max_fan_out <= 1 && (stats.nodes as f64 / largest_node_id as f64) >= 0.75 {\n\n return create_info::<DenseAdjacencyListStorage>();\n\n }\n\n }\n\n\n\n create_info::<AdjacencyListStorage>()\n\n }\n\n}\n\n\n", "file_path": "core/src/graph/storage/registry.rs", "rank": 34, "score": 202596.1905016259 }, { "content": "#[derive(Clone, PartialEq, Eq, Hash, MallocSizeOf, PartialOrd, Ord, Serialize, Deserialize)]\n\nstruct TextKey {\n\n id: u32,\n\n corpus_ref: Option<u32>,\n\n}\n\n\n\nimpl KeySerializer for TextKey {\n\n fn create_key(&self) -> KeyVec {\n\n let mut result = KeyVec::new();\n\n result.extend(self.id.to_be_bytes());\n\n if let Some(corpus_ref) = self.corpus_ref {\n\n result.extend(corpus_ref.to_be_bytes());\n\n }\n\n result\n\n }\n\n\n\n fn parse_key(\n\n key: &[u8],\n\n ) -> std::result::Result<Self, Box<dyn std::error::Error + Send + Sync>> {\n\n let id_size = std::mem::size_of::<u32>();\n\n let id = u32::from_be_bytes(key[0..id_size].try_into()?);\n\n let corpus_ref = if key.len() == id_size * 2 {\n\n Some(u32::from_be_bytes(key[id_size..].try_into()?))\n\n } else {\n\n None\n\n };\n\n\n\n Ok(TextKey { id, corpus_ref })\n\n }\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 35, "score": 201972.2104705001 }, { "content": "struct PreparationResult {\n\n query: Disjunction,\n\n db_entry: Arc<RwLock<CacheEntry>>,\n\n}\n\n\n\n/// Definition of a single attribute of a frequency query.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct FrequencyDefEntry {\n\n /// The namespace of the annotation from which the attribute value is generated.\n\n #[serde(default)]\n\n pub ns: Option<String>,\n\n /// The name of the annotation from which the attribute value is generated.\n\n pub name: String,\n\n /// The name of the query node from which the attribute value is generated.\n\n pub node_ref: String,\n\n}\n\n\n\nimpl FromStr for FrequencyDefEntry {\n\n type Err = GraphAnnisError;\n\n fn from_str(s: &str) -> std::result::Result<FrequencyDefEntry, Self::Err> {\n", "file_path": "graphannis/src/annis/db/corpusstorage.rs", "rank": 36, "score": 201856.3064072727 }, { "content": "fn create_lockfile_for_directory(db_dir: &Path) -> Result<File> {\n\n std::fs::create_dir_all(&db_dir).map_err(|e| CorpusStorageError::LockCorpusDirectory {\n\n path: db_dir.to_string_lossy().to_string(),\n\n source: e,\n\n })?;\n\n let lock_file_path = db_dir.join(\"db.lock\");\n\n // check if we can get the file lock\n\n let lock_file = OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .create(true)\n\n .open(lock_file_path.as_path())\n\n .map_err(|e| CorpusStorageError::LockCorpusDirectory {\n\n path: db_dir.to_string_lossy().to_string(),\n\n source: e,\n\n })?;\n\n lock_file\n\n .try_lock_exclusive()\n\n .map_err(|e| CorpusStorageError::LockCorpusDirectory {\n\n path: db_dir.to_string_lossy().to_string(),\n\n source: e,\n\n })?;\n\n\n\n Ok(lock_file)\n\n}\n", "file_path": "graphannis/src/annis/db/corpusstorage.rs", "rank": 37, "score": 201296.9204700286 }, { "content": "pub fn default_serialize_gs<GS>(gs: &GS, location: &Path) -> Result<()>\n\nwhere\n\n GS: Serialize,\n\n{\n\n let data_path = location.join(\"component.bin\");\n\n let f_data = std::fs::File::create(&data_path)?;\n\n let mut writer = std::io::BufWriter::new(f_data);\n\n bincode::serialize_into(&mut writer, gs)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "core/src/graph/storage/mod.rs", "rank": 38, "score": 200218.5164373244 }, { "content": "pub fn get_optimal_impl_heuristic<CT: ComponentType>(\n\n db: &Graph<CT>,\n\n stats: &GraphStatistic,\n\n) -> GSInfo {\n\n if stats.max_depth <= 1 {\n\n // if we don't have any deep graph structures an adjencency list is always fasted (and has no overhead)\n\n return get_adjacencylist_impl(db, stats);\n\n } else if stats.rooted_tree {\n\n if stats.max_fan_out <= 1 {\n\n return get_linear_by_size(stats);\n\n } else {\n\n return get_prepostorder_by_size(stats);\n\n }\n\n // it might be still wise to use pre/post order if the graph is \"almost\" a tree, thus\n\n // does not have many exceptions\n\n } else if !stats.cyclic && stats.dfs_visit_ratio <= 1.03 {\n\n // there is no more than 3% overhead\n\n // TODO: how to determine the border?\n\n return get_prepostorder_by_size(stats);\n\n }\n\n\n\n // fallback\n\n get_adjacencylist_impl(db, stats)\n\n}\n\n\n", "file_path": "core/src/graph/storage/registry.rs", "rank": 39, "score": 199148.70616964152 }, { "content": "/// Takes a match identifier (which includes the matched annotation name) and returns the node name.\n\npub fn node_names_from_match(match_line: &str) -> Vec<String> {\n\n let mut result = Vec::default();\n\n\n\n for m in match_line.split_whitespace() {\n\n let elements: Vec<&str> = m.splitn(3, \"::\").collect();\n\n if let Some(last_element) = elements.last() {\n\n result.push(last_element.to_string());\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct TimeoutCheck {\n\n start_time: Instant,\n\n timeout: Option<Duration>,\n\n}\n\n\n\nimpl TimeoutCheck {\n", "file_path": "graphannis/src/annis/util/mod.rs", "rank": 40, "score": 198734.1518425605 }, { "content": "fn insert_info<GS: 'static>(registry: &mut HashMap<String, GSInfo>)\n\nwhere\n\n for<'de> GS: GraphStorage + Default + Deserialize<'de>,\n\n{\n\n let info = create_info::<GS>();\n\n registry.insert(info.id.clone(), info);\n\n}\n\n\n", "file_path": "core/src/graph/storage/registry.rs", "rank": 41, "score": 198145.13010249328 }, { "content": "struct TextPosTable {\n\n token_by_left_textpos: DiskMap<TextProperty, NodeID>,\n\n token_by_right_textpos: DiskMap<TextProperty, NodeID>,\n\n // maps a token index to an node ID\n\n token_by_index: DiskMap<TextProperty, NodeID>,\n\n // maps a token node id to the token index\n\n token_to_index: DiskMap<NodeID, TextProperty>,\n\n //// Map as node to it's \"left\" value.\n\n //// This is used for alignment and can be the token or character index.\n\n node_to_left: DiskMap<NodeID, TextProperty>,\n\n /// Map as node to it's \"right\" value.\n\n //// This is used for alignment and can be the token or character index.\n\n node_to_right: DiskMap<NodeID, TextProperty>,\n\n /// Map a node to its left character index.\n\n node_to_left_char: DiskMap<NodeID, TextProperty>,\n\n /// Map a node to its right character index.\n\n node_to_right_char: DiskMap<NodeID, TextProperty>,\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 42, "score": 197842.841570695 }, { "content": "#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, MallocSizeOf, Serialize, Deserialize)]\n\nstruct NodeByTextEntry {\n\n text_id: u32,\n\n corpus_ref: u32,\n\n node_id: NodeID,\n\n}\n\n\n\nimpl KeySerializer for NodeByTextEntry {\n\n fn create_key(&self) -> KeyVec {\n\n let mut result = KeyVec::new();\n\n result.extend(self.text_id.to_be_bytes());\n\n result.extend(self.corpus_ref.to_be_bytes());\n\n result.extend(self.node_id.to_be_bytes());\n\n result\n\n }\n\n\n\n fn parse_key(\n\n key: &[u8],\n\n ) -> std::result::Result<Self, Box<dyn std::error::Error + Send + Sync>> {\n\n let u32_size = std::mem::size_of::<u32>();\n\n let text_id = u32::from_be_bytes(key[0..u32_size].try_into()?);\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 43, "score": 197842.841570695 }, { "content": "struct LoadRankResult {\n\n components_by_pre: DiskMap<u32, Component<AnnotationComponentType>>,\n\n edges_by_pre: DiskMap<u32, Edge>,\n\n text_coverage_edges: DiskMap<Edge, bool>,\n\n /// Some rank entries have NULL as parent: we don't add an edge but remember the component name\n\n /// for re-creating omitted coverage edges with the correct name.\n\n component_for_parentless_target_node: DiskMap<NodeID, Component<AnnotationComponentType>>,\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 44, "score": 197729.79159065627 }, { "content": "struct LoadNodeResult {\n\n nodes_by_text: DiskMap<NodeByTextEntry, bool>,\n\n id_to_node_name: DiskMap<NodeID, String>,\n\n textpos_table: TextPosTable,\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 45, "score": 197729.79159065627 }, { "content": "#[derive(Clone)]\n\nstruct Near<'a> {\n\n gs_order: Arc<dyn GraphStorage>,\n\n tok_helper: TokenHelper<'a>,\n\n spec: NearSpec,\n\n}\n\n\n\nimpl BinaryOperatorSpec for NearSpec {\n\n fn necessary_components(\n\n &self,\n\n db: &AnnotationGraph,\n\n ) -> HashSet<Component<AnnotationComponentType>> {\n\n let component_order = Component::new(\n\n AnnotationComponentType::Ordering,\n\n ANNIS_NS.into(),\n\n self.segmentation\n\n .as_ref()\n\n .map_or_else(smartstring::alias::String::default, |s| s.into()),\n\n );\n\n\n\n let mut v = HashSet::default();\n", "file_path": "graphannis/src/annis/db/aql/operators/near.rs", "rank": 46, "score": 193849.67860507444 }, { "content": "struct NodeTabParseResult {\n\n nodes_by_text: DiskMap<NodeByTextEntry, bool>,\n\n missing_seg_span: DiskMap<NodeID, String>,\n\n id_to_node_name: DiskMap<NodeID, String>,\n\n textpos_table: TextPosTable,\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 47, "score": 193801.61965697558 }, { "content": "struct LoadNodeAndCorpusResult {\n\n toplevel_corpus_name: String,\n\n id_to_node_name: DiskMap<NodeID, String>,\n\n textpos_table: TextPosTable,\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 48, "score": 193801.61965697558 }, { "content": "pub fn import<CT: ComponentType, R: Read, F>(\n\n input: R,\n\n disk_based: bool,\n\n progress_callback: F,\n\n) -> Result<(Graph<CT>, Option<String>)>\n\nwhere\n\n F: Fn(&str),\n\n{\n\n // Always buffer the read operations\n\n let mut input = BufReader::new(input);\n\n let mut g = Graph::new(disk_based)?;\n\n let mut updates = GraphUpdate::default();\n\n let mut edge_updates = GraphUpdate::default();\n\n\n\n // read in all nodes and edges, collecting annotation keys on the fly\n\n progress_callback(\"reading GraphML\");\n\n let config = read_graphml::<CT, BufReader<R>, F>(\n\n &mut input,\n\n &mut updates,\n\n &mut edge_updates,\n", "file_path": "core/src/graph/serialization/graphml.rs", "rank": 49, "score": 189594.00403169155 }, { "content": "#[derive(Debug)]\n\nstruct UnaryOperatorSpecEntry {\n\n op: Arc<dyn UnaryOperatorSpec>,\n\n idx: usize,\n\n}\n\n\n\npub struct BinaryOperatorEntry<'a> {\n\n pub op: BinaryOperator<'a>,\n\n pub args: BinaryOperatorArguments,\n\n}\n\n\n\npub struct UnaryOperatorEntry<'a> {\n\n pub op: Box<dyn UnaryOperator + 'a>,\n\n pub node_nr: usize,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct NodeSearchSpecEntry {\n\n pub var: String,\n\n pub spec: NodeSearchSpec,\n\n pub optional: bool,\n", "file_path": "graphannis/src/annis/db/aql/conjunction.rs", "rank": 50, "score": 189568.32488523962 }, { "content": "#[derive(Debug)]\n\nstruct BinaryOperatorSpecEntry {\n\n op: Arc<dyn BinaryOperatorSpec>,\n\n args: BinaryOperatorArguments,\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/aql/conjunction.rs", "rank": 51, "score": 189568.32488523962 }, { "content": "fn compare_document_path(\n\n p1: &str,\n\n p2: &str,\n\n collation: CollationType,\n\n quirks_mode: bool,\n\n) -> std::cmp::Ordering {\n\n let it1 = p1.split('/').filter(|s| !s.is_empty());\n\n let it2 = p2.split('/').filter(|s| !s.is_empty());\n\n\n\n if quirks_mode {\n\n // only use the document name in quirks mode and make sure it is decoded from a possible percentage encoding\n\n let path1: Vec<&str> = it1.collect();\n\n let path2: Vec<&str> = it2.collect();\n\n if let (Some(doc1), Some(doc2)) = (path1.last(), path2.last()) {\n\n let doc1: Cow<str> =\n\n percent_encoding::percent_decode(doc1.as_bytes()).decode_utf8_lossy();\n\n let doc2: Cow<str> =\n\n percent_encoding::percent_decode(doc2.as_bytes()).decode_utf8_lossy();\n\n let string_cmp = compare_string(&doc1, &doc2, collation);\n\n if string_cmp != std::cmp::Ordering::Equal {\n", "file_path": "graphannis/src/annis/db/sort_matches.rs", "rank": 52, "score": 189512.10665059197 }, { "content": "fn update_components_for_nodes(\n\n node2component: &mut BTreeMap<usize, usize>,\n\n from: usize,\n\n to: usize,\n\n) {\n\n if from == to {\n\n // nothing todo\n\n return;\n\n }\n\n\n\n let mut node_ids_to_update: Vec<usize> = Vec::new();\n\n for (k, v) in node2component.iter() {\n\n if *v == from {\n\n node_ids_to_update.push(*k);\n\n }\n\n }\n\n\n\n // set the component id for each node of the other component\n\n for nid in &node_ids_to_update {\n\n node2component.insert(*nid, to);\n\n }\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/aql/conjunction.rs", "rank": 53, "score": 189477.67148518658 }, { "content": "fn partition<T, F>(items: &mut [T], order_func: &F) -> Result<usize>\n\nwhere\n\n F: Fn(&T, &T) -> Result<std::cmp::Ordering>,\n\n{\n\n let r = items.len() - 1;\n\n\n\n let mut i = 0;\n\n\n\n for j in 0..(items.len() - 1) {\n\n let comparision = order_func(&items[j], &items[r])?;\n\n match comparision {\n\n std::cmp::Ordering::Less | std::cmp::Ordering::Equal => {\n\n items.swap(i, j);\n\n i += 1;\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n items.swap(i, r);\n", "file_path": "graphannis/src/annis/util/quicksort.rs", "rank": 54, "score": 188923.0479129291 }, { "content": "struct NonExistingUnaryOperatorIndex<'a> {\n\n target: NodeSearchSpec,\n\n negated_op: Box<dyn BinaryOperatorIndex + 'a>,\n\n graph: &'a AnnotationGraph,\n\n op_estimation: EstimationType,\n\n}\n\n\n\nimpl<'a> Display for NonExistingUnaryOperatorIndex<'a> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \" !{} {}\", self.negated_op, self.target)?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<'a> UnaryOperator for NonExistingUnaryOperatorIndex<'a> {\n\n fn filter_match(&self, m: &graphannis_core::annostorage::Match) -> Result<bool> {\n\n // Extract the annotation keys for the matches\n\n let it = self.negated_op.retrieve_matches(m).fuse().map(|m| match m {\n\n Ok(m) => Ok(m.node),\n\n Err(e) => {\n", "file_path": "graphannis/src/annis/db/aql/operators/non_existing.rs", "rank": 55, "score": 188021.95913656504 }, { "content": "struct NonExistingUnaryOperatorFilter<'a> {\n\n target: NodeSearchSpec,\n\n target_left: bool,\n\n negated_op: BinaryOperator<'a>,\n\n graph: &'a AnnotationGraph,\n\n op_estimation: EstimationType,\n\n}\n\n\n\nimpl<'a> Display for NonExistingUnaryOperatorFilter<'a> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n if self.target_left {\n\n write!(f, \" !({} {} x)\", self.target, self.negated_op)?;\n\n } else {\n\n write!(f, \" !(x {} {})\", self.negated_op, self.target,)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<'a> UnaryOperator for NonExistingUnaryOperatorFilter<'a> {\n", "file_path": "graphannis/src/annis/db/aql/operators/non_existing.rs", "rank": 56, "score": 188021.95913656504 }, { "content": "fn parse_text_tab<F>(\n\n path: &Path,\n\n is_annis_33: bool,\n\n progress_callback: &F,\n\n) -> Result<DiskMap<TextKey, Text>>\n\nwhere\n\n F: Fn(&str),\n\n{\n\n let mut text_tab_path = PathBuf::from(path);\n\n text_tab_path.push(if is_annis_33 {\n\n \"text.annis\"\n\n } else {\n\n \"text.tab\"\n\n });\n\n\n\n progress_callback(&format!(\n\n \"loading {}\",\n\n text_tab_path.to_str().unwrap_or_default()\n\n ));\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 57, "score": 186558.62963878008 }, { "content": "fn load_component_tab<F>(\n\n path: &Path,\n\n is_annis_33: bool,\n\n progress_callback: &F,\n\n) -> Result<BTreeMap<u32, Component<AnnotationComponentType>>>\n\nwhere\n\n F: Fn(&str),\n\n{\n\n let mut component_tab_path = PathBuf::from(path);\n\n component_tab_path.push(if is_annis_33 {\n\n \"component.annis\"\n\n } else {\n\n \"component.tab\"\n\n });\n\n\n\n progress_callback(&format!(\n\n \"loading {}\",\n\n component_tab_path.to_str().unwrap_or_default()\n\n ));\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 58, "score": 186489.87314601487 }, { "content": "fn add_annotation_key(keys: &mut BTreeMap<String, AnnoKey>, attributes: Attributes) -> Result<()> {\n\n // resolve the ID to the fully qualified annotation name\n\n let mut id: Option<String> = None;\n\n let mut anno_key: Option<AnnoKey> = None;\n\n\n\n for att in attributes {\n\n let att = att?;\n\n\n\n let att_value = String::from_utf8_lossy(&att.value);\n\n\n\n match att.key {\n\n b\"id\" => {\n\n id = Some(att_value.to_string());\n\n }\n\n b\"attr.name\" => {\n\n let (ns, name) = split_qname(att_value.as_ref());\n\n anno_key = Some(AnnoKey {\n\n ns: ns.unwrap_or(\"\").into(),\n\n name: name.into(),\n\n });\n", "file_path": "core/src/graph/serialization/graphml.rs", "rank": 59, "score": 186198.48903515114 }, { "content": "fn randomized_partition<T, F>(items: &mut [T], order_func: &F) -> Result<usize>\n\nwhere\n\n F: Fn(&T, &T) -> Result<std::cmp::Ordering>,\n\n{\n\n let items_len = items.len();\n\n if items_len == 0 {\n\n Ok(0)\n\n } else {\n\n let mut rng = rand::thread_rng();\n\n let i = rng.gen_range(0..items_len);\n\n items.swap(items_len - 1, i);\n\n partition(items, order_func)\n\n }\n\n}\n\n\n", "file_path": "graphannis/src/annis/util/quicksort.rs", "rank": 60, "score": 186122.25030701715 }, { "content": "struct BaseEdgeOp {\n\n gs: Vec<Arc<dyn GraphStorage>>,\n\n spec: BaseEdgeOpSpec,\n\n max_nodes_estimate: usize,\n\n inverse: bool,\n\n}\n\n\n\nimpl BaseEdgeOp {\n\n pub fn new(db: &AnnotationGraph, spec: BaseEdgeOpSpec) -> Result<BaseEdgeOp> {\n\n let mut gs: Vec<Arc<dyn GraphStorage>> = Vec::new();\n\n for c in &spec.components {\n\n let gs_for_component = db.get_graphstorage(c).ok_or_else(|| {\n\n GraphAnnisError::ImpossibleSearch(format!(\"Component {} does not exist\", &c))\n\n })?;\n\n gs.push(gs_for_component);\n\n }\n\n Ok(BaseEdgeOp {\n\n gs,\n\n spec,\n\n max_nodes_estimate: db.get_node_annos().guess_max_count(\n", "file_path": "graphannis/src/annis/db/aql/operators/edge_op.rs", "rank": 61, "score": 186007.45043832384 }, { "content": "fn make_binary_operator_spec(\n\n op: ast::BinaryOpSpec,\n\n spec_left: NodeSearchSpec,\n\n spec_right: NodeSearchSpec,\n\n) -> Result<Arc<dyn BinaryOperatorSpec>> {\n\n let op_spec: Arc<dyn BinaryOperatorSpec> = match op {\n\n ast::BinaryOpSpec::Dominance(spec) => Arc::new(spec),\n\n ast::BinaryOpSpec::Pointing(spec) => Arc::new(spec),\n\n ast::BinaryOpSpec::Precedence(spec) => Arc::new(spec),\n\n ast::BinaryOpSpec::Near(spec) => Arc::new(spec),\n\n ast::BinaryOpSpec::Overlap(spec) => Arc::new(spec),\n\n ast::BinaryOpSpec::IdenticalCoverage(spec) => Arc::new(spec),\n\n ast::BinaryOpSpec::PartOfSubCorpus(spec) => Arc::new(spec),\n\n ast::BinaryOpSpec::Inclusion(spec) => Arc::new(spec),\n\n ast::BinaryOpSpec::LeftAlignment(spec) => Arc::new(spec),\n\n ast::BinaryOpSpec::RightAlignment(spec) => Arc::new(spec),\n\n ast::BinaryOpSpec::IdenticalNode(spec) => Arc::new(spec),\n\n ast::BinaryOpSpec::ValueComparison(cmp) => match cmp {\n\n ast::ComparisonOperator::Equal => Arc::new(EqualValueSpec {\n\n spec_left,\n", "file_path": "graphannis/src/annis/db/aql/mod.rs", "rank": 62, "score": 185214.75531509225 }, { "content": "fn postgresql_import_reader(path: &Path) -> std::result::Result<csv::Reader<File>, csv::Error> {\n\n csv::ReaderBuilder::new()\n\n .has_headers(false)\n\n .delimiter(b'\\t')\n\n .quote(0) // effectivly disable quoting\n\n .from_path(path)\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 63, "score": 183258.95578419924 }, { "content": "fn calculate_automatic_coverage_edges<F>(\n\n updates: &mut GraphUpdate,\n\n load_node_and_corpus_result: &LoadNodeAndCorpusResult,\n\n load_rank_result: &LoadRankResult,\n\n progress_callback: &F,\n\n) -> Result<()>\n\nwhere\n\n F: Fn(&str),\n\n{\n\n // add explicit coverage edges for each node in the special annis namespace coverage component\n\n progress_callback(\"calculating the automatically generated Coverage edges\");\n\n\n\n for item in load_node_and_corpus_result\n\n .textpos_table\n\n .node_to_left\n\n .iter()?\n\n {\n\n let (n, textprop) = item?;\n\n // Do not calculate automatic coverage edges for token\n\n if textprop.segmentation.is_empty()\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 64, "score": 182841.26311301492 }, { "content": "#[derive(Clone, Debug)]\n\nstruct BaseEdgeOpSpec {\n\n pub components: Vec<Component<AnnotationComponentType>>,\n\n pub dist: RangeSpec,\n\n pub edge_anno: Option<EdgeAnnoSearchSpec>,\n\n pub is_reflexive: bool,\n\n pub op_str: Option<String>,\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/aql/operators/edge_op.rs", "rank": 65, "score": 182606.2209849332 }, { "content": "fn check_edge_annotation(\n\n edge_anno: &Option<EdgeAnnoSearchSpec>,\n\n gs: &dyn GraphStorage,\n\n source: NodeID,\n\n target: NodeID,\n\n) -> Result<bool> {\n\n match edge_anno {\n\n Some(EdgeAnnoSearchSpec::ExactValue { ns, name, val }) => {\n\n for a in gs\n\n .get_anno_storage()\n\n .get_annotations_for_item(&Edge { source, target })?\n\n {\n\n if name != &a.key.name {\n\n continue;\n\n }\n\n if let Some(template_ns) = ns {\n\n if template_ns != &a.key.ns {\n\n continue;\n\n }\n\n }\n", "file_path": "graphannis/src/annis/db/aql/operators/edge_op.rs", "rank": 66, "score": 181665.28777824092 }, { "content": "/// Classic implementation of a quicksort algorithm, see Cormen et al. 2009 \"Introduction to Algorithms\" p. 170ff\n\n/// for the specific algorithm used as a base here.\n\n///\n\n/// The algorithm has been modified to accept a `max_size` parameter which allows to abort the algorithm\n\n/// if at least `max_size` items at the beginning of the vector have been sorted.\n\n///\n\n/// The algorithm used a randomized pivot element.\n\nfn quicksort<T, F>(items: &mut [T], max_size: usize, order_func: &F) -> Result<()>\n\nwhere\n\n F: Fn(&T, &T) -> Result<std::cmp::Ordering>,\n\n{\n\n if items.len() > 1 {\n\n let q = randomized_partition(items, order_func)?;\n\n let (lo, hi) = items.split_at_mut(q);\n\n\n\n quicksort(lo, max_size, order_func)?;\n\n if q < max_size {\n\n // only sort right partition if the left partition is not large enough\n\n quicksort(hi, max_size, order_func)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "graphannis/src/annis/util/quicksort.rs", "rank": 67, "score": 180832.03229611617 }, { "content": "#[allow(clippy::borrowed_box)]\n\nfn error_kind(e: &Box<dyn StdError>) -> &'static str {\n\n if let Some(annis_err) = e.downcast_ref::<errors::GraphAnnisError>() {\n\n annis_err.into()\n\n } else {\n\n // Check for several known types\n\n if e.is::<std::io::Error>() {\n\n \"IO\"\n\n } else if e.is::<log::SetLoggerError>() {\n\n \"SetLoggerError\"\n\n } else {\n\n \"Unknown\"\n\n }\n\n }\n\n}\n\n\n", "file_path": "capi/src/cerror.rs", "rank": 68, "score": 179895.64476515193 }, { "content": "pub fn export<CT: ComponentType, W: std::io::Write, F>(\n\n graph: &Graph<CT>,\n\n graph_configuration: Option<&str>,\n\n output: W,\n\n progress_callback: F,\n\n) -> Result<()>\n\nwhere\n\n F: Fn(&str),\n\n{\n\n // Always buffer the output\n\n let output = BufWriter::new(output);\n\n let mut writer = Writer::new_with_indent(output, b' ', 4);\n\n\n\n // Add XML declaration\n\n let xml_decl = BytesDecl::new(b\"1.0\", Some(b\"UTF-8\"), None);\n\n writer.write_event(Event::Decl(xml_decl))?;\n\n\n\n // Always write the root element\n\n writer.write_event(Event::Start(BytesStart::borrowed_name(b\"graphml\")))?;\n\n\n", "file_path": "core/src/graph/serialization/graphml.rs", "rank": 69, "score": 178743.20954924976 }, { "content": "/// Classic implementation of a quicksort algorithm, see Cormen et al. 2009 \"Introduction to Algorithms\" p. 170ff\n\n/// for the specific algorithm used as a base here.\n\n///\n\n/// The algorithm has been modified to accept a `max_size` parameter which allows to abort the algorithm\n\n/// if at least `max_size` items at the beginning of the vector have been sorted.\n\n///\n\n/// The algorithm used a randomized pivot element and is executed in parallel.\n\nfn quicksort_parallel<T, F>(items: &mut [T], max_size: usize, order_func: &F) -> Result<()>\n\nwhere\n\n T: Send,\n\n F: Fn(&T, &T) -> Result<std::cmp::Ordering> + Sync,\n\n{\n\n if items.len() > 1 {\n\n let q = randomized_partition(items, order_func)?;\n\n let (lo, hi) = items.split_at_mut(q);\n\n\n\n let result = rayon::join(\n\n || quicksort_parallel(lo, max_size, order_func),\n\n || -> Result<()> {\n\n if q < max_size {\n\n // only sort right partition if the left partition is not large enough\n\n quicksort_parallel(hi, max_size, order_func)?;\n\n }\n\n Ok(())\n\n },\n\n );\n\n // Return if any of the closures produced an error\n\n result.0?;\n\n result.1?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "graphannis/src/annis/util/quicksort.rs", "rank": 70, "score": 178246.11915499135 }, { "content": "pub fn create_error_list(e: Box<dyn StdError>) -> ErrorList {\n\n let mut result = vec![Error {\n\n msg: CString::new(e.to_string()).unwrap_or_default(),\n\n kind: CString::new(error_kind(&e)).unwrap_or_default(),\n\n }];\n\n let cause_it = CauseIterator {\n\n current: e.source(),\n\n };\n\n for e in cause_it {\n\n result.push(e)\n\n }\n\n result\n\n}\n\n\n\nimpl From<log::SetLoggerError> for Error {\n\n fn from(e: log::SetLoggerError) -> Error {\n\n if let Ok(error_msg) = CString::new(e.to_string()) {\n\n Error {\n\n msg: error_msg,\n\n kind: CString::new(\"SetLoggerError\").unwrap(),\n", "file_path": "capi/src/cerror.rs", "rank": 71, "score": 173851.5871005625 }, { "content": "fn get_corpus_path(cid: u32, corpus_table: &ParsedCorpusTable) -> Result<String> {\n\n let mut result: String = get_parent_path(cid, corpus_table)?.into();\n\n let corpus = corpus_table\n\n .corpus_by_id\n\n .get(&cid)\n\n .ok_or(RelAnnisError::CorpusNotFound(cid))?;\n\n result.push_str(\"/\");\n\n result.push_str(&corpus.normalized_name);\n\n Ok(result)\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 72, "score": 172441.37266502908 }, { "content": "type MatchCandidate = (Arc<MatchGroup>, Arc<MatchGroup>, Sender<Result<MatchGroup>>);\n\n\n\nimpl<'a> NestedLoop<'a> {\n\n pub fn new(\n\n op_entry: BinaryOperatorEntry<'a>,\n\n lhs: Box<dyn ExecutionNode<Item = Result<MatchGroup>> + 'a>,\n\n rhs: Box<dyn ExecutionNode<Item = Result<MatchGroup>> + 'a>,\n\n lhs_idx: usize,\n\n rhs_idx: usize,\n\n ) -> Result<NestedLoop<'a>> {\n\n let mut left_is_outer = true;\n\n if let (Some(desc_lhs), Some(desc_rhs)) = (lhs.get_desc(), rhs.get_desc()) {\n\n if let (&Some(ref cost_lhs), &Some(ref cost_rhs)) = (&desc_lhs.cost, &desc_rhs.cost) {\n\n if cost_lhs.output > cost_rhs.output {\n\n left_is_outer = false;\n\n }\n\n }\n\n }\n\n\n\n let processed_func = |_, out_lhs: usize, out_rhs: usize| {\n", "file_path": "graphannis/src/annis/db/exec/parallel/nestedloop.rs", "rank": 73, "score": 171317.74218345358 }, { "content": "fn find_all_children_for_or(expr: &ast::Expr, followers: &mut Vec<ast::Expr>) {\n\n match expr {\n\n Expr::Or(lhs, rhs) => {\n\n find_all_children_for_or(lhs, followers);\n\n find_all_children_for_or(rhs, followers);\n\n }\n\n _ => {\n\n // add the expression itself\n\n followers.push(expr.clone());\n\n }\n\n }\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/aql/mod.rs", "rank": 74, "score": 170776.18210713242 }, { "content": "fn find_all_children_for_and(expr: &ast::Expr, followers: &mut Vec<ast::Literal>) {\n\n match expr {\n\n Expr::Terminal(l) => {\n\n followers.push(l.clone());\n\n }\n\n Expr::And(lhs, rhs) => {\n\n find_all_children_for_and(lhs, followers);\n\n find_all_children_for_and(rhs, followers);\n\n }\n\n _ => {}\n\n }\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/aql/mod.rs", "rank": 75, "score": 170776.18210713242 }, { "content": "fn calculate_outputsize<Op: BinaryOperatorBase + ?Sized>(\n\n op: &Op,\n\n cost_lhs: &CostEstimate,\n\n cost_rhs: &CostEstimate,\n\n) -> Result<usize> {\n\n let output = match op.estimation_type()? {\n\n EstimationType::Selectivity(selectivity) => {\n\n let num_tuples = (cost_lhs.output * cost_rhs.output) as f64;\n\n if let Some(edge_sel) = op.edge_anno_selectivity()? {\n\n (num_tuples * selectivity * edge_sel).round() as usize\n\n } else {\n\n (num_tuples * selectivity).round() as usize\n\n }\n\n }\n\n EstimationType::Min => std::cmp::min(cost_lhs.output, cost_rhs.output),\n\n };\n\n // always assume at least one output item otherwise very small selectivity can fool the planner\n\n Ok(std::cmp::max(output, 1))\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/exec/mod.rs", "rank": 76, "score": 167541.42994458732 }, { "content": "pub fn list_groups(conn: &SqliteConnection) -> Result<Vec<Group>, ServiceError> {\n\n use crate::schema::corpus_groups::dsl::*;\n\n use crate::schema::groups::dsl::*;\n\n\n\n let result = conn.transaction::<_, ServiceError, _>(move || {\n\n let mut result: Vec<Group> = Vec::new();\n\n // Collect the corpora for each group name\n\n for group_name in groups.select(name).load::<String>(conn)? {\n\n let corpora = corpus_groups\n\n .select(corpus)\n\n .filter(group.eq(&group_name))\n\n .load::<String>(conn)?;\n\n result.push(Group {\n\n corpora,\n\n name: group_name.clone(),\n\n })\n\n }\n\n Ok(result)\n\n })?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "webservice/src/actions.rs", "rank": 77, "score": 167395.7276586648 }, { "content": "fn create_info<GS: 'static>() -> GSInfo\n\nwhere\n\n for<'de> GS: GraphStorage + Default + Deserialize<'de>,\n\n{\n\n // create an instance to get the name\n\n let instance = GS::default();\n\n\n\n GSInfo {\n\n id: instance.serialization_id(),\n\n constructor: || Ok(Arc::new(GS::default())),\n\n deserialize_func: |location| Ok(Arc::new(GS::load_from(location)?)),\n\n }\n\n}\n\n\n", "file_path": "core/src/graph/storage/registry.rs", "rank": 78, "score": 167063.6452521984 }, { "content": "pub fn delete_group(group_name: &str, conn: &SqliteConnection) -> Result<(), ServiceError> {\n\n use crate::schema::groups::dsl;\n\n\n\n diesel::delete(dsl::groups)\n\n .filter(dsl::name.eq(group_name))\n\n .execute(conn)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "webservice/src/actions.rs", "rank": 79, "score": 165051.66240690323 }, { "content": "pub fn add_or_replace_group(group: Group, conn: &SqliteConnection) -> Result<(), ServiceError> {\n\n use crate::schema::corpus_groups::dsl as cg_dsl;\n\n use crate::schema::groups::dsl as g_dsl;\n\n\n\n conn.transaction::<_, ServiceError, _>(move || {\n\n // Delete all group corpus relations for this group name\n\n diesel::delete(cg_dsl::corpus_groups)\n\n .filter(cg_dsl::group.eq(group.name.as_str()))\n\n .execute(conn)?;\n\n\n\n // Delete any possible group with the same name\n\n diesel::delete(g_dsl::groups)\n\n .filter(g_dsl::name.eq(&group.name))\n\n .execute(conn)?;\n\n // Insert the group with its name\n\n diesel::insert_into(g_dsl::groups)\n\n .values(models::Group {\n\n name: group.name.clone(),\n\n })\n\n .execute(conn)?;\n", "file_path": "webservice/src/actions.rs", "rank": 80, "score": 165051.66240690323 }, { "content": "pub fn regex_full_match(pattern: &str) -> String {\n\n let mut full_match_pattern = String::new();\n\n full_match_pattern.push_str(r\"\\A(\");\n\n full_match_pattern.push_str(pattern);\n\n full_match_pattern.push_str(r\")\\z\");\n\n\n\n full_match_pattern\n\n}\n", "file_path": "core/src/util/mod.rs", "rank": 81, "score": 164036.62107155728 }, { "content": "pub fn size_of_pathbuf(val: &Path, ops: &mut MallocSizeOfOps) -> usize {\n\n // The path uses an OsString internally, use this for the estimation\n\n val.as_os_str().size_of(ops)\n\n}\n\n\n", "file_path": "core/src/util/memory_estimation.rs", "rank": 82, "score": 162948.88381577044 }, { "content": "fn get_parent_path(cid: u32, corpus_table: &ParsedCorpusTable) -> Result<std::string::String> {\n\n let corpus = corpus_table\n\n .corpus_by_id\n\n .get(&cid)\n\n .ok_or(RelAnnisError::CorpusNotFound(cid))?;\n\n let pre = corpus.pre;\n\n let post = corpus.post;\n\n\n\n Ok(corpus_table\n\n .corpus_by_preorder\n\n .range(0..pre)\n\n .filter_map(|(_, cid)| corpus_table.corpus_by_id.get(cid))\n\n .filter(|parent_corpus| post < parent_corpus.post)\n\n .map(|parent_corpus| parent_corpus.normalized_name.clone())\n\n .join(\"/\"))\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 83, "score": 162725.22129724684 }, { "content": "fn split_path_and_nodename(full_node_name: &str) -> (&str, &str) {\n\n full_node_name\n\n .rsplit_once('#')\n\n .unwrap_or((full_node_name, \"\"))\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/sort_matches.rs", "rank": 84, "score": 160231.63092980906 }, { "content": "fn component_path<CT: ComponentType>(\n\n location: &Option<PathBuf>,\n\n c: &Component<CT>,\n\n) -> Option<PathBuf> {\n\n match location {\n\n Some(ref loc) => {\n\n let mut p = PathBuf::from(loc);\n\n // don't use the backup-folder per default\n\n p.push(\"current\");\n\n p.push(component_to_relative_path(c));\n\n Some(p)\n\n }\n\n None => None,\n\n }\n\n}\n\n\n\nimpl<CT: ComponentType> Graph<CT> {\n\n /// Create a new and empty instance without any location on the disk.\n\n pub fn new(disk_based: bool) -> Result<Self> {\n\n let node_annos: Box<dyn AnnotationStorage<NodeID>> = if disk_based {\n", "file_path": "core/src/graph/mod.rs", "rank": 85, "score": 160093.9305446043 }, { "content": "/// A binary operator that can be used in an [`IndexJoin`](crate::annis::db::exec::indexjoin::IndexJoin).\n\npub trait BinaryOperatorIndex: BinaryOperatorBase {\n\n fn retrieve_matches<'a>(&'a self, lhs: &Match) -> Box<dyn Iterator<Item = Result<Match>> + 'a>;\n\n\n\n fn as_binary_operator(&self) -> &dyn BinaryOperatorBase;\n\n}\n\n\n", "file_path": "graphannis/src/annis/operator.rs", "rank": 86, "score": 159999.4458423623 }, { "content": "fn get_fan_outs(edges: &FxHashMap<NodeID, Vec<NodeID>>) -> Vec<usize> {\n\n let mut fan_outs: Vec<usize> = Vec::new();\n\n if !edges.is_empty() {\n\n for outgoing in edges.values() {\n\n fan_outs.push(outgoing.len());\n\n }\n\n }\n\n // order the fan-outs\n\n fan_outs.sort_unstable();\n\n\n\n fan_outs\n\n}\n\n\n\nimpl Default for AdjacencyListStorage {\n\n fn default() -> Self {\n\n AdjacencyListStorage::new()\n\n }\n\n}\n\n\n\nimpl AdjacencyListStorage {\n", "file_path": "core/src/graph/storage/adjacencylist.rs", "rank": 87, "score": 159005.78473845663 }, { "content": "pub fn contains_regex_metacharacters(pattern: &str) -> bool {\n\n for c in pattern.chars() {\n\n if regex_syntax::is_meta_character(c) {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "graphannis/src/annis/util/mod.rs", "rank": 88, "score": 156961.95253367227 }, { "content": "#[derive(Serialize, Deserialize, Clone, MallocSizeOf)]\n\nstruct RelativePosition<PosT> {\n\n pub root: NodeID,\n\n pub pos: PosT,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, MallocSizeOf)]\n\npub struct LinearGraphStorage<PosT: NumValue> {\n\n node_to_pos: FxHashMap<NodeID, RelativePosition<PosT>>,\n\n node_chains: FxHashMap<NodeID, Vec<NodeID>>,\n\n annos: AnnoStorageImpl<Edge>,\n\n stats: Option<GraphStatistic>,\n\n}\n\n\n\nimpl<PosT> LinearGraphStorage<PosT>\n\nwhere\n\n PosT: NumValue,\n\n{\n\n pub fn new() -> LinearGraphStorage<PosT> {\n\n LinearGraphStorage {\n\n node_to_pos: FxHashMap::default(),\n", "file_path": "core/src/graph/storage/linear.rs", "rank": 89, "score": 155007.1552070649 }, { "content": "struct ParsedCorpusTable {\n\n toplevel_corpus_name: String,\n\n corpus_by_preorder: BTreeMap<u32, u32>,\n\n corpus_by_id: BTreeMap<u32, CorpusTableEntry>,\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 90, "score": 154852.07726113964 }, { "content": "struct CorpusTableEntry {\n\n pre: u32,\n\n post: u32,\n\n name: String,\n\n normalized_name: String,\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 91, "score": 154852.07726113964 }, { "content": "fn add_subcorpora(\n\n updates: &mut GraphUpdate,\n\n corpus_table: &ParsedCorpusTable,\n\n node_node_result: &LoadNodeResult,\n\n texts: &DiskMap<TextKey, Text>,\n\n corpus_id_to_annos: &BTreeMap<(u32, AnnoKey), std::string::String>,\n\n is_annis_33: bool,\n\n path: &Path,\n\n) -> Result<()> {\n\n // add the toplevel corpus as node\n\n {\n\n updates.add_event(UpdateEvent::AddNode {\n\n node_name: corpus_table.toplevel_corpus_name.as_str().into(),\n\n node_type: \"corpus\".into(),\n\n })?;\n\n\n\n // save the relANNIS version as meta data attribute on the toplevel corpus\n\n updates.add_event(UpdateEvent::AddNodeLabel {\n\n node_name: corpus_table.toplevel_corpus_name.as_str().into(),\n\n anno_ns: ANNIS_NS.to_owned(),\n", "file_path": "graphannis/src/annis/db/relannis.rs", "rank": 92, "score": 153504.84635451378 }, { "content": "fn init_locale() {\n\n // use collation as defined by the environment variables (LANGUAGE, LC_*, etc.)\n\n unsafe {\n\n let locale = CString::new(\"\").unwrap_or_default();\n\n libc::setlocale(libc::LC_COLLATE, locale.as_ptr());\n\n }\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/corpusstorage.rs", "rank": 93, "score": 153504.84635451378 }, { "content": "#[test]\n\nfn delete() {\n\n let tmp = tempfile::tempdir().unwrap();\n\n let cs = CorpusStorage::with_auto_cache_size(tmp.path(), false).unwrap();\n\n // fully load a corpus\n\n let mut g = GraphUpdate::new();\n\n g.add_event(UpdateEvent::AddNode {\n\n node_name: \"test\".to_string(),\n\n node_type: \"node\".to_string(),\n\n })\n\n .unwrap();\n\n\n\n cs.apply_update(\"testcorpus\", &mut g).unwrap();\n\n cs.preload(\"testcorpus\").unwrap();\n\n cs.delete(\"testcorpus\").unwrap();\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/corpusstorage/tests.rs", "rank": 94, "score": 153504.84635451378 }, { "content": "pub trait UnaryOperator: std::fmt::Display {\n\n fn filter_match(&self, m: &Match) -> Result<bool>;\n\n\n\n fn estimation_type(&self) -> EstimationType {\n\n EstimationType::Selectivity(0.1)\n\n }\n\n}\n", "file_path": "graphannis/src/annis/operator.rs", "rank": 95, "score": 152649.40917305672 }, { "content": "/// Creates a byte array key from a vector of strings.\n\n///\n\n/// The strings are terminated with `\\0`.\n\npub fn create_str_vec_key(val: &[&str]) -> KeyVec {\n\n let mut result: KeyVec = KeyVec::default();\n\n for v in val {\n\n // append null-terminated string to result\n\n for b in v.as_bytes() {\n\n result.push(*b)\n\n }\n\n result.push(0);\n\n }\n\n result\n\n}\n\n\n\n/// Defines a definition of a query including its number of expected results.\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct SearchDef {\n\n pub aql: String,\n\n pub count: u64,\n\n pub name: String,\n\n pub corpus: Vec<String>,\n\n}\n\n\n", "file_path": "graphannis/src/annis/util/mod.rs", "rank": 96, "score": 152144.2733074181 }, { "content": "struct NodeDescArg {\n\n query_fragment: String,\n\n node_nr: usize,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialOrd, Ord, Hash, PartialEq, Eq)]\n\npub enum NodeSearchSpec {\n\n ExactValue {\n\n ns: Option<String>,\n\n name: String,\n\n val: Option<String>,\n\n is_meta: bool,\n\n },\n\n NotExactValue {\n\n ns: Option<String>,\n\n name: String,\n\n val: String,\n\n is_meta: bool,\n\n },\n\n RegexValue {\n", "file_path": "graphannis/src/annis/db/exec/nodesearch.rs", "rank": 97, "score": 151954.37411913316 }, { "content": "type ResultIterator<'a, K, V> = Box<dyn Iterator<Item = Result<(K, V)>> + 'a>;\n\n\n\npub struct CombinedRange<'a, K, V>\n\nwhere\n\n for<'de> K: 'static + Clone + KeySerializer + Send,\n\n for<'de> V: 'static + Clone + Serialize + Deserialize<'de> + Send,\n\n{\n\n c0_iterator: Peekable<std::collections::btree_map::Range<'a, K, Option<V>>>,\n\n c1_iterator: Peekable<ResultIterator<'a, K, Option<V>>>,\n\n c2_iterator: Peekable<ResultIterator<'a, K, V>>,\n\n}\n\n\n\nimpl<'a, K, V> CombinedRange<'a, K, V>\n\nwhere\n\n for<'de> K: 'static + Clone + KeySerializer + Serialize + Deserialize<'de> + Send + Sync + Ord,\n\n for<'de> V: 'static + Clone + Serialize + Deserialize<'de> + Send + Sync,\n\n{\n\n fn new<R: RangeBounds<K> + Clone>(\n\n range: R,\n\n c0: &'a BTreeMap<K, Option<V>>,\n", "file_path": "core/src/util/disk_collections.rs", "rank": 98, "score": 151229.61863981112 }, { "content": "fn get_max_cache_size(cache_strategy: &CacheStrategy, used_cache_size: usize) -> usize {\n\n match cache_strategy {\n\n CacheStrategy::FixedMaxMemory(max_size) => *max_size * 1_000_000,\n\n CacheStrategy::PercentOfFreeMemory(max_percent) => {\n\n // get the current free space in main memory\n\n if let Ok(mem) = sys_info::mem_info() {\n\n // the free memory\n\n let free_system_mem: usize = mem.avail as usize * 1024; // mem.free is in KiB\n\n // A part of the system memory is already used by the cache.\n\n // We want x percent of the overall available memory (thus not used by us), so add the cache size\n\n let available_memory: usize = free_system_mem + used_cache_size;\n\n ((available_memory as f64) * (max_percent / 100.0)) as usize\n\n } else {\n\n // fallback to include only the last loaded corpus if free memory size is unknown\n\n 0\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "graphannis/src/annis/db/corpusstorage.rs", "rank": 99, "score": 150541.29085247876 } ]
Rust
packages/std/src/init_handle.rs
puneet2019/cosmwasm
8cf9c302e408ce175852ad6a2ab153d426b43bdd
use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::fmt; use crate::coins::Coin; use crate::encoding::Binary; use crate::errors::StdResult; use crate::types::{HumanAddr, Never}; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CosmosMsg<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { Bank(BankMsg), Custom(T), Staking(StakingMsg), Wasm(WasmMsg), } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum BankMsg { Send { from_address: HumanAddr, to_address: HumanAddr, amount: Vec<Coin>, }, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum StakingMsg { Delegate { validator: HumanAddr, amount: Coin, }, Undelegate { validator: HumanAddr, amount: Coin, }, Withdraw { validator: HumanAddr, recipient: Option<HumanAddr>, }, Redelegate { src_validator: HumanAddr, dst_validator: HumanAddr, amount: Coin, }, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum WasmMsg { Execute { contract_addr: HumanAddr, msg: Binary, send: Vec<Coin>, }, Instantiate { code_id: u64, msg: Binary, send: Vec<Coin>, label: Option<String>, }, } impl<T: Clone + fmt::Debug + PartialEq + JsonSchema> From<BankMsg> for CosmosMsg<T> { fn from(msg: BankMsg) -> Self { CosmosMsg::Bank(msg) } } #[cfg(feature = "staking")] impl<T: Clone + fmt::Debug + PartialEq + JsonSchema> From<StakingMsg> for CosmosMsg<T> { fn from(msg: StakingMsg) -> Self { CosmosMsg::Staking(msg) } } impl<T: Clone + fmt::Debug + PartialEq + JsonSchema> From<WasmMsg> for CosmosMsg<T> { fn from(msg: WasmMsg) -> Self { CosmosMsg::Wasm(msg) } } #[derive(Serialize, Deserialize, Clone, Default, Debug, PartialEq, JsonSchema)] pub struct LogAttribute { pub key: String, pub value: String, } pub fn log<K: ToString, V: ToString>(key: K, value: V) -> LogAttribute { LogAttribute { key: key.to_string(), value: value.to_string(), } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct InitResponse<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { pub messages: Vec<CosmosMsg<T>>, pub log: Vec<LogAttribute>, pub data: Option<Binary>, } pub type InitResult<U = Never> = StdResult<InitResponse<U>>; impl<T> Default for InitResponse<T> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { fn default() -> Self { InitResponse { messages: vec![], log: vec![], data: None, } } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct HandleResponse<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { pub messages: Vec<CosmosMsg<T>>, pub log: Vec<LogAttribute>, pub data: Option<Binary>, } pub type HandleResult<U = Never> = StdResult<HandleResponse<U>>; impl<T> Default for HandleResponse<T> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { fn default() -> Self { HandleResponse { messages: vec![], log: vec![], data: None, } } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct MigrateResponse<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { pub messages: Vec<CosmosMsg<T>>, pub log: Vec<LogAttribute>, pub data: Option<Binary>, } pub type MigrateResult<U = Never> = StdResult<MigrateResponse<U>>; impl<T> Default for MigrateResponse<T> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { fn default() -> Self { MigrateResponse { messages: vec![], log: vec![], data: None, } } } #[cfg(test)] mod test { use super::*; use crate::errors::StdError; use crate::{coins, from_slice, to_vec, Uint128}; #[test] fn log_works_for_different_types() { let expeceted = LogAttribute { key: "foo".to_string(), value: "42".to_string(), }; assert_eq!(log("foo", "42"), expeceted); assert_eq!(log("foo".to_string(), "42"), expeceted); assert_eq!(log("foo", "42".to_string()), expeceted); assert_eq!(log("foo", HumanAddr::from("42")), expeceted); assert_eq!(log("foo", Uint128(42)), expeceted); assert_eq!(log("foo", 42), expeceted); } #[test] fn can_deser_error_result() { let fail = InitResult::Err(StdError::Unauthorized { backtrace: None }); let bin = to_vec(&fail).expect("encode contract result"); println!("error: {}", std::str::from_utf8(&bin).unwrap()); let back: InitResult = from_slice(&bin).expect("decode contract result"); assert_eq!(fail, back); } #[test] fn can_deser_ok_result() { let send = InitResult::Ok(InitResponse { messages: vec![BankMsg::Send { from_address: HumanAddr("me".to_string()), to_address: HumanAddr("you".to_string()), amount: coins(1015, "earth"), } .into()], log: vec![LogAttribute { key: "action".to_string(), value: "release".to_string(), }], data: None, }); let bin = to_vec(&send).expect("encode contract result"); println!("ok: {}", std::str::from_utf8(&bin).unwrap()); let back: InitResult = from_slice(&bin).expect("decode contract result"); assert_eq!(send, back); } #[test] fn msg_from_works() { let from_address = HumanAddr("me".to_string()); let to_address = HumanAddr("you".to_string()); let amount = coins(1015, "earth"); let bank = BankMsg::Send { from_address, to_address, amount, }; let msg: CosmosMsg = bank.clone().into(); match msg { CosmosMsg::Bank(msg) => assert_eq!(bank, msg), _ => panic!("must encode in Bank variant"), } } }
use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::fmt; use crate::coins::Coin; use crate::encoding::Binary; use crate::errors::StdResult; use crate::types::{HumanAddr, Never}; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CosmosMsg<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { Bank(BankMsg), Custom(T), Staking(StakingMsg), Wasm(WasmMsg), } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum BankMsg { Send { from_address: HumanAddr, to_address: HumanAddr, amount: Vec<Coin>, }, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum StakingMsg { Delegate { validator: HumanAddr, amount: Coin, }, Undelegate { validator: HumanAddr, amount: Coin, }, Withdraw { validator: HumanAddr, recipient: Option<HumanAddr>, }, Redelegate { src_validator: HumanAddr, dst_validator: HumanAddr, amount: Coin, }, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum WasmMsg { Execute { contract_addr: HumanAddr, msg: Binary, send: Vec<Coin>, }, Instantiate { code_id: u64, msg: Binary, send: Vec<Coin>, label: Option<String>, }, } impl<T: Clone + fmt::Debug + PartialEq + JsonSchema> From<BankMsg> for CosmosMsg<T> { fn from(msg: BankMsg) -> Self { CosmosMsg::Bank(msg) } } #[cfg(feature = "staking")] impl<T: Clone + fmt::Debug + PartialEq + JsonSchema> From<StakingMsg> for CosmosMsg<T> { fn from(msg: StakingMsg) -> Self { CosmosMsg::Staking(msg) } } impl<T: Clone + fmt::Debug + PartialEq + JsonSchema> From<WasmMsg> for CosmosMsg<T> { fn from(msg: WasmMsg) -> Self { CosmosMsg::Wasm(msg) } } #[derive(Serialize, Deserialize, Clone, Default, Debug, PartialEq, JsonSchema)] pub struct LogAttribute { pub key: String, pub value: String, } pub fn log<K: ToString, V: ToString>(key: K, value: V) -> LogAttribute { LogAttribute { key: key.to_string(), value: value.to_string(), } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct InitResponse<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { pub messages: Vec<CosmosMsg<T>>, pub log: Vec<LogAttribute>, pub data: Option<Binary>, } pub type InitResult<U = Never> = StdResult<InitResponse<U>>; impl<T> Default for InitResponse<T> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { fn default() -> Self { InitResponse { messages: vec![], log: vec![], data: None, } } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct HandleResponse<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { pub messages: Vec<CosmosMsg<T>>, pub log: Vec<LogAttribute>, pub data: Option<Binary>, } pub type HandleResult<U = Never> = StdResult<HandleResponse<U>>; impl<T> Default for HandleResponse<T> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { fn default() -> Self { HandleResponse { messages: vec![], log: vec![], data: None, } } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct MigrateResponse<T = Never> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { pub messages: Vec<CosmosMsg<T>>, pub log: Vec<LogAttribute>, pub data: Option<Binary>, } pub type MigrateResult<U = Never> = StdResult<MigrateResponse<U>>; impl<T> Default for MigrateResponse<T> where T: Clone + fmt::Debug + PartialEq + JsonSchema, { fn default() -> Self { MigrateResponse { messages: vec![], log: vec![],
=> assert_eq!(bank, msg), _ => panic!("must encode in Bank variant"), } } }
data: None, } } } #[cfg(test)] mod test { use super::*; use crate::errors::StdError; use crate::{coins, from_slice, to_vec, Uint128}; #[test] fn log_works_for_different_types() { let expeceted = LogAttribute { key: "foo".to_string(), value: "42".to_string(), }; assert_eq!(log("foo", "42"), expeceted); assert_eq!(log("foo".to_string(), "42"), expeceted); assert_eq!(log("foo", "42".to_string()), expeceted); assert_eq!(log("foo", HumanAddr::from("42")), expeceted); assert_eq!(log("foo", Uint128(42)), expeceted); assert_eq!(log("foo", 42), expeceted); } #[test] fn can_deser_error_result() { let fail = InitResult::Err(StdError::Unauthorized { backtrace: None }); let bin = to_vec(&fail).expect("encode contract result"); println!("error: {}", std::str::from_utf8(&bin).unwrap()); let back: InitResult = from_slice(&bin).expect("decode contract result"); assert_eq!(fail, back); } #[test] fn can_deser_ok_result() { let send = InitResult::Ok(InitResponse { messages: vec![BankMsg::Send { from_address: HumanAddr("me".to_string()), to_address: HumanAddr("you".to_string()), amount: coins(1015, "earth"), } .into()], log: vec![LogAttribute { key: "action".to_string(), value: "release".to_string(), }], data: None, }); let bin = to_vec(&send).expect("encode contract result"); println!("ok: {}", std::str::from_utf8(&bin).unwrap()); let back: InitResult = from_slice(&bin).expect("decode contract result"); assert_eq!(send, back); } #[test] fn msg_from_works() { let from_address = HumanAddr("me".to_string()); let to_address = HumanAddr("you".to_string()); let amount = coins(1015, "earth"); let bank = BankMsg::Send { from_address, to_address, amount, }; let msg: CosmosMsg = bank.clone().into(); match msg { CosmosMsg::Bank(msg)
random
[ { "content": "// coins is a shortcut constructor for a set of one denomination of coins\n\npub fn coins(amount: u128, denom: &str) -> Vec<Coin> {\n\n vec![coin(amount, denom)]\n\n}\n\n\n", "file_path": "packages/std/src/coins.rs", "rank": 1, "score": 333316.4643425882 }, { "content": "// coin is a shorthand constructor for Coin\n\npub fn coin(amount: u128, denom: &str) -> Coin {\n\n Coin::new(amount, denom)\n\n}\n\n\n", "file_path": "packages/std/src/coins.rs", "rank": 2, "score": 283659.9813436641 }, { "content": "pub fn to_vec<T>(data: &T) -> VmResult<Vec<u8>>\n\nwhere\n\n T: Serialize + ?Sized,\n\n{\n\n serde_json::to_vec(data).map_err(|e| VmError::serialize_err(type_name::<T>(), e))\n\n}\n", "file_path": "packages/vm/src/serde.rs", "rank": 3, "score": 258776.6628388422 }, { "content": "pub fn to_vec<T>(data: &T) -> StdResult<Vec<u8>>\n\nwhere\n\n T: Serialize + ?Sized,\n\n{\n\n serde_json_wasm::to_vec(data).map_err(|e| serialize_err(type_name::<T>(), e))\n\n}\n\n\n", "file_path": "packages/std/src/serde.rs", "rank": 4, "score": 258776.66283884223 }, { "content": "pub fn serialize_err<S: Into<String>, M: Display>(source: S, msg: M) -> StdError {\n\n SerializeErr {\n\n source: source.into(),\n\n msg: msg.to_string(),\n\n }\n\n .build()\n\n}\n\n\n", "file_path": "packages/std/src/errors/std_error_helpers.rs", "rank": 5, "score": 247387.47467720462 }, { "content": "pub fn generic_err<S: Into<String>>(msg: S) -> StdError {\n\n GenericErr { msg: msg.into() }.build()\n\n}\n\n\n", "file_path": "packages/std/src/errors/std_error_helpers.rs", "rank": 6, "score": 239395.00365208054 }, { "content": "pub fn to_binary<T>(data: &T) -> StdResult<Binary>\n\nwhere\n\n T: Serialize + ?Sized,\n\n{\n\n to_vec(data).map(Binary)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use serde::Deserialize;\n\n\n\n #[derive(Serialize, Deserialize, Debug, PartialEq)]\n\n #[serde(rename_all = \"snake_case\")]\n\n enum SomeMsg {\n\n Refund {},\n\n ReleaseAll {\n\n image: String,\n\n amount: u32,\n\n time: u64,\n", "file_path": "packages/std/src/serde.rs", "rank": 7, "score": 235531.2192579653 }, { "content": "pub fn from_binary<T: DeserializeOwned>(value: &Binary) -> StdResult<T> {\n\n from_slice(value.as_slice())\n\n}\n\n\n", "file_path": "packages/std/src/serde.rs", "rank": 8, "score": 232647.50410062497 }, { "content": "/// has_coins returns true if the list of coins has at least the required amount\n\npub fn has_coins(coins: &[Coin], required: &Coin) -> bool {\n\n coins\n\n .iter()\n\n .find(|c| c.denom == required.denom)\n\n .map(|m| m.amount >= required.amount)\n\n .unwrap_or(false)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn has_coins_matches() {\n\n let wallet = vec![coin(12345, \"ETH\"), coin(555, \"BTC\")];\n\n\n\n // less than same type\n\n assert!(has_coins(&wallet, &coin(777, \"ETH\")));\n\n }\n\n}\n", "file_path": "packages/std/src/coins.rs", "rank": 9, "score": 220620.65516428964 }, { "content": "/// Sequence creates a custom Singleton to hold an empty sequence\n\npub fn sequence<'a, S: Storage>(storage: &'a mut S, key: &[u8]) -> Singleton<'a, S, u64> {\n\n Singleton::new(storage, key)\n\n}\n\n\n", "file_path": "packages/storage/src/sequence.rs", "rank": 10, "score": 217824.56427041942 }, { "content": "pub fn parse_err<T: Into<String>, M: Display>(target: T, msg: M) -> StdError {\n\n ParseErr {\n\n target: target.into(),\n\n msg: msg.to_string(),\n\n }\n\n .build()\n\n}\n\n\n", "file_path": "packages/std/src/errors/std_error_helpers.rs", "rank": 11, "score": 216098.80167894933 }, { "content": "/// Api are callbacks to system functions defined outside of the wasm modules.\n\n/// This is a trait to allow Mocks in the test code.\n\n///\n\n/// Currently it just supports address conversion, we could add eg. crypto functions here.\n\n/// These should all be pure (stateless) functions. If you need state, you probably want\n\n/// to use the Querier.\n\n///\n\n/// We can use feature flags to opt-in to non-essential methods\n\n/// for backwards compatibility in systems that don't have them all.\n\npub trait Api: Copy + Clone + Send {\n\n fn canonical_address(&self, human: &HumanAddr) -> StdResult<CanonicalAddr>;\n\n fn human_address(&self, canonical: &CanonicalAddr) -> StdResult<HumanAddr>;\n\n}\n\n\n\n/// A short-hand alias for the two-level query result (1. accessing the contract, 2. executing query in the contract)\n\npub type QuerierResult = SystemResult<StdResult<Binary>>;\n\n\n", "file_path": "packages/std/src/traits.rs", "rank": 12, "score": 211759.58385624766 }, { "content": "/// Api are callbacks to system functions defined outside of the wasm modules.\n\n/// This is a trait to allow Mocks in the test code.\n\n///\n\n/// Currently it just supports address conversion, we could add eg. crypto functions here.\n\n/// These should all be pure (stateless) functions. If you need state, you probably want\n\n/// to use the Querier.\n\n///\n\n/// We can use feature flags to opt-in to non-essential methods\n\n/// for backwards compatibility in systems that don't have them all.\n\npub trait Api: Copy + Clone + Send {\n\n fn canonical_address(&self, human: &HumanAddr) -> FfiResult<CanonicalAddr>;\n\n fn human_address(&self, canonical: &CanonicalAddr) -> FfiResult<HumanAddr>;\n\n}\n\n\n\n/// A short-hand alias for the three-level query result\n\n/// 1. Passing the query message to the backend\n\n/// 2. Accessing the contract\n\n/// 3. Executing query in the contract\n\npub type QuerierResult = FfiResult<(SystemResult<StdResult<Binary>>, u64)>;\n\n\n", "file_path": "packages/vm/src/traits.rs", "rank": 13, "score": 211759.58385624766 }, { "content": "pub fn to_snake_case(name: &str) -> String {\n\n let mut out = String::new();\n\n for (index, ch) in name.char_indices() {\n\n if index != 0 && ch.is_uppercase() {\n\n out.push('_');\n\n }\n\n out.push(ch.to_ascii_lowercase());\n\n }\n\n out\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn to_snake_case_leaves_snake_case_untouched() {\n\n assert_eq!(to_snake_case(\"\"), \"\");\n\n assert_eq!(to_snake_case(\"a\"), \"a\");\n\n assert_eq!(to_snake_case(\"abc\"), \"abc\");\n", "file_path": "packages/schema/src/casing.rs", "rank": 14, "score": 199098.15094327237 }, { "content": "/// do_query should be wrapped in an external \"C\" export, containing a contract-specific function as arg\n\npub fn do_query<T: DeserializeOwned + JsonSchema>(\n\n query_fn: &dyn Fn(\n\n &Extern<ExternalStorage, ExternalApi, ExternalQuerier>,\n\n T,\n\n ) -> StdResult<QueryResponse>,\n\n msg_ptr: u32,\n\n) -> u32 {\n\n let res: QueryResult = _do_query(query_fn, msg_ptr as *mut c_void);\n\n let v = to_vec(&res).unwrap();\n\n release_buffer(v) as u32\n\n}\n\n\n", "file_path": "packages/std/src/exports.rs", "rank": 15, "score": 196823.83382706883 }, { "content": "/// currval returns the last value returned by nextval. If the sequence has never been used,\n\n/// then it will return 0.\n\npub fn currval<S: Storage>(seq: &Singleton<S, u64>) -> StdResult<u64> {\n\n Ok(seq.may_load()?.unwrap_or_default())\n\n}\n\n\n", "file_path": "packages/storage/src/sequence.rs", "rank": 16, "score": 193921.37662913813 }, { "content": "/// Returns a box of a Region, which can be sent over a call to extern\n\n/// note that this DOES NOT take ownership of the data, and we MUST NOT consume_region\n\n/// the resulting data.\n\n/// The Box must be dropped (with scope), but not the data\n\npub fn build_region(data: &[u8]) -> Box<Region> {\n\n let data_ptr = data.as_ptr() as usize;\n\n build_region_from_components(\n\n u32::try_from(data_ptr).expect(\"pointer doesn't fit in u32\"),\n\n u32::try_from(data.len()).expect(\"length doesn't fit in u32\"),\n\n u32::try_from(data.len()).expect(\"length doesn't fit in u32\"),\n\n )\n\n}\n\n\n", "file_path": "packages/std/src/memory.rs", "rank": 17, "score": 193302.8840334154 }, { "content": "#[inline]\n\nfn concat(namespace: &[u8], key: &[u8]) -> Vec<u8> {\n\n let mut k = namespace.to_vec();\n\n k.extend_from_slice(key);\n\n k\n\n}\n\n\n\n#[cfg(feature = \"iterator\")]\n\npub(crate) fn range_with_prefix<'a, S: ReadonlyStorage>(\n\n storage: &'a S,\n\n namespace: &[u8],\n\n start: Option<&[u8]>,\n\n end: Option<&[u8]>,\n\n order: Order,\n\n) -> StdResult<Box<dyn Iterator<Item = KV> + 'a>> {\n\n // prepare start, end with prefix\n\n let start = match start {\n\n Some(s) => concat(namespace, s),\n\n None => namespace.to_vec(),\n\n };\n\n let end = match end {\n", "file_path": "packages/storage/src/namespace_helpers.rs", "rank": 18, "score": 192230.15761213182 }, { "content": "#[cfg(feature = \"iterator\")]\n\n#[inline]\n\nfn trim(namespace: &[u8], key: &[u8]) -> Vec<u8> {\n\n key[namespace.len()..].to_vec()\n\n}\n\n\n\n/// Returns a new vec of same length and last byte incremented by one\n\n/// If last bytes are 255, we handle overflow up the chain.\n\n/// If all bytes are 255, this returns wrong data - but that is never possible as a namespace\n", "file_path": "packages/storage/src/namespace_helpers.rs", "rank": 19, "score": 192230.15761213182 }, { "content": "/// Get how many more gas units can be used in the instance.\n\npub fn get_gas_left(instance: &WasmerInstance) -> u64 {\n\n let used = metering::get_points_used(instance);\n\n // when running out of gas, get_points_used can exceed GAS_LIMIT\n\n GAS_LIMIT.saturating_sub(used)\n\n}\n", "file_path": "packages/vm/src/backends/singlepass.rs", "rank": 20, "score": 191950.7074621459 }, { "content": "/// Get how many more gas units can be used in the instance.\n\npub fn get_gas_left(_instance: &WasmerInstance) -> u64 {\n\n FAKE_GAS_AVAILABLE\n\n}\n", "file_path": "packages/vm/src/backends/cranelift.rs", "rank": 21, "score": 191950.7074621459 }, { "content": "/// Takes a comma-separated string, splits it by commas, removes empty elements and returns a set of features.\n\n/// This can be used e.g. to initialize the cache.\n\npub fn features_from_csv(csv: &str) -> HashSet<String> {\n\n HashSet::from_iter(\n\n csv.split(',')\n\n .map(|x| x.trim().to_string())\n\n .filter(|f| !f.is_empty()),\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/features.rs", "rank": 22, "score": 190857.5641533296 }, { "content": "/// Calculates the raw key prefix for a given namespace as documented\n\n/// in https://github.com/webmaster128/key-namespacing#length-prefixed-keys\n\npub fn to_length_prefixed(namespace: &[u8]) -> Vec<u8> {\n\n let mut out = Vec::with_capacity(namespace.len() + 2);\n\n out.extend_from_slice(&encode_length(namespace));\n\n out.extend_from_slice(namespace);\n\n out\n\n}\n\n\n", "file_path": "packages/storage/src/length_prefixed.rs", "rank": 23, "score": 190661.77429302532 }, { "content": "#[cfg(feature = \"default-cranelift\")]\n\npub fn try_consume_gas<S: Storage, Q: Querier>(_ctx: &mut Ctx, _used_gas: u64) -> VmResult<()> {\n\n Ok(())\n\n}\n\n\n", "file_path": "packages/vm/src/context.rs", "rank": 24, "score": 190443.1063111659 }, { "content": "#[cfg(feature = \"default-singlepass\")]\n\npub fn try_consume_gas<S: Storage, Q: Querier>(ctx: &mut Ctx, used_gas: u64) -> VmResult<()> {\n\n use crate::backends::{get_gas_left, set_gas_limit};\n\n\n\n let ctx_data = get_context_data_mut::<S, Q>(ctx);\n\n if let Some(mut instance_ptr) = ctx_data.wasmer_instance {\n\n let instance = unsafe { instance_ptr.as_mut() };\n\n let gas_state = &mut ctx_data.gas_state;\n\n\n\n let wasmer_used_gas = gas_state.get_gas_used_in_wasmer(get_gas_left(instance));\n\n\n\n gas_state.use_gas(used_gas);\n\n // These lines reduce the amount of gas available to wasmer\n\n // so it can not consume gas that was consumed externally.\n\n let new_limit = gas_state.get_gas_left(wasmer_used_gas);\n\n // This tells wasmer how much more gas it can consume from this point in time.\n\n set_gas_limit(instance, new_limit);\n\n\n\n if gas_state.externally_used_gas + wasmer_used_gas > gas_state.gas_limit {\n\n Err(VmError::GasDepletion)\n\n } else {\n\n Ok(())\n\n }\n\n } else {\n\n Err(VmError::uninitialized_context_data(\"wasmer_instance\"))\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/context.rs", "rank": 25, "score": 190443.1063111659 }, { "content": "pub fn custom_query_execute(query: &CustomQuery) -> StdResult<Binary> {\n\n let msg = match query {\n\n CustomQuery::Ping {} => \"pong\".to_string(),\n\n CustomQuery::Capital { text } => text.to_uppercase(),\n\n };\n\n to_binary(&CustomResponse { msg })\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use cosmwasm_std::{from_binary, Querier, QueryRequest};\n\n\n\n #[test]\n\n fn custom_query_execute_ping() {\n\n let res = custom_query_execute(&CustomQuery::Ping {}).unwrap();\n\n let response: CustomResponse = from_binary(&res).unwrap();\n\n assert_eq!(response.msg, \"pong\");\n\n }\n\n\n", "file_path": "contracts/reflect/src/testing.rs", "rank": 26, "score": 189975.01241028158 }, { "content": "/// nextval increments the counter by 1 and returns the new value.\n\n/// On the first time it is called (no sequence info in db) it will return 1.\n\npub fn nextval<S: Storage>(seq: &mut Singleton<S, u64>) -> StdResult<u64> {\n\n let val = currval(&seq)? + 1;\n\n seq.save(&val)?;\n\n Ok(val)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use cosmwasm_std::testing::MockStorage;\n\n\n\n #[test]\n\n fn walk_through_sequence() {\n\n let mut store = MockStorage::new();\n\n let mut seq = sequence(&mut store, b\"seq\");\n\n\n\n assert_eq!(currval(&seq).unwrap(), 0);\n\n assert_eq!(nextval(&mut seq).unwrap(), 1);\n\n assert_eq!(nextval(&mut seq).unwrap(), 2);\n\n assert_eq!(nextval(&mut seq).unwrap(), 3);\n", "file_path": "packages/storage/src/sequence.rs", "rank": 27, "score": 189695.11344489356 }, { "content": "/// Implementation for check_wasm, based on static analysis of the bytecode.\n\n/// This is used for code upload, to perform check before compiling the Wasm.\n\npub fn required_features_from_module(module: &Module) -> HashSet<String> {\n\n match module.export_section() {\n\n None => HashSet::new(),\n\n Some(export_section) => {\n\n HashSet::from_iter(export_section.entries().iter().filter_map(|entry| {\n\n if let Internal::Function(_) = entry.internal() {\n\n let name = entry.field();\n\n if name.starts_with(REQUIRES_PREFIX) && name.len() > REQUIRES_PREFIX.len() {\n\n let (_, required_feature) = name.split_at(REQUIRES_PREFIX.len());\n\n return Some(required_feature.to_string());\n\n }\n\n }\n\n None\n\n }))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "packages/vm/src/features.rs", "rank": 28, "score": 188576.96275106427 }, { "content": "/// Calculates the raw key prefix for a given nested namespace\n\n/// as documented in https://github.com/webmaster128/key-namespacing#nesting\n\npub fn to_length_prefixed_nested(namespaces: &[&[u8]]) -> Vec<u8> {\n\n let mut size = 0;\n\n for &namespace in namespaces {\n\n size += namespace.len() + 2;\n\n }\n\n\n\n let mut out = Vec::with_capacity(size);\n\n for &namespace in namespaces {\n\n out.extend_from_slice(&encode_length(namespace));\n\n out.extend_from_slice(namespace);\n\n }\n\n out\n\n}\n\n\n", "file_path": "packages/storage/src/length_prefixed.rs", "rank": 29, "score": 188385.86745315592 }, { "content": "/// Similar to alloc, but instead of creating a new vector it consumes an existing one and returns\n\n/// a pointer to the Region (preventing the memory from being freed until explicitly called later).\n\n///\n\n/// The resulting Region has capacity = length, i.e. the buffer's capacity is ignored.\n\npub fn release_buffer(buffer: Vec<u8>) -> *mut c_void {\n\n let region = build_region(&buffer);\n\n mem::forget(buffer);\n\n Box::into_raw(region) as *mut c_void\n\n}\n\n\n\n/// Return the data referenced by the Region and\n\n/// deallocates the Region (and the vector when finished).\n\n/// Warning: only use this when you are sure the caller will never use (or free) the Region later\n\n///\n\n/// # Safety\n\n///\n\n/// The ptr must refer to a valid Region, which was previously returned by alloc,\n\n/// and not yet deallocated. This call will deallocate the Region and return an owner vector\n\n/// to the caller containing the referenced data.\n\n///\n\n/// Naturally, calling this function twice on the same pointer will double deallocate data\n\n/// and lead to a crash. Make sure to call it exactly once (either consuming the input in\n\n/// the wasm code OR deallocating the buffer from the caller).\n\npub unsafe fn consume_region(ptr: *mut c_void) -> Vec<u8> {\n", "file_path": "packages/std/src/memory.rs", "rank": 30, "score": 187528.9677418001 }, { "content": "pub fn from_slice<T: DeserializeOwned>(value: &[u8]) -> StdResult<T> {\n\n serde_json_wasm::from_slice(value).map_err(|e| parse_err(type_name::<T>(), e))\n\n}\n\n\n", "file_path": "packages/std/src/serde.rs", "rank": 31, "score": 186207.5550244017 }, { "content": "pub fn required_features_from_wasmer_instance(wasmer_instance: &WasmerInstance) -> HashSet<String> {\n\n HashSet::from_iter(wasmer_instance.exports().filter_map(|(mut name, export)| {\n\n if let Export::Function { .. } = export {\n\n if name.starts_with(REQUIRES_PREFIX) && name.len() > REQUIRES_PREFIX.len() {\n\n let required_feature = name.split_off(REQUIRES_PREFIX.len());\n\n return Some(required_feature);\n\n }\n\n }\n\n None\n\n }))\n\n}\n\n\n", "file_path": "packages/vm/src/features.rs", "rank": 32, "score": 182287.12342440002 }, { "content": "/// Just set sender and sent funds for the message. The rest uses defaults.\n\n/// The sender will be canonicalized internally to allow developers pasing in human readable senders.\n\n/// This is intended for use in test code only.\n\npub fn mock_env<T: Api, U: Into<HumanAddr>>(api: &T, sender: U, sent: &[Coin]) -> Env {\n\n Env {\n\n block: BlockInfo {\n\n height: 12_345,\n\n time: 1_571_797_419,\n\n chain_id: \"cosmos-testnet-14002\".to_string(),\n\n },\n\n message: MessageInfo {\n\n sender: api.canonical_address(&sender.into()).unwrap(),\n\n sent_funds: sent.to_vec(),\n\n },\n\n contract: ContractInfo {\n\n address: api\n\n .canonical_address(&HumanAddr::from(MOCK_CONTRACT_ADDR))\n\n .unwrap(),\n\n },\n\n }\n\n}\n\n\n\n/// The same type as cosmwasm-std's QuerierResult, but easier to reuse in\n", "file_path": "packages/std/src/mock.rs", "rank": 33, "score": 181176.68724449308 }, { "content": "/// Set the amount of gas units that can be used in the instance.\n\npub fn set_gas_limit(_instance: &mut WasmerInstance, _limit: u64) {}\n\n\n", "file_path": "packages/vm/src/backends/cranelift.rs", "rank": 34, "score": 181160.21748790971 }, { "content": "/// Set the amount of gas units that can be used in the instance.\n\npub fn set_gas_limit(instance: &mut WasmerInstance, limit: u64) {\n\n let used = GAS_LIMIT.saturating_sub(limit);\n\n metering::set_points_used(instance, used)\n\n}\n\n\n", "file_path": "packages/vm/src/backends/singlepass.rs", "rank": 35, "score": 181160.21748790971 }, { "content": "/// Just set sender and sent funds for the message. The rest uses defaults.\n\n/// The sender will be canonicalized internally to allow developers pasing in human readable senders.\n\n/// This is intended for use in test code only.\n\npub fn mock_env<T: Api, U: Into<HumanAddr>>(api: &T, sender: U, sent: &[Coin]) -> Env {\n\n Env {\n\n block: BlockInfo {\n\n height: 12_345,\n\n time: 1_571_797_419,\n\n chain_id: \"cosmos-testnet-14002\".to_string(),\n\n },\n\n message: MessageInfo {\n\n sender: api.canonical_address(&sender.into()).unwrap(),\n\n sent_funds: sent.to_vec(),\n\n },\n\n contract: ContractInfo {\n\n address: api\n\n .canonical_address(&HumanAddr::from(MOCK_CONTRACT_ADDR))\n\n .unwrap(),\n\n },\n\n }\n\n}\n\n\n\n/// MockQuerier holds an immutable table of bank balances\n", "file_path": "packages/vm/src/testing/mock.rs", "rank": 36, "score": 179454.00998227962 }, { "content": "pub fn invalid_utf8<S: Display>(msg: S) -> StdError {\n\n InvalidUtf8 {\n\n msg: msg.to_string(),\n\n }\n\n .build()\n\n}\n\n\n", "file_path": "packages/std/src/errors/std_error_helpers.rs", "rank": 37, "score": 179241.98930678406 }, { "content": "pub fn invalid_base64<S: Display>(msg: S) -> StdError {\n\n InvalidBase64 {\n\n msg: msg.to_string(),\n\n }\n\n .build()\n\n}\n\n\n", "file_path": "packages/std/src/errors/std_error_helpers.rs", "rank": 38, "score": 179241.98930678406 }, { "content": "pub fn not_found<S: Into<String>>(kind: S) -> StdError {\n\n NotFound { kind: kind.into() }.build()\n\n}\n\n\n", "file_path": "packages/std/src/errors/std_error_helpers.rs", "rank": 39, "score": 178258.30014398298 }, { "content": "pub fn setup_context<S: Storage, Q: Querier>(gas_limit: u64) -> (*mut c_void, fn(*mut c_void)) {\n\n (\n\n create_unmanaged_context_data::<S, Q>(gas_limit),\n\n destroy_unmanaged_context_data::<S, Q>,\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/context.rs", "rank": 40, "score": 177245.42580201878 }, { "content": "/// A prepared and sufficiently large memory Region is expected at ptr that points to pre-allocated memory.\n\n///\n\n/// Returns number of bytes written on success.\n\npub fn write_region(ctx: &Ctx, ptr: u32, data: &[u8]) -> VmResult<()> {\n\n let mut region = get_region(ctx, ptr);\n\n\n\n let region_capacity = region.capacity as usize;\n\n if data.len() > region_capacity {\n\n return Err(VmError::region_too_small(region_capacity, data.len()));\n\n }\n\n\n\n let memory = ctx.memory(0);\n\n match WasmPtr::<u8, Array>::new(region.offset).deref(memory, 0, region.capacity) {\n\n Some(cells) => {\n\n // In case you want to do some premature optimization, this shows how to cast a `&'mut [Cell<u8>]` to `&mut [u8]`:\n\n // https://github.com/wasmerio/wasmer/blob/0.13.1/lib/wasi/src/syscalls/mod.rs#L79-L81\n\n for i in 0..data.len() {\n\n cells[i].set(data[i])\n\n }\n\n region.length = data.len() as u32;\n\n set_region(ctx, ptr, region);\n\n Ok(())\n\n },\n\n None => panic!(\n\n \"Error dereferencing region {:?} in wasm memory of size {}. This typically happens when the given pointer does not point to a Region struct.\",\n\n region,\n\n memory.size().bytes().0\n\n ),\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/memory.rs", "rank": 41, "score": 175619.84401523392 }, { "content": "/// Checks if the data is valid wasm and compatibility with the CosmWasm API (imports and exports)\n\npub fn check_wasm(wasm_code: &[u8], supported_features: &HashSet<String>) -> VmResult<()> {\n\n let module = match deserialize_buffer(&wasm_code) {\n\n Ok(deserialized) => deserialized,\n\n Err(err) => {\n\n return Err(VmError::static_validation_err(format!(\n\n \"Wasm bytecode could not be deserialized. Deserialization error: \\\"{}\\\"\",\n\n err\n\n )));\n\n }\n\n };\n\n check_wasm_memories(&module)?;\n\n check_wasm_exports(&module)?;\n\n check_wasm_imports(&module)?;\n\n check_wasm_features(&module, supported_features)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "packages/vm/src/compatability.rs", "rank": 42, "score": 174325.67526701055 }, { "content": "fn sample_validator<U: Into<HumanAddr>>(addr: U) -> Validator {\n\n Validator {\n\n address: addr.into(),\n\n commission: Decimal::percent(3),\n\n max_commission: Decimal::percent(10),\n\n max_change_rate: Decimal::percent(1),\n\n }\n\n}\n\n\n", "file_path": "contracts/staking/tests/integration.rs", "rank": 43, "score": 172701.8193463739 }, { "content": "/// Expects a (fixed size) Region struct at ptr, which is read. This links to the\n\n/// memory region, which is copied in the second step.\n\n/// Errors if the length of the region exceeds `max_length`.\n\npub fn read_region(ctx: &Ctx, ptr: u32, max_length: usize) -> VmResult<Vec<u8>> {\n\n let region = get_region(ctx, ptr);\n\n\n\n if region.length > to_u32(max_length)? {\n\n return Err(VmError::region_length_too_big(\n\n region.length as usize,\n\n max_length,\n\n ));\n\n }\n\n\n\n let memory = ctx.memory(0);\n\n match WasmPtr::<u8, Array>::new(region.offset).deref(memory, 0, region.length) {\n\n Some(cells) => {\n\n // In case you want to do some premature optimization, this shows how to cast a `&'mut [Cell<u8>]` to `&mut [u8]`:\n\n // https://github.com/wasmerio/wasmer/blob/0.13.1/lib/wasi/src/syscalls/mod.rs#L79-L81\n\n let len = region.length as usize;\n\n let mut result = vec![0u8; len];\n\n for i in 0..len {\n\n result[i] = cells[i].get();\n\n }\n", "file_path": "packages/vm/src/memory.rs", "rank": 44, "score": 164723.17144924955 }, { "content": "pub fn typed<S: Storage, T>(storage: &mut S) -> TypedStorage<S, T>\n\nwhere\n\n T: Serialize + DeserializeOwned,\n\n{\n\n TypedStorage::new(storage)\n\n}\n\n\n", "file_path": "packages/storage/src/typed.rs", "rank": 45, "score": 159227.9935196932 }, { "content": "#[cfg(feature = \"iterator\")]\n\npub fn maybe_read_region(ctx: &Ctx, ptr: u32, max_length: usize) -> VmResult<Option<Vec<u8>>> {\n\n if ptr == 0 {\n\n Ok(None)\n\n } else {\n\n read_region(ctx, ptr, max_length).map(Some)\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/memory.rs", "rank": 46, "score": 159008.27884816495 }, { "content": "/// reinvest will withdraw all pending rewards,\n\n/// then issue a callback to itself via _bond_all_tokens\n\n/// to reinvest the new earnings (and anything else that accumulated)\n\npub fn reinvest<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n) -> StdResult<HandleResponse> {\n\n let contract_addr = deps.api.human_address(&env.contract.address)?;\n\n let invest = invest_info_read(&deps.storage).load()?;\n\n let msg = to_binary(&HandleMsg::_BondAllTokens {})?;\n\n\n\n // and bond them to the validator\n\n let res = HandleResponse {\n\n messages: vec![\n\n StakingMsg::Withdraw {\n\n validator: invest.validator,\n\n recipient: Some(contract_addr.clone()),\n\n }\n\n .into(),\n\n WasmMsg::Execute {\n\n contract_addr,\n\n msg,\n\n send: vec![],\n\n }\n\n .into(),\n\n ],\n\n log: vec![],\n\n data: None,\n\n };\n\n Ok(res)\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 47, "score": 157453.88346176263 }, { "content": "pub fn handle<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n msg: HandleMsg,\n\n) -> StdResult<HandleResponse> {\n\n match msg {\n\n HandleMsg::Transfer { recipient, amount } => transfer(deps, env, recipient, amount),\n\n HandleMsg::Bond {} => bond(deps, env),\n\n HandleMsg::Unbond { amount } => unbond(deps, env, amount),\n\n HandleMsg::Claim {} => claim(deps, env),\n\n HandleMsg::Reinvest {} => reinvest(deps, env),\n\n HandleMsg::_BondAllTokens {} => _bond_all_tokens(deps, env),\n\n }\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 48, "score": 157449.39224391567 }, { "content": "pub fn claim<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n) -> StdResult<HandleResponse> {\n\n // find how many tokens the contract has\n\n let contract_human = deps.api.human_address(&env.contract.address)?;\n\n let invest = invest_info_read(&deps.storage).load()?;\n\n let mut balance = deps\n\n .querier\n\n .query_balance(&contract_human, &invest.bond_denom)?;\n\n if balance.amount < invest.min_withdrawal {\n\n return Err(generic_err(\n\n \"Insufficient balance in contract to process claim\",\n\n ));\n\n }\n\n\n\n // check how much to send - min(balance, claims[sender]), and reduce the claim\n\n let sender_raw = env.message.sender;\n\n let mut to_send = balance.amount;\n\n claims(&mut deps.storage).update(sender_raw.as_slice(), |claim| {\n", "file_path": "contracts/staking/src/contract.rs", "rank": 49, "score": 157449.39224391567 }, { "content": "pub fn init<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n msg: InitMsg,\n\n) -> StdResult<InitResponse> {\n\n // ensure the validator is registered\n\n let vals = deps.querier.query_validators()?;\n\n if !vals.iter().any(|v| v.address == msg.validator) {\n\n return Err(generic_err(format!(\n\n \"{} is not in the current validator set\",\n\n msg.validator\n\n )));\n\n }\n\n\n\n let token = TokenInfoResponse {\n\n name: msg.name,\n\n symbol: msg.symbol,\n\n decimals: msg.decimals,\n\n };\n\n token_info(&mut deps.storage).save(&token)?;\n", "file_path": "contracts/staking/src/contract.rs", "rank": 50, "score": 157449.39224391567 }, { "content": "pub fn transfer<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n recipient: HumanAddr,\n\n send: Uint128,\n\n) -> StdResult<HandleResponse> {\n\n let rcpt_raw = deps.api.canonical_address(&recipient)?;\n\n let sender_raw = env.message.sender;\n\n\n\n let mut accounts = balances(&mut deps.storage);\n\n accounts.update(sender_raw.as_slice(), |balance: Option<Uint128>| {\n\n balance.unwrap_or_default() - send\n\n })?;\n\n accounts.update(rcpt_raw.as_slice(), |balance: Option<Uint128>| {\n\n Ok(balance.unwrap_or_default() + send)\n\n })?;\n\n\n\n let res = HandleResponse {\n\n messages: vec![],\n\n log: vec![\n\n log(\"action\", \"transfer\"),\n\n log(\"from\", deps.api.human_address(&sender_raw)?),\n\n log(\"to\", recipient),\n\n log(\"amount\", send),\n\n ],\n\n data: None,\n\n };\n\n Ok(res)\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 51, "score": 157449.39224391567 }, { "content": "pub fn query<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n msg: QueryMsg,\n\n) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::TokenInfo {} => query_token_info(deps),\n\n QueryMsg::Investment {} => query_investment(deps),\n\n QueryMsg::Balance { address } => query_balance(deps, address),\n\n QueryMsg::Claims { address } => query_claims(deps, address),\n\n }\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 52, "score": 157449.39224391567 }, { "content": "pub fn unbond<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n amount: Uint128,\n\n) -> StdResult<HandleResponse> {\n\n let sender_raw = env.message.sender;\n\n\n\n let invest = invest_info_read(&deps.storage).load()?;\n\n // ensure it is big enough to care\n\n if amount < invest.min_withdrawal {\n\n return Err(generic_err(format!(\n\n \"Must unbond at least {} {}\",\n\n invest.min_withdrawal, invest.bond_denom\n\n )));\n\n }\n\n // calculate tax and remainer to unbond\n\n let tax = amount * invest.exit_tax;\n\n\n\n // deduct all from the account\n\n let mut accounts = balances(&mut deps.storage);\n", "file_path": "contracts/staking/src/contract.rs", "rank": 53, "score": 157449.39224391567 }, { "content": "pub fn bond<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n) -> StdResult<HandleResponse> {\n\n let sender_raw = env.message.sender;\n\n\n\n // ensure we have the proper denom\n\n let invest = invest_info_read(&deps.storage).load()?;\n\n // payment finds the proper coin (or throws an error)\n\n let payment = env\n\n .message\n\n .sent_funds\n\n .iter()\n\n .find(|x| x.denom == invest.bond_denom)\n\n .ok_or_else(|| generic_err(format!(\"No {} tokens sent\", &invest.bond_denom)))?;\n\n\n\n // re-calculate bonded to ensure we have real values\n\n let contract_addr = deps.api.human_address(&env.contract.address)?;\n\n // bonded is the total number of tokens we have delegated from this address\n\n let bonded = get_bonded(&deps.querier, &contract_addr)?;\n", "file_path": "contracts/staking/src/contract.rs", "rank": 54, "score": 157449.39224391567 }, { "content": "pub fn typed_read<S: ReadonlyStorage, T>(storage: &S) -> ReadonlyTypedStorage<S, T>\n\nwhere\n\n T: Serialize + DeserializeOwned,\n\n{\n\n ReadonlyTypedStorage::new(storage)\n\n}\n\n\n\npub struct TypedStorage<'a, S: Storage, T>\n\nwhere\n\n T: Serialize + DeserializeOwned,\n\n{\n\n storage: &'a mut S,\n\n // see https://doc.rust-lang.org/std/marker/struct.PhantomData.html#unused-type-parameters for why this is needed\n\n data: PhantomData<&'a T>,\n\n}\n\n\n\nimpl<'a, S: Storage, T> TypedStorage<'a, S, T>\n\nwhere\n\n T: Serialize + DeserializeOwned,\n\n{\n", "file_path": "packages/storage/src/typed.rs", "rank": 55, "score": 157192.9753295204 }, { "content": "// singleton is a helper function for less verbose usage\n\npub fn singleton<'a, S: Storage, T>(storage: &'a mut S, key: &[u8]) -> Singleton<'a, S, T>\n\nwhere\n\n T: Serialize + DeserializeOwned,\n\n{\n\n Singleton::new(storage, key)\n\n}\n\n\n", "file_path": "packages/storage/src/singleton.rs", "rank": 56, "score": 157047.12502615564 }, { "content": "pub fn _bond_all_tokens<S: Storage, A: Api, Q: Querier>(\n\n deps: &mut Extern<S, A, Q>,\n\n env: Env,\n\n) -> StdResult<HandleResponse> {\n\n // this is just meant as a call-back to ourself\n\n if env.message.sender != env.contract.address {\n\n return Err(unauthorized());\n\n }\n\n\n\n // find how many tokens we have to bond\n\n let contract_human = deps.api.human_address(&env.contract.address)?;\n\n let invest = invest_info_read(&deps.storage).load()?;\n\n let mut balance = deps\n\n .querier\n\n .query_balance(contract_human, &invest.bond_denom)?;\n\n\n\n // we deduct pending claims from our account balance before reinvesting.\n\n // if there is not enough funds, we just return a no-op\n\n match total_supply(&mut deps.storage).update(|mut supply| {\n\n balance.amount = (balance.amount - supply.claims)?;\n", "file_path": "contracts/staking/src/contract.rs", "rank": 57, "score": 155081.02536387276 }, { "content": "pub fn query_investment<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n) -> StdResult<Binary> {\n\n let invest = invest_info_read(&deps.storage).load()?;\n\n let supply = total_supply_read(&deps.storage).load()?;\n\n\n\n let res = InvestmentResponse {\n\n owner: deps.api.human_address(&invest.owner)?,\n\n exit_tax: invest.exit_tax,\n\n validator: invest.validator,\n\n min_withdrawal: invest.min_withdrawal,\n\n token_supply: supply.issued,\n\n staked_tokens: coin(supply.bonded.u128(), &invest.bond_denom),\n\n nominal_value: if supply.issued.is_zero() {\n\n FALLBACK_RATIO\n\n } else {\n\n Decimal::from_ratio(supply.bonded, supply.issued)\n\n },\n\n };\n\n to_binary(&res)\n", "file_path": "contracts/staking/src/contract.rs", "rank": 58, "score": 155081.02536387276 }, { "content": "pub fn query_claims<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n address: HumanAddr,\n\n) -> StdResult<Binary> {\n\n let address_raw = deps.api.canonical_address(&address)?;\n\n let claims = claims_read(&deps.storage)\n\n .may_load(address_raw.as_slice())?\n\n .unwrap_or_default();\n\n to_binary(&ClaimsResponse { claims })\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 59, "score": 155081.02536387276 }, { "content": "pub fn query_balance<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n address: HumanAddr,\n\n) -> StdResult<Binary> {\n\n let address_raw = deps.api.canonical_address(&address)?;\n\n let balance = balances_read(&deps.storage)\n\n .may_load(address_raw.as_slice())?\n\n .unwrap_or_default();\n\n to_binary(&BalanceResponse { balance })\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 60, "score": 155081.02536387276 }, { "content": "pub fn from_slice<'a, T>(value: &'a [u8]) -> VmResult<T>\n\nwhere\n\n T: Deserialize<'a>,\n\n{\n\n serde_json::from_slice(value).map_err(|e| VmError::parse_err(type_name::<T>(), e))\n\n}\n\n\n", "file_path": "packages/vm/src/serde.rs", "rank": 61, "score": 153831.5775051936 }, { "content": "pub fn query_token_info<S: Storage, A: Api, Q: Querier>(\n\n deps: &Extern<S, A, Q>,\n\n) -> StdResult<Binary> {\n\n let info = token_info_read(&deps.storage).load()?;\n\n to_binary(&info)\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 62, "score": 152812.87298060567 }, { "content": "#[test]\n\nfn initialization_with_missing_validator() {\n\n let mut ext = mock_dependencies(20, &[]);\n\n ext.querier\n\n .with_staking(\"stake\", &[sample_validator(\"john\")], &[]);\n\n let mut deps = Instance::from_code(WASM, ext, 500_000).unwrap();\n\n\n\n let creator = HumanAddr::from(\"creator\");\n\n let msg = InitMsg {\n\n name: \"Cool Derivative\".to_string(),\n\n symbol: \"DRV\".to_string(),\n\n decimals: 9,\n\n validator: HumanAddr::from(\"my-validator\"),\n\n exit_tax: Decimal::percent(2),\n\n min_withdrawal: Uint128(50),\n\n };\n\n let env = mock_env(&deps.api, &creator, &[]);\n\n\n\n // make sure we can init with this\n\n let res: StdResult<InitResponse> = init(&mut deps, env, msg.clone());\n\n match res.unwrap_err() {\n\n StdError::GenericErr { msg, .. } => {\n\n assert_eq!(msg, \"my-validator is not in the current validator set\")\n\n }\n\n _ => panic!(\"expected unregistered validator error\"),\n\n }\n\n}\n\n\n", "file_path": "contracts/staking/tests/integration.rs", "rank": 63, "score": 150860.4624998776 }, { "content": "fn create_unmanaged_context_data<S: Storage, Q: Querier>(gas_limit: u64) -> *mut c_void {\n\n let data = ContextData::<S, Q> {\n\n gas_state: GasState::with_limit(gas_limit),\n\n storage: None,\n\n storage_readonly: true,\n\n querier: None,\n\n wasmer_instance: None,\n\n #[cfg(feature = \"iterator\")]\n\n iterators: HashMap::new(),\n\n #[cfg(not(feature = \"iterator\"))]\n\n iterators: PhantomData::default(),\n\n };\n\n let heap_data = Box::new(data); // move from stack to heap\n\n Box::into_raw(heap_data) as *mut c_void // give up ownership\n\n}\n\n\n", "file_path": "packages/vm/src/context.rs", "rank": 64, "score": 147513.6644298584 }, { "content": "/// claims are the claims to money being unbonded\n\npub fn claims<S: Storage>(storage: &mut S) -> Bucket<S, Uint128> {\n\n bucket(PREFIX_CLAIMS, storage)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 65, "score": 147401.03605380646 }, { "content": "/// balances are state of the erc20 tokens\n\npub fn balances<S: Storage>(storage: &mut S) -> Bucket<S, Uint128> {\n\n bucket(PREFIX_BALANCE, storage)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 66, "score": 147401.03605380646 }, { "content": "pub fn total_supply<S: Storage>(storage: &mut S) -> Singleton<S, Supply> {\n\n singleton(storage, KEY_TOTAL_SUPPLY)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 67, "score": 145315.13171942372 }, { "content": "pub fn claims_read<S: ReadonlyStorage>(storage: &S) -> ReadonlyBucket<S, Uint128> {\n\n bucket_read(PREFIX_CLAIMS, storage)\n\n}\n\n\n\n/// Investment info is fixed at initialization, and is used to control the function of the contract\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\npub struct InvestmentInfo {\n\n /// owner created the contract and takes a cut\n\n pub owner: CanonicalAddr,\n\n /// this is the denomination we can stake (and only one we accept for payments)\n\n pub bond_denom: String,\n\n /// this is how much the owner takes as a cut when someone unbonds\n\n pub exit_tax: Decimal,\n\n /// All tokens are bonded to this validator\n\n /// FIXME: humanize/canonicalize address doesn't work for validator addrresses\n\n pub validator: HumanAddr,\n\n /// This is the minimum amount we will pull out to reinvest, as well as a minumum\n\n /// that can be unbonded (to avoid needless staking tx)\n\n pub min_withdrawal: Uint128,\n\n}\n", "file_path": "contracts/staking/src/state.rs", "rank": 68, "score": 144874.40208231274 }, { "content": "pub fn balances_read<S: ReadonlyStorage>(storage: &S) -> ReadonlyBucket<S, Uint128> {\n\n bucket_read(PREFIX_BALANCE, storage)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 69, "score": 144874.40208231274 }, { "content": "/// The BTreeMap specific key-value pair reference type, as returned by BTreeMap<Vec<u8>, T>::range.\n\n/// This is internal as it can change any time if the map implementation is swapped out.\n\ntype BTreeMapPairRef<'a, T = Vec<u8>> = (&'a Vec<u8>, &'a T);\n\n\n", "file_path": "packages/std/src/storage.rs", "rank": 70, "score": 144212.9605668046 }, { "content": "/// The BTreeMap specific key-value pair reference type, as returned by BTreeMap<Vec<u8>, T>::range.\n\n/// This is internal as it can change any time if the map implementation is swapped out.\n\ntype BTreeMapPairRef<'a, T = Vec<u8>> = (&'a Vec<u8>, &'a T);\n\n\n\npub struct StorageTransaction<'a, S: ReadonlyStorage> {\n\n /// read-only access to backing storage\n\n storage: &'a S,\n\n /// these are local changes not flushed to backing storage\n\n local_state: BTreeMap<Vec<u8>, Delta>,\n\n /// a log of local changes not yet flushed to backing storage\n\n rep_log: RepLog,\n\n}\n\n\n\nimpl<'a, S: ReadonlyStorage> StorageTransaction<'a, S> {\n\n pub fn new(storage: &'a S) -> Self {\n\n StorageTransaction {\n\n storage,\n\n local_state: BTreeMap::new(),\n\n rep_log: RepLog::new(),\n\n }\n\n }\n\n\n", "file_path": "packages/storage/src/transactions.rs", "rank": 71, "score": 144212.9605668046 }, { "content": "pub fn invest_info<S: Storage>(storage: &mut S) -> Singleton<S, InvestmentInfo> {\n\n singleton(storage, KEY_INVESTMENT)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 72, "score": 143312.22210344355 }, { "content": "pub fn underflow<U: ToString>(minuend: U, subtrahend: U) -> StdError {\n\n Underflow {\n\n minuend: minuend.to_string(),\n\n subtrahend: subtrahend.to_string(),\n\n }\n\n .build()\n\n}\n\n\n", "file_path": "packages/std/src/errors/std_error_helpers.rs", "rank": 73, "score": 143297.31832739533 }, { "content": "pub fn total_supply_read<S: ReadonlyStorage>(storage: &S) -> ReadonlySingleton<S, Supply> {\n\n singleton_read(storage, KEY_TOTAL_SUPPLY)\n\n}\n", "file_path": "contracts/staking/src/state.rs", "rank": 74, "score": 142949.6304691674 }, { "content": "/// The BTreeMap specific key-value pair reference type, as returned by BTreeMap<Vec<u8>, T>::range.\n\n/// This is internal as it can change any time if the map implementation is swapped out.\n\ntype BTreeMapPairRef<'a, T = Vec<u8>> = (&'a Vec<u8>, &'a T);\n\n\n", "file_path": "packages/vm/src/testing/storage.rs", "rank": 75, "score": 142921.84294583555 }, { "content": "pub fn token_info<S: Storage>(storage: &mut S) -> Singleton<S, TokenInfoResponse> {\n\n singleton(storage, KEY_TOKEN_INFO)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 76, "score": 141387.45049029822 }, { "content": "pub fn invest_info_read<S: ReadonlyStorage>(storage: &S) -> ReadonlySingleton<S, InvestmentInfo> {\n\n singleton_read(storage, KEY_INVESTMENT)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 77, "score": 141098.51183577554 }, { "content": "// handle mimicks the call signature of the smart contracts.\n\n// thus it moves env and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn handle<\n\n S: Storage + 'static,\n\n A: Api + 'static,\n\n Q: Querier + 'static,\n\n T: Serialize + JsonSchema,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n>(\n\n instance: &mut Instance<S, A, Q>,\n\n env: Env,\n\n msg: T,\n\n) -> HandleResult<U> {\n\n let serialized_msg = to_vec(&msg)?;\n\n call_handle(instance, &env, &serialized_msg).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/calls.rs", "rank": 78, "score": 140753.56271749362 }, { "content": "// init mimicks the call signature of the smart contracts.\n\n// thus it moves env and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn init<\n\n S: Storage + 'static,\n\n A: Api + 'static,\n\n Q: Querier + 'static,\n\n T: Serialize + JsonSchema,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n>(\n\n instance: &mut Instance<S, A, Q>,\n\n env: Env,\n\n msg: T,\n\n) -> InitResult<U> {\n\n let serialized_msg = to_vec(&msg)?;\n\n call_init(instance, &env, &serialized_msg).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/calls.rs", "rank": 79, "score": 140753.56271749362 }, { "content": "// query mimicks the call signature of the smart contracts.\n\n// thus it moves env and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn query<\n\n S: Storage + 'static,\n\n A: Api + 'static,\n\n Q: Querier + 'static,\n\n T: Serialize + JsonSchema,\n\n>(\n\n instance: &mut Instance<S, A, Q>,\n\n msg: T,\n\n) -> StdResult<QueryResponse> {\n\n let serialized_msg = to_vec(&msg)?;\n\n call_query(instance, &serialized_msg).expect(\"VM error\")\n\n}\n", "file_path": "packages/vm/src/testing/calls.rs", "rank": 80, "score": 140753.56271749362 }, { "content": "// migrate mimicks the call signature of the smart contracts.\n\n// thus it moves env and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn migrate<\n\n S: Storage + 'static,\n\n A: Api + 'static,\n\n Q: Querier + 'static,\n\n T: Serialize + JsonSchema,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n>(\n\n instance: &mut Instance<S, A, Q>,\n\n env: Env,\n\n msg: T,\n\n) -> MigrateResult<U> {\n\n let serialized_msg = to_vec(&msg)?;\n\n call_migrate(instance, &env, &serialized_msg).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/calls.rs", "rank": 81, "score": 140753.56271749362 }, { "content": "/// All external requirements that can be injected for unit tests.\n\n/// It sets the given balance for the contract itself, nothing else\n\npub fn mock_dependencies(\n\n canonical_length: usize,\n\n contract_balance: &[Coin],\n\n) -> Extern<MockStorage, MockApi, MockQuerier> {\n\n let contract_addr = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n Extern {\n\n storage: MockStorage::default(),\n\n api: MockApi::new(canonical_length),\n\n querier: MockQuerier::new(&[(&contract_addr, contract_balance)]),\n\n }\n\n}\n\n\n", "file_path": "packages/std/src/mock.rs", "rank": 82, "score": 140745.02656538296 }, { "content": "pub fn token_info_read<S: ReadonlyStorage>(storage: &S) -> ReadonlySingleton<S, TokenInfoResponse> {\n\n singleton_read(storage, KEY_TOKEN_INFO)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 83, "score": 139316.89796129253 }, { "content": "/// Initializes the querier along with the mock_dependencies.\n\n/// Sets all balances provided (yoy must explicitly set contract balance if desired)\n\npub fn mock_dependencies_with_balances(\n\n canonical_length: usize,\n\n balances: &[(&HumanAddr, &[Coin])],\n\n) -> Extern<MockStorage, MockApi, MockQuerier> {\n\n Extern {\n\n storage: MockStorage::default(),\n\n api: MockApi::new(canonical_length),\n\n querier: MockQuerier::new(balances),\n\n }\n\n}\n\n\n\n// Use MemoryStorage implementation (which is valid in non-testcode)\n\n// We can later make simplifications here if needed\n\npub type MockStorage = MemoryStorage;\n\n\n\n// MockPrecompiles zero pads all human addresses to make them fit the canonical_length\n\n// it trims off zeros for the reverse operation.\n\n// not really smart, but allows us to see a difference (and consistent length for canonical adddresses)\n\n#[derive(Copy, Clone)]\n\npub struct MockApi {\n", "file_path": "packages/std/src/mock.rs", "rank": 84, "score": 138862.7657554365 }, { "content": "/// All external requirements that can be injected for unit tests.\n\n/// It sets the given balance for the contract itself, nothing else\n\npub fn mock_dependencies(\n\n canonical_length: usize,\n\n contract_balance: &[Coin],\n\n) -> Extern<MockStorage, MockApi, MockQuerier> {\n\n let contract_addr = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n Extern {\n\n storage: MockStorage::default(),\n\n api: MockApi::new(canonical_length),\n\n querier: MockQuerier::new(&[(&contract_addr, contract_balance)]),\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/testing/mock.rs", "rank": 85, "score": 138862.7657554365 }, { "content": "pub fn mock_instance(\n\n wasm: &[u8],\n\n contract_balance: &[Coin],\n\n) -> Instance<MockStorage, MockApi, MockQuerier> {\n\n mock_instance_with_options(\n\n wasm,\n\n MockInstanceOptions {\n\n contract_balance: Some(contract_balance),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 86, "score": 138862.7657554365 }, { "content": "pub fn set_storage_readonly<S: Storage, Q: Querier>(ctx: &mut Ctx, new_value: bool) {\n\n let mut context_data = get_context_data_mut::<S, Q>(ctx);\n\n context_data.storage_readonly = new_value;\n\n}\n\n\n\n/// Add the iterator to the context's data. A new ID is assigned and returned.\n\n/// IDs are guaranteed to be in the range [0, 2**31-1], i.e. fit in the non-negative part if type i32.\n", "file_path": "packages/vm/src/context.rs", "rank": 87, "score": 138453.47732469888 }, { "content": "pub fn do_remove<S: Storage, Q: Querier>(ctx: &mut Ctx, key_ptr: u32) -> VmResult<()> {\n\n if is_storage_readonly::<S, Q>(ctx) {\n\n return Err(VmError::write_access_denied());\n\n }\n\n\n\n let key = read_region(ctx, key_ptr, MAX_LENGTH_DB_KEY)?;\n\n let used_gas = with_storage_from_context::<S, Q, _, _>(ctx, |store| Ok(store.remove(&key)?))?;\n\n try_consume_gas::<S, Q>(ctx, used_gas)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "packages/vm/src/imports.rs", "rank": 88, "score": 137075.6355559235 }, { "content": "/// A drop-in replacement for cosmwasm_std::testing::mock_dependencies\n\n/// this uses our CustomQuerier.\n\npub fn mock_dependencies_with_custom_querier(\n\n canonical_length: usize,\n\n contract_balance: &[Coin],\n\n) -> Extern<MockStorage, MockApi, MockQuerier<CustomQuery>> {\n\n let contract_addr = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n let custom_querier: MockQuerier<CustomQuery> =\n\n MockQuerier::new(&[(&contract_addr, contract_balance)])\n\n .with_custom_handler(|query| Ok(custom_query_execute(&query)));\n\n Extern {\n\n storage: MockStorage::default(),\n\n api: MockApi::new(canonical_length),\n\n querier: custom_querier,\n\n }\n\n}\n\n\n", "file_path": "contracts/reflect/src/testing.rs", "rank": 89, "score": 137072.00977628573 }, { "content": "pub fn mock_instance_with_options(\n\n wasm: &[u8],\n\n options: MockInstanceOptions,\n\n) -> Instance<MockStorage, MockApi, MockQuerier> {\n\n check_wasm(wasm, &options.supported_features).unwrap();\n\n let contract_address = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n\n\n // merge balances\n\n let mut balances = options.balances.to_vec();\n\n if let Some(contract_balance) = options.contract_balance {\n\n // Remove old entry if exists\n\n if let Some(pos) = balances.iter().position(|item| *item.0 == contract_address) {\n\n balances.remove(pos);\n\n }\n\n balances.push((&contract_address, contract_balance));\n\n }\n\n\n\n let api = if let Some(error_message) = options.error_message {\n\n MockApi::new_failing(options.canonical_address_length, error_message)\n\n } else {\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 90, "score": 137067.5138127851 }, { "content": "pub fn mock_instance_with_balances(\n\n wasm: &[u8],\n\n balances: &[(&HumanAddr, &[Coin])],\n\n) -> Instance<MockStorage, MockApi, MockQuerier> {\n\n mock_instance_with_options(\n\n wasm,\n\n MockInstanceOptions {\n\n balances,\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 91, "score": 137067.5138127851 }, { "content": "/// A drop-in replacement for cosmwasm_vm::testing::mock_dependencies\n\n/// that supports CustomQuery.\n\npub fn mock_dependencies_with_custom_querier(\n\n canonical_length: usize,\n\n contract_balance: &[Coin],\n\n) -> Extern<MockStorage, MockApi, MockQuerier<CustomQuery>> {\n\n let contract_addr = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n let custom_querier: MockQuerier<CustomQuery> =\n\n MockQuerier::new(&[(&contract_addr, contract_balance)])\n\n .with_custom_handler(|query| Ok(custom_query_execute(query)));\n\n\n\n Extern {\n\n storage: MockStorage::default(),\n\n api: MockApi::new(canonical_length),\n\n querier: custom_querier,\n\n }\n\n}\n\n\n", "file_path": "contracts/reflect/tests/integration.rs", "rank": 92, "score": 137067.5138127851 }, { "content": "/// Initializes the querier along with the mock_dependencies.\n\n/// Sets all balances provided (yoy must explicitly set contract balance if desired)\n\npub fn mock_dependencies_with_balances(\n\n canonical_length: usize,\n\n balances: &[(&HumanAddr, &[Coin])],\n\n) -> Extern<MockStorage, MockApi, MockQuerier> {\n\n Extern {\n\n storage: MockStorage::default(),\n\n api: MockApi::new(canonical_length),\n\n querier: MockQuerier::new(balances),\n\n }\n\n}\n\n\n\n// MockPrecompiles zero pads all human addresses to make them fit the canonical_length\n\n// it trims off zeros for the reverse operation.\n\n// not really smart, but allows us to see a difference (and consistent length for canonical adddresses)\n\n#[derive(Copy, Clone)]\n\npub struct MockApi {\n\n canonical_length: usize,\n\n error_message: Option<&'static str>,\n\n}\n\n\n", "file_path": "packages/vm/src/testing/mock.rs", "rank": 93, "score": 137067.5138127851 }, { "content": "fn _do_query<T: DeserializeOwned + JsonSchema>(\n\n query_fn: &dyn Fn(\n\n &Extern<ExternalStorage, ExternalApi, ExternalQuerier>,\n\n T,\n\n ) -> StdResult<QueryResponse>,\n\n msg_ptr: *mut c_void,\n\n) -> StdResult<QueryResponse> {\n\n let msg: Vec<u8> = unsafe { consume_region(msg_ptr) };\n\n\n\n let msg: T = from_slice(&msg)?;\n\n let deps = make_dependencies();\n\n query_fn(&deps, msg)\n\n}\n\n\n", "file_path": "packages/std/src/exports.rs", "rank": 94, "score": 135468.45021400426 }, { "content": "pub fn mock_instance_with_gas_limit(\n\n wasm: &[u8],\n\n gas_limit: u64,\n\n) -> Instance<MockStorage, MockApi, MockQuerier> {\n\n mock_instance_with_options(\n\n wasm,\n\n MockInstanceOptions {\n\n gas_limit,\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct MockInstanceOptions<'a> {\n\n // dependencies\n\n pub canonical_address_length: usize,\n\n pub balances: &'a [(&'a HumanAddr, &'a [Coin])],\n\n /// This option is merged into balances and might override an existing value\n\n pub contract_balance: Option<&'a [Coin]>,\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 95, "score": 135353.3739571646 }, { "content": "pub fn mock_instance_with_failing_api(\n\n wasm: &[u8],\n\n contract_balance: &[Coin],\n\n error_message: &'static str,\n\n) -> Instance<MockStorage, MockApi, MockQuerier> {\n\n mock_instance_with_options(\n\n wasm,\n\n MockInstanceOptions {\n\n contract_balance: Some(contract_balance),\n\n error_message: Some(error_message),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 96, "score": 135353.3739571646 }, { "content": "/// Reads a storage entry from the VM's storage into Wasm memory\n\npub fn do_read<S: Storage, Q: Querier>(ctx: &mut Ctx, key_ptr: u32) -> VmResult<u32> {\n\n let key = read_region(ctx, key_ptr, MAX_LENGTH_DB_KEY)?;\n\n // `Ok(expr?)` used to convert the error variant.\n\n let (value, used_gas): (Option<Vec<u8>>, u64) =\n\n with_storage_from_context::<S, Q, _, _>(ctx, |store| Ok(store.get(&key)?))?;\n\n try_consume_gas::<S, Q>(ctx, used_gas)?;\n\n\n\n let out_data = match value {\n\n Some(data) => data,\n\n None => return Ok(0),\n\n };\n\n\n\n let out_ptr = with_func_from_context::<S, Q, u32, u32, _, _>(ctx, \"allocate\", |allocate| {\n\n let out_size = to_u32(out_data.len())?;\n\n let ptr = allocate.call(out_size)?;\n\n if ptr == 0 {\n\n return Err(CommunicationError::zero_address().into());\n\n }\n\n Ok(ptr)\n\n })?;\n\n write_region(ctx, out_ptr, &out_data)?;\n\n Ok(out_ptr)\n\n}\n\n\n", "file_path": "packages/vm/src/imports.rs", "rank": 97, "score": 134003.5934135776 }, { "content": "/// do_handle should be wrapped in an external \"C\" export, containing a contract-specific function as arg\n\npub fn do_handle<T, U>(\n\n handle_fn: &dyn Fn(\n\n &mut Extern<ExternalStorage, ExternalApi, ExternalQuerier>,\n\n Env,\n\n T,\n\n ) -> HandleResult<U>,\n\n env_ptr: u32,\n\n msg_ptr: u32,\n\n) -> u32\n\nwhere\n\n T: DeserializeOwned + JsonSchema,\n\n U: Serialize + Clone + fmt::Debug + PartialEq + JsonSchema,\n\n{\n\n let res: HandleResult<U> =\n\n _do_handle(handle_fn, env_ptr as *mut c_void, msg_ptr as *mut c_void);\n\n let v = to_vec(&res).unwrap();\n\n release_buffer(v) as u32\n\n}\n\n\n", "file_path": "packages/std/src/exports.rs", "rank": 98, "score": 133671.39027831625 }, { "content": "/// do_init should be wrapped in an external \"C\" export, containing a contract-specific function as arg\n\npub fn do_init<T, U>(\n\n init_fn: &dyn Fn(\n\n &mut Extern<ExternalStorage, ExternalApi, ExternalQuerier>,\n\n Env,\n\n T,\n\n ) -> InitResult<U>,\n\n env_ptr: u32,\n\n msg_ptr: u32,\n\n) -> u32\n\nwhere\n\n T: DeserializeOwned + JsonSchema,\n\n U: Serialize + Clone + fmt::Debug + PartialEq + JsonSchema,\n\n{\n\n let res: InitResult<U> = _do_init(init_fn, env_ptr as *mut c_void, msg_ptr as *mut c_void);\n\n let v = to_vec(&res).unwrap();\n\n release_buffer(v) as u32\n\n}\n\n\n", "file_path": "packages/std/src/exports.rs", "rank": 99, "score": 133671.39027831625 } ]
Rust
libllama/src/dbgcore.rs
HIDE810/llama
380326dd946cee1769711999b9c12aa09dfb1e99
use std::sync; use cpu::{self, v5, v6}; pub use cpu::irq::{IrqType9, IrqClient}; use cpu::caches::Ops; use hwcore; use io; #[derive(Clone)] pub struct DbgCore { hw: sync::Arc<sync::Mutex<hwcore::HwCore>> } impl DbgCore { pub fn bind(hw: hwcore::HwCore) -> DbgCore { DbgCore { hw: sync::Arc::new(sync::Mutex::new(hw)), } } pub fn ctx<'a>(&'a mut self, which: ActiveCpu) -> DbgContext<'a> { DbgContext { active_cpu: which, hwcore: self.hw.lock().unwrap() } } } pub struct DbgContext<'a> { active_cpu: ActiveCpu, hwcore: sync::MutexGuard<'a, hwcore::HwCore> } impl<'a> DbgContext<'a> { pub fn pause(&mut self) { self.hwcore.stop(); } pub fn resume(&mut self) { self.hwcore.start(); } pub fn running(&mut self) -> bool { self.hwcore.running() } pub fn hwcore(&self) -> &hwcore::HwCore { &*self.hwcore } pub fn hwcore_mut(&mut self) -> &mut hwcore::HwCore { &mut *self.hwcore } pub fn hw9<'b>(&'b mut self) -> DbgHw9Context<'b> { use std::sync::PoisonError; use hwcore::Hardware9; let print_regs = |p: PoisonError<sync::MutexGuard<'_, Hardware9>>| { let hw9 = p.into_inner(); let s = format!("Internal error!\nCPU register state:\n\ gpregs: {:#X?}\n\ cpsr: {:#X?}\n\ last 1024 instruction addresses:\n\ {:#X?}", hw9.arm9.regs, hw9.arm9.cpsr.val, hw9.arm9.last_instructions); panic!("{}", s); }; DbgHw9Context { hw: self.hwcore.hardware9.lock().unwrap_or_else(print_regs) } } pub fn hw11<'b>(&'b mut self) -> DbgHw11Context<'b> { use std::sync::PoisonError; use hwcore::Hardware11; let print_regs = |p: PoisonError<sync::MutexGuard<'_, Hardware11>>| { let hw11 = p.into_inner(); let s = format!("Internal error!\nCPU register state:\n\ gpregs: {:#X?}\n\ cpsr: {:#X?}\n\ last 1024 instruction addresses:\n\ {:#X?}", hw11.arm11.regs, hw11.arm11.cpsr.val, hw11.arm11.last_instructions); panic!("{}", s); }; DbgHw11Context { hw: self.hwcore.hardware11.lock().unwrap_or_else(print_regs) } } pub fn hw<'b>(&'b mut self) -> Box<dyn HwCtx + 'b> { match self.active_cpu { ActiveCpu::Arm9 => Box::new(self.hw9()), ActiveCpu::Arm11 => Box::new(self.hw11()) } } pub fn trigger_irq(&mut self, irq: IrqType9) { self.hwcore_mut().irq_tx.assert(irq); } } #[derive(Copy, Clone, Eq, PartialEq)] pub enum ActiveCpu { Arm9, Arm11 } #[allow(non_camel_case_types)] pub enum CpuRef<'a> { v5(&'a cpu::Cpu<v5>), v6(&'a cpu::Cpu<v6>), } #[allow(non_camel_case_types)] pub enum CpuMut<'a> { v5(&'a mut cpu::Cpu<v5>), v6(&'a mut cpu::Cpu<v6>), } macro_rules! any_cpu { ($self:expr, mut $ident:ident; $code:block) => { match $self.cpu_mut() { CpuMut::v5($ident) => $code, CpuMut::v6($ident) => $code } }; ($self:expr, ref $ident:ident; $code:block) => { match $self.cpu_ref() { CpuRef::v5($ident) => $code, CpuRef::v6($ident) => $code } }; } pub trait HwCtx { fn cpu_ref(&self) -> CpuRef; fn cpu_mut(&mut self) -> CpuMut; fn read_mem(&mut self, address: u32, bytes: &mut [u8]) -> Result<(), String> { any_cpu!(self, mut cpu; { cpu.mpu.icache_invalidate(); cpu.mpu.dcache_invalidate(); cpu.mpu.main_mem_mut().debug_read_buf(address, bytes) }) } fn write_mem(&mut self, address: u32, bytes: &[u8]) { any_cpu!(self, mut cpu; { cpu.mpu.icache_invalidate(); cpu.mpu.dcache_invalidate(); cpu.mpu.main_mem_mut().write_buf(address, bytes); }) } fn read_reg(&self, reg: usize) -> u32 { any_cpu!(self, ref cpu; { cpu.regs[reg] }) } fn write_reg(&mut self, reg: usize, value: u32) { any_cpu!(self, mut cpu; { cpu.regs[reg] = value; }) } fn read_cpsr(&self) -> u32 { any_cpu!(self, ref cpu; { cpu.cpsr.val }) } fn write_cpsr(&mut self, value: u32) { any_cpu!(self, mut cpu; { cpu.cpsr.val = value; let mode_num = cpu.cpsr.mode.get(); cpu.regs.swap(cpu::Mode::from_num(mode_num)); }) } fn pause_addr(&self) -> u32 { any_cpu!(self, ref cpu; { cpu.regs[15] - cpu.get_pc_offset() }) } fn branch_to(&mut self, addr: u32) { any_cpu!(self, mut cpu; { cpu.branch(addr); }) } fn is_thumb(&self) -> bool { any_cpu!(self, ref cpu; { cpu.cpsr.thumb_bit.get() == 1 }) } fn step(&mut self) { any_cpu!(self, mut cpu; { cpu.run(1); }) } fn set_breakpoint(&mut self, addr: u32) { any_cpu!(self, mut cpu; { cpu.breakpoints.insert(addr); }) } fn has_breakpoint(&mut self, addr: u32) -> bool { any_cpu!(self, ref cpu; { cpu.breakpoints.contains(&addr) }) } fn del_breakpoint(&mut self, addr: u32) { any_cpu!(self, mut cpu; { cpu.breakpoints.remove(&addr); }) } } pub struct DbgHw9Context<'a> { hw: sync::MutexGuard<'a, hwcore::Hardware9> } impl<'a> DbgHw9Context<'a> { pub fn io9_devices(&self) -> &io::IoRegsArm9 { self.hw.io9() } pub fn io_shared_devices(&self) -> &io::IoRegsShared { self.hw.io_shared() } } impl<'a> HwCtx for DbgHw9Context<'a> { fn cpu_ref(&self) -> CpuRef { CpuRef::v5(&self.hw.arm9) } fn cpu_mut(&mut self) -> CpuMut { CpuMut::v5(&mut self.hw.arm9) } } pub struct DbgHw11Context<'a> { hw: sync::MutexGuard<'a, hwcore::Hardware11> } impl<'a> DbgHw11Context<'a> { pub fn io11_devices(&self) -> &io::IoRegsArm11 { self.hw.io11() } pub fn io_shared_devices(&self) -> &io::IoRegsShared { self.hw.io_shared() } } impl<'a> HwCtx for DbgHw11Context<'a> { fn cpu_ref(&self) -> CpuRef { CpuRef::v6(&self.hw.arm11) } fn cpu_mut(&mut self) -> CpuMut { CpuMut::v6(&mut self.hw.arm11) } }
use std::sync; use cpu::{self, v5, v6}; pub use cpu::irq::{IrqType9, IrqClient}; use cpu::caches::Ops; use hwcore; use io; #[derive(Clone)] pub struct DbgCore { hw: sync::Arc<sync::Mutex<hwcore::HwCore>> } impl DbgCore { pub fn bind(hw: hwcore::HwCore) -> DbgCore { DbgCore { hw: sync::Arc::new(sync::Mutex::new(hw)), } } pub fn ctx<'a>(&'a mut self, which: ActiveCpu) -> DbgContext<'a> { DbgContext { active_cpu: which, hwcore: self.hw.lock().unwrap() } } } pub struct DbgContext<'a> { active_cpu: ActiveCpu, hwcore: sync::MutexGuard<'a, hwcore::HwCore> } impl<'a> DbgContext<'a> { pub fn pause(&mut self) { self.hwcore.stop(); } pub fn resume(&mut self) { self.hwcore.start(); } pub fn running(&mut self) -> bool { self.hwcore.running() } pub fn hwcore(&self) -> &hwcore::HwCore { &*self.hwcore } pub fn hwcore_mut(&mut self) -> &mut hwcore::HwCore { &mut *self.hwcore } pub fn hw9<'b>(&'b mut self) -> DbgHw9Context<'b> { use std::sync::PoisonError; use hwcore::Hardware9; let print_regs = |p: PoisonError<sync::MutexGuard<'_, Hardware9>>| { let hw9 = p.into_inner(); let s = format!("Internal error!\nCPU register state:\n\ gpregs: {:#X?}\n\ cpsr: {:#X?}\n\ last 1024 instruction addresses:\n\ {:#X?}", hw9.arm9.regs, hw9.arm9.cpsr.val, hw9.arm9.last_instructions); panic!("{}", s); }; DbgHw9Context { hw: self.hwcore.hardware9.lock().unwrap_or_else(print_regs) } } pub fn hw11<'b>(&'b mut self) -> DbgHw11Context<'b> { use std::sync::PoisonError; use hwcore::Hardware11; let print_regs = |p: PoisonError<sync::MutexGuard<'_, Hardware11>>| { let hw11 = p.into_inner(); let s = format!("Internal error!\nCPU register state:\n\ gpregs: {:#X?}\n\ cpsr: {:#X?}\n\ last 1024 instruction addresses:\n\ {:#X?}", hw11.arm11.regs, hw11.arm11.cpsr.val, hw11.arm11.last_instructions); panic!("{}", s); }; DbgHw11Context { hw: self.hwcore.hardw
de:block) => { match $self.cpu_ref() { CpuRef::v5($ident) => $code, CpuRef::v6($ident) => $code } }; } pub trait HwCtx { fn cpu_ref(&self) -> CpuRef; fn cpu_mut(&mut self) -> CpuMut; fn read_mem(&mut self, address: u32, bytes: &mut [u8]) -> Result<(), String> { any_cpu!(self, mut cpu; { cpu.mpu.icache_invalidate(); cpu.mpu.dcache_invalidate(); cpu.mpu.main_mem_mut().debug_read_buf(address, bytes) }) } fn write_mem(&mut self, address: u32, bytes: &[u8]) { any_cpu!(self, mut cpu; { cpu.mpu.icache_invalidate(); cpu.mpu.dcache_invalidate(); cpu.mpu.main_mem_mut().write_buf(address, bytes); }) } fn read_reg(&self, reg: usize) -> u32 { any_cpu!(self, ref cpu; { cpu.regs[reg] }) } fn write_reg(&mut self, reg: usize, value: u32) { any_cpu!(self, mut cpu; { cpu.regs[reg] = value; }) } fn read_cpsr(&self) -> u32 { any_cpu!(self, ref cpu; { cpu.cpsr.val }) } fn write_cpsr(&mut self, value: u32) { any_cpu!(self, mut cpu; { cpu.cpsr.val = value; let mode_num = cpu.cpsr.mode.get(); cpu.regs.swap(cpu::Mode::from_num(mode_num)); }) } fn pause_addr(&self) -> u32 { any_cpu!(self, ref cpu; { cpu.regs[15] - cpu.get_pc_offset() }) } fn branch_to(&mut self, addr: u32) { any_cpu!(self, mut cpu; { cpu.branch(addr); }) } fn is_thumb(&self) -> bool { any_cpu!(self, ref cpu; { cpu.cpsr.thumb_bit.get() == 1 }) } fn step(&mut self) { any_cpu!(self, mut cpu; { cpu.run(1); }) } fn set_breakpoint(&mut self, addr: u32) { any_cpu!(self, mut cpu; { cpu.breakpoints.insert(addr); }) } fn has_breakpoint(&mut self, addr: u32) -> bool { any_cpu!(self, ref cpu; { cpu.breakpoints.contains(&addr) }) } fn del_breakpoint(&mut self, addr: u32) { any_cpu!(self, mut cpu; { cpu.breakpoints.remove(&addr); }) } } pub struct DbgHw9Context<'a> { hw: sync::MutexGuard<'a, hwcore::Hardware9> } impl<'a> DbgHw9Context<'a> { pub fn io9_devices(&self) -> &io::IoRegsArm9 { self.hw.io9() } pub fn io_shared_devices(&self) -> &io::IoRegsShared { self.hw.io_shared() } } impl<'a> HwCtx for DbgHw9Context<'a> { fn cpu_ref(&self) -> CpuRef { CpuRef::v5(&self.hw.arm9) } fn cpu_mut(&mut self) -> CpuMut { CpuMut::v5(&mut self.hw.arm9) } } pub struct DbgHw11Context<'a> { hw: sync::MutexGuard<'a, hwcore::Hardware11> } impl<'a> DbgHw11Context<'a> { pub fn io11_devices(&self) -> &io::IoRegsArm11 { self.hw.io11() } pub fn io_shared_devices(&self) -> &io::IoRegsShared { self.hw.io_shared() } } impl<'a> HwCtx for DbgHw11Context<'a> { fn cpu_ref(&self) -> CpuRef { CpuRef::v6(&self.hw.arm11) } fn cpu_mut(&mut self) -> CpuMut { CpuMut::v6(&mut self.hw.arm11) } }
are11.lock().unwrap_or_else(print_regs) } } pub fn hw<'b>(&'b mut self) -> Box<dyn HwCtx + 'b> { match self.active_cpu { ActiveCpu::Arm9 => Box::new(self.hw9()), ActiveCpu::Arm11 => Box::new(self.hw11()) } } pub fn trigger_irq(&mut self, irq: IrqType9) { self.hwcore_mut().irq_tx.assert(irq); } } #[derive(Copy, Clone, Eq, PartialEq)] pub enum ActiveCpu { Arm9, Arm11 } #[allow(non_camel_case_types)] pub enum CpuRef<'a> { v5(&'a cpu::Cpu<v5>), v6(&'a cpu::Cpu<v6>), } #[allow(non_camel_case_types)] pub enum CpuMut<'a> { v5(&'a mut cpu::Cpu<v5>), v6(&'a mut cpu::Cpu<v6>), } macro_rules! any_cpu { ($self:expr, mut $ident:ident; $code:block) => { match $self.cpu_mut() { CpuMut::v5($ident) => $code, CpuMut::v6($ident) => $code } }; ($self:expr, ref $ident:ident; $co
random
[ { "content": "/// Controls debugger behavior based on user-provided commands\n\n///\n\n/// `command`: Iterator over &str items\n\npub fn handle<'a, It>(active_cpu: &mut ActiveCpu, debugger: &mut dbgcore::DbgCore, mut command: It)\n\n where It: Iterator<Item=&'a str> {\n\n\n\n match command.next() {\n\n Some(\"asm\") => cmd_asm(*active_cpu, debugger, command),\n\n Some(\"brk\") => cmd_brk(*active_cpu, debugger, command),\n\n Some(\"btn\") => cmd_btn(*active_cpu, debugger, command),\n\n Some(\"fbdmp\") => cmd_fbdmp(*active_cpu, debugger, command),\n\n Some(\"irq\") => cmd_irq(*active_cpu, debugger, command),\n\n Some(\"keydmp\") => cmd_keydmp(*active_cpu, debugger, command),\n\n Some(\"mem\") => cmd_mem(*active_cpu, debugger, command),\n\n Some(\"reg\") => cmd_reg(*active_cpu, debugger, command),\n\n Some(\"run\") => { debugger.ctx(*active_cpu).resume() },\n\n Some(\"step\") => cmd_step(*active_cpu, debugger, command),\n\n\n\n Some(\"cpu\") => {\n\n match command.next() {\n\n Some(\"arm9\") => *active_cpu = ActiveCpu::Arm9,\n\n Some(\"arm11\") => *active_cpu = ActiveCpu::Arm11,\n\n _ => error!(\"Expected `cpu <arm9|arm11>\")\n", "file_path": "llama-ui/commands.rs", "rank": 0, "score": 313972.88307156577 }, { "content": "fn arm9_run(client: &msgs::Client<Message>, hardware: &mut Hardware9) -> bool {\n\n let reason = 't: loop {\n\n for msg in client.try_iter() {\n\n match msg {\n\n Message::Quit => return false,\n\n Message::SuspendEmulation => {\n\n break 't cpu::BreakReason::Trapped\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n if let reason @ cpu::BreakReason::Breakpoint = hardware.arm9.run(1000) {\n\n info!(\"Breakpoint hit @ 0x{:X}!\", hardware.arm9.regs[15] - hardware.arm9.get_pc_offset());\n\n client.send(Message::Arm11Halted(reason));\n\n break 't reason\n\n }\n\n };\n\n\n\n client.send(Message::Arm9Halted(reason));\n\n true\n\n}\n\n\n", "file_path": "libllama/src/hwcore.rs", "rank": 1, "score": 287958.47946583916 }, { "content": "fn arm11_run(client: &msgs::Client<Message>, hardware: &mut Hardware11) -> bool {\n\n let reason = 't: loop {\n\n for msg in client.try_iter() {\n\n match msg {\n\n Message::Quit => return false,\n\n Message::SuspendEmulation => {\n\n break 't cpu::BreakReason::Trapped\n\n }\n\n Message::HidUpdate(btn) => {\n\n let io_shared = &hardware.io_shared().hid;\n\n io::hid::update_pad(&mut io_shared.lock(), btn);\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n if let reason @ cpu::BreakReason::Breakpoint = hardware.arm11.run(1000) {\n\n info!(\"Breakpoint hit @ 0x{:X}!\", hardware.arm11.regs[15] - hardware.arm11.get_pc_offset());\n\n client.send(Message::Arm9Halted(reason));\n\n break 't reason\n\n }\n\n };\n\n\n\n client.send(Message::Arm11Halted(reason));\n\n\n\n true\n\n}\n\n\n", "file_path": "libllama/src/hwcore.rs", "rank": 2, "score": 287958.4794658392 }, { "content": "/// Prints registers to the screen based on provided register name\n\n/// Command format: \"reg [register name]\"\n\n///\n\n/// `args`: Iterator over &str items\n\nfn cmd_reg<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)\n\n where It: Iterator<Item=&'a str> {\n\n let mut ctx = debugger.ctx(active_cpu);\n\n let hw = ctx.hw();\n\n\n\n let print_reg = |reg_num| info!(\"R{} = 0x{:08X}\", reg_num, hw.read_reg(reg_num));\n\n let print_cpsr = || info!(\"CPSR = 0x{:08X}\", hw.read_cpsr());\n\n\n\n let reg_str = match args.next() {\n\n Some(arg) => arg.to_owned().to_lowercase(),\n\n None => {\n\n for i in 0..16 {\n\n print_reg(i);\n\n }\n\n print_cpsr();\n\n return;\n\n }\n\n };\n\n\n\n match reg_str.as_str() {\n", "file_path": "llama-ui/commands.rs", "rank": 3, "score": 256912.3958467156 }, { "content": "/// Prints disassembly for the next instruction\n\n/// Command format: \"asm [address hex]\"\n\n///\n\n/// `args`: Iterator over &str items\n\nfn cmd_asm<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)\n\n where It: Iterator<Item=&'a str> {\n\n\n\n use capstone::Capstone;\n\n use capstone::arch::BuildsCapstone;\n\n use capstone::arch::arm::ArchMode;\n\n let _ = args;\n\n\n\n let mut ctx = debugger.ctx(active_cpu);\n\n let mut hw = ctx.hw();\n\n\n\n let pause_addr = match args.next().map(from_hex) {\n\n Some(Ok(x)) => x,\n\n Some(Err(_)) => { error!(\"Could not parse hex value!\"); return }\n\n None => hw.pause_addr(),\n\n };\n\n\n\n let cpu_mode = if hw.is_thumb() {\n\n ArchMode::Thumb\n\n } else {\n", "file_path": "llama-ui/commands.rs", "rank": 4, "score": 256911.33129019054 }, { "content": "/// Adds CPU breakpoint at instruction address\n\n/// Command format: \"brk <address hex>\"\n\n///\n\n/// `args`: Iterator over &str items\n\nfn cmd_brk<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)\n\n where It: Iterator<Item=&'a str> {\n\n\n\n let addr_str = match args.next() {\n\n Some(arg) => from_hex(arg),\n\n None => { info!(\"Usage: `brk <addr>\"); return }\n\n };\n\n\n\n // Check for from_hex errors\n\n let addr = match addr_str {\n\n Ok(x) => x,\n\n _ => { error!(\"Could not parse hex value!\"); return }\n\n };\n\n\n\n info!(\"Toggling breakpoint at 0x{:X}\", addr);\n\n\n\n let mut ctx = debugger.ctx(active_cpu);\n\n let mut hw = ctx.hw();\n\n\n\n if !hw.has_breakpoint(addr) {\n\n hw.set_breakpoint(addr);\n\n } else {\n\n hw.del_breakpoint(addr);\n\n }\n\n}\n\n\n", "file_path": "llama-ui/commands.rs", "rank": 5, "score": 256911.28331012992 }, { "content": "/// Prints memory to the screen based on provided address, number of bytes\n\n/// Command format: \"mem <start address hex> [# bytes hex]\"\n\n///\n\n/// `args`: Iterator over &str items\n\nfn cmd_mem<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)\n\n where It: Iterator<Item=&'a str> {\n\n\n\n // Tuple: (u32: start, u32: num)\n\n let arg_res = match (args.next(), args.next()) {\n\n (Some(ss), Some(ns)) => from_hex(ss).and_then(|s| Ok((s, from_hex(ns)?))),\n\n (Some(ss), None) => from_hex(ss).and_then(|s| Ok((s, 1))),\n\n (None, _) => { info!(\"Usage: `mem <start> [num] [outfile.bin]\"); return }\n\n };\n\n\n\n // Check for from_hex errors, validate `num` input\n\n let (start, num) = match arg_res {\n\n Ok((s, n)) if n > 0 => (s, n),\n\n Ok((s, _)) => (s, 1),\n\n _ => { error!(\"Could not parse hex value!\"); return }\n\n };\n\n\n\n trace!(\"Printing {} bytes of RAM starting at 0x{:08X}\", num, start);\n\n\n\n let mut ctx = debugger.ctx(active_cpu);\n", "file_path": "llama-ui/commands.rs", "rank": 6, "score": 256907.41800035274 }, { "content": "/// Triggers the specified IRQ\n\n/// Command format: \"irq <type>\"\n\n///\n\n/// `args`: Iterator over &str items\n\nfn cmd_irq<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)\n\n where It: Iterator<Item=&'a str> {\n\n\n\n let irq_ty = match args.next() {\n\n Some(arg) => arg.to_lowercase(),\n\n None => { info!(\"Usage: `irq <type>\"); return }\n\n };\n\n\n\n let irq = match irq_ty.as_str() {\n\n \"timer0\" => dbgcore::IrqType9::Timer0,\n\n \"timer1\" => dbgcore::IrqType9::Timer1,\n\n \"timer2\" => dbgcore::IrqType9::Timer2,\n\n \"timer3\" => dbgcore::IrqType9::Timer3,\n\n _ => { error!(\"Unimplemented/unknown IRQ type `{}`\", irq_ty); return }\n\n };\n\n\n\n info!(\"Triggering IRQ {}\", irq_ty);\n\n\n\n let mut ctx = debugger.ctx(active_cpu);\n\n ctx.trigger_irq(irq);\n\n}\n\n\n", "file_path": "llama-ui/commands.rs", "rank": 7, "score": 256907.41800035274 }, { "content": "pub fn cond_passed(cond_opcode: u32, cpsr: &Psr::Bf) -> bool {\n\n match cond_opcode {\n\n 0b0000 => return cpsr.z_bit.get() == 1, // EQ\n\n 0b0001 => return cpsr.z_bit.get() == 0, // NE\n\n 0b0010 => return cpsr.c_bit.get() == 1, // CS\n\n 0b0011 => return cpsr.c_bit.get() == 0, // CC\n\n 0b0100 => return cpsr.n_bit.get() == 1, // MI\n\n 0b0101 => return cpsr.n_bit.get() == 0, // PL\n\n 0b0110 => return cpsr.v_bit.get() == 1, // VS\n\n 0b0111 => return cpsr.v_bit.get() == 0, // VC\n\n 0b1000 => { // HI\n\n return (cpsr.c_bit.get() == 1) && (cpsr.z_bit.get() == 0)\n\n },\n\n 0b1001 => { // LS\n\n return (cpsr.c_bit.get() == 0) || (cpsr.z_bit.get() == 1)\n\n },\n\n 0b1010 => { // GE\n\n return cpsr.n_bit.get() == cpsr.v_bit.get()\n\n },\n\n 0b1011 => { // LT\n", "file_path": "libllama/src/cpu/interpreter_arm.rs", "rank": 8, "score": 252582.43601851288 }, { "content": "/// Dumps framebuffer to file\n\n/// Command format: \"fbdmp\"\n\n///\n\n/// `args`: Unused\n\nfn cmd_fbdmp<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, _: It)\n\n where It: Iterator<Item=&'a str> {\n\n\n\n use libllama::io::gpu;\n\n\n\n let mut ctx = debugger.ctx(active_cpu);\n\n let fb_state = {\n\n let hw = ctx.hw11();\n\n let gpu = &hw.io11_devices().gpu;\n\n let fb_state = gpu::fb_state(&*gpu.borrow());\n\n fb_state\n\n };\n\n\n\n let mut fbs = libllama::hwcore::Framebuffers::default();\n\n ctx.hwcore().copy_framebuffers(&mut fbs, &fb_state);\n\n\n\n info!(\"Dumping framebuffers to disk in CWD...\");\n\n\n\n let mut top = File::create(\"./fb-top.bin\")\n\n .expect(\"Could not create fb-top.bin file!\");\n\n top.write_all(fbs.top_screen.as_slice())\n\n .expect(\"Could not write top framebuffer!\");\n\n\n\n let mut bot = File::create(\"./fb-bot.bin\")\n\n .expect(\"Could not create fb-bot.bin file!\");\n\n bot.write_all(fbs.bot_screen.as_slice())\n\n .expect(\"Could not write bottom framebuffer!\");\n\n}\n\n\n", "file_path": "llama-ui/commands.rs", "rank": 9, "score": 245442.33124307817 }, { "content": "/// Sets AES key-dumping state\n\n/// Command format: \"keydmp\"\n\n///\n\n/// `args`: Unused\n\nfn cmd_keydmp<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, _: It)\n\n where It: Iterator<Item=&'a str> {\n\n\n\n use libllama::io::aes;\n\n\n\n let mut ctx = debugger.ctx(active_cpu);\n\n let hw = ctx.hw9();\n\n let key_slots = {\n\n let aes = &hw.io9_devices().aes;\n\n aes::dump_keys(&*aes.borrow())\n\n };\n\n\n\n info!(\"Dumping AES keys to disk...\");\n\n\n\n use libllama::fs;\n\n fs::create_file(fs::LlamaFile::AesKeyDb, |file| {\n\n for k in key_slots.iter() {\n\n if let Err(x) = file.write_all(&k.data) {\n\n error!(\"Failed to write to aeskeydb file; {:?}\", x);\n\n return\n\n }\n\n }\n\n }).unwrap();\n\n}\n\n\n", "file_path": "llama-ui/commands.rs", "rank": 10, "score": 245442.33124307817 }, { "content": "/// Runs one instruction on the CPU\n\n/// Command format: \"step\"\n\n///\n\n/// `args`: Unused\n\nfn cmd_step<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, args: It)\n\n where It: Iterator<Item=&'a str> {\n\n let _ = args;\n\n let mut ctx = debugger.ctx(active_cpu);\n\n let mut hw = ctx.hw();\n\n\n\n hw.step();\n\n}\n\n\n", "file_path": "llama-ui/commands.rs", "rank": 11, "score": 243471.94517981727 }, { "content": "fn should_xfer(dev: &mut NdmaChannel) -> bool {\n\n let chan_cnt = RegChannelCnt::new(dev.chan_cnt.get());\n\n if chan_cnt.enabled.get() == 0 {\n\n return false;\n\n }\n\n let startup_dev = startup_mode(&chan_cnt);\n\n let mut xns = dev._internal_state.connections.borrow_mut();\n\n\n\n let mode = (chan_cnt.immed_mode.get(), chan_cnt.repeat_mode.get());\n\n match mode {\n\n (0, 0) => {\n\n let bus = xns.buses.get_mut(&startup_dev)\n\n .expect(&format!(\"Could not find NDMA bus for device 0x{:X}\", startup_dev));\n\n bus.observe()\n\n }\n\n (1, 0) => {\n\n // Immediate mode\n\n true\n\n }\n\n (0, 1) => {\n\n unimplemented!()\n\n }\n\n _ => panic!(\"Attempted to use impossible channel mode immed+repeat\")\n\n }\n\n}\n\n\n", "file_path": "libllama/src/io/ndma.rs", "rank": 12, "score": 240411.61437385128 }, { "content": "fn _write_fb_pointers(cpu: &mut cpu::Cpu<v5>) {\n\n // Initialize framebuffer data to be b9s compatible\n\n cpu.mpu.dmem_write(0xFFF00000, 0x18000000u32);\n\n cpu.mpu.dmem_write(0xFFF00004, 0x18000000u32);\n\n cpu.mpu.dmem_write(0x18000000, 0x18000010u32);\n\n cpu.mpu.dmem_write(0x18000008, 0x1808CA10u32);\n\n cpu.regs[0] = 2;\n\n cpu.regs[1] = 0xFFF00000;\n\n}\n\n\n", "file_path": "libllama/src/hwcore.rs", "rank": 13, "score": 236605.53911870607 }, { "content": "pub fn schedule(dev: &mut NdmaDevice) {\n\n for channel in dev._internal_state.channels.iter_mut() {\n\n process_channel(channel);\n\n }\n\n}\n\n\n\niodevice!(NdmaDevice, {\n\n internal_state: NdmaDeviceState;\n\n regs: {\n\n 0x000 => global_cnt: u32 { }\n\n }\n\n ranges: {\n\n 0x004;0xE0 => {\n\n // Remap addresses for individual channel registers\n\n read_effect = |dev: &mut NdmaDevice, buf_pos: usize, dest: &mut [u8]| {\n\n let channel = buf_pos / 0x1C;\n\n let new_buf_pos = buf_pos % 0x1C + 4; // As if the pos was for channel 0\n\n dev._internal_state.channels[channel].read_reg(new_buf_pos, dest);\n\n };\n\n write_effect = |dev: &mut NdmaDevice, buf_pos: usize, src: &[u8]| {\n\n let channel = buf_pos / 0x1C;\n\n let new_buf_pos = buf_pos % 0x1C + 4; // As if the pos was for channel 0\n\n dev._internal_state.channels[channel].write_reg(new_buf_pos, src);\n\n };\n\n }\n\n }\n\n});\n", "file_path": "libllama/src/io/ndma.rs", "rank": 14, "score": 236240.89500730572 }, { "content": "pub fn schedule(xdma: &mut XdmaDevice) {\n\n if xdma._internal_state.manager.running {\n\n replace_active_thread(xdma, None);\n\n if active_thread(xdma).running {\n\n interpreter::run_active_thread(xdma);\n\n }\n\n }\n\n for thread in 0..8 {\n\n replace_active_thread(xdma, Some(thread));\n\n if active_thread(xdma).running {\n\n interpreter::run_active_thread(xdma);\n\n }\n\n }\n\n}\n\n\n", "file_path": "libllama/src/io/xdma.rs", "rank": 15, "score": 236240.89500730572 }, { "content": "fn advance_state_machine(dev: &mut I2cDevice) -> bool {\n\n let byte = dev.data.get();\n\n let state = &mut dev._internal_state;\n\n let next_input = match state.next_input {\n\n I2cByteExpected::DeviceSelect => {\n\n state.device = byte & 0xFE;\n\n\n\n trace!(\"Selected I2C device 0x{:02X}\", byte);\n\n if byte & 1 == 0 {\n\n I2cByteExpected::RegisterSelect\n\n } else {\n\n I2cByteExpected::DataRead\n\n }\n\n }\n\n I2cByteExpected::RegisterSelect => {\n\n state.register = byte;\n\n trace!(\"Selected I2C register 0x{:02X}\", byte);\n\n I2cByteExpected::DataWrite\n\n }\n\n I2cByteExpected::DataWrite => {\n", "file_path": "libllama/src/io/i2c.rs", "rank": 16, "score": 234382.05836246823 }, { "content": "pub fn switch(_dev: &mut EmmcDevice) {\n\n warn!(\"STUBBED: SDMMC CMD6 SWITCH!\");\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 17, "score": 233188.21754099225 }, { "content": "pub fn stop_transmission(dev: &mut EmmcDevice) {\n\n emmc::get_active_card(dev).kill_transfer();\n\n emmc::clear_status(dev, Status1::RxReady);\n\n emmc::clear_status(dev, Status1::TxRq);\n\n emmc::clear_status(dev, Status32::RxReady);\n\n emmc::clear_status(dev, Status32::_TxRq);\n\n warn!(\"STUBBED: SDMMC CMD12 STOP_TRANSMISSION!\");\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 18, "score": 230260.6866817755 }, { "content": "pub fn set_blocklen(_dev: &mut EmmcDevice) {\n\n warn!(\"STUBBED: SDMMC CMD16 SET_BLOCKLEN!\");\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 19, "score": 230260.6866817755 }, { "content": "pub fn app_cmd(dev: &mut EmmcDevice) {\n\n emmc::get_active_card(dev).csr.app_cmd.set(1);\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 20, "score": 230260.6866817755 }, { "content": "pub fn get_scr(dev: &mut EmmcDevice) {\n\n warn!(\"STUBBED: SDMMC ACMD51 GET_SCR!\");\n\n assert!(dev.data16_blk_len.get() == 8);\n\n emmc::trigger_status(dev, Status1::RxReady);\n\n emmc::get_active_card(dev).make_transfer(TransferLoc::RegScr, TransferType::Read, 1);\n\n}\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 21, "score": 230260.6866817755 }, { "content": "pub fn get_ssr(dev: &mut EmmcDevice) {\n\n warn!(\"STUBBED: SDMMC ACMD13 GET_SSR!\");\n\n assert!(dev.data16_blk_len.get() == 64);\n\n emmc::trigger_status(dev, Status1::RxReady);\n\n emmc::get_active_card(dev).make_transfer(TransferLoc::RegSsr, TransferType::Read, 1);\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 22, "score": 230260.6866817755 }, { "content": "pub fn pad(dev: &mut HidDevice) -> u16 {\n\n !dev.pad.get()\n\n}\n\n\n\niodevice!(HidDevice, {\n\n regs: {\n\n 0x000 => pad: u16 {\n\n default = !0;\n\n write_bits = 0;\n\n }\n\n 0x002 => unk: u16 {\n\n read_effect = |_| trace!(\"STUBBED: Read from unknown HID+0x2 register!\");\n\n write_effect = |_| warn!(\"STUBBED: Write to unknown HID+0x2 register!\");\n\n }\n\n }\n\n});\n", "file_path": "libllama/src/io/hid.rs", "rank": 23, "score": 229765.57978953928 }, { "content": "/// Toggles or displays button state\n\n/// Command format: \"btn [button name] [up/down]\"\n\n///\n\n/// `args`: Iterator over &str items\n\nfn cmd_btn<'a, It>(_active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)\n\n where It: Iterator<Item=&'a str> {\n\n use libllama::io::hid;\n\n\n\n let mut ctx = debugger.ctx(ActiveCpu::Arm11);\n\n let hw = ctx.hw11();\n\n let io_shared = &hw.io_shared_devices().hid;\n\n\n\n let btn_map = [\n\n (\"a\", hid::Button::A),\n\n (\"b\", hid::Button::B),\n\n (\"x\", hid::Button::X),\n\n (\"y\", hid::Button::Y),\n\n (\"l\", hid::Button::L),\n\n (\"r\", hid::Button::R),\n\n (\"up\", hid::Button::Up),\n\n (\"down\", hid::Button::Down),\n\n (\"left\", hid::Button::Left),\n\n (\"right\", hid::Button::Right),\n\n (\"start\", hid::Button::Start),\n", "file_path": "llama-ui/commands.rs", "rank": 24, "score": 229139.72008697077 }, { "content": "pub fn instr_msr<V: Version>(cpu: &mut Cpu<V>, data: arm::Msr1::Bf, immediate: bool) -> cpu::InstrStatus {\n\n if !cpu::cond_passed(data.cond.get(), &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n let field_mask = data.field_mask.get();\n\n let shifter_operand = data.shifter_operand.get();\n\n\n\n let val = if immediate {\n\n let immed_8 = bits!(shifter_operand, 0:7);\n\n let rotate_imm = bits!(shifter_operand, 8:11);\n\n immed_8.rotate_right(rotate_imm * 2)\n\n } else {\n\n cpu.regs[bits!(shifter_operand, 0:3) as usize]\n\n };\n\n\n\n let unalloc_mask = if V::is::<v5>() { 0x07FFFF00u32 } else { 0x06F0FC00 };\n\n let user_mask = if V::is::<v5>() { 0xF8000000u32 } else { 0xF80F0200 };\n\n let priv_mask = if V::is::<v5>() { 0x0000000Fu32 } else { 0x000001DF };\n\n let state_mask = if V::is::<v5>() { 0x00000020u32 } else { 0x01000020 };\n", "file_path": "libllama/src/cpu/instructions_arm/program_status.rs", "rank": 25, "score": 227916.31845210042 }, { "content": "pub fn set_relative_addr(dev: &mut EmmcDevice) {\n\n let reladdr = emmc::get_params_u16(dev)[1];\n\n emmc::get_active_card(dev).rca = reladdr;\n\n emmc::get_active_card(dev).set_state(CardState::Stby);\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 26, "score": 227450.7613000204 }, { "content": "pub fn go_idle_state(dev: &mut EmmcDevice) {\n\n for card in dev._internal_state.cards.iter_mut() {\n\n card.reset(false);\n\n }\n\n warn!(\"STUBBED: SDMMC CMD0 GO_IDLE_STATE!\");\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 27, "score": 227450.7613000204 }, { "content": "pub fn set_bus_width(_dev: &mut EmmcDevice) {\n\n warn!(\"STUBBED: SDMMC ACMD6 SET_BUS_WIDTH!\");\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 28, "score": 227450.7613000204 }, { "content": "pub fn select_deselect_card(dev: &mut EmmcDevice) {\n\n emmc::get_active_card(dev).set_state(CardState::Tran);\n\n warn!(\"STUBBED: SDMMC CMD7 SELECT_DESELECT_CARD!\");\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 29, "score": 227450.7613000204 }, { "content": "pub fn instr_mul64_accumulate<V: Version>(cpu: &mut Cpu<V>, data: arm::Umlal::Bf, signed: bool) -> cpu::InstrStatus {\n\n if !cpu::cond_passed(data.cond.get(), &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n let rd_hi = data.rd_hi.get() as usize;\n\n let rd_lo = data.rd_lo.get() as usize;\n\n\n\n let base_val = cpu.regs[data.rm.get() as usize];\n\n let multiplier = cpu.regs[data.rs.get() as usize];\n\n\n\n let mul_val = if signed {\n\n // Double cast to ensure sign extension\n\n ((base_val as i32) as i64).wrapping_mul((multiplier as i32) as i64) as u64\n\n } else {\n\n (base_val as u64).wrapping_mul(multiplier as u64)\n\n };\n\n\n\n let val_lo = wrapping_sum!(mul_val as u32, cpu.regs[rd_lo]);\n\n let val_lo_carry = checked_sum!(mul_val as u32, cpu.regs[rd_lo]).is_none();\n", "file_path": "libllama/src/cpu/instructions_arm/data_processing.rs", "rank": 30, "score": 225681.21899133714 }, { "content": "pub fn set_clr_card_detect(_dev: &mut EmmcDevice) {\n\n warn!(\"STUBBED: SDMMC ACMD42 SET_CLR_CARD_DETECT!\");\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 31, "score": 224751.4942239481 }, { "content": "pub fn send_if_cond(dev: &mut EmmcDevice) -> u32 {\n\n let out = emmc::get_params_u32(dev);\n\n warn!(\"STUBBED: SDMMC CMD8 SEND_IF_COND!\");\n\n out\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 32, "score": 224028.12354856747 }, { "content": "pub fn send_csd(dev: &mut EmmcDevice) -> u128 {\n\n let csd = emmc::get_active_card(dev).csd;\n\n emmc::get_active_card(dev).set_state(CardState::Ident);\n\n return csd.val;\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 33, "score": 224028.12354856744 }, { "content": "pub fn all_send_cid(dev: &mut EmmcDevice) -> u128 {\n\n let cid = emmc::get_active_card(dev).cid;\n\n emmc::get_active_card(dev).set_state(CardState::Ident);\n\n return cid.val;\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 34, "score": 224028.12354856744 }, { "content": "pub fn send_op_cond(dev: &mut EmmcDevice) -> u32 {\n\n let ocr = emmc::get_params_u32(dev);\n\n emmc::get_active_card(dev).set_state(CardState::Ready);\n\n warn!(\"STUBBED: SDMMC CMD1 SEND_OP_COND!\");\n\n return ocr | (1 << 31);\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 35, "score": 221328.8564724952 }, { "content": "pub fn get_relative_addr(dev: &mut EmmcDevice) -> u16 {\n\n let rca = emmc::get_active_card(dev).rca + 1;\n\n emmc::get_active_card(dev).rca = rca;\n\n emmc::get_active_card(dev).set_state(CardState::Stby);\n\n rca\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 36, "score": 221328.8564724952 }, { "content": "pub fn app_send_op_cond(dev: &mut EmmcDevice) -> u32 {\n\n let voltages = emmc::get_params_u32(dev) & 0xFFF;\n\n emmc::get_active_card(dev).set_state(CardState::Ready);\n\n warn!(\"STUBBED: SDMMC ACMD41 SD_SEND_OP_COND!\");\n\n return voltages | (1 << 31);\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 37, "score": 218733.8371404251 }, { "content": "pub fn update_pad(dev: &mut HidDevice, change: ButtonState) {\n\n let mut current_pad = dev.pad.get();\n\n match change {\n\n ButtonState::Pressed(b) => current_pad &= !(1 << b as u32),\n\n ButtonState::Released(b) => current_pad |= 1 << b as u32\n\n }\n\n dev.pad.set_unchecked(current_pad);\n\n}\n\n\n", "file_path": "libllama/src/io/hid.rs", "rank": 38, "score": 218156.02186745015 }, { "content": "pub fn allow_trace(yes: bool) {\n\n TRACE_ENABLED.store(yes, Ordering::Relaxed);\n\n}\n", "file_path": "llama-ui/uilog.rs", "rank": 39, "score": 215656.6996208183 }, { "content": "pub fn prepare_multi_transfer(dev: &mut EmmcDevice, ttype: TransferType) {\n\n let file_offset = emmc::get_params_u32(&*dev);\n\n\n\n let block_count = if emmc::use_32bit(dev) {\n\n match ttype {\n\n TransferType::Read => emmc::trigger_status(dev, Status32::RxReady),\n\n TransferType::Write => {\n\n // TODO: no trigger_status?\n\n dev._internal_state.dma_out.trigger();\n\n }\n\n }\n\n dev.data32_blk_cnt.get()\n\n } else {\n\n match ttype {\n\n TransferType::Read => emmc::trigger_status(dev, Status1::RxReady),\n\n TransferType::Write => emmc::trigger_status(dev, Status1::TxRq)\n\n }\n\n dev.data16_blk_cnt.get()\n\n };\n\n\n\n let card = &mut emmc::get_active_card(dev);\n\n card.make_transfer(TransferLoc::Storage, ttype, block_count);\n\n card.seek(SeekFrom::Start(file_offset as u64)).unwrap();\n\n trace!(\"Seeking SDMMC pointer to offset 0x{:08X}!\", file_offset);\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/cmds.rs", "rank": 40, "score": 213064.30616653696 }, { "content": "pub fn handle_cmd(dev: &mut EmmcDevice, cmd_index: u16) {\n\n handle_any_cmd(dev, &CMDS, cmd_index);\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/mode_sd.rs", "rank": 41, "score": 213064.30616653693 }, { "content": "pub fn handle_acmd(dev: &mut EmmcDevice, cmd_index: u16) {\n\n handle_any_cmd(dev, &ACMDS, cmd_index);\n\n}\n", "file_path": "libllama/src/io/emmc/mode_sd.rs", "rank": 42, "score": 213064.30616653693 }, { "content": "fn use_32bit(dev: &EmmcDevice) -> bool {\n\n let d16ctl = RegData16Ctl::new(dev.data16_ctl.get());\n\n let d32ctl = RegData32Ctl::new(dev.data32_ctl.get());\n\n d16ctl.use_32bit.get() == 1 && d32ctl.use_32bit.get() == 1\n\n}\n\n\n", "file_path": "libllama/src/io/emmc/mod.rs", "rank": 43, "score": 211575.92061098362 }, { "content": "fn reg_fifo_mod(dev: &mut EmmcDevice, transfer_type: TransferType, is_32bit: bool) {\n\n let fifo_size = if use_32bit(dev) {\n\n dev.data32_blk_len.get()\n\n } else {\n\n dev.data16_blk_len.get()\n\n };\n\n\n\n let should_stop = {\n\n let transfer = match get_active_card(dev).get_transfer_mut() {\n\n Some(t) => t,\n\n None => return\n\n };\n\n assert_eq!(transfer.ty, transfer_type);\n\n\n\n trace!(\"{} SD FIFO! blocks left: {}, fifo pos: {}\",\n\n match transfer_type {\n\n TransferType::Read => \"Reading from\",\n\n TransferType::Write => \"Writing to\"\n\n },\n\n transfer.blocks_left, transfer.fifo_pos);\n", "file_path": "libllama/src/io/emmc/mod.rs", "rank": 44, "score": 209076.49094955332 }, { "content": "pub fn getreg<V: Version>(cpu: &Cpu<V>, pc_advanced: bool, num: usize) -> u32 {\n\n if num != 15 { cpu.regs[num] }\n\n else if pc_advanced { cpu.regs[15] + 4 }\n\n else { cpu.regs[15] }\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/data_processing.rs", "rank": 45, "score": 197193.69696747945 }, { "content": "fn addressing_mode_inner(p_bit: bool, u_bit: bool, w_bit: bool, rn_val: u32, num_registers: u32) -> (u32, u32) {\n\n let (addr, wb) = match (p_bit, u_bit) {\n\n (false, true) => (rn_val, rn_val + num_registers * 4), // Increment after\n\n (true, true) => (rn_val + 4, rn_val + num_registers * 4), // Increment before\n\n (false, false) => (rn_val - num_registers * 4 + 4, rn_val - num_registers * 4), // Decrement after\n\n (true, false) => (rn_val - num_registers * 4, rn_val - num_registers * 4) // Decrement before\n\n };\n\n\n\n if !w_bit {\n\n (addr, addr)\n\n } else {\n\n (addr, wb)\n\n }\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/load_store_multiple.rs", "rank": 46, "score": 194904.5683825086 }, { "content": "pub fn from_u128(mut num: u128) -> [u8; 16] {\n\n let mut data = [0u8; 0x10];\n\n for b in data.iter_mut().rev() {\n\n *b = num as u8;\n\n num >>= 8;\n\n }\n\n data\n\n}\n\n\n", "file_path": "libllama/src/utils/bytes.rs", "rank": 47, "score": 193525.8841369694 }, { "content": "pub fn handle_clock_update(timer_states: &TimerStates, clock_diff: u64, irq_tx: &mut irq::IrqSyncClient) {\n\n // Update global counter\n\n let ctr = timer_states.global_counter.get();\n\n let new_ctr = ctr + clock_diff;\n\n timer_states.global_counter.set(new_ctr);\n\n\n\n // Check if we have any work to do\n\n if !past_deadline(timer_states) {\n\n return\n\n }\n\n\n\n let mut timers = timer_states.all.borrow_mut();\n\n if let Cycles::CountUp(_) = timers[0].val_cycles {\n\n panic!(\"Don't know how to handle TIMER0 as a count-up timer!\");\n\n }\n\n\n\n // Update individual timers\n\n let mut prev_overflowed = false;\n\n for (index, timer) in timers.iter_mut().enumerate() {\n\n if !timer.started {\n", "file_path": "libllama/src/io/timer.rs", "rank": 48, "score": 191601.7392398492 }, { "content": "pub fn and<V: Version>(cpu: &mut Cpu<V>, data: arm::And::Bf) -> cpu::InstrStatus {\n\n instr_bitwise(cpu, data, ProcessInstrBitOp::And)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/data_processing.rs", "rank": 49, "score": 187787.83737432954 }, { "content": "pub fn b_1<V: Version>(cpu: &mut Cpu<V>, data: thumb::B1::Bf) -> cpu::InstrStatus {\n\n let offset_8 = data.signed_imm_8.get();\n\n let cond = data.cond.get();\n\n\n\n if !cpu::cond_passed(cond as u32, &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n let addr = (cpu.regs[15] as i32 + (sign_extend32(offset_8 as u32, 8) << 1)) as u32;\n\n cpu.branch(addr);\n\n cpu::InstrStatus::Branched\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/branch.rs", "rank": 50, "score": 187787.83737432954 }, { "content": "pub fn and<V: Version>(cpu: &mut Cpu<V>, data: thumb::And::Bf) -> cpu::InstrStatus {\n\n instr_bitwise(cpu, data, ProcessInstrBitOp::And)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/data_processing.rs", "rank": 51, "score": 187787.83737432954 }, { "content": "pub fn uxtb<V: Version>(cpu: &mut Cpu<V>, data: arm::Uxtb::Bf) -> cpu::InstrStatus {\n\n assert!( V::is::<cpu::v6>() );\n\n if !cpu::cond_passed(data.cond.get(), &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n let rm = cpu.regs[data.rm.get() as usize];\n\n let rot = 8 * data.rot.get() as usize;\n\n\n\n let val = (rm >> rot) & 0xFF;\n\n cpu.regs[data.rd.get() as usize] = val;\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/media.rs", "rank": 52, "score": 185692.8236686897 }, { "content": "pub fn rev<V: Version>(cpu: &mut Cpu<V>, data: arm::Rev::Bf) -> cpu::InstrStatus {\n\n assert!(V::is::<cpu::v6>());\n\n if !cpu::cond_passed(data.cond.get(), &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n let rn = cpu.regs[data.rn.get() as usize];\n\n let out = rn.swap_bytes();\n\n cpu.regs[data.rd.get() as usize] = out;\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/media.rs", "rank": 53, "score": 185692.8236686897 }, { "content": "pub fn bkpt<V: Version>(_cpu: &mut Cpu<V>, data: arm::Bkpt::Bf) -> cpu::InstrStatus {\n\n let brk_num = data.immed_lo.get() | (data.immed_hi.get() << 4);\n\n panic!(\"Hit breakpoint instruction! (#{})\", brk_num);\n\n}\n", "file_path": "libllama/src/cpu/instructions_arm/misc.rs", "rank": 54, "score": 185692.8236686897 }, { "content": "pub fn branch<V: Version>(cpu: &mut Cpu<V>, data: thumb::Branch::Bf) -> cpu::InstrStatus {\n\n let offset_11 = data.offset_11.get();\n\n\n\n match data.h_bits.get() {\n\n 0b00 => {\n\n let addr = (cpu.regs[15] as i32 + (sign_extend32(offset_11 as u32, 11) << 1)) as u32;\n\n cpu.branch(addr);\n\n cpu::InstrStatus::Branched\n\n },\n\n 0b01 => {\n\n let addr = (cpu.regs[14] + (offset_11 << 1) as u32) & 0xFFFFFFFC;\n\n cpu.regs[14] = (cpu.regs[15] - 2) as u32 | 1;\n\n cpu.cpsr.thumb_bit.set(0);\n\n cpu.branch(addr);\n\n cpu::InstrStatus::Branched\n\n },\n\n 0b10 => {\n\n cpu.regs[14] = (cpu.regs[15] as i32 + (sign_extend32(offset_11 as u32, 11) << 12)) as u32;\n\n cpu::InstrStatus::InBlock\n\n },\n\n 0b11 => {\n\n let addr = cpu.regs[14] + (offset_11 << 1) as u32;\n\n cpu.regs[14] = (cpu.regs[15] - 2) as u32 | 1;\n\n cpu.branch(addr);\n\n cpu::InstrStatus::Branched\n\n },\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/branch.rs", "rank": 55, "score": 185692.8236686897 }, { "content": "pub fn bbl<V: Version>(cpu: &mut Cpu<V>, data: arm::Bbl::Bf) -> cpu::InstrStatus {\n\n if !cpu::cond_passed(data.cond.get(), &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n let signed_imm_24 = data.signed_imm_24.get();\n\n\n\n if data.link_bit.get() == 1 {\n\n cpu.regs[14] = cpu.regs[15] - 4;\n\n }\n\n\n\n let pc = cpu.regs[15];\n\n cpu.branch(((pc as i32) + (sign_extend32(signed_imm_24, 24) << 2)) as u32);\n\n\n\n cpu::InstrStatus::Branched\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/branch.rs", "rank": 56, "score": 185692.8236686897 }, { "content": "pub fn mrc<V: Version>(cpu: &mut Cpu<V>, data: arm::Mrc::Bf) -> cpu::InstrStatus {\n\n if !cpu::cond_passed(data.cond.get(), &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n let crn = data.crn.get() as usize;\n\n let crm = data.crm.get() as usize;\n\n let opcode_1 = data.opcode_1.get() as usize;\n\n let opcode_2 = data.opcode_2.get() as usize;\n\n let rd = data.rd.get();\n\n\n\n let retval = {\n\n let coproc = cpu.get_coprocessor(data.cp_num.get() as usize);\n\n coproc.move_out(crn, crm, opcode_1, opcode_2)\n\n };\n\n\n\n if rd == 15 {\n\n cpu.cpsr.n_bit.set(bit!(retval, 31));\n\n cpu.cpsr.z_bit.set(bit!(retval, 30));\n\n cpu.cpsr.c_bit.set(bit!(retval, 29));\n\n cpu.cpsr.v_bit.set(bit!(retval, 28));\n\n } else {\n\n cpu.regs[rd as usize] = retval;\n\n }\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n", "file_path": "libllama/src/cpu/instructions_arm/coprocessor.rs", "rank": 57, "score": 185692.8236686897 }, { "content": "pub fn blx<V: Version>(cpu: &mut Cpu<V>, data: arm::Blx2::Bf) -> cpu::InstrStatus {\n\n instr_branch_exchange(cpu, arm::Bx::new(data.val), true)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/branch.rs", "rank": 58, "score": 185692.8236686897 }, { "content": "pub fn bx<V: Version>(cpu: &mut Cpu<V>, data: arm::Bx::Bf) -> cpu::InstrStatus {\n\n instr_branch_exchange(cpu, data, false)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/branch.rs", "rank": 59, "score": 185692.8236686897 }, { "content": "pub fn mcr<V: Version>(cpu: &mut Cpu<V>, data: arm::Mcr::Bf) -> cpu::InstrStatus {\n\n if !cpu::cond_passed(data.cond.get(), &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n let src_val = cpu.regs[data.rd.get() as usize];\n\n let crn = data.crn.get() as usize;\n\n let crm = data.crm.get() as usize;\n\n let opcode_1 = data.opcode_1.get() as usize;\n\n let opcode_2 = data.opcode_2.get() as usize;\n\n\n\n let cp_effect = {\n\n let coproc = cpu.get_coprocessor(data.cp_num.get() as usize);\n\n coproc.move_in(crn, crm, opcode_1, opcode_2, src_val)\n\n };\n\n cp_effect(cpu);\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/coprocessor.rs", "rank": 60, "score": 185692.8236686897 }, { "content": "pub fn blx_2<V: Version>(cpu: &mut Cpu<V>, data: thumb::Blx2::Bf) -> cpu::InstrStatus {\n\n let rm = data.rm.get() | (data.h2.get() << 3);\n\n let addr = cpu.regs[rm as usize];\n\n\n\n cpu.regs[14] = (cpu.regs[15] - 2) as u32 | 1;\n\n cpu.cpsr.thumb_bit.set(bit!(addr, 0));\n\n\n\n cpu.branch(addr & 0xFFFFFFFE);\n\n cpu::InstrStatus::Branched\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/branch.rs", "rank": 61, "score": 185692.8236686897 }, { "content": "pub fn swi<V: Version>(cpu: &mut Cpu<V>, data: thumb::Swi::Bf) -> cpu::InstrStatus {\n\n assert!(V::is::<cpu::v5>());\n\n let arminst: u32 = 0b111011110000000000000000_00000000\n\n | ((data.immed_8.get() as u32) << 0);\n\n cpu::instructions_arm::swi(cpu, arm::Swi::new(arminst))\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/misc.rs", "rank": 62, "score": 185692.8236686897 }, { "content": "pub fn bkpt<V: Version>(cpu: &mut Cpu<V>, data: thumb::Bkpt::Bf) -> cpu::InstrStatus {\n\n assert!(V::is::<cpu::v5>());\n\n let immed_lo = data.immed_8.get() as u32 & 0b1111;\n\n let immed_hi = data.immed_8.get() as u32 >> 4;\n\n let arminst: u32 = 0b11100001001000000000_0000_0111_0000\n\n | (immed_hi << 8)\n\n | (immed_lo << 0);\n\n cpu::instructions_arm::bkpt(cpu, arm::Bkpt::new(arminst))\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/misc.rs", "rank": 63, "score": 185692.8236686897 }, { "content": "pub fn uxth<V: Version>(cpu: &mut Cpu<V>, data: arm::Uxth::Bf) -> cpu::InstrStatus {\n\n assert!( V::is::<cpu::v6>() );\n\n if !cpu::cond_passed(data.cond.get(), &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n let rm = cpu.regs[data.rm.get() as usize];\n\n let rot = 8 * data.rot.get();\n\n\n\n let val = rm.rotate_right(rot) & 0xFFFF;\n\n cpu.regs[data.rd.get() as usize] = val;\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n", "file_path": "libllama/src/cpu/instructions_arm/media.rs", "rank": 64, "score": 185692.8236686897 }, { "content": "pub fn rev<V: Version>(cpu: &mut Cpu<V>, data: thumb::Rev::Bf) -> cpu::InstrStatus {\n\n assert!(V::is::<cpu::v6>());\n\n let arminst: u32 = 0b1110011010111111_0000_11110011_0000\n\n | ((data.rd.get() as u32) << 12)\n\n | ((data.rn.get() as u32) << 0);\n\n cpu::instructions_arm::rev(cpu, arm::Rev::new(arminst))\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/media.rs", "rank": 65, "score": 185692.8236686897 }, { "content": "pub fn uxth<V: Version>(cpu: &mut Cpu<V>, data: thumb::Uxth::Bf) -> cpu::InstrStatus {\n\n assert!(V::is::<cpu::v6>());\n\n let arminst: u32 = 0b1110011011111111_0000_00000111_0000\n\n | ((data.rd.get() as u32) << 12)\n\n | ((data.rm.get() as u32) << 0);\n\n cpu::instructions_arm::uxth(cpu, arm::Uxth::new(arminst))\n\n}\n", "file_path": "libllama/src/cpu/instructions_thumb/media.rs", "rank": 66, "score": 185692.8236686897 }, { "content": "pub fn uxtb<V: Version>(cpu: &mut Cpu<V>, data: thumb::Uxtb::Bf) -> cpu::InstrStatus {\n\n assert!(V::is::<cpu::v6>());\n\n let arminst: u32 = 0b1110011011101111_0000_00000111_0000\n\n | ((data.rd.get() as u32) << 12)\n\n | ((data.rm.get() as u32) << 0);\n\n cpu::instructions_arm::uxtb(cpu, arm::Uxtb::new(arminst))\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/media.rs", "rank": 67, "score": 185692.8236686897 }, { "content": "pub fn bx<V: Version>(cpu: &mut Cpu<V>, data: thumb::Bx::Bf) -> cpu::InstrStatus {\n\n let addr = cpu.regs[((data.h2.get() << 3) | data.rm.get()) as usize];\n\n cpu.cpsr.thumb_bit.set(bit!(addr, 0));\n\n cpu.branch(addr & 0xFFFFFFFE);\n\n cpu::InstrStatus::Branched\n\n}\n", "file_path": "libllama/src/cpu/instructions_thumb/branch.rs", "rank": 68, "score": 185692.8236686897 }, { "content": "pub fn swi<V: Version>(cpu: &mut Cpu<V>, data: arm::Swi::Bf) -> cpu::InstrStatus {\n\n assert!(V::is::<cpu::v5>());\n\n if !cpu::cond_passed(data.cond.get(), &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n let next_instr = cpu.regs[15] - cpu.get_pc_offset() / 2;\n\n cpu.enter_exception(next_instr, cpu::Mode::Svc);\n\n cpu::InstrStatus::Branched\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/misc.rs", "rank": 69, "score": 185692.8236686897 }, { "content": "pub fn ldrd<V: Version>(cpu: &mut Cpu<V>, data: arm::Ldrd::Bf) -> cpu::InstrStatus {\n\n assert!(V::is::<cpu::v5>());\n\n instr_load_misc(cpu, arm::Ldrh::new(data.val), MiscLsType::Doubleword)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/load_store.rs", "rank": 70, "score": 183669.08636272143 }, { "content": "pub fn add_3<V: Version>(cpu: &mut Cpu<V>, data: thumb::Add3::Bf) -> cpu::InstrStatus {\n\n let arminst: u32 = 0b111000001001_0000_0000_00000000_0000\n\n | ((data.rn.get() as u32) << 16)\n\n | ((data.rd.get() as u32) << 12)\n\n | ((data.rm.get() as u32) << 0);\n\n cpu::instructions_arm::add(cpu, arm::Add::new(arminst))\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/data_processing.rs", "rank": 71, "score": 183669.08636272146 }, { "content": "pub fn asr_1<V: Version>(cpu: &mut Cpu<V>, data: thumb::Asr1::Bf) -> cpu::InstrStatus {\n\n let arminst: u32 = 0b1110000110110000_0000_00000_100_0000\n\n | ((data.rd.get() as u32) << 12)\n\n | ((data.immed_5.get() as u32) << 7)\n\n | ((data.rm.get() as u32) << 0);\n\n cpu::instructions_arm::mov(cpu, arm::Mov::new(arminst))\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/data_processing.rs", "rank": 72, "score": 183669.08636272146 }, { "content": "pub fn sub_1<V: Version>(cpu: &mut Cpu<V>, data: thumb::Sub1::Bf) -> cpu::InstrStatus {\n\n let arminst: u32 = 0b111000100101_0000_0000_000000000_000\n\n | ((data.rn.get() as u32) << 16)\n\n | ((data.rd.get() as u32) << 12)\n\n | ((data.immed_3.get() as u32) << 0);\n\n cpu::instructions_arm::sub(cpu, arm::Sub::new(arminst))\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/data_processing.rs", "rank": 73, "score": 183669.08636272146 }, { "content": "pub fn lsl_2<V: Version>(cpu: &mut Cpu<V>, data: thumb::Lsl2::Bf) -> cpu::InstrStatus {\n\n let arminst: u32 = 0b1110000110110000_0000_0000_0001_0000\n\n | ((data.rd.get() as u32) << 12)\n\n | ((data.rs.get() as u32) << 8)\n\n | ((data.rd.get() as u32) << 0);\n\n cpu::instructions_arm::mov(cpu, arm::Mov::new(arminst))\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/data_processing.rs", "rank": 74, "score": 183669.08636272146 }, { "content": "pub fn mla<V: Version>(cpu: &mut Cpu<V>, data: arm::Mla::Bf) -> cpu::InstrStatus {\n\n if !cpu::cond_passed(data.cond.get(), &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n let base_val = cpu.regs[data.rm.get() as usize] as u64;\n\n let multiplier = cpu.regs[data.rs.get() as usize] as u64;\n\n let accumulated = cpu.regs[data.rn.get() as usize] as u64;\n\n let val = (base_val * multiplier + accumulated) as u32;\n\n\n\n cpu.regs[data.rd.get() as usize] = val;\n\n\n\n if data.s_bit.get() == 1 {\n\n cpu.cpsr.n_bit.set(bit!(val, 31));\n\n cpu.cpsr.z_bit.set((val == 0) as u32);\n\n };\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/data_processing.rs", "rank": 75, "score": 183669.08636272146 }, { "content": "pub fn add_6<V: Version>(cpu: &mut Cpu<V>, data: thumb::Add6::Bf) -> cpu::InstrStatus {\n\n let arminst: u32 = 0b1110001010001101_0000_1111_00000000\n\n | ((data.rd.get() as u32) << 12)\n\n | ((data.immed_8.get() as u32) << 0);\n\n cpu::instructions_arm::add(cpu, arm::Add::new(arminst))\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/data_processing.rs", "rank": 76, "score": 183669.08636272146 }, { "content": "pub fn sub<V: Version>(cpu: &mut Cpu<V>, data: arm::Sub::Bf) -> cpu::InstrStatus {\n\n instr_logical(cpu, arm::Add::new(data.val), ProcessInstrLogicalOp::Sub)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/data_processing.rs", "rank": 77, "score": 183669.08636272146 }, { "content": "pub fn sbc<V: Version>(cpu: &mut Cpu<V>, data: arm::Sbc::Bf) -> cpu::InstrStatus {\n\n instr_logical(cpu, arm::Add::new(data.val), ProcessInstrLogicalOp::SubCarry)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/data_processing.rs", "rank": 78, "score": 183669.08636272146 }, { "content": "pub fn cmn<V: Version>(cpu: &mut Cpu<V>, data: thumb::Cmn::Bf) -> cpu::InstrStatus {\n\n let arminst: u32 = 0b111000010111_0000_0000_00000000_0000\n\n | ((data.rn.get() as u32) << 16)\n\n | ((data.rm.get() as u32) << 0);\n\n cpu::instructions_arm::cmn(cpu, arm::Cmn::new(arminst))\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/data_processing.rs", "rank": 79, "score": 183669.08636272146 }, { "content": "pub fn adc<V: Version>(cpu: &mut Cpu<V>, data: thumb::Adc::Bf) -> cpu::InstrStatus {\n\n let arminst: u32 = 0b111000001011_0000_0000_00000000_0000\n\n | ((data.rd.get() as u32) << 16)\n\n | ((data.rd.get() as u32) << 12)\n\n | ((data.rm.get() as u32) << 0);\n\n cpu::instructions_arm::adc(cpu, arm::Adc::new(arminst))\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/data_processing.rs", "rank": 80, "score": 183669.08636272146 }, { "content": "pub fn cps<V: Version>(cpu: &mut Cpu<V>, data: arm::Cps::Bf) -> cpu::InstrStatus {\n\n assert!(V::is::<v6>());\n\n \n\n if let cpu::Mode::Usr = cpu::Mode::from_num(cpu.cpsr.mode.get()) {\n\n return cpu::InstrStatus::InBlock\n\n }\n\n\n\n if data.imod.get() & 2 != 0 {\n\n let new = data.imod.get() & 1;\n\n if data.a_bit.get() != 0 { cpu.cpsr.disable_imp_abt.set(new) }\n\n if data.i_bit.get() != 0 { cpu.cpsr.disable_irq_bit.set(new) }\n\n if data.f_bit.get() != 0 { cpu.cpsr.disable_fiq_bit.set(new) }\n\n }\n\n if data.mmod.get() != 0 {\n\n cpu.cpsr.mode.set(data.mode.get());\n\n cpu.regs.swap(cpu::Mode::from_num(data.mode.get()));\n\n }\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/program_status.rs", "rank": 81, "score": 183669.08636272146 }, { "content": "pub fn mov_1<V: Version>(cpu: &mut Cpu<V>, data: thumb::Mov1::Bf) -> cpu::InstrStatus {\n\n let val = data.immed_8.get() as u32;\n\n\n\n cpu.cpsr.n_bit.set(bit!(val, 31));\n\n cpu.cpsr.z_bit.set((val == 0) as u32);\n\n cpu.regs[data.rd.get() as usize] = val;\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/data_processing.rs", "rank": 82, "score": 183669.08636272146 }, { "content": "pub fn swpb<V: Version>(cpu: &mut Cpu<V>, data: arm::Swpb::Bf) -> cpu::InstrStatus {\n\n assert!(V::is::<cpu::v5>());\n\n if !cpu::cond_passed(data.cond.get(), &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n // TODO: determine behavior based on CP15 r1 bit_U (22)\n\n let addr = cpu.regs[data.rn.get() as usize];\n\n let new_val = cpu.regs[data.rm.get() as usize];\n\n\n\n let tmp = cpu.mpu.dmem_read::<u8>(addr);\n\n cpu.mpu.dmem_write::<u8>(addr, new_val as u8);\n\n cpu.regs[data.rd.get() as usize] = tmp as u32;\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n", "file_path": "libllama/src/cpu/instructions_arm/load_store.rs", "rank": 83, "score": 183669.08636272146 }, { "content": "pub fn mul<V: Version>(cpu: &mut Cpu<V>, data: thumb::Mul::Bf) -> cpu::InstrStatus {\n\n let rm = cpu.regs[data.rm.get() as usize] as u64;\n\n let rd = cpu.regs[data.rd.get() as usize] as u64;\n\n\n\n let val = (rm * rd) as u32;\n\n cpu.regs[data.rd.get() as usize] = val;\n\n\n\n cpu.cpsr.n_bit.set(bit!(val, 31));\n\n cpu.cpsr.z_bit.set((val == 0) as u32);\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/data_processing.rs", "rank": 84, "score": 183669.08636272146 }, { "content": "pub fn mrs<V: Version>(cpu: &mut Cpu<V>, data: arm::Mrs::Bf) -> cpu::InstrStatus {\n\n if !cpu::cond_passed(data.cond.get(), &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n let rd = data.rd.get();\n\n let r_bit = data.r_bit.get();\n\n\n\n if r_bit == 1 {\n\n cpu.regs[rd as usize] = cpu.get_current_spsr().val;\n\n } else {\n\n cpu.regs[rd as usize] = cpu.cpsr.val;\n\n }\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/program_status.rs", "rank": 85, "score": 183669.08636272146 }, { "content": "pub fn cmp_3<V: Version>(cpu: &mut Cpu<V>, data: thumb::Cmp3::Bf) -> cpu::InstrStatus {\n\n let rn = data.rn.get() | (data.h1.get() << 3);\n\n let rm = data.rm.get() | (data.h2.get() << 3);\n\n let base_val = cpu.regs[rn as usize];\n\n let other = cpu.regs[rm as usize];\n\n\n\n let val = base_val - other;\n\n let carry_bit = !base_val.checked_sub(other).is_none();\n\n let overflow_bit = (base_val as i32).checked_sub(other as i32).is_none();\n\n\n\n cpu.cpsr.n_bit.set(bit!(val, 31));\n\n cpu.cpsr.z_bit.set((val == 0) as u32);\n\n cpu.cpsr.c_bit.set(carry_bit as u32);\n\n cpu.cpsr.v_bit.set(overflow_bit as u32);\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/data_processing.rs", "rank": 86, "score": 183669.08636272146 }, { "content": "pub fn ldrb<V: Version>(cpu: &mut Cpu<V>, data: arm::Ldrb::Bf) -> cpu::InstrStatus {\n\n instr_load(cpu, arm::Ldr::new(data.val), true)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/load_store.rs", "rank": 87, "score": 183669.08636272143 }, { "content": "pub fn ldrsh<V: Version>(cpu: &mut Cpu<V>, data: arm::Ldrsh::Bf) -> cpu::InstrStatus {\n\n instr_load_misc(cpu, arm::Ldrh::new(data.val), MiscLsType::SignedHalfword)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/load_store.rs", "rank": 88, "score": 183669.08636272146 }, { "content": "pub fn cmp<V: Version>(cpu: &mut Cpu<V>, data: arm::Cmp::Bf) -> cpu::InstrStatus {\n\n instr_compare(cpu, data, false)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/data_processing.rs", "rank": 89, "score": 183669.08636272146 }, { "content": "pub fn mvn<V: Version>(cpu: &mut Cpu<V>, data: arm::Mvn::Bf) -> cpu::InstrStatus {\n\n instr_move(cpu, arm::Mov::new(data.val), true)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/data_processing.rs", "rank": 90, "score": 183669.08636272146 }, { "content": "pub fn adc<V: Version>(cpu: &mut Cpu<V>, data: arm::Adc::Bf) -> cpu::InstrStatus {\n\n instr_logical(cpu, arm::Add::new(data.val), ProcessInstrLogicalOp::AddCarry)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/data_processing.rs", "rank": 91, "score": 183669.08636272146 }, { "content": "pub fn ldr<V: Version>(cpu: &mut Cpu<V>, data: arm::Ldr::Bf) -> cpu::InstrStatus {\n\n instr_load(cpu, data, false)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/load_store.rs", "rank": 92, "score": 183669.08636272146 }, { "content": "pub fn ldrex<V: Version>(cpu: &mut Cpu<V>, data: arm::Ldrex::Bf) -> cpu::InstrStatus {\n\n assert!(V::is::<cpu::v6>());\n\n\n\n if !cpu::cond_passed(data.cond.get(), &cpu.cpsr) {\n\n return cpu::InstrStatus::InBlock;\n\n }\n\n\n\n let addr = cpu.regs[data.rn.get() as usize];\n\n let word = cpu.mpu.dmem_read::<u32>(addr);\n\n cpu.regs[data.rd.get() as usize] = word;\n\n\n\n warn!(\"ldrex: implemented as ldr!\");\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/load_store.rs", "rank": 93, "score": 183669.08636272146 }, { "content": "pub fn bic<V: Version>(cpu: &mut Cpu<V>, data: arm::Bic::Bf) -> cpu::InstrStatus {\n\n instr_bitwise(cpu, arm::And::new(data.val), ProcessInstrBitOp::AndNot)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/data_processing.rs", "rank": 94, "score": 183669.08636272146 }, { "content": "pub fn orr<V: Version>(cpu: &mut Cpu<V>, data: thumb::Orr::Bf) -> cpu::InstrStatus {\n\n instr_bitwise(cpu, thumb::And::new(data.val), ProcessInstrBitOp::Or)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/data_processing.rs", "rank": 95, "score": 183669.08636272146 }, { "content": "pub fn umlal<V: Version>(cpu: &mut Cpu<V>, data: arm::Umlal::Bf) -> cpu::InstrStatus {\n\n instr_mul64_accumulate(cpu, data, false)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/data_processing.rs", "rank": 96, "score": 183669.08636272146 }, { "content": "pub fn strd<V: Version>(cpu: &mut Cpu<V>, data: arm::Strd::Bf) -> cpu::InstrStatus {\n\n assert!(V::is::<cpu::v5>());\n\n instr_store_misc(cpu, arm::Strh::new(data.val), MiscLsType::Doubleword)\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/load_store.rs", "rank": 97, "score": 183669.08636272146 }, { "content": "pub fn clrex<V: Version>(_cpu: &mut Cpu<V>, _data: arm::Clrex::Bf) -> cpu::InstrStatus {\n\n assert!(V::is::<cpu::v6>());\n\n warn!(\"clrex: implemented as no-op!\");\n\n\n\n cpu::InstrStatus::InBlock\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_arm/load_store.rs", "rank": 98, "score": 183669.08636272143 }, { "content": "pub fn sub_4<V: Version>(cpu: &mut Cpu<V>, data: thumb::Sub4::Bf) -> cpu::InstrStatus {\n\n let arminst: u32 = 0b1110001001001101110111110_0000000\n\n | ((data.immed_7.get() as u32) << 0);\n\n cpu::instructions_arm::sub(cpu, arm::Sub::new(arminst))\n\n}\n\n\n", "file_path": "libllama/src/cpu/instructions_thumb/data_processing.rs", "rank": 99, "score": 183669.08636272146 } ]
Rust
rooms/tests/kdbush.rs
oniproject/tto
4336c525ac39b1706e7ded9b28e4c5d55929268f
/* mod data; fn sq_dist(a: [f32; 2], b: [f32; 2]) -> f32 { let dx = a[0] - b[0]; let dy = a[1] - b[1]; dx * dx + dy * dy } static IDS: &[u32] = &[ 97,74,95,30,77,38,76,27,80,55,72,90,88,48,43,46, 65,39,62,93, 9,96,47, 8, 3,12,15,14,21,41,36,40, 69,56,85,78,17,71,44,19,18,13,99,24,67,33,37,49, 54,57,98,45,23,31,66,68, 0,32, 5,51,75,73,84,35, 81,22,61,89, 1,11,86,52,94,16, 2, 6,25,92,42,20, 60,58,83,79,64,10,59,53,26,87, 4,63,50, 7,28,82, 70,29,34,91, ]; static COORDS: &[(f32, f32)] = &[ (10.0,20.0),( 6.0,22.0),(10.0,10.0),( 6.0,27.0),(20.0,42.0),(18.0,28.0), (11.0,23.0),(13.0,25.0),( 9.0,40.0),(26.0, 4.0),(29.0,50.0),(30.0,38.0), (41.0,11.0),(43.0,12.0),(43.0, 3.0),(46.0,12.0),(32.0,14.0),(35.0,15.0), (40.0,31.0),(33.0,18.0),(43.0,15.0),(40.0,34.0),(32.0,38.0),(33.0,34.0), (33.0,54.0),( 1.0,61.0),(24.0,56.0),(11.0,91.0),( 4.0,98.0),(20.0,81.0), (22.0,93.0),(19.0,81.0),(21.0,67.0),( 6.0,76.0),(21.0,72.0),(21.0,73.0), (25.0,57.0),(44.0,64.0),(47.0,66.0),(29.0,69.0),(46.0,61.0),(38.0,74.0), (46.0,78.0),(38.0,84.0),(32.0,88.0),(27.0,91.0),(45.0,94.0),(39.0,94.0), (41.0,92.0),(47.0,21.0),(47.0,29.0),(48.0,34.0),(60.0,25.0),(58.0,22.0), (55.0, 6.0),(62.0,32.0),(54.0, 1.0),(53.0,28.0),(54.0, 3.0),(66.0,14.0), (68.0, 3.0),(70.0, 5.0),(83.0, 6.0),(93.0,14.0),(99.0, 2.0),(71.0,15.0), (96.0,18.0),(95.0,20.0),(97.0,21.0),(81.0,23.0),(78.0,30.0),(84.0,30.0), (87.0,28.0),(90.0,31.0),(65.0,35.0),(53.0,54.0),(52.0,38.0),(65.0,48.0), (67.0,53.0),(49.0,60.0),(50.0,68.0),(57.0,70.0),(56.0,77.0),(63.0,86.0), (71.0,90.0),(52.0,83.0),(71.0,82.0),(72.0,81.0),(94.0,51.0),(75.0,53.0), (95.0,39.0),(78.0,53.0),(88.0,62.0),(84.0,72.0),(77.0,73.0),(99.0,76.0), (73.0,81.0),(88.0,87.0),(96.0,98.0),(96.0,82.0), ]; /* #[test] fn create_index() { let index = kdbush(points, 10); assert!(index.ids, ids, "ids are kd-sorted"); assert!(index.coords, coords, "coords are kd-sorted"); } #[test] fn range_search() { let index = kdbush(points, 10); let result = index.range(20, 30, 50, 70); assert_eq!(result, &RANGE, "returns ids"); for idx in &result { let p = points[idx]; let is = p[0] < 20 || p[0] > 50 || p[1] < 30 || p[1] > 70; assert!(!is, "result point in range"); } for idx in &IDS { let p = points[idx]; let is = result.indexOf(idx) < 0 && p[0] >= 20 && p[0] <= 50 && p[1] >= 30 && p[1] <= 70; assert!(!is, "outside point not in range"); } } #[test] fn within_search() { let index = KDBush::new(points, 10); let qp = [50, 50]; let r = 20; let r2 = 20 * 20; let result = index.within(qp[0], qp[1], r); assert_eq!(result, &WITHIN, "returns ids"); for idx in &result { let p = points[idx]; let is = sq_dist(p, qp) > r2; assert!(!is, "result point in range"); } for idx in &IDS { let p = points[idx]; let is = result.index_of(idx) < 0 && sq_dist(p, qp) <= r2; assert!(!is, "outside point not in range"); } } */ */ mod data; use crate::data::*; use rooms::index::{KDBush, SpatialIndex}; #[test] fn range() { let mut index: KDBush<f32> = KDBush::new(10); index.fill(POINTS.iter().cloned().enumerate() .map(|(i, p)| (i as u32, p))); let mut result = Vec::new(); index.range(RANGE_MIN, RANGE_MAX, |idx| { result.push(idx); let p = POINTS[idx as usize]; assert!(test_range(p), "result point {:?} not in range {:?} {:?}", p, RANGE_MIN, RANGE_MAX); }); let mut brute: Vec<_> = brute_range().collect(); result.sort(); brute.sort(); assert_eq!(&result[..], &brute[..]); } #[test] fn within() { let mut index: KDBush<f32> = KDBush::new(10); index.fill(POINTS.iter().cloned().enumerate() .map(|(i, p)| (i as u32, p))); let mut result = Vec::new(); index.within(WITHIN_CENTER, WITHIN_RADIUS, |idx| { result.push(idx); let p = POINTS[idx as usize]; assert!(test_within(p), "result point {:?} not in range {:?} {:?}", p, WITHIN_CENTER, WITHIN_RADIUS); }); let mut brute: Vec<_> = brute_within().collect(); result.sort(); brute.sort(); assert_eq!(&result[..], &brute[..]); }
/* mod data; fn sq_dist(a: [f32; 2], b: [f32; 2]) -> f32 { let dx = a[0] - b[0]; let dy = a[1] - b[1]; dx * dx + dy * dy } static IDS: &[u32] = &[ 97,74,95,30,77,38,76,27,80,55,72,90,88,48,43,46, 65,39,62,93, 9,96,47, 8, 3,12,15,14,21,41,36,40, 69,56,85,78,17,71,44,19,18,13,99,24,67,33,37,49, 54,57,98,45,23,31,66,68, 0,32, 5,51,75,73,84,35, 81,22,61,89, 1,11,86,52,94,16, 2, 6,25,92,42,20, 60,58,83,79,64,10,59,53,26,87, 4,63,50, 7,28,82, 70,29,34,91, ]; static COORDS: &[(f32, f32)] = &[ (10.0,20.0),( 6.0,22.0),(10.0,10.0),( 6.0,27.0),(20.0,42.0),(18.0,28.0), (11.0,23.0),(13.0,25.0),( 9.0,40.0),(26.0, 4.0),(29.0,50.0),(30.0,38.0), (41.0,11.0),(43.0,12.0),(43.0, 3.0),(46.0,12.0),(32.0,14.0),(35.0,15.0), (40.0,31.0),(33.0,18.0),(43.0,15.0),(40.0,34.0),(32.0,38.0),(33.0,34.0), (33.0,54.0),( 1.0,61.0),(24.0,56.0),(11.0,91.0),( 4.0,98.0),(20.0,81.0), (22.0,93.0),(19.0,81.0),(21.0,67.0),( 6.0,76.0),(21.0,72.0),(21.0,73.0), (25.0,57.0),(44.0,64.0),(47.0,66.0),(29.0,69.0),(46.0,61.0),(38.0,74.0), (46.0,78.0),(38.0,84.0),(32.0,88.0),(27.0,91.0),(45.0,94.0),(39.0,94.0), (41.0,92.0),(47.0,21.0),(47.0,29.0),(48.0,34.0),(60.0,25.0),(58.0,22.0), (55.0, 6.0),(62.0,32.0),(54.0, 1.0),(53.0,28.0),(54.0, 3.0),(66.0,14.0), (68.0, 3.0),(70.0, 5.0),(83.0, 6.0),(93.0,14.0),(99.0, 2.0),(71.0,15.0), (96.0,18.0),(95.0,20.0),(97.0,21.0),(81.0,23.0),(78.0,30.0),(84.0,30.0), (87.0,28.0),(90.0,31.0),(65.0,35.0),(53.0,54.0),(52.0,38.0),(65.0,48.0), (67.0,53.0),(49.0,60.0),(50.0,68.0),(57.0,70.0),(56.0,77.0),(63.0,86.0), (71.0,90.0),(52.0,83.0),(71.0,82.0),(72.0,81.0),(94.0,51.0),(75.0,53.0), (95.0,39.0),(78.0,53.0),(88.0,62.0),(84.0,72.0),(77.0,73.0),(99.0,76.0), (73.0,81.0),(88.0,87.0),(96.0,98.0),(96.0,82.0), ]; /* #[test] fn create_index() { let index = kdbush(points, 10); assert!(index.ids, ids, "ids are kd-sorted"); assert!(index.coords, coords, "coords are kd-sorted"); } #[test] fn range_search() { let index = kdbush(points, 10); let result = index.range(20, 30, 50, 70); assert_eq!(result, &RANGE, "returns ids"); for idx in &result { let p = points[idx]; let is = p[0] < 20 || p[0] > 50 || p[1] < 30 || p[1] > 70; assert!(!is, "result point in range"); } for idx in &IDS { let p = points[idx]; let is = result.indexOf(idx) < 0 && p[0] >= 20 && p[0] <= 50 && p[1] >= 30 && p[1] <= 70; assert!(!is, "outside point not in range"); } } #[test] fn within_search() { let index = KDBush::new(points, 10); let qp = [50, 50]; let r = 20; let r2 = 20 * 20; let result = index.within(qp[0], qp[1], r); assert_eq!(result, &WITHIN, "returns ids"); for idx in &result { let p = points[idx]; let is = sq_dist(p, qp) > r2; assert!(!is, "result point in range"); } for idx in &IDS { let p = points[idx]; let is = result.index_of(idx) < 0 && sq_dist(p, qp) <= r2; assert!(!is, "outside point not in range"); } } */ */ mod data; use crate::data::*; use rooms::index::{KDBush, SpatialIndex}; #[test] fn range() { let mut index: KDBush<f32> = KDBush::new(10); index.fill(POINTS.iter().cloned().enumerate() .map(|(i, p)| (i as u32, p))); let mut result = Vec::new(); index.range(RANGE_MIN, RANGE_MAX, |idx| { result.push(idx); let p = POINTS[idx as usize]; assert!(test_range(p), "result point {:?} not in range {:?} {:?}", p, RANGE_MIN, RANGE_MAX); }); let mut brute: Vec<_> = brute_range().collect(); result.sort(); brute.sort(); assert_eq!(&result[..], &brute[..]); } #[test]
fn within() { let mut index: KDBush<f32> = KDBush::new(10); index.fill(POINTS.iter().cloned().enumerate() .map(|(i, p)| (i as u32, p))); let mut result = Vec::new(); index.within(WITHIN_CENTER, WITHIN_RADIUS, |idx| { result.push(idx); let p = POINTS[idx as usize]; assert!(test_within(p), "result point {:?} not in range {:?} {:?}", p, WITHIN_CENTER, WITHIN_RADIUS); }); let mut brute: Vec<_> = brute_within().collect(); result.sort(); brute.sort(); assert_eq!(&result[..], &brute[..]); }
function_block-full_function
[ { "content": "pub fn test_within(p: [f32; 2]) -> bool {\n\n let dx = p[0] - WITHIN_CENTER[0];\n\n let dy = p[1] - WITHIN_CENTER[1];\n\n dx * dx + dy * dy <= WITHIN_RADIUS * WITHIN_RADIUS\n\n}\n\n\n", "file_path": "rooms/tests/data.rs", "rank": 0, "score": 279974.47262647696 }, { "content": "pub fn test_range(p: [f32; 2]) -> bool {\n\n p[0] >= RANGE_MIN[0] && p[0] <= RANGE_MAX[0] &&\n\n p[1] >= RANGE_MIN[1] && p[1] <= RANGE_MAX[1]\n\n}\n\n\n", "file_path": "rooms/tests/data.rs", "rank": 1, "score": 279974.47262647696 }, { "content": "fn within<T>(b: &mut Bencher, mut index: T)\n\n where T: SpatialIndex<f32>,\n\n{\n\n index.fill(POINTS.iter().cloned());\n\n b.iter_with_setup(|| &WITHIN[..], |within| {\n\n for r in within {\n\n index.within(r.0, r.1, |idx| { black_box(idx); });\n\n }\n\n });\n\n}\n\n\n", "file_path": "rooms/benches/indexing.rs", "rank": 2, "score": 269745.6570726534 }, { "content": "fn range<T>(b: &mut Bencher, mut index: T)\n\n where T: SpatialIndex<f32>,\n\n{\n\n index.fill(POINTS.iter().cloned());\n\n b.iter_with_setup(|| &RANGE[..], |range| {\n\n for r in range {\n\n index.range(r.0, r.1, |idx| { black_box(idx); });\n\n }\n\n });\n\n}\n\n\n", "file_path": "rooms/benches/indexing.rs", "rank": 3, "score": 247012.87109467195 }, { "content": "pub fn brute_within() -> impl Iterator<Item=u32> {\n\n POINTS.iter()\n\n .enumerate()\n\n .filter_map(|(i, &p)| {\n\n if test_within(p) {\n\n Some(i as u32)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n", "file_path": "rooms/tests/data.rs", "rank": 4, "score": 240982.34380894338 }, { "content": "pub fn brute_range() -> impl Iterator<Item=u32> {\n\n POINTS.iter()\n\n .enumerate()\n\n .filter_map(|(i, &p)| {\n\n if test_range(p) {\n\n Some(i as u32)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "rooms/tests/data.rs", "rank": 5, "score": 240982.34380894338 }, { "content": "pub fn dcubic_hermite(p0: f32, v0: f32, p1: f32, v1: f32, t: f32) -> f32 {\n\n let tt = t * t;\n\n let dh00 = 6.0 * tt - 6.0 * t;\n\n let dh10 = 3.0 * tt - 4.0 * t + 1.0;\n\n let dh01 = -6.0 * tt + 6.0 * t;\n\n let dh11 = 3.0 * tt - 2.0 * t;\n\n\n\n dh00 * p0 + dh10 * v0 + dh01 * p1 + dh11 * v1\n\n}\n\n\n", "file_path": "examples/testbed/src/util.rs", "rank": 7, "score": 225760.32402682066 }, { "content": "pub fn cubic_hermite(p0: f32, v0: f32, p1: f32, v1: f32, t: f32) -> f32 {\n\n let ti = t - 1.0;\n\n let t2 = t * t;\n\n let ti2 = ti * ti;\n\n let h00 = (1.0 + 2.0 * t) * ti2;\n\n let h10 = t * ti2;\n\n let h01 = t2 * (3.0 - 2.0 * t);\n\n let h11 = t2 * ti;\n\n\n\n h00 * p0 + h10 * v0 + h01 * p1 + h11 * v1\n\n}\n\n\n", "file_path": "examples/testbed/src/util.rs", "rank": 8, "score": 225760.32402682066 }, { "content": "fn fill<T>(b: &mut Bencher, mut index: T)\n\n where T: SpatialIndex<f32>\n\n{\n\n b.iter(|| {\n\n index.fill(POINTS.iter().cloned());\n\n black_box(&mut index);\n\n });\n\n}\n\n\n", "file_path": "rooms/benches/indexing.rs", "rank": 9, "score": 214061.049435188 }, { "content": "// A test state machine that can increment and decrement.\n\nfn exec(mut acc: u32, dt: f64, state: &mut State<TestActions, ()>) -> u32 {\n\n state.event(dt, &mut |args| {\n\n match *args.action {\n\n Inc => { acc += 1; (Success, args.dt) },\n\n Dec => { acc -= 1; (Success, args.dt) },\n\n }\n\n });\n\n acc\n\n}\n\n\n\n// Each action that terminates immediately\n\n// consumes a time of 0.0 seconds.\n\n// This makes it possible to execute one action\n\n// after another without delay or waiting for next update.\n", "file_path": "examples/testbed/src/ai/btree/tests.rs", "rank": 10, "score": 211421.76174454857 }, { "content": "pub fn hermite2(p0: Point2<f32>, v0: Vector2<f32>, p1: Point2<f32>, v1: Vector2<f32>, t: f32) -> Point2<f32> {\n\n let x = cubic_hermite(p0.x, v0.x, p1.x, v1.x, t);\n\n let y = cubic_hermite(p0.y, v0.y, p1.y, v1.y, t);\n\n Point2::new(x, y)\n\n}\n\n\n\npub const fn color(c: u32) -> [f32; 3] {\n\n let c = c.to_le();\n\n [\n\n ((c >> 16) & 0xFF) as f32 / 0xFF as f32,\n\n ((c >> 8) & 0xFF) as f32 / 0xFF as f32,\n\n ( c & 0xFF) as f32 / 0xFF as f32,\n\n ]\n\n}\n", "file_path": "examples/testbed/src/util.rs", "rank": 11, "score": 205189.29169775077 }, { "content": "fn new_dispatcher(name: &'static str, num_threads: usize, index: usize) -> DispatcherBuilder<'static, 'static> {\n\n DispatcherBuilder::new().with_pool(new_pool(name, num_threads, index))\n\n}\n\n*/\n\n\n", "file_path": "examples/testbed/src/ui/app.rs", "rank": 12, "score": 201246.39276676974 }, { "content": "fn sequence(children: &'static mut [impl Generator<Return=bool>]) -> impl Generator {\n\n move || {\n\n for child in children {\n\n match unsafe { child.resume() } {\n\n GeneratorState::Complete(true) => (),\n\n GeneratorState::Complete(false) => return false,\n\n GeneratorState::Yielded(_) => yield,\n\n }\n\n }\n\n true\n\n }\n\n}\n\n\n", "file_path": "examples/testbed/src/ai/_btree/fuck.rs", "rank": 13, "score": 187758.10749788806 }, { "content": "fn selector(children: &'static mut [impl Generator<Return=bool>]) -> impl Generator {\n\n move || {\n\n for child in children {\n\n match unsafe { child.resume() } {\n\n GeneratorState::Complete(true) => return true,\n\n GeneratorState::Complete(false) => (),\n\n GeneratorState::Yielded(_) => yield,\n\n }\n\n }\n\n true\n\n }\n\n}\n\n\n\n/*\n", "file_path": "examples/testbed/src/ai/_btree/fuck.rs", "rank": 14, "score": 187758.10749788806 }, { "content": "fn draw_body<'w>(view: &mut View<'w>, iso: Isometry2<f32>, color: u32) {\n\n use std::f32::consts::FRAC_PI_2;\n\n let iso = iso * UnitComplex::from_angle(-FRAC_PI_2);\n\n view.curve_in(iso, color, true, &[\n\n Point2::new(0.0, 0.20),\n\n Point2::new(0.15, -0.10),\n\n Point2::new(0.0, 0.0),\n\n Point2::new(-0.15, -0.10),\n\n ]);\n\n}\n", "file_path": "examples/testbed/src/ui/demo.rs", "rank": 15, "score": 187034.7706211747 }, { "content": "#[inline(always)]\n\nfn memzero_slice(p: &mut [u8]) {\n\n for i in 0..p.len() {\n\n unsafe { p.as_mut_ptr().add(i).write_volatile(0) }\n\n }\n\n}\n\n\n\n/// Size of Key.\n\npub const KEY: usize = 32;\n\n\n\npub const HMAC: usize = 16;\n\n\n\n/// Nonce size for ChaCha20Poly1305 IETF in bytes.\n\npub const NONCE: usize = 12;\n\n\n\n/// Nonce size for XChaCha20Poly1305 IETF in bytes.\n\npub const XNONCE: usize = 24;\n\n\n\npub type Nonce = [u8; NONCE];\n\npub type Xnonce = [u8; XNONCE];\n\npub type Key = [u8; KEY];\n\npub type Tag = [u8; HMAC];\n\n\n\npub use self::chacha20::ChaCha20;\n\npub use self::poly1305::Poly1305;\n\npub use self::hchacha20::hchacha20;\n\n\n\n/// Performs inplace encryption using ChaCha20Poly1305 IETF.\n", "file_path": "src/crypto/mod.rs", "rank": 16, "score": 185969.5242749999 }, { "content": "fn new_pool(name: &'static str, num_threads: usize, index: usize) -> Arc<ThreadPool> {\n\n use oni_trace::register_thread;\n\n Arc::new(ThreadPoolBuilder::new()\n\n .num_threads(num_threads)\n\n .thread_name(move |n| format!(\"rayon #{} {}\", n, name))\n\n .start_handler(move |_| register_thread(Some(index), Some(index)))\n\n .build()\n\n .unwrap())\n\n}\n\n\n\n/*\n", "file_path": "examples/testbed/src/ui/app.rs", "rank": 21, "score": 173652.8447849246 }, { "content": "fn truncate(v: Vector2<f32>, max: f32) -> Vector2<f32> {\n\n let n = norm(&v);\n\n if n == 0.0 {\n\n zero()\n\n } else {\n\n let i = max / n;\n\n v * if i < 1.0 { i } else { 1.0 }\n\n }\n\n}\n", "file_path": "examples/testbed/src/ai/mod.rs", "rank": 22, "score": 164158.1053298225 }, { "content": "#[inline(always)]\n\nfn index(bit: usize) -> usize { bit >> 3 }\n\n\n", "file_path": "src/bitset.rs", "rank": 23, "score": 163027.3875711775 }, { "content": "fn benchmark(c: &mut Criterion) {\n\n c.bench_function(\"kdbush fill\", |b| {\n\n let index: KDBush<f32> = KDBush::new(10);\n\n fill(b, index)\n\n });\n\n c.bench_function(\"kdbush range\", |b| {\n\n let index: KDBush<f32> = KDBush::new(10);\n\n range(b, index)\n\n });\n\n c.bench_function(\"kdbush within\", |b| {\n\n let index: KDBush<f32> = KDBush::new(10);\n\n within(b, index)\n\n });\n\n}\n\n\n\ncriterion_group!(benches, benchmark);\n\ncriterion_main!(benches);\n", "file_path": "rooms/benches/indexing.rs", "rank": 24, "score": 161146.27866675143 }, { "content": "fn xopen(c: &mut [u8], ad: &[u8], t: [u8; TAG], n: &[u8; NPUB], k: &[u8; KEY]) -> Result<(), ()> {\n\n let (n, k) = AutoNonce(*n).split(k);\n\n open(c, Some(ad), &t, &n, &k)\n\n}\n\n\n", "file_path": "tests/xchacha20poly1305.rs", "rank": 25, "score": 160073.43384097313 }, { "content": "#[inline]\n\n#[cfg(not(feature = \"sodium\"))]\n\npub fn xopen(c: &mut [u8], ad: &[u8], t: &Tag, n: &[u8; XNONCE], k: &[u8; KEY]) -> Result<(), ()> {\n\n let (n, k) = AutoNonce(*n).split(k);\n\n open(c, Some(ad), &t, &n, &k)\n\n}\n\n\n", "file_path": "src/crypto/mod.rs", "rank": 26, "score": 157259.34543668822 }, { "content": "#[inline(always)]\n\nfn index(bit: usize) -> usize { bit >> 3 }\n\n\n", "file_path": "oni_reliable/src/bitset.rs", "rank": 27, "score": 156699.21023137643 }, { "content": "/// Returns `1` if `a == b` and `0` otherwise.\n\npub fn eq(a: u32, b: u32) -> isize {\n\n (u64::from(u32::from(a^b).wrapping_sub(1)) >> 63) as isize\n\n}\n", "file_path": "src/crypto/subtle.rs", "rank": 28, "score": 155545.33251883203 }, { "content": "fn hex2bin(bin: &[u8], out: &mut [u8]) {\n\n for (i, c) in bin.chunks(2).enumerate() {\n\n if c.len() != 2 { break }\n\n let c = std::str::from_utf8(c).unwrap();\n\n out[i] = u8::from_str_radix(c, 16).unwrap();\n\n }\n\n}\n\n\n", "file_path": "tests/chacha20.rs", "rank": 29, "score": 151167.42519618827 }, { "content": "pub fn crypto_random(buf: &mut [u8]) {\n\n use rand::Rng;\n\n rand::thread_rng().fill(buf)\n\n}\n\n\n", "file_path": "src/crypto/mod.rs", "rank": 30, "score": 147568.80114250083 }, { "content": "fn write_global_instant<W: Write>(w: &mut W, name: &'static str) {\n\n let ts = precise_time_ns();\n\n serde_json::to_writer(w, &Event::Instant {\n\n s: \"g\",\n\n ts: ts / 1000,\n\n base: Base {\n\n name: name.into(),\n\n cat: None,\n\n pid: 0,\n\n tid: 0,\n\n args: Args::Empty,\n\n cname: Some(colors::WHITE),\n\n },\n\n }).ok();\n\n}\n\n\n", "file_path": "oni_trace/src/lib.rs", "rank": 31, "score": 146504.1816934703 }, { "content": "#[inline(always)]\n\npub fn read_z(b: u8) -> u32 {\n\n b.trailing_zeros() + 1\n\n}\n\n\n\n/// `z >= 1 && z <= 9`\n\n#[inline(always)]\n\npub unsafe fn read_varint64_unchecked(p: *const u8, z: u32) -> u64 {\n\n #![allow(clippy::cast_ptr_alignment)]\n\n assert!(cfg!(target_endian = \"little\"), \"big endian doesn't support\");\n\n if z == 9 {\n\n (p.add(1) as *const u64).read_unaligned()\n\n } else {\n\n read_varint56_unchecked(p, z)\n\n }\n\n}\n\n\n\n/// `z >= 1 && z <= 8`\n\n#[inline(always)]\n\npub unsafe fn read_varint56_unchecked(p: *const u8, z: u32) -> u64 {\n\n #![allow(clippy::cast_ptr_alignment)]\n\n assert!(cfg!(target_endian = \"little\"), \"big endian doesn't support\");\n\n let u = 64 - 8 * z;\n\n ((p as *const u64).read_unaligned() << u) >> (u + z)\n\n}\n\n\n", "file_path": "src/prefix_varint.rs", "rank": 33, "score": 145577.72491246852 }, { "content": "fn world_behavior(states: &mut [usize], world: &mut World) -> Status {\n\n let children = &[do_sun, do_rain];\n\n\n\n let current = &mut states[0];\n\n while let Some(child) = children.get(*current) {\n\n match child(world) {\n\n Failure => *current += 1,\n\n status => return status,\n\n }\n\n }\n\n Failure\n\n}\n\n*/\n\n\n\n\n\n/// Describes a behavior.\n\n///\n\n/// This is used for more complex event logic.\n\n/// Can also be used for game AI.\n\n#[derive(Clone, Deserialize, Serialize, PartialEq)]\n", "file_path": "examples/testbed/src/ai/_btree/tests3.rs", "rank": 34, "score": 145446.1445593021 }, { "content": "fn write_args<W: Write>(w: &mut W, args: Args) -> io::Result<()> {\n\n if let Some(args) = args {\n\n w.write_u16::<LE>(args.len() as u16)?;\n\n w.write_all(&args)?;\n\n } else {\n\n w.write_u16::<LE>(0)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "oni_trace/src/format.rs", "rank": 35, "score": 143662.1660996813 }, { "content": "fn write_time<W: Write>(w: &mut W, time: Duration) -> io::Result<()> {\n\n w.write_u64::<LE>(time.as_secs())?;\n\n w.write_u32::<LE>(time.subsec_nanos())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "oni_trace/src/format.rs", "rank": 36, "score": 143662.1660996813 }, { "content": "fn write_header<W: Write>(w: &mut W, version: &str) -> io::Result<()> {\n\n let padding = 128usize.checked_sub(version.len()).expect(\"128-byte fixed-size header\");\n\n w.write_all(version.as_bytes())?;\n\n for _ in 0..padding {\n\n w.write_u8(0)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "oni_trace/src/format.rs", "rank": 37, "score": 143662.1660996813 }, { "content": "#[inline(always)]\n\npub fn write_varint(buf: &mut [u8; 9], seq: u64, min: u32) -> &[u8] {\n\n assert!(min <= 8);\n\n let bits = (64 - (seq | 1).leading_zeros()).max(min * 7);\n\n let bytes = 1 + (bits - 1) / 7;\n\n\n\n if bits > 56 {\n\n buf[0] = 0u8;\n\n LE::write_u64(&mut buf[1..], seq);\n\n &buf[..]\n\n } else {\n\n let mut x = (2 * seq + 1) << (bytes - 1);\n\n for i in 0..bytes {\n\n buf[i as usize] = (x & 0xff) as u8;\n\n x >>= 8;\n\n }\n\n &buf[..bytes as usize]\n\n }\n\n}\n\n\n", "file_path": "src/prefix_varint.rs", "rank": 38, "score": 141134.21089797787 }, { "content": "pub fn open(m: &mut [u8], c: &[u8], mac: &[u8; 16], ad: &[u8], npub: [u8; 8], k: &[u8; 32]) -> Result<(), ()> {\n\n assert_eq!(m.len(), c.len());\n\n if let Err(()) = verify(c, mac, ad, npub, k) {\n\n m.iter_mut().for_each(|v| *v = 0);\n\n return Err(());\n\n } else {\n\n ChaCha20::stream_xor(m.as_mut_ptr(), c.as_ptr(), m.len() as u64, npub, 1, k);\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/crypto/aead.rs", "rank": 39, "score": 136811.37676343674 }, { "content": "#[get(\"/\")]\n\nfn index() -> Redirect {\n\n Redirect::to(\"/index.html\")\n\n}\n\n\n", "file_path": "relay/src/main.rs", "rank": 40, "score": 134720.24190005756 }, { "content": "pub fn new_server(dispatcher: DispatcherBuilder<'static, 'static>, network: oni::Server<Socket>) -> Demo {\n\n let mut world = World::new();\n\n world.register::<Conn>();\n\n world.register::<LastSequence>();\n\n world.register::<Actor>();\n\n world.register::<NetMarker>();\n\n world.register::<InputBuffer>();\n\n world.register::<StateBuffer>();\n\n\n\n world.register::<StupidBot>();\n\n\n\n //world.add_resource(ServerTime::new());\n\n world.add_resource(network);\n\n world.add_resource(NetNode::new(1..0xFF00));\n\n\n\n if false {\n\n for _ in 0..120 {\n\n let pos = Point2::origin();\n\n let _e = world.create_entity()\n\n .marked::<NetMarker>()\n", "file_path": "examples/testbed/src/server/mod.rs", "rank": 41, "score": 134183.43536218334 }, { "content": "pub fn generate_id() -> usize {\n\n static NEXT_ID: AtomicUsize = AtomicUsize::new(0);\n\n NEXT_ID.fetch_add(1, Ordering::Relaxed)\n\n}\n\n\n\nuse log::{Log, Metadata, Record};\n\n\n\npub struct Logger;\n\n\n\nimpl Log for Logger {\n\n fn enabled(&self, _metadata: &Metadata) -> bool {\n\n //metadata.level() <= Level::Info\n\n true\n\n }\n\n\n\n fn log(&self, record: &Record) {\n\n let ts = precise_time_ns();\n\n if self.enabled(record.metadata()) {\n\n LOCAL.with(|profiler| match *profiler.borrow() {\n\n Some(ref profiler) => profiler.log(ts, record),\n", "file_path": "oni_trace/src/lib.rs", "rank": 42, "score": 127983.7543025901 }, { "content": "fn write_name<W: Write, S: AsRef<[u8]>>(w: &mut W, name: Option<S>) -> io::Result<()> {\n\n if let Some(name) = name {\n\n let name = name.as_ref();\n\n w.write_u16::<LE>(name.len() as u16)?;\n\n w.write_all(name)?;\n\n } else {\n\n w.write_u16::<LE>(0)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "oni_trace/src/format.rs", "rank": 43, "score": 126251.65417770331 }, { "content": "#[inline(always)]\n\nfn index_u8(bit: usize) -> usize { bit >> 3 }\n\n\n", "file_path": "examples/testbed/src/bit_io.rs", "rank": 44, "score": 125815.80448856812 }, { "content": "fn decode(mut buf: &[u8]) {\n\n let kind = buf.read_u8()\n\n let let mut nCurMsgNum = 0i64;\n\n let mut nDecodeReliablePos = 0i64;\n\n\n\n loop {\n\n if kind & 0xC0 == 0x00 {\n\n // Unreliable segment\n\n\n\n // Decode message number\n\n if nCurMsgNum == 0 {\n\n // First unreliable frame.\n\n // Message number is absolute, but only bottom N bits are sent\n\n static const char szUnreliableMsgNumOffset[] = \"unreliable msgnum\";\n\n int64 nLowerBits, nMask;\n\n if nFrameType & 0x10 {\n\n READ_32BITU( nLowerBits, szUnreliableMsgNumOffset );\n\n nMask = 0xffffffff;\n\n nCurMsgNum = NearestWithSameLowerBits( (int32)nLowerBits, m_receiverState.m_nHighestSeenMsgNum );\n\n } else {\n", "file_path": "src/frame.rs", "rank": 45, "score": 123873.05028451153 }, { "content": "/// Converts a Duration to the time in seconds in an `f32`.\n\npub fn duration_to_secs_f32(duration: Duration) -> f32 {\n\n duration.as_secs() as f32 + (duration.subsec_nanos() as f32 / 1.0e9)\n\n}\n\n\n", "file_path": "examples/testbed/src/timing.rs", "rank": 46, "score": 122008.28197247618 }, { "content": "/// Converts a time in seconds in an `f32` to a duration.\n\npub fn secs_to_duration_f32(secs: f32) -> Duration {\n\n Duration::new(secs as u64, ((secs % 1.0) * 1.0e9) as u32)\n\n}\n\n\n", "file_path": "examples/testbed/src/timing.rs", "rank": 47, "score": 122008.28197247618 }, { "content": "pub fn ietf_open(m: &mut [u8], c: &[u8], mac: &[u8; 16], ad: &[u8], npub: &[u8; 12], k: &[u8; 32]) -> Result<(), ()> {\n\n assert_eq!(m.len(), c.len());\n\n if let Err(()) = ietf_verify(c, mac, ad, npub, k) {\n\n m.iter_mut().for_each(|v| *v = 0);\n\n return Err(());\n\n } else {\n\n ChaCha20::stream_ietf_xor(m.as_mut_ptr(), c.as_ptr(), m.len() as u64, npub, 1, k);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/crypto/aead.rs", "rank": 48, "score": 120316.19966723499 }, { "content": "#[inline]\n\nfn sequence_bytes_required(sequence: u64) -> u32 {\n\n 1 + (64 - (sequence | 1).leading_zeros() - 1) / 8\n\n}\n\n\n\nimpl<'a> Packet<'a> {\n\n pub fn encode_close(protocol: u64, mut buf: &mut [u8], seq: u64, k: &[u8; KEY]) -> io::Result<usize> {\n\n let start_len = buf.len();\n\n\n\n let sss = sequence_bytes_required(seq);\n\n let prefix = 0b0011_0001 | ((sss - 1) as u8) << 1;\n\n buf.write_u8(prefix)?;\n\n buf.write_uint::<LE>(seq, sss as usize)?;\n\n\n\n let tag = Self::seal(protocol, &mut [], seq, prefix, k);\n\n buf.write_all(&tag)?;\n\n\n\n Ok(start_len - buf.len())\n\n }\n\n\n\n // TODO: version without encryption?\n", "file_path": "src/protocol.rs", "rank": 49, "score": 119809.465890467 }, { "content": "fn pool_dispatcher(pool: Arc<ThreadPool>) -> DispatcherBuilder<'static, 'static> {\n\n DispatcherBuilder::new().with_pool(pool)\n\n}\n\n\n\nimpl AppState {\n\n pub fn new(font: Rc<Font<'static>>) -> Self {\n\n let name = \"trace.json.gz\";\n\n let sleep = std::time::Duration::from_millis(100);\n\n let worker = oni_trace::AppendWorker::new(name, sleep);\n\n\n\n // setup a server, the player's client, and another player.\n\n\n\n let (player1, player2, server) = {\n\n use std::io::Write;\n\n use oni::{\n\n token::{PublicToken, USER},\n\n Server,\n\n Client,\n\n ServerList,\n\n };\n", "file_path": "examples/testbed/src/ui/app.rs", "rank": 50, "score": 119785.8508531012 }, { "content": "fn xseal(m: &mut [u8], ad: &[u8], n: &[u8; NPUB], k: &[u8; KEY]) -> [u8; TAG] {\n\n let (n, k) = AutoNonce(*n).split(k);\n\n seal(m, Some(ad), &n, &k)\n\n}\n\n\n", "file_path": "tests/xchacha20poly1305.rs", "rank": 51, "score": 119396.03193342348 }, { "content": "type Index = usize;\n\nconst MAGIC: Index = 600;\n\n\n\npub struct KDBush<S: Shim> {\n\n data: Vec<Entry<S>>,\n\n node_size: usize,\n\n _marker: PhantomData<S>,\n\n}\n\n\n\nimpl<S: Shim> KDBush<S> {\n\n pub fn new(node_size: usize) -> Self {\n\n Self {\n\n node_size,\n\n data: Vec::new(),\n\n _marker: PhantomData\n\n }\n\n }\n\n\n\n fn sort_kd(&mut self, left: Index, right: Index, axis: u8) {\n\n if right - left <= self.node_size {\n", "file_path": "rooms/src/index/kdbush.rs", "rank": 52, "score": 118052.03542121724 }, { "content": "#[inline]\n\n#[cfg(not(feature = \"sodium\"))]\n\npub fn xseal(m: &mut [u8], ad: &[u8], n: &[u8; XNONCE], k: &[u8; KEY]) -> Tag {\n\n let (n, k) = AutoNonce(*n).split(k);\n\n seal(m, Some(ad), &n, &k)\n\n}\n\n\n", "file_path": "src/crypto/mod.rs", "rank": 53, "score": 117230.56767175606 }, { "content": "/// Registers the current thread with the global profiler.\n\npub fn register_thread(pid: Option<usize>, sort_index: Option<usize>) {\n\n GLOBAL.lock().unwrap().register_thread(pid.unwrap_or(0), sort_index);\n\n}\n\n\n\npub macro location() {\n\n if $crate::TRACE_LOC {\n\n $crate::Args::Location { module: module_path!(), file: file!(), line: line!() }\n\n } else {\n\n $crate::Args::Empty\n\n }\n\n}\n\n\n\npub macro instant {\n\n (json $name:expr, $value:expr) => {\n\n if $crate::ENABLED {\n\n $crate::instant_thread($name, \"\", $crate::Args::Custom {\n\n value: $value,\n\n });\n\n }\n\n },\n", "file_path": "oni_trace/src/lib.rs", "rank": 54, "score": 116283.72789425998 }, { "content": "#[inline]\n\n#[cfg(not(feature = \"sodium\"))]\n\npub fn seal(m: &mut [u8], ad: Option<&[u8]>, npub: &Nonce, key: &Key) -> Tag {\n\n let ad = ad.unwrap_or(&[]);\n\n let z = &mut [0u8; 64][..];\n\n ChaCha20::new_ietf(key, npub, 0).inplace(z);\n\n ChaCha20::new_ietf(key, npub, 1).inplace(m);\n\n let mut poly1305 = Poly1305::with_key(&z[..32]);\n\n poly1305.update_pad(ad);\n\n poly1305.update_pad(m);\n\n poly1305.update_u64(ad.len() as u64);\n\n poly1305.update_u64(m.len() as u64);\n\n poly1305.finish()\n\n}\n\n\n\n/// Performs inplace decryption using ChaCha20Poly1305 IETF.\n", "file_path": "src/crypto/mod.rs", "rank": 55, "score": 116223.07026810429 }, { "content": "// Interleave bits of x and y, so that all of the\n\n// bits of x are in the even positions and y in the odd;\n\n// z gets the resulting Morton Number.\n\nfn naive(x: u16, y: u16) -> u32 {\n\n let (x, y) = (x as u32, y as u32);\n\n let mut z = 0;\n\n for i in 0..16 { // unroll for more speed...\n\n z |= (x & 1 << i) << i | (y & 1 << i) << (i + 1);\n\n }\n\n z\n\n}\n\n\n\nmacro table($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, [ $($num:expr,)+ ]) {\n\n static $a: [u32; 256] = [ $($num << 17,)+ ];\n\n static $b: [u32; 256] = [ $($num << 16,)+ ];\n\n static $c: [u32; 256] = [ $($num << 1,)+ ];\n\n static $d: [u32; 256] = [ $($num,)+ ];\n\n\n\n static $e: [u16; 256] = [ $($num,)+ ];\n\n}\n\n\n\ntable!(TABLE_17, TABLE_16, TABLE_1, TABLE, TABLE_U16, [\n\n 0x0000, 0x0001, 0x0004, 0x0005, 0x0010, 0x0011, 0x0014, 0x0015,\n", "file_path": "examples/testbed/src/morton.rs", "rank": 56, "score": 113899.4270952596 }, { "content": "// Interleave bits of x and y, so that all of the\n\n// bits of x are in the even positions and y in the odd;\n\n// z gets the resulting 32-bit Morton Number.\n\nfn table1(x: u16, y: u16) -> u32 {\n\n let a = TABLE_U16[(y >> 8 ) as usize] as u32;\n\n let b = TABLE_U16[(x >> 8 ) as usize] as u32;\n\n let c = TABLE_U16[(y & 0xFF) as usize] as u32;\n\n let d = TABLE_U16[(x & 0xFF) as usize] as u32;\n\n (a << 17) | (b << 16) | (c << 1) | d\n\n}\n\n\n", "file_path": "examples/testbed/src/morton.rs", "rank": 57, "score": 113899.35986569748 }, { "content": "// Interleave bits of x and y, so that all of the\n\n// bits of x are in the even positions and y in the odd;\n\n// z gets the resulting 32-bit Morton Number.\n\nfn table4(x: u16, y: u16) -> u32 {\n\n TABLE_17[(y >> 8 ) as usize] | TABLE_16[(x >> 8 ) as usize] |\n\n TABLE_1 [(y & 0xFF) as usize] | TABLE [(x & 0xFF) as usize]\n\n}\n\n\n", "file_path": "examples/testbed/src/morton.rs", "rank": 58, "score": 113899.35986569748 }, { "content": "// Interleave lower 16 bits of x and y, so the bits of x\n\n// are in the even positions and bits from y in the odd;\n\n// z gets the resulting 32-bit Morton Number.\n\nfn magic(x: u16, y: u16) -> u32 {\n\n let (x, y) = (x as u32, y as u32);\n\n\n\n const B0: u32 = 0x55555555;\n\n const B1: u32 = 0x33333333;\n\n const B2: u32 = 0x0F0F0F0F;\n\n const B3: u32 = 0x00FF00FF;\n\n\n\n const S0: u32 = 1;\n\n const S1: u32 = 2;\n\n const S2: u32 = 4;\n\n const S3: u32 = 8;\n\n\n\n let x = (x | (x << S3)) & B3;\n\n let x = (x | (x << S2)) & B2;\n\n let x = (x | (x << S1)) & B1;\n\n let x = (x | (x << S0)) & B0;\n\n\n\n let y = (y | (y << S3)) & B3;\n\n let y = (y | (y << S2)) & B2;\n\n let y = (y | (y << S1)) & B1;\n\n let y = (y | (y << S0)) & B0;\n\n\n\n x | (y << 1)\n\n}\n", "file_path": "examples/testbed/src/morton.rs", "rank": 59, "score": 113899.23177051131 }, { "content": "fn world_behavior() -> Behavior<'static, World> {\n\n Selector(&[\n\n Condition(World::can_shine, &Action(World::toggle_sun)),\n\n Condition(World::can_rain, &Action(World::rain)),\n\n ])\n\n}\n\n\n", "file_path": "examples/testbed/src/ai/_btree/tests2.rs", "rank": 60, "score": 113813.39295753499 }, { "content": "fn tree_behavior() -> Behavior<'static, Tree> {\n\n Selector(&[\n\n Sequence(&[\n\n Condition(Tree::can_make_energy, &Action(Tree::make_energy)),\n\n Condition(Tree::can_grow, &Action(Tree::grow)),\n\n Condition(Tree::can_emit_oxygen, &Action(Tree::emit_oxygen)),\n\n ]),\n\n Condition(Tree::can_gather_sun, &Action(Tree::gather_sun)),\n\n Condition(Tree::can_gather_water, &Action(Tree::gather_water)),\n\n ])\n\n}\n\n\n", "file_path": "examples/testbed/src/ai/_btree/tests2.rs", "rank": 61, "score": 113813.39295753499 }, { "content": "#[doc(hidden)]\n\npub fn instant_thread(name: &'static str, cat: &'static str, args: Args) {\n\n let ts = precise_time_ns();\n\n LOCAL.with(|profiler| match *profiler.borrow() {\n\n Some(ref profiler) => profiler.instant_thread(ts, name, cat, args),\n\n None => println!(\"ERROR: instant_thread on unregistered thread!\"),\n\n });\n\n}\n\n\n\npub macro async_event {\n\n ($kind:ident, $name:expr, $cat:expr, $id:expr, $cname:expr) => {\n\n if $crate::ENABLED {\n\n let cat: Option<&'static str> = $cat;\n\n $crate::push_async($id, $name, $cat, $crate::Async::$kind, location!(), $cname);\n\n }\n\n }\n\n}\n\n\n\npub macro async_start {\n\n ($name:expr, $cat:expr, $id:expr) => { $crate::async_event!(Start, $name, $cat, $id, None); },\n\n ($name:expr, $cat:expr, $id:expr, $cname:expr) => { $crate::async_event!(Start, $name, $cat, $id, Some($cname)); }\n", "file_path": "oni_trace/src/lib.rs", "rank": 62, "score": 113703.02650505339 }, { "content": "#[inline]\n\n#[cfg(not(feature = \"sodium\"))]\n\npub fn open(c: &mut [u8], ad: Option<&[u8]>, tag: &Tag, npub: &Nonce, key: &Key)\n\n -> Result<(), ()>\n\n{\n\n let ad = ad.unwrap_or(&[]);\n\n let z = &mut [0u8; 64][..];\n\n ChaCha20::new_ietf(key, npub, 0).inplace(z);\n\n let mut poly1305 = Poly1305::with_key(&z[..32]);\n\n poly1305.update_pad(ad);\n\n poly1305.update_pad(c);\n\n poly1305.update_u64(ad.len() as u64);\n\n poly1305.update_u64(c.len() as u64);\n\n if poly1305.finish_verify(tag) {\n\n ChaCha20::new_ietf(key, npub, 1).inplace(c);\n\n Ok(())\n\n } else {\n\n c.iter_mut().for_each(|v| *v = 0);\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "src/crypto/mod.rs", "rank": 63, "score": 112814.06740274903 }, { "content": "fn do_rain(world: &mut World) -> Status {\n\n let can = world.can_rain();\n\n if can { world.rain(); }\n\n can.into()\n\n}\n\n\n", "file_path": "examples/testbed/src/ai/_btree/tests3.rs", "rank": 64, "score": 110062.04856380245 }, { "content": "fn do_sun(world: &mut World) -> Status {\n\n let can = world.can_shine();\n\n if can { world.toggle_sun(); }\n\n can.into()\n\n}\n\n\n", "file_path": "examples/testbed/src/ai/_btree/tests3.rs", "rank": 65, "score": 110062.04856380245 }, { "content": "#[inline(always)]\n\npub fn read_varint(buf: &[u8]) -> Result<u64, ()> {\n\n if buf.is_empty() { return Err(()); }\n\n let z = read_z(buf[0]);\n\n if buf.len() < z as usize { return Err(()); }\n\n unsafe {\n\n Ok(read_varint64_unchecked(buf.as_ptr(), z))\n\n }\n\n}\n\n\n", "file_path": "src/prefix_varint.rs", "rank": 66, "score": 109734.56759822428 }, { "content": "fn world_behavior(world: Rc<RefCell<World>>) -> impl Generator<Return=Rc<RefCell<World>>> {\n\n move || {\n\n if world.borrow().can_shine() {\n\n while world.borrow_mut().toggle_sun() {\n\n yield;\n\n }\n\n }\n\n\n\n if world.borrow().can_rain() {\n\n while world.borrow_mut().rain() {\n\n yield;\n\n }\n\n }\n\n\n\n return world;\n\n }\n\n}\n\n\n\nmacro action($obj: ident, $fn:ident) {\n\n while $obj.borrow_mut().$fn() {\n\n yield;\n\n }\n\n}\n\n\n", "file_path": "examples/testbed/src/ai/_btree/tests.rs", "rank": 67, "score": 109719.01614900352 }, { "content": "fn tree_behavior(tree: Rc<RefCell<Tree>>) -> impl Generator<Return=Rc<RefCell<Tree>>> {\n\n move || {\n\n //sequence!\n\n loop {\n\n // photosynthesise\n\n if !tree.borrow().can_make_energy() {\n\n break;\n\n } else {\n\n action!(tree, make_energy);\n\n }\n\n\n\n if !tree.borrow().can_grow() {\n\n break;\n\n } else {\n\n action!(tree, grow);\n\n }\n\n\n\n if !tree.borrow().can_emit_oxygen() {\n\n break;\n\n } else {\n", "file_path": "examples/testbed/src/ai/_btree/tests.rs", "rank": 68, "score": 109719.01614900352 }, { "content": "#[test]\n\nfn original() {\n\n let mut key = [0u8; ChaCha20::KEYBYTES];\n\n let mut nonce = [0u8; ChaCha20::NONCEBYTES];\n\n let mut expect = [0u8; 160];\n\n let mut out = [0u8; 160];\n\n let zero = [0u8; 160];\n\n\n\n let mut vector = 0;\n\n\n\n for test in TESTS.iter() {\n\n hex2bin(test.key, &mut key);\n\n hex2bin(test.nonce, &mut nonce);\n\n hex2bin(VECTORS[vector], &mut expect);\n\n vector += 1;\n\n ChaCha20::stream(&mut out, nonce, &key);\n\n assert_eq!(&out[..], &expect[..]);\n\n for plen in 1..out.len() {\n\n let mut part = vec![0u8; plen];\n\n ChaCha20::stream_xor(part.as_mut_ptr(), out.as_ptr(), plen as u64, nonce, 0, &key);\n\n assert_eq!(part, &zero[..plen], \"Failed with length {}\", plen);\n", "file_path": "tests/chacha20.rs", "rank": 69, "score": 109253.59405377528 }, { "content": "#[test]\n\nfn original() {\n\n const MLEN: usize = 10;\n\n const ADLEN: usize = 10;\n\n const CLEN: usize = MLEN + ABYTES;\n\n\n\n let firstkey: [u8; ABYTES] = [\n\n 0x42, 0x90, 0xbc, 0xb1, 0x54, 0x17, 0x35, 0x31,\n\n 0xf3, 0x14, 0xaf, 0x57, 0xf3, 0xbe, 0x3b, 0x50,\n\n 0x06, 0xda, 0x37, 0x1e, 0xce, 0x27, 0x2a, 0xfa,\n\n 0x1b, 0x5d, 0xbd, 0xd1, 0x10, 0x0a, 0x10, 0x07,\n\n ];\n\n let m = [0x86, 0xd0, 0x99, 0x74, 0x84, 0x0b, 0xde, 0xd2, 0xa5, 0xca];\n\n let nonce = [0xcd, 0x7c, 0xf6, 0x7b, 0xe3, 0x9c, 0x79, 0x4a];\n\n let ad = [0x87, 0xe2, 0x29, 0xd4, 0x50, 0x08, 0x45, 0xa0, 0x79, 0xc0];\n\n\n\n let mut c = vec![0u8; MLEN];\n\n let mac = aead::seal(c.as_mut_ptr(), &m, &ad, nonce, &firstkey);\n\n\n\n assert_eq!(&c[..], &[0xe3,0xe4,0x46,0xf7,0xed,0xe9,0xa1,0x9b,0x62,0xa4]);\n\n assert_eq!(&mac[..], &[0x67,0x7d,0xab,0xf4,0xe3,0xd2,0x4b,0x87,0x6b,0xb2,0x84,0x75,0x38,0x96,0xe1,0xd6]);\n\n\n\n aead::verify(&c, &mac, &ad, nonce, &firstkey).unwrap();\n\n\n\n let mut dst = vec![0u8; MLEN];\n\n aead::open(&mut dst, &c, &mac, &ad, nonce, &firstkey).unwrap();\n\n assert_eq!(&dst, &m);\n\n}\n", "file_path": "tests/aead.rs", "rank": 70, "score": 109253.59405377528 }, { "content": "#[test]\n\nfn ietf() {\n\n let mut key = [0u8; ChaCha20::KEYBYTES];\n\n let mut nonce = [0u8; ChaCha20::IETF_NONCEBYTES];\n\n let mut expect = [0u8; 160];\n\n let zero = [0u8; 160];\n\n\n\n let mut vector = 31;\n\n\n\n for test in TESTS_IETF.iter() {\n\n let ic = test.ic;\n\n hex2bin(test.key, &mut key);\n\n hex2bin(test.nonce, &mut nonce);\n\n hex2bin(VECTORS[vector], &mut expect);\n\n vector += 1;\n\n let mut out = [0u8; 160];\n\n\n\n ChaCha20::stream_ietf_xor(out.as_mut_ptr(), out.as_ptr(), out.len() as u64, &nonce, ic, &key);\n\n assert_eq!(&out[..], &expect[..]);\n\n\n\n for plen in 1..out.len() {\n", "file_path": "tests/chacha20.rs", "rank": 71, "score": 109253.59405377528 }, { "content": "fn decode_payload(buf: &[u8]) -> std::io::Result<Frame> {\n\n use byteorder::{LE, ReadBytesExt};\n\n\n\n use std::num::NonZeroU64;\n\n use std::io::{Error, ErrorKind::{\n\n InvalidData,\n\n }};\n\n\n\n\n\n let mut p = &buf[..];\n\n\n\n let prefix = p.read_u8()?;\n\n\n\n match prefix & 0b11000000 {\n\n 0b00000000 => {\n\n let e = (prefix & 0b100000) != 0;\n\n let m = (prefix & 0b010000) != 0;\n\n let o = (prefix & 0b001000) != 0;\n\n let sss = (prefix & 0b111) as usize;\n\n\n", "file_path": "src/frame.rs", "rank": 72, "score": 107264.57077286631 }, { "content": "#[test]\n\nfn vector_xchacha20poly1305() {\n\n use std::fs::File;\n\n use std::io::{BufReader, BufRead};\n\n\n\n const FILENAME: &str = \"tests/xchacha20poly1305.vector\";\n\n\n\n #[derive(Default, Clone)]\n\n struct Vector {\n\n key: Vec<u8>,\n\n ad: Vec<u8>,\n\n nonce: Vec<u8>,\n\n\n\n input: Vec<u8>,\n\n output: Vec<u8>, // with tag\n\n }\n\n\n\n let vectors = {\n\n let mut vectors = Vec::new();\n\n\n\n let file = File::open(FILENAME).unwrap();\n", "file_path": "tests/xchacha20poly1305.rs", "rank": 73, "score": 106275.40917958626 }, { "content": "#[test]\n\nfn poly1305_vectors() {\n\n for v in VECTORS {\n\n assert_eq!(Poly1305::sum(v.text, &v.key), v.tag);\n\n }\n\n}\n\n\n", "file_path": "tests/poly1305.rs", "rank": 74, "score": 106275.40917958626 }, { "content": "#[test]\n\nfn smoke_xchacha20poly1305() {\n\n const MLEN: usize = 114;\n\n const ADLEN: usize = 12;\n\n\n\n static FIRST_KEY: [u8; KEY] = [\n\n 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,\n\n 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,\n\n 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,\n\n 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,\n\n ];\n\n\n\n static MESSAGE: &[u8; MLEN] = b\"Ladies and Gentlemen of the class of '99: \\\n\n If I could offer you only one tip for the future, sunscreen would be it.\";\n\n\n\n static NONCE: [u8; NPUB] = [\n\n 0x07, 0x00, 0x00, 0x00, 0x40, 0x41, 0x42, 0x43,\n\n 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a, 0x4b,\n\n 0x4c, 0x4d, 0x4e, 0x4f, 0x50, 0x51, 0x52, 0x53,\n\n ];\n\n static AD: [u8; ADLEN] = [\n", "file_path": "tests/xchacha20poly1305.rs", "rank": 75, "score": 106275.40917958626 }, { "content": "#[test]\n\nfn multi() {\n\n let mut world = World::new();\n\n let sys = MultiSystem::new(&mut world);\n\n\n\n let room = Room::new();\n\n let room = world.create_entity()\n\n .with(room)\n\n .build();\n\n\n\n let e1 = world.create_entity()\n\n .with(Replica::new(View::Range(20.0, 10.0)))\n\n .with(Position::new([0.0, 0.0]))\n\n .with(Spawned::new(room))\n\n .build();\n\n\n\n let mut dispatcher = DispatcherBuilder::new()\n\n .with(sys, \"replica\", &[])\n\n .build();\n\n\n\n dispatcher.dispatch(&world.res);\n", "file_path": "rooms/tests/system.rs", "rank": 76, "score": 106275.40917958626 }, { "content": "#[test]\n\nfn poly1305_verify() {\n\n assert!(Poly1305::verify(&EXP, &C, &RS));\n\n}\n\n\n", "file_path": "tests/poly1305.rs", "rank": 77, "score": 106275.40917958626 }, { "content": "#[test]\n\nfn base() {\n\n let mut replica = Replica::new(View::Within(10.0));\n\n\n\n {\n\n replica.extend(vec![1, 2, 3]);\n\n assert_eq!(&replica.created(), &[1, 2, 3]);\n\n assert_eq!(&replica.removed(), &[]);\n\n assert_eq!(&replica.nchange(), &[]);\n\n }\n\n\n\n {\n\n replica.extend(vec![4, 2, 3, 4]);\n\n assert_eq!(&replica.created(), &[4]);\n\n assert_eq!(&replica.removed(), &[1]);\n\n assert_eq!(&replica.nchange(), &[2, 3]);\n\n }\n\n\n\n {\n\n replica.extend(vec![4, 2, 3, 4]);\n\n assert_eq!(&replica.created(), &[]);\n", "file_path": "rooms/tests/replica.rs", "rank": 78, "score": 106275.40917958626 }, { "content": "#[test]\n\nfn test_sequence() {\n\n let tests = [\n\n (0x______________00, 1),\n\n (0x______________11, 1),\n\n (0x______________FF, 1),\n\n\n\n (0x____________0100, 2),\n\n (0x____________1122, 2),\n\n (0x____________FFFF, 2),\n\n\n\n (0x__________010000, 3),\n\n (0x__________112233, 3),\n\n (0x__________FFFFFF, 3),\n\n\n\n (0x________01000000, 4),\n\n (0x________11223344, 4),\n\n (0x________FFFFFFFF, 4),\n\n\n\n (0x______0100000000, 5),\n\n (0x______1122334455, 5),\n", "file_path": "src/protocol.rs", "rank": 79, "score": 106275.40917958626 }, { "content": "#[test]\n\nfn client_server() {\n\n const CONNECT_TOKEN_EXPIRY: u32 = 30;\n\n const CONNECT_TOKEN_TIMEOUT: u32 = 5;\n\n const PROTOCOL_ID: u64 = 0x1122334455667788;\n\n const DELTA_TIME: Duration = Duration::from_millis(1000 / 60);\n\n\n\n println!(\"[client/server]\");\n\n\n\n let (connect_token, mut server) = {\n\n use std::io::Write;\n\n\n\n let private_key = keygen();\n\n let client_id = 1345643;\n\n\n\n let server = Server::simulated(PROTOCOL_ID, private_key);\n\n\n\n let mut server_list = ServerList::new();\n\n server_list.push(server.local_addr()).unwrap();\n\n\n\n let data = server_list.serialize().unwrap();\n", "file_path": "tests/server.rs", "rank": 80, "score": 106275.40917958626 }, { "content": "pub fn seal_inplace(m: &mut [u8], ad: &[u8], npub: [u8; 8], k: &[u8; 32]) -> [u8; 16] {\n\n let mut block0 = [0u8; 64];\n\n ChaCha20::stream(&mut block0, npub, k);\n\n ChaCha20::stream_xor(m.as_mut_ptr(), m.as_ptr(), m.len() as u64, npub, 1, k);\n\n\n\n let mut poly1305 = Poly1305::with_key(&block0[..32]);\n\n poly1305.update(ad);\n\n poly1305.update_u64(ad.len() as u64);\n\n poly1305.update(m);\n\n poly1305.update_u64(m.len() as u64);\n\n poly1305.finish()\n\n}\n\n\n", "file_path": "src/crypto/aead.rs", "rank": 81, "score": 103601.60153511583 }, { "content": "#[get(\"/<file..>\")]\n\nfn files(file: PathBuf) -> Result<NamedFile, NotFound<String>> {\n\n let path = Path::new(\"static/\").join(file);\n\n NamedFile::open(&path).map_err(|_| NotFound(format!(\"Bad path: {:?}\", path)))\n\n}\n\n\n", "file_path": "relay/src/main.rs", "rank": 82, "score": 103598.01113072233 }, { "content": "pub fn verify(c: &[u8], mac: &[u8; 16], ad: &[u8], npub: [u8; 8], k: &[u8; 32]) -> Result<(), ()> {\n\n let mut block0 = [0u8; 64];\n\n ChaCha20::stream(&mut block0, npub, k);\n\n\n\n let mut poly1305 = Poly1305::with_key(&block0[..32]);\n\n poly1305.update(ad);\n\n poly1305.update_u64(ad.len() as u64);\n\n poly1305.update(&c);\n\n poly1305.update_u64(c.len() as u64);\n\n if poly1305.finish_verify(mac) {\n\n Ok(())\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "src/crypto/aead.rs", "rank": 83, "score": 102288.09652978664 }, { "content": "/// Copies the contents of `src` into `dst` (a slice of equal length) if `v == 1`.\n\n/// If `v == 0`, `dst` is left unchanged.\n\n///\n\n/// Its behavior is undefined if v takes any other value.\n\npub fn copy(v: isize, dst: &mut [u8], src: &[u8]) {\n\n assert_eq!(dst.len(), src.len(), \"slices have different lengths\");\n\n let xmask = ( (v - 1)) as u8;\n\n let ymask = (!(v - 1)) as u8;\n\n for i in 0..dst.len() {\n\n dst[i] = dst[i] & xmask | src[i] & ymask;\n\n }\n\n}\n", "file_path": "src/crypto/subtle.rs", "rank": 84, "score": 100695.49725195028 }, { "content": "pub fn seal(c: *mut u8, m: &[u8], ad: &[u8], npub: [u8; 8], k: &[u8; 32]) -> [u8; 16] {\n\n let mut block0 = [0u8; 64];\n\n ChaCha20::stream(&mut block0, npub, k);\n\n ChaCha20::stream_xor(c, m.as_ptr(), m.len() as u64, npub, 1, k);\n\n\n\n let mut poly1305 = Poly1305::with_key(&block0[..32]);\n\n poly1305.update(ad);\n\n poly1305.update_u64(ad.len() as u64);\n\n poly1305.update(unsafe { from_raw_parts(c, m.len()) });\n\n poly1305.update_u64(m.len() as u64);\n\n poly1305.finish()\n\n}\n\n\n", "file_path": "src/crypto/aead.rs", "rank": 85, "score": 100344.88552225835 }, { "content": "fn print_hex(out: &[u8]) {\n\n for c in out.iter() {\n\n print!(\"{:02x}\", c);\n\n }\n\n println!();\n\n}\n\n\n", "file_path": "tests/chacha20.rs", "rank": 86, "score": 99567.6703687014 }, { "content": "#[test]\n\nfn print_2() {\n\n let a: u32 = 0;\n\n let seq = Sequence(vec![Action(Inc), Action(Inc)]);\n\n let mut state = State::new(seq);\n\n let a = exec(a, 0.0, &mut state);\n\n assert_eq!(a, 2);\n\n}\n\n\n\n// If you wait the exact amount before to execute an action,\n\n// it will execute. This behavior makes it easy to predict\n\n// when an action will run.\n", "file_path": "examples/testbed/src/ai/btree/tests.rs", "rank": 87, "score": 98521.61572311343 }, { "content": "#[test]\n\nfn run() {\n\n let world = World::new();\n\n let world = Rc::new(RefCell::new(world));\n\n\n\n let tree = Tree::new(world.clone());\n\n let tree = Rc::new(RefCell::new(tree));\n\n\n\n let mut tree_generator = tree_behavior(tree.clone());\n\n let mut world_generator = world_behavior(world.clone());\n\n\n\n loop {\n\n {\n\n match unsafe { tree_generator.resume() } {\n\n GeneratorState::Yielded(_) => (), //println!(\"tree yield\"),\n\n GeneratorState::Complete(tree) => {\n\n tree_generator = tree_behavior(tree);\n\n },\n\n }\n\n }\n\n\n", "file_path": "examples/testbed/src/ai/_btree/tests.rs", "rank": 88, "score": 98521.61572311343 }, { "content": "#[test]\n\nfn when_all_wait() {\n\n let a: u32 = 0;\n\n let all = Sequence(vec![\n\n // Wait in parallel.\n\n WhenAll(vec![Wait(0.5), Wait(1.0)]),\n\n Action(Inc)\n\n ]);\n\n let mut state = State::new(all);\n\n let a = exec(a, 0.5, &mut state);\n\n assert_eq!(a, 0);\n\n let a = exec(a, 0.5, &mut state);\n\n assert_eq!(a, 1);\n\n}\n\n\n\n//#[ignore]\n", "file_path": "examples/testbed/src/ai/btree/tests.rs", "rank": 89, "score": 98521.61572311343 }, { "content": "func BenchmarkConstantTimeEq(b *testing.B) {\n\n var x, y int\n\n for i := 0; i < b.N; i++ {\n\n x, y = ConstantTimeEq(int32(x), int32(y)), x\n\n }\n\n benchmarkGlobal = uint8(x)\n\n}\n", "file_path": "src/crypto/subtle.rs", "rank": 90, "score": 97709.58103763685 }, { "content": "#[test]\n\nfn wait_sec() {\n\n let a: u32 = 0;\n\n let seq = Sequence(vec![Wait(1.0), Action(Inc)]);\n\n let mut state = State::new(seq);\n\n let a = exec(a, 1.0, &mut state);\n\n assert_eq!(a, 1);\n\n}\n\n\n\n// When we execute half the time and then the other half,\n\n// then the action should be executed.\n", "file_path": "examples/testbed/src/ai/btree/tests.rs", "rank": 91, "score": 96267.66547503408 }, { "content": "#[test]\n\nfn while_wait_sequence() {\n\n let mut a: u32 = 0;\n\n let w = While(Box::new(Wait(9.999999)), vec![\n\n Sequence(vec![\n\n Wait(0.5),\n\n Action(Inc),\n\n Wait(0.5),\n\n Action(Inc)\n\n ])\n\n ]);\n\n let mut state = State::new(w);\n\n for _ in 0..100 {\n\n a = exec(a, 0.1, &mut state);\n\n }\n\n // The last increment is never executed, because there is not enough time.\n\n assert_eq!(a, 19);\n\n}\n\n\n", "file_path": "examples/testbed/src/ai/btree/tests.rs", "rank": 92, "score": 96267.66547503408 }, { "content": "pub trait Steering<B: Boid> {\n\n fn steering(&mut self, boid: &B) -> Isometry2<f32>;\n\n}\n\n\n\npub struct AI {\n\n pub path: PathFollowing,\n\n pub wander: Wander,\n\n}\n\n\n\nimpl AI {\n\n pub fn new() -> Self {\n\n let path_radius = 0.2;\n\n Self {\n\n path: PathFollowing::new(vec![\n\n Target::new(-1.0, -1.5, path_radius),\n\n Target::new( 3.0, 1.5, path_radius),\n\n Target::new(-2.0, 1.5, path_radius),\n\n ]),\n\n\n\n wander: Wander::new(),\n", "file_path": "examples/testbed/src/ai/mod.rs", "rank": 93, "score": 95971.50224957969 }, { "content": "func BenchmarkConstantTimeLessOrEq(b *testing.B) {\n\n var x, y int\n\n for i := 0; i < b.N; i++ {\n\n x, y = ConstantTimeLessOrEq(x, y), x\n\n }\n\n benchmarkGlobal = uint8(x)\n\n}\n\n*/\n", "file_path": "src/crypto/subtle.rs", "rank": 94, "score": 95685.43742322897 }, { "content": "func BenchmarkConstantTimeByteEq(b *testing.B) {\n\n var x, y uint8\n\n for i := 0; i < b.N; i++ {\n\n x, y = uint8(ConstantTimeByteEq(x, y)), x\n\n }\n\n benchmarkGlobal = x\n\n}\n", "file_path": "src/crypto/subtle.rs", "rank": 95, "score": 95685.43742322897 }, { "content": "pub fn new_client(\n\n dispatcher: DispatcherBuilder<'static, 'static>,\n\n mut socket: oni::Client<Socket>,\n\n server: SocketAddr, is_ai: bool,\n\n) -> Demo\n\n{\n\n socket.connect(server).unwrap();\n\n //socket.send_client(Client::Start, server);\n\n\n\n let mut world = World::new();\n\n world.register::<Actor>();\n\n world.register::<NetMarker>();\n\n world.register::<StateBuffer>();\n\n world.register::<InterpolationMarker>();\n\n\n\n world.add_resource(Sequence::<u16>::default());\n\n\n\n world.add_resource(socket);\n\n world.add_resource(server);\n\n world.add_resource(Reconciliation::new());\n", "file_path": "examples/testbed/src/client/mod.rs", "rank": 96, "score": 94408.1353452742 }, { "content": "#[get(\"/\")]\n\nfn index(private: State<[u8; KEY]>, servers: State<Servers>) -> Html<String> {\n\n let servers: &Vec<SocketAddr> = &servers.read().unwrap();\n\n Html(format!(\"<!doctype html>\n\n<html>\n\n<head>\n\n</head>\n\n<body>\n\nHello, <strong>world!</strong>\n\n<h3>Servers:</h3>\n\n<pre>\n\n{:#?}\n\n</pre>\n\n</body>\n\n</html>\", servers))\n\n}\n\n\n", "file_path": "examples/relay/src/main.rs", "rank": 97, "score": 94255.5012365563 }, { "content": "#[test]\n\nfn wait_half_sec() {\n\n let a: u32 = 0;\n\n let seq = Sequence(vec![Wait(1.0), Action(Inc)]);\n\n let mut state = State::new(seq);\n\n let a = exec(a, 0.5, &mut state);\n\n assert_eq!(a, 0);\n\n let a = exec(a, 0.5, &mut state);\n\n assert_eq!(a, 1);\n\n}\n\n\n\n// A sequence of one event is like a bare event.\n", "file_path": "examples/testbed/src/ai/btree/tests.rs", "rank": 98, "score": 94153.42100267866 }, { "content": "#[test]\n\nfn sequence_of_one_event() {\n\n let a: u32 = 0;\n\n let seq = Sequence(vec![Action(Inc)]);\n\n let mut state = State::new(seq);\n\n let a = exec(a, 1.0, &mut state);\n\n assert_eq!(a, 1);\n\n}\n\n\n\n// A sequence of wait events is the same as one wait event.\n", "file_path": "examples/testbed/src/ai/btree/tests.rs", "rank": 99, "score": 94153.42100267866 } ]
Rust
src/main.rs
arlicle/panda-api-install
4adefe942fe58770f617e8331475eebc38217cea
use std::fs::{self, DirEntry, File, OpenOptions}; use std::io::{self, BufReader, Read, Write, Error}; use std::path::Path; use std::process::Command; use fs_extra::dir::{self, copy}; use fs_extra::{copy_items, remove_items}; #[cfg(windows)] use winapi; #[cfg(windows)] use winreg::enums::*; #[cfg(windows)] use winreg::{self, RegKey}; fn main() { pretty_env_logger::init(); let current_exe = &std::env::current_exe().unwrap(); let current_exe = Path::new(current_exe); let path = current_exe.parent().unwrap(); let current_dir = path.to_str().unwrap(); let split_s = if cfg!(target_os = "windows") { r"\" } else { "/" }; let home_dir = dirs::home_dir().unwrap(); let home_dir = home_dir.to_str().unwrap().trim_end_matches(split_s); let mut panda_dir_string = format!("{1}{0}.panda_api{0}", split_s, home_dir); let panda_dir = Path::new(&panda_dir_string); let panda_dir_string = panda_dir.to_str().unwrap(); if panda_dir.exists() { let mut from_paths = vec![&panda_dir_string]; let _r = remove_items(&from_paths); } match std::fs::create_dir_all(&panda_dir_string) { Ok(_) => (), Err(e) => { println!("create folder failed {} {:?}", &panda_dir_string, e); } } let options = dir::CopyOptions::new(); let install_files = if cfg!(target_os = "windows") { ["panda.exe", "theme"] } else { ["panda", "theme"] }; let mut from_paths: Vec<String> = Vec::new(); for file in &install_files { from_paths.push(format!("{1}{0}Contents{0}{2}", split_s, current_dir, file)); } match copy_items(&from_paths, &panda_dir_string, &options) { Ok(r) => { println!("Copy files done."); } Err(e) => { println!("Copy files failed, install failed"); log::error!("Copy files failed, install failed"); return; } } let success_msg = "Congratulations!\nPanda api install done!\nYou can run pana command in your api docs folder now."; if cfg!(target_os = "windows") { #[cfg(windows)] { let hklm = RegKey::predef(HKEY_CURRENT_USER); let cur_ver = hklm.open_subkey("Environment").unwrap_or_else(|e| match e.kind() { io::ErrorKind::NotFound => panic!("Key doesn't exist"), io::ErrorKind::PermissionDenied => panic!("Access denied"), _ => panic!("{:?}", e), }); let (reg_key, disp) = hklm.create_subkey("Environment").unwrap(); let user_envs: String = if let Ok(p) = cur_ver.get_value("Path") { p } else { "".to_string() }; let mut user_envs = user_envs.trim().trim_end_matches(";"); let panda_dir_string = panda_dir_string.trim_end_matches(split_s); if user_envs.contains(panda_dir_string) { } else { let s = format!("{};{};", user_envs, panda_dir_string); match reg_key.set_value("Path", &s) { Ok(r) => { println!("reg ok"); } Err(e) => { println!("reg failed"); } } } } } else { let output = Command::new("sh") .arg("-c") .arg("echo $SHELL") .output() .expect("failed to execute process"); let shell_name = String::from_utf8(output.stdout).unwrap(); let shell_name = shell_name .trim() .trim_start_matches("/") .trim_start_matches("/"); let mut profile_name = "".to_string(); let shell_name_info: Vec<&str> = shell_name.split("/").collect(); if let Some(shell_name) = shell_name_info.last() { profile_name = format!(".{}rc", shell_name); } let profile_filepath_string = format!("{}/{}", home_dir, profile_name); let profile_filepath = Path::new(&profile_filepath_string); let profile_content = r#"export PATH="$HOME/.panda_api:$PATH""#; let mut has_profile_content = false; if profile_filepath.exists() { let mut content = fs::read_to_string(&profile_filepath_string) .expect(&format!("failed to read file {}", &profile_filepath_string)); if content.contains(profile_content) { has_profile_content = true; } } if !has_profile_content { let mut file_options = OpenOptions::new() .read(true) .write(true) .create(true) .append(true) .open(&profile_filepath_string); match file_options { Ok(mut file) => { let new_content = format!("{}\n", profile_content); file.write_all(new_content.as_bytes()).expect(&format!( "failed to write data to file {}", &profile_filepath_string )); } Err(e) => { panic!("{:?}", e); } } } let profile_list = [".zshrc", ".bashrc", ".cshrc"]; for profile_file in &profile_list { let profile_filepath_string = format!("{}/{}", home_dir, profile_file); let profile_filepath = Path::new(&profile_filepath_string); if profile_filepath.exists() { let mut content = fs::read_to_string(&profile_filepath_string) .expect(&format!("failed to read file {}", &profile_filepath_string)); if content.contains(profile_content) { continue; } else { let mut file_options = OpenOptions::new() .read(true) .write(true) .append(true) .open(&profile_filepath_string); match file_options { Ok(mut file) => { let new_content = format!("{}\n", profile_content); file.write_all(new_content.as_bytes()).expect(&format!( "failed to write data to file {}", &profile_filepath_string )); } Err(e) => { println!("不存在 {} {:?}", profile_filepath_string, e); continue; } } } } } } println!("{}", success_msg); } fn fix_filepath(filepath: String) -> String { filepath .replace("(", r"\(") .replace(")", r"\)") .replace(" ", r"\ ") }
use std::fs::{self, DirEntry, File, OpenOptions}; use std::io::{self, BufReader, Read, Write, Error}; use std::path::Path; use std::process::Command; use fs_extra::dir::
; if panda_dir.exists() { let mut from_paths = vec![&panda_dir_string]; let _r = remove_items(&from_paths); } match std::fs::create_dir_all(&panda_dir_string) { Ok(_) => (), Err(e) => { println!("create folder failed {} {:?}", &panda_dir_string, e); } } let options = dir::CopyOptions::new(); let install_files = if cfg!(target_os = "windows") { ["panda.exe", "theme"] } else { ["panda", "theme"] }; let mut from_paths: Vec<String> = Vec::new(); for file in &install_files { from_paths.push(format!("{1}{0}Contents{0}{2}", split_s, current_dir, file)); } match copy_items(&from_paths, &panda_dir_string, &options) { Ok(r) => { println!("Copy files done."); } Err(e) => { println!("Copy files failed, install failed"); log::error!("Copy files failed, install failed"); return; } } let success_msg = "Congratulations!\nPanda api install done!\nYou can run pana command in your api docs folder now."; if cfg!(target_os = "windows") { #[cfg(windows)] { let hklm = RegKey::predef(HKEY_CURRENT_USER); let cur_ver = hklm.open_subkey("Environment").unwrap_or_else(|e| match e.kind() { io::ErrorKind::NotFound => panic!("Key doesn't exist"), io::ErrorKind::PermissionDenied => panic!("Access denied"), _ => panic!("{:?}", e), }); let (reg_key, disp) = hklm.create_subkey("Environment").unwrap(); let user_envs: String = if let Ok(p) = cur_ver.get_value("Path") { p } else { "".to_string() }; let mut user_envs = user_envs.trim().trim_end_matches(";"); let panda_dir_string = panda_dir_string.trim_end_matches(split_s); if user_envs.contains(panda_dir_string) { } else { let s = format!("{};{};", user_envs, panda_dir_string); match reg_key.set_value("Path", &s) { Ok(r) => { println!("reg ok"); } Err(e) => { println!("reg failed"); } } } } } else { let output = Command::new("sh") .arg("-c") .arg("echo $SHELL") .output() .expect("failed to execute process"); let shell_name = String::from_utf8(output.stdout).unwrap(); let shell_name = shell_name .trim() .trim_start_matches("/") .trim_start_matches("/"); let mut profile_name = "".to_string(); let shell_name_info: Vec<&str> = shell_name.split("/").collect(); if let Some(shell_name) = shell_name_info.last() { profile_name = format!(".{}rc", shell_name); } let profile_filepath_string = format!("{}/{}", home_dir, profile_name); let profile_filepath = Path::new(&profile_filepath_string); let profile_content = r#"export PATH="$HOME/.panda_api:$PATH""#; let mut has_profile_content = false; if profile_filepath.exists() { let mut content = fs::read_to_string(&profile_filepath_string) .expect(&format!("failed to read file {}", &profile_filepath_string)); if content.contains(profile_content) { has_profile_content = true; } } if !has_profile_content { let mut file_options = OpenOptions::new() .read(true) .write(true) .create(true) .append(true) .open(&profile_filepath_string); match file_options { Ok(mut file) => { let new_content = format!("{}\n", profile_content); file.write_all(new_content.as_bytes()).expect(&format!( "failed to write data to file {}", &profile_filepath_string )); } Err(e) => { panic!("{:?}", e); } } } let profile_list = [".zshrc", ".bashrc", ".cshrc"]; for profile_file in &profile_list { let profile_filepath_string = format!("{}/{}", home_dir, profile_file); let profile_filepath = Path::new(&profile_filepath_string); if profile_filepath.exists() { let mut content = fs::read_to_string(&profile_filepath_string) .expect(&format!("failed to read file {}", &profile_filepath_string)); if content.contains(profile_content) { continue; } else { let mut file_options = OpenOptions::new() .read(true) .write(true) .append(true) .open(&profile_filepath_string); match file_options { Ok(mut file) => { let new_content = format!("{}\n", profile_content); file.write_all(new_content.as_bytes()).expect(&format!( "failed to write data to file {}", &profile_filepath_string )); } Err(e) => { println!("不存在 {} {:?}", profile_filepath_string, e); continue; } } } } } } println!("{}", success_msg); } fn fix_filepath(filepath: String) -> String { filepath .replace("(", r"\(") .replace(")", r"\)") .replace(" ", r"\ ") }
{self, copy}; use fs_extra::{copy_items, remove_items}; #[cfg(windows)] use winapi; #[cfg(windows)] use winreg::enums::*; #[cfg(windows)] use winreg::{self, RegKey}; fn main() { pretty_env_logger::init(); let current_exe = &std::env::current_exe().unwrap(); let current_exe = Path::new(current_exe); let path = current_exe.parent().unwrap(); let current_dir = path.to_str().unwrap(); let split_s = if cfg!(target_os = "windows") { r"\" } else { "/" }; let home_dir = dirs::home_dir().unwrap(); let home_dir = home_dir.to_str().unwrap().trim_end_matches(split_s); let mut panda_dir_string = format!("{1}{0}.panda_api{0}", split_s, home_dir); let panda_dir = Path::new(&panda_dir_string); let panda_dir_string = panda_dir.to_str().unwrap()
random
[]
Rust
rust/src/bin/frontend.rs
emwalker/digraffe
0ea46c938155204739cae2ddc4c668b7d7d85acd
use actix_web::{guard, post, web, App, HttpRequest, HttpResponse, HttpServer}; use async_graphql::extensions; use async_graphql::http::{playground_source, GraphQLPlaygroundConfig}; use async_graphql::EmptySubscription; use async_graphql_actix_web::{GraphQLRequest, GraphQLResponse}; use std::env; use digraph::config::Config; use digraph::db; use digraph::prelude::*; use digraph::schema::{MutationRoot, QueryRoot, Schema, State}; struct AuthHeader(String); impl AuthHeader { fn decode(&self) -> Result<(String, String)> { let encoded = self.0.split(' ').last().unwrap_or_default(); let decoded = base64::decode(&encoded)?; let decoded = String::from_utf8_lossy(&decoded); let parts = decoded .split(':') .map(str::to_string) .collect::<Vec<String>>(); if parts.len() != 2 { return Err(Error::Auth(format!("unexpected message: {}", self.0))); } Ok((parts[0].clone(), parts[1].clone())) } } fn user_id_from_header(req: HttpRequest) -> Option<(String, String)> { match req.headers().get("authorization") { Some(value) => match value.to_str() { Ok(value) => match AuthHeader(value.into()).decode() { Ok((user_id, session_id)) => { log::info!("user and session id found in auth header: {}", user_id); Some((user_id, session_id)) } Err(err) => { log::info!("failed to decode auth header, proceeding as guest: {}", err); None } }, Err(err) => { log::warn!("problem fetching authorization header value: {}", err); None } }, None => { log::warn!("no authorization header, proceeding as guest"); None } } } #[post("/graphql")] async fn index( state: web::Data<State>, req: GraphQLRequest, http_req: HttpRequest, ) -> GraphQLResponse { let user_info = user_id_from_header(http_req); let viewer = state.authenticate(user_info).await; let repo = state.create_repo(viewer); state .schema .execute(req.into_inner().data(repo)) .await .into() } async fn index_playground() -> Result<HttpResponse> { Ok(HttpResponse::Ok() .content_type("text/html; charset=utf-8") .body(playground_source( GraphQLPlaygroundConfig::new("/graphql").subscription_endpoint("/graphql"), ))) } #[actix_web::main] async fn main() -> async_graphql::Result<()> { let config = Config::load()?; env_logger::init(); let pool = db::db_connection(&config).await?; sqlx::migrate!("db/migrations").run(&pool).await?; let schema = Schema::build(QueryRoot, MutationRoot, EmptySubscription) .extension(extensions::Logger) .finish(); let state = State::new(pool, schema, config.digraph_server_secret); let socket = env::var("LISTEN_ADDR").unwrap_or_else(|_| "0.0.0.0:8080".to_owned()); println!("Playground: http://localhost:8080"); HttpServer::new(move || { App::new() .app_data(web::Data::new(state.clone())) .service(index) .service( web::resource("/graphql") .guard(guard::Get()) .to(index_playground), ) .service(web::resource("/").guard(guard::Get()).to(index_playground)) }) .bind(socket)? .run() .await?; Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn test_auth_header_parsing() { let auth = AuthHeader("Bearer NDYxYzg3YzgtZmI4Zi0xMWU4LTljYmMtYWZkZTZjNTRkODgxOmFiM2Q1MTYwYWFlNjMyYTUxNzNjMDVmOGNiMGVmMDg2ODY2ZGFkMTAzNTE3ZGQwMTRmMzhhNWIxY2E2OWI5YWE=".into()); let (user_id, session_id) = auth.decode().unwrap(); assert_eq!(user_id, "461c87c8-fb8f-11e8-9cbc-afde6c54d881"); assert_eq!( session_id, "ab3d5160aae632a5173c05f8cb0ef086866dad103517dd014f38a5b1ca69b9aa" ); } }
use actix_web::{guard, post, web, App, HttpRequest, HttpResponse, HttpServer}; use async_graphql::extensions; use async_graphql::http::{playground_source, GraphQLPlaygroundConfig}; use async_graphql::EmptySubscription; use async_graphql_actix_web::{GraphQLRequest, GraphQLResponse}; use std::env; use digraph::config::Config; use digraph::db; use digraph::prelude::*; use digraph::schema::{MutationRoot, QueryRoot, Schema, State}; struct AuthHeader(String); impl AuthHeader { fn decode(&self) -> Result<(String, String)> { let encoded = self.0.split(' ').last().unwrap_or_default(); let decoded = base64::decode(&encoded)?; let decoded = String::from_utf8_lossy(&decoded); let parts = decoded .split(':') .map(str::to_string) .collect::<Vec<String>>(); if parts.len() != 2 { return Err(Error::Auth(format!("unexpected message: {}", self.0))); } Ok((parts[0].clone(), parts[1].clone())) } } fn user_id_from_header(req: HttpRequest) -> Option<(String, String)> { match req.headers().get("authorization") { Some(value) => match value.to_str() { Ok(value) => match AuthHeader(value.into()).decode() { Ok((user_id, session_id)) => { log::info!("user and session id found in auth header: {}", user_id); Some((user_id, session_id)) } Err(err) => { log::info!("failed to decode auth header, proceeding as guest: {}", err); None } }, Err(err) => { log::warn!("problem fetching authorization header value: {}", err); None } }, None => { log::warn!("no authorization header, proceeding as guest"); None } } } #[post("/graphql")] async fn index( state: web::Data<State>, req: GraphQLRequest, http_req: HttpRequest, ) -> GraphQLResponse { let user_info = user_id_from_header(http_req); let viewer = state.authenticate(user_info).await; let repo = state.create_repo(viewer); state .schema .execute(req.into_inner().data(repo)) .await .into() } async fn index_playground() -> Result<HttpResponse> { Ok(HttpResponse::Ok() .content_type("text/html; charset=utf-8") .body(playground_source( GraphQLPlaygroundConfig::new("/graphql").subscription_endpoint("/graphql"), ))) } #[actix_web::main] async fn main() -> async_graphql::Result<()> { let config = Config::load()?; env_logger::init(); let pool = db::db_connection(&config).await?; sqlx::migrate!("db/migrations").run(&pool).await?; let schema = Schema::build(QueryRoot, MutationRoot, EmptySubscription) .exte
80"); HttpServer::new(move || { App::new() .app_data(web::Data::new(state.clone())) .service(index) .service( web::resource("/graphql") .guard(guard::Get()) .to(index_playground), ) .service(web::resource("/").guard(guard::Get()).to(index_playground)) }) .bind(socket)? .run() .await?; Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn test_auth_header_parsing() { let auth = AuthHeader("Bearer NDYxYzg3YzgtZmI4Zi0xMWU4LTljYmMtYWZkZTZjNTRkODgxOmFiM2Q1MTYwYWFlNjMyYTUxNzNjMDVmOGNiMGVmMDg2ODY2ZGFkMTAzNTE3ZGQwMTRmMzhhNWIxY2E2OWI5YWE=".into()); let (user_id, session_id) = auth.decode().unwrap(); assert_eq!(user_id, "461c87c8-fb8f-11e8-9cbc-afde6c54d881"); assert_eq!( session_id, "ab3d5160aae632a5173c05f8cb0ef086866dad103517dd014f38a5b1ca69b9aa" ); } }
nsion(extensions::Logger) .finish(); let state = State::new(pool, schema, config.digraph_server_secret); let socket = env::var("LISTEN_ADDR").unwrap_or_else(|_| "0.0.0.0:8080".to_owned()); println!("Playground: http://localhost:80
function_block-random_span
[ { "content": "fn sha1_digest(normalized: &[u8]) -> String {\n\n let hash = Sha1::digest(normalized);\n\n format!(\"{:x}\", hash)\n\n}\n\n\n", "file_path": "rust/src/http/repo_url.rs", "rank": 0, "score": 157224.1471751658 }, { "content": "pub fn warning(text: String) -> Alert {\n\n Alert {\n\n text,\n\n alert_type: AlertType::Warn,\n\n id: String::from(\"0\"),\n\n }\n\n}\n", "file_path": "rust/src/schema/alert.rs", "rank": 1, "score": 152778.72739734495 }, { "content": "pub fn success(text: String) -> Alert {\n\n Alert {\n\n text,\n\n alert_type: AlertType::Success,\n\n id: String::from(\"0\"),\n\n }\n\n}\n\n\n", "file_path": "rust/src/schema/alert.rs", "rank": 2, "score": 152778.72739734495 }, { "content": "CREATE INDEX user_link_reviews_user_idx ON public.user_link_reviews USING btree (user_id, reviewed_at);\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 3, "score": 149289.2271471636 }, { "content": "fn make_filter<'r>(host: Option<&str>) -> impl Fn(&Pair<'r>) -> bool + '_ {\n\n move |p: &Pair<'r>| match host {\n\n Some(\"abcnews.go.com\") => p.0 == \"id\",\n\n Some(\"khpg.org\") => p.0 == \"id\",\n\n Some(\"news.ycombinator.com\") => p.0 == \"id\",\n\n Some(\"newscenter.sdsu.edu\") => p.0 == \"sid\",\n\n Some(\"scholarworks.umass.edu\") => p.0 == \"article\" || p.0 == \"context\",\n\n Some(\"www.baylor.edu\") => p.0 == \"action\" || p.0 == \"story\",\n\n Some(\"www.c-span.org\") => true,\n\n Some(\"www.dur.ac.uk\") => p.0 == \"itemno\",\n\n Some(\"www.facebook.com\") => p.0 == \"__xts__[0]\" || p.0 == \"v\",\n\n Some(\"www.greenbeltmd.gov\") => p.0 == \"id\",\n\n Some(\"www.koreaherald.com\") => p.0 == \"ud\",\n\n Some(\"www.lenr-forum.com\") => p.0 == \"pageNo\",\n\n Some(\"www.nzherald.co.nz\") => p.0 == \"objectid\",\n\n Some(\"www.sourcewatch.org\") => p.0 == \"title\",\n\n Some(\"www.urbandictionary.com\") => p.0 == \"term\",\n\n Some(\"www.youtube.com\") => p.0 == \"v\" || p.0 == \"t\",\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "rust/src/http/repo_url.rs", "rank": 4, "score": 147912.2339694319 }, { "content": "CREATE INDEX topics_links_child_parent_idx ON public.link_topics USING btree (child_id, parent_id);\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 5, "score": 138868.87896180176 }, { "content": "CREATE INDEX topics_topics_child_parent_idx ON public.topic_topics USING btree (child_id, parent_id);\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 6, "score": 138868.87896180176 }, { "content": "--\n\n-- Name: sessions sessions_session_id_idx; Type: CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 7, "score": 134699.4841938034 }, { "content": "--\n\n-- Name: sessions sessions_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 8, "score": 125573.76440337137 }, { "content": "--\n\n-- Name: user_links_created_at_index; Type: INDEX; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 9, "score": 124219.81966284543 }, { "content": "CREATE INDEX user_links_created_at_index ON public.user_links USING btree (created_at);\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 10, "score": 121093.25947034999 }, { "content": "let app = require('./server').default\n", "file_path": "javascript/src/index.ts", "rank": 11, "score": 114933.54967192933 }, { "content": "--\n\n-- Name: links_title_idx; Type: INDEX; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 12, "score": 114609.8052745128 }, { "content": "--\n\n-- Name: links_to_tsvector_idx; Type: INDEX; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 13, "score": 114609.8052745128 }, { "content": "--\n\n-- Name: users_email_key; Type: INDEX; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 14, "score": 114609.8052745128 }, { "content": "--\n\n-- Name: topics_synonyms1_idx; Type: INDEX; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 15, "score": 114609.8052745128 }, { "content": "CREATE INDEX links_title_idx ON public.links USING btree (title);\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 16, "score": 114586.70371403043 }, { "content": "--\n\n-- Name: links_url_to_trgm_idx; Type: INDEX; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 17, "score": 113110.69013618882 }, { "content": "CREATE INDEX users_email_key ON public.users USING btree (primary_email);\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 18, "score": 112199.33529237387 }, { "content": "--\n\n-- Name: topics_links_child_parent_idx; Type: INDEX; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 19, "score": 111676.85468417092 }, { "content": "--\n\n-- Name: topics_topics_child_parent_idx; Type: INDEX; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 20, "score": 111676.85468417092 }, { "content": "--\n\n-- Name: user_link_reviews_user_idx; Type: INDEX; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 21, "score": 111676.85468417092 }, { "content": "--\n\n-- Name: link_transitive_closure id; Type: DEFAULT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 22, "score": 110449.51608923332 }, { "content": "--\n\n-- Name: topic_transitive_closure id; Type: DEFAULT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 23, "score": 110449.51608923332 }, { "content": "CREATE INDEX topics_synonyms1_idx ON public.topics USING gin (synonyms jsonb_path_ops);\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 24, "score": 106402.18927732413 }, { "content": "CREATE INDEX links_url_to_trgm_idx ON public.links USING gin (url public.gin_trgm_ops);\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 25, "score": 102398.75759695753 }, { "content": "CREATE INDEX links_to_tsvector_idx ON public.links USING gin (to_tsvector('public.linksdict'::regconfig, title));\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 26, "score": 102042.20852041872 }, { "content": "--\n\n-- Name: sessions sessions_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 27, "score": 101780.29165814587 }, { "content": "CREATE FUNCTION public.topic_down_set(topic_id uuid) RETURNS TABLE(parent_id uuid, child_id uuid)\n\n LANGUAGE plpgsql\n\n AS $$\n\nbegin\n\n return query\n\n with recursive\n\n child_topics as (\n\n select topic_id as parent_id, topic_id as child_id\n\n union\n\n select pt.child_id, ct.child_id\n\n from topic_topics ct\n\n inner join child_topics pt on pt.child_id = ct.parent_id\n\n )\n\n select topic_id, ct.child_id from child_topics ct;\n\nend;\n\n$$;\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 28, "score": 96330.1455197985 }, { "content": "CREATE FUNCTION public.link_down_set(topic_id uuid) RETURNS TABLE(parent_id uuid, child_id uuid)\n\n LANGUAGE plpgsql\n\n AS $$\n\nbegin\n\n return query\n\n select topic_id, lt.child_id\n\n from topic_down_set(topic_id) ct\n\n inner join link_topics lt on lt.parent_id = ct.child_id;\n\nend;\n\n$$;\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 29, "score": 96330.1455197985 }, { "content": "--\n\n-- Name: sessions; Type: TABLE; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 30, "score": 95200.25543115987 }, { "content": "CREATE FUNCTION public.topic_upper_set(topic_id uuid) RETURNS TABLE(parent_id uuid, child_id uuid)\n\n LANGUAGE plpgsql\n\n AS $$\n\nbegin\n\n return query\n\n with recursive\n\n parent_topics as (\n\n select topic_id as parent_id, topic_id as child_id\n\n union\n\n select pt.parent_id, ct.child_id\n\n from topic_topics pt\n\n inner join parent_topics ct on pt.child_id = ct.parent_id\n\n )\n\n select pt.parent_id, topic_id from parent_topics pt;\n\nend;\n\n$$;\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 31, "score": 94609.56482798058 }, { "content": "CREATE FUNCTION public.add_topic_to_link(topic_id uuid, link_id uuid) RETURNS void\n\n LANGUAGE plpgsql\n\n AS $$\n\nbegin\n\n insert into link_topics (parent_id, child_id)\n\n values (topic_id, link_id)\n\n on conflict do nothing;\n\n insert into link_transitive_closure (parent_id, child_id)\n\n select us.parent_id, link_id\n\n from topic_upper_set(topic_id) us\n\n on conflict do nothing;\n\nend;\n\n$$;\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 32, "score": 93995.65300334131 }, { "content": "pub fn conn<N: OutputType>(\n\n after: Option<String>,\n\n before: Option<String>,\n\n first: Option<i32>,\n\n last: Option<i32>,\n\n results: Vec<N>,\n\n) -> Result<Connection<String, N, EmptyFields, EmptyFields, DefaultConnectionName, DefaultEdgeName>>\n\n{\n\n let result = query(\n\n after,\n\n before,\n\n first,\n\n last,\n\n |_after, _before, _first, _last| async move {\n\n let mut connection = Connection::new(false, false);\n\n connection.edges.extend(\n\n results\n\n .into_iter()\n\n .map(|n| Edge::with_additional_fields(String::from(\"0\"), n, EmptyFields)),\n\n );\n\n Ok::<_, Error>(connection)\n\n },\n\n );\n\n executor::block_on(result).map_err(Error::Resolver)\n\n}\n", "file_path": "rust/src/schema/relay.rs", "rank": 33, "score": 93845.30229747132 }, { "content": "--\n\n-- Name: user_link_reviews user_link_reviews_user_id_link_id_key; Type: CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 34, "score": 91168.08702386111 }, { "content": "CREATE FUNCTION public.add_topic_to_topic(initial_parent_id uuid, initial_child_id uuid) RETURNS void\n\n LANGUAGE plpgsql\n\n AS $$\n\nbegin\n\n -- Add the new relationship\n\n insert into topic_topics (parent_id, child_id)\n\n values (initial_parent_id, initial_child_id)\n\n on conflict do nothing;\n\n -- Update the topic upward set of the child topic\n\n insert into topic_transitive_closure (parent_id, child_id)\n\n select us.parent_id, us.child_id\n\n from topic_upper_set(initial_child_id) us\n\n on conflict do nothing;\n\n -- Add the link down set of the child topic to the new parent topic\n\n insert into link_transitive_closure (parent_id, child_id)\n\n select initial_parent_id, ds.child_id\n\n from link_down_set(initial_child_id) ds\n\n on conflict do nothing;\n\nend;\n\n$$;\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 35, "score": 90200.70521086031 }, { "content": "CREATE FUNCTION public.upsert_link_down_set(topic_id uuid) RETURNS void\n\n LANGUAGE plpgsql\n\n AS $$\n\nbegin\n\n insert into link_transitive_closure (parent_id, child_id)\n\n select * from link_down_set(topic_id)\n\n on conflict do nothing;\n\nend;\n\n$$;\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 36, "score": 89266.28759587042 }, { "content": "CREATE FUNCTION public.upsert_topic_down_set(topic_id uuid) RETURNS void\n\n LANGUAGE plpgsql\n\n AS $$\n\nbegin\n\n insert into topic_transitive_closure (parent_id, child_id)\n\n select topic_id, child_id from topic_down_set(topic_id)\n\n on conflict do nothing;\n\nend;\n\n$$;\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 37, "score": 89266.28759587042 }, { "content": "const config = {\n\n resolve: {\n\n extensions: ['.mjs', '.js', '.jsx', '.ts', '.tsx', '.css', '.scss'],\n\n\n\n alias: {\n\n components: path.resolve('src/components'),\n\n mutations: path.resolve('src/mutations'),\n\n utils: path.resolve('src/utils'),\n\n },\n\n\n\n },\n\n\n\n module: {\n\n rules: [\n\n {\n\n test: /\\.(gif|jpe?g|png|ico)$/,\n\n loader: 'url-loader?limit=10000'\n\n }\n\n ],\n\n\n\n modules: [\n\n 'node_modules',\n\n path.resolve(__dirname, 'public'),\n\n ]\n\n },\n\n\n\n plugins: [\n\n new CopyWebpackPlugin({\n\n patterns: [\n\n { from: 'javascript/public' }\n\n ],\n\n }),\n\n ],\n", "file_path": "javascript/webpack.config.js", "rank": 38, "score": 88939.00222508062 }, { "content": "--\n\n-- Name: topic_transitive_closure_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 39, "score": 88726.70268935297 }, { "content": "--\n\n-- Name: link_transitive_closure_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 40, "score": 88726.70268935297 }, { "content": "fn parse_url(input: &str) -> Result<url::Url> {\n\n let url = url::Url::parse(input)?;\n\n let host = url.host_str();\n\n let filter = make_filter(host);\n\n let query: Vec<(_, _)> = url.query_pairs().filter(filter).collect();\n\n\n\n let mut url2 = url.clone();\n\n url2.set_query(None);\n\n\n\n for pair in query {\n\n url2.query_pairs_mut()\n\n .append_pair(&pair.0.to_string()[..], &pair.1.to_string()[..]);\n\n }\n\n\n\n match host {\n\n Some(\"mail.google.com\") => {}\n\n _ => url2.set_fragment(None),\n\n }\n\n\n\n Ok(url2)\n", "file_path": "rust/src/http/repo_url.rs", "rank": 41, "score": 87634.38222020915 }, { "content": "--\n\n-- Name: link_transitive_closure_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 42, "score": 84865.45698072629 }, { "content": "--\n\n-- Name: topics topics_timerange_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 43, "score": 84865.45698072629 }, { "content": "--\n\n-- Name: repositories repositories_owner_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 44, "score": 84865.45698072629 }, { "content": "--\n\n-- Name: links links_organization_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 45, "score": 84865.45698072629 }, { "content": "--\n\n-- Name: repositories repositories_organization_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 46, "score": 84865.45698072629 }, { "content": "--\n\n-- Name: topic_transitive_closure_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 47, "score": 84865.45698072629 }, { "content": "--\n\n-- Name: topics topics_organization_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 48, "score": 84865.45698072629 }, { "content": "--\n\n-- Name: user_links user_links_link_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 49, "score": 82290.61932716111 }, { "content": "--\n\n-- Name: user_links user_links_repository_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 50, "score": 82290.61932716111 }, { "content": "--\n\n-- Name: topic_topics topic_topics_child_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 51, "score": 82290.61932716111 }, { "content": "--\n\n-- Name: google_accounts google_accounts_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 52, "score": 82290.61932716111 }, { "content": "--\n\n-- Name: link_topics link_topics_child_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 53, "score": 82290.61932716111 }, { "content": "--\n\n-- Name: github_accounts github_accounts_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 54, "score": 82290.61932716111 }, { "content": "--\n\n-- Name: user_links user_links_organization_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 55, "score": 82290.61932716111 }, { "content": "--\n\n-- Name: organization_members organization_members_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 56, "score": 82290.61932716111 }, { "content": "--\n\n-- Name: topic_topics topics_topics_parent_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 57, "score": 82290.61932716111 }, { "content": "--\n\n-- Name: organization_members organization_members_organization_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 58, "score": 82290.61932716111 }, { "content": "--\n\n-- Name: link_topics link_topics_parent_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 59, "score": 82290.61932716111 }, { "content": "--\n\n-- Name: user_links user_links_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 60, "score": 82290.61932716111 }, { "content": "import passport from 'passport'\n\nimport { Express } from 'express'\n\n\n\nconst registerEndpointsFn = (provider: string) => (app: Express) => {\n\n app.get(`/auth/${provider}`, passport.authenticate(provider))\n\n\n\n app.get(\n\n `/auth/${provider}/callback`,\n\n passport.authenticate(provider, { failureRedirect: '/login' }),\n\n async (req, res) => {\n\n // eslint-disable-next-line no-console\n\n console.log(`Auth with ${provider} succeeded, redirecting to /`)\n\n res.redirect('/')\n\n },\n\n )\n\n\n\n return app\n\n}\n\n\n\nexport default registerEndpointsFn\n", "file_path": "javascript/src/server/auth/registerEndpointsFn.ts", "rank": 61, "score": 81785.6498000131 }, { "content": "--\n\n-- Name: user_link_reviews user_link_reviews_link_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 62, "score": 79916.4628811689 }, { "content": "--\n\n-- Name: topic_transitive_closure topic_transitive_closure_child_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 63, "score": 79916.4628811689 }, { "content": "--\n\n-- Name: topic_transitive_closure topic_transitive_closure_parent_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 64, "score": 79916.4628811689 }, { "content": "--\n\n-- Name: user_link_reviews user_link_reviews_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 65, "score": 79916.4628811689 }, { "content": "--\n\n-- Name: user_link_topics user_link_topics_topic_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 66, "score": 79916.4628811689 }, { "content": "--\n\n-- Name: link_transitive_closure link_transitive_closure_parent_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 67, "score": 79916.4628811689 }, { "content": "--\n\n-- Name: link_transitive_closure link_transitive_closure_child_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 68, "score": 79916.4628811689 }, { "content": "--\n\n-- Name: user_link_topics user_link_topics_user_link_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 69, "score": 78797.25704126028 }, { "content": "ALTER TABLE ONLY public.link_transitive_closure ALTER COLUMN id SET DEFAULT nextval('public.link_transitive_closure_id_seq'::regclass);\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 70, "score": 77695.20654617975 }, { "content": "ALTER TABLE ONLY public.topic_transitive_closure ALTER COLUMN id SET DEFAULT nextval('public.topic_transitive_closure_id_seq'::regclass);\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 71, "score": 77695.20654617975 }, { "content": "use serde::Deserialize;\n\n\n\nuse crate::prelude::*;\n\n\n\n#[derive(Clone, Debug, Deserialize, SimpleObject)]\n\npub struct Session {\n\n pub id: String,\n\n}\n\n\n\n#[derive(Debug, SimpleObject)]\n\npub struct SessionEdge {\n\n pub cursor: String,\n\n pub node: Session,\n\n}\n", "file_path": "rust/src/schema/session.rs", "rank": 72, "score": 76484.39081038347 }, { "content": "const onAuthSuccessFn = (environment: Environment) => async (\n\n _arg0: any, _arg1: any, profile: IProfile, done: Done,\n\n) => {\n\n console.log('GitHub login succeeded, getting viewer id')\n\n\n\n // eslint-disable-next-line @typescript-eslint/naming-convention\n\n const { displayName, emails, username, _json: { avatar_url } } = profile\n\n const email = primaryOrFirstEmail(emails || [])\n\n console.log(`User ${email || '(no email)'} logging in`)\n\n if (!email || !username) return\n\n\n\n const input: Input = {\n\n githubAvatarUrl: avatar_url,\n\n githubUsername: username,\n\n name: displayName || 'Nemo',\n\n primaryEmail: email,\n\n serverSecret: process.env.DIGRAPH_SERVER_SECRET || 'keyboard cat',\n\n }\n\n\n\n createGithubSessionMutation(environment, input, {\n\n onCompleted(payload: Response) {\n\n if (!payload.createGithubSession) {\n\n console.log('createGithubSession field missing from response:', payload)\n\n done(null, null)\n\n return\n\n }\n\n\n\n const { createGithubSession } = payload\n\n const userEdge = createGithubSession?.userEdge\n\n const sessionEdge = createGithubSession?.sessionEdge\n\n console.log('User fetched from api, saving to session', userEdge, sessionEdge)\n\n const id = userEdge && userEdge.node && userEdge.node.id\n\n const sessionId = sessionEdge?.node?.id\n\n done(null, { id, sessionId })\n\n },\n\n\n\n onError(error: Error) {\n\n console.log('Something happened:', error)\n\n done(null, null)\n\n },\n\n })\n", "file_path": "javascript/src/server/auth/withGithub.ts", "rank": 73, "score": 76442.65927067916 }, { "content": "class FlashMessages extends Component<Props, State> {\n\n constructor(props: Props) {\n\n super(props)\n\n this.state = {\n\n messages: props.initialAlerts || [],\n\n }\n\n // @ts-ignore\n\n window.flashMessages = this\n\n }\n\n\n\n get alerts() {\n\n return this.state.messages.map((message) => (\n\n <Alert\n\n key={message.id}\n\n message={message}\n\n onClose={() => this.removeMessage(message)}\n\n />\n\n ))\n\n }\n\n\n\n removeMessage = (message: AlertPayload) => {\n\n const index = this.state.messages.indexOf(message)\n\n this.setState((prevState) => ({\n\n messages: update(prevState.messages, { $splice: [[index, 1]] }),\n\n }))\n\n }\n\n\n\n addMessage = (message: AlertPayload) => {\n\n this.setState((prevState) => ({\n\n messages: update(prevState.messages, { $push: [message] }),\n\n }))\n\n }\n\n\n\n render = () => {\n\n const { alerts } = this\n\n\n\n if (alerts.length === 0) return null\n\n\n\n return (\n\n <div className=\"container-lg clearfix my-2 px-3 px-md-6 px-lg-3\">\n\n { alerts }\n\n </div>\n\n )\n\n }\n", "file_path": "javascript/src/components/FlashMessages/index.tsx", "rank": 74, "score": 76235.328552181 }, { "content": " async fetch(request: RequestParameters, variables: Variables) {\n\n const { headers } = this\n\n\n\n const response = await fetch(this.url, {\n\n method: 'POST',\n\n headers,\n\n body: JSON.stringify({ query: request.text, variables }),\n\n credentials: 'include',\n\n })\n\n\n\n return response.json()\n", "file_path": "javascript/src/FetcherBase.ts", "rank": 75, "score": 75367.62861248072 }, { "content": " onCompleted(payload: Response) {\n\n if (!payload.createGithubSession) {\n\n console.log('createGithubSession field missing from response:', payload)\n\n done(null, null)\n\n return\n\n }\n\n\n\n const { createGithubSession } = payload\n\n const userEdge = createGithubSession?.userEdge\n\n const sessionEdge = createGithubSession?.sessionEdge\n\n console.log('User fetched from api, saving to session', userEdge, sessionEdge)\n\n const id = userEdge && userEdge.node && userEdge.node.id\n\n const sessionId = sessionEdge?.node?.id\n\n done(null, { id, sessionId })\n", "file_path": "javascript/src/server/auth/withGithub.ts", "rank": 76, "score": 74779.45916719099 }, { "content": " onError(error: Error) {\n\n console.log('Something happened:', error)\n\n done(null, null)\n", "file_path": "javascript/src/server/auth/withGithub.ts", "rank": 77, "score": 74779.45916719099 }, { "content": " async fetch(request: RequestParameters, variables: Variables) {\n\n console.log('Quering from the node server:', request.name)\n\n const payload = await super.fetch(request, variables)\n\n this.payloads[request.name] = payload\n\n return payload\n", "file_path": "javascript/src/server/ServerFetcher.ts", "rank": 78, "score": 74442.82299133364 }, { "content": " async fetch(request: RequestParameters, variables: Variables) {\n\n if (request.name in this.payloads) {\n\n const payload = this.payloads[request.name]\n\n delete this.payloads[request.name]\n\n return payload\n\n }\n\n\n\n return super.fetch(request, variables)\n", "file_path": "javascript/src/client/ClientFetcher.ts", "rank": 79, "score": 74442.82299133364 }, { "content": "const registerEndpointsFn = (provider: string) => (app: Express) => {\n\n app.get(`/auth/${provider}`, passport.authenticate(provider))\n\n\n\n app.get(\n\n `/auth/${provider}/callback`,\n\n passport.authenticate(provider, { failureRedirect: '/login' }),\n\n async (req, res) => {\n\n // eslint-disable-next-line no-console\n\n console.log(`Auth with ${provider} succeeded, redirecting to /`)\n\n res.redirect('/')\n\n },\n\n )\n\n\n\n return app\n", "file_path": "javascript/src/server/auth/registerEndpointsFn.ts", "rank": 80, "score": 73192.02655834572 }, { "content": "CREATE TABLE public.sessions (\n\n session_id bytea DEFAULT public.digest((random())::text, 'sha256'::text) NOT NULL,\n\n user_id uuid NOT NULL,\n\n id uuid DEFAULT public.gen_random_uuid() NOT NULL\n\n);\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 81, "score": 71602.5297922891 }, { "content": "ALTER TABLE ONLY public.sessions\n\n ADD CONSTRAINT sessions_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE;\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 82, "score": 71602.5297922891 }, { "content": "let currentApp = app\n", "file_path": "javascript/src/index.ts", "rank": 83, "score": 71018.70602286447 }, { "content": "ALTER TABLE public.sessions OWNER TO postgres;\n\n\n", "file_path": "rust/db/schema.sql", "rank": 84, "score": 67316.77461315444 }, { "content": "--\n\n-- Name: citext; Type: EXTENSION; Schema: -; Owner: -\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 85, "score": 66385.87240013531 }, { "content": "--\n\n-- Name: pgcrypto; Type: EXTENSION; Schema: -; Owner: -\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 86, "score": 66385.87240013531 }, { "content": "--\n\n-- Name: unaccent; Type: EXTENSION; Schema: -; Owner: -\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 87, "score": 66385.87240013531 }, { "content": "--\n\n-- Name: schema_migrations schema_migrations_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 88, "score": 65993.04470722892 }, { "content": "--\n\n-- Name: schema_migrations; Type: TABLE; Schema: public; Owner: postgres\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 89, "score": 65840.33246475976 }, { "content": "--\n\n-- Name: pg_trgm; Type: EXTENSION; Schema: -; Owner: -\n\n--\n\n\n", "file_path": "rust/db/schema.sql", "rank": 90, "score": 65599.40737051112 }, { "content": "ALTER SEQUENCE public.link_transitive_closure_id_seq OWNED BY public.link_transitive_closure.id;\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 91, "score": 65222.179762653876 }, { "content": "ALTER FUNCTION public.add_topic_to_link(topic_id uuid, link_id uuid) OWNER TO postgres;\n\n\n", "file_path": "rust/db/schema.sql", "rank": 92, "score": 65222.179762653876 }, { "content": "ALTER SEQUENCE public.topic_transitive_closure_id_seq OWNED BY public.topic_transitive_closure.id;\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 93, "score": 65222.179762653876 }, { "content": "class Alert extends Component<Props> {\n\n get className(): string {\n\n return classNames(\n\n 'flash fade in mt-3 mb-3',\n\n this.alertClass(this.props.message.type),\n\n )\n\n }\n\n\n\n alertClass = (type: AlertType) => {\n\n const classes = {\n\n ERROR: 'flash-error',\n\n WARN: 'flash-warn',\n\n SUCCESS: 'flash-success',\n\n '%future added value': 'flash-error',\n\n }\n\n return classes[type] || 'flash-success'\n\n }\n\n\n\n render = () => (\n\n <div className={this.className}>\n\n <button\n\n className=\"flash-close\"\n\n onClick={this.props.onClose}\n\n type=\"button\"\n\n >\n\n <GoX />\n\n </button>\n\n { this.props.message.text }\n\n </div>\n\n )\n", "file_path": "javascript/src/components/FlashMessages/Alert/index.tsx", "rank": 94, "score": 65160.84149005237 }, { "content": "--\n\n-- Name: EXTENSION citext; Type: COMMENT; Schema: -; Owner:\n\n--\n\n\n\nCOMMENT ON EXTENSION citext IS 'data type for case-insensitive character strings';\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 95, "score": 63878.34311680256 }, { "content": "--\n\n-- Name: EXTENSION unaccent; Type: COMMENT; Schema: -; Owner:\n\n--\n\n\n\nCOMMENT ON EXTENSION unaccent IS 'text search dictionary that removes accents';\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 96, "score": 63878.34311680256 }, { "content": "--\n\n-- Name: EXTENSION pgcrypto; Type: COMMENT; Schema: -; Owner:\n\n--\n\n\n\nCOMMENT ON EXTENSION pgcrypto IS 'cryptographic functions';\n\n\n\n\n", "file_path": "rust/db/schema.sql", "rank": 97, "score": 63878.34311680256 }, { "content": " pub fn create_repo(&self, viewer: Viewer) -> Repo {\n\n Repo::new(viewer, self.pool.clone(), self.server_secret.clone())\n\n }\n\n\n\n pub async fn authenticate(&self, user_info: Option<(String, String)>) -> Viewer {\n\n match user_info {\n\n Some((user_id, session_id)) => {\n\n let result = sqlx::query_as::<_, (i64,)>(\n\n r#\"select count(*)\n\n from sessions\n\n where user_id = $1::uuid and session_id = decode($2, 'hex')\"#,\n\n )\n\n .bind(&user_id)\n\n .bind(&session_id)\n\n .fetch_one(&self.pool)\n\n .await;\n\n\n\n match result {\n\n Ok((count,)) => {\n\n if count == 0 {\n", "file_path": "rust/src/schema/mod.rs", "rank": 98, "score": 31.36763618804204 }, { "content": " log::warn!(\n\n \"no user session found in database, proceeding as guest: {}, {}\",\n\n user_id,\n\n session_id\n\n );\n\n return Viewer::guest();\n\n }\n\n log::info!(\"found user and session in database: {}\", user_id);\n\n Viewer {\n\n mutation_ids: vec![user_id.clone()],\n\n query_ids: vec![user_id.clone(), GUEST_ID.to_string()],\n\n session_id: Some(session_id),\n\n user_id,\n\n }\n\n }\n\n Err(err) => {\n\n log::warn!(\"failed to fetch session info, proceeding as guest: {}\", err);\n\n Viewer::guest()\n\n }\n\n }\n", "file_path": "rust/src/schema/mod.rs", "rank": 99, "score": 29.939276890514765 } ]
Rust
src/wayland/gamma_control.rs
EdwardBetts/way-cooler
9d0e5d8137dbe2b4e6c7c3c1b899ab3e87cb6da0
use wayland::gamma_control::generated ::server::gamma_control::GammaControl; use wayland::gamma_control::generated ::server::gamma_control_manager::GammaControlManager; use rustwlc::wayland; use rustwlc::handle::{wlc_handle_from_wl_output_resource, WlcOutput}; use rustwlc::render::{wlc_output_set_gamma, wlc_output_get_gamma_size}; use wayland_server::Resource; use wayland_sys::common::{wl_array}; use wayland_sys::server::{WAYLAND_SERVER_HANDLE, wl_client, wl_resource}; use std::os::raw::c_void; use nix::libc::{c_int, c_uint, uint32_t, uint16_t}; static SET_GAMMA_ERROR: &'static str = "The gamma ramps don't have the same size!"; static INVALID_GAMMA_CODE: u32 = 0; static mut GAMMA_CONTROL_MANAGER: GammaControlManagerInterface = GammaControlManagerInterface { destroy: destroy, get_gamma_control: get_gamma_control }; static mut GAMMA_CONTROL: GammaControlInterface = GammaControlInterface { destroy: destroy, set_gamma: set_gamma, reset_gamma: reset_gamma }; mod generated { #![allow(dead_code,non_camel_case_types,unused_unsafe,unused_variables)] #![allow(non_upper_case_globals,non_snake_case,unused_imports)] pub mod interfaces { #[doc(hidden)] pub use wayland_server::protocol_interfaces::{wl_output_interface}; include!(concat!(env!("OUT_DIR"), "/gamma-control_interface.rs")); } pub mod server { #[doc(hidden)] pub use wayland_server::{Resource, Client, Liveness, Implementable, EventLoopHandle, EventResult}; #[doc(hidden)] pub use wayland_server::protocol::{wl_output}; #[doc(hidden)] pub use super::interfaces; include!(concat!(env!("OUT_DIR"), "/gamma-control_api.rs")); } } #[repr(C)] struct GammaControlManagerInterface { destroy: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource), get_gamma_control: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource, id: u32, output: *mut wl_resource) } #[repr(C)] struct GammaControlInterface { destroy: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource), set_gamma: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource, red: *mut wl_array, green: *mut wl_array, blue: *mut wl_array), reset_gamma: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource) } unsafe extern "C" fn set_gamma(_client: *mut wl_client, resource: *mut wl_resource, red: *mut wl_array, green: *mut wl_array, blue: *mut wl_array) { info!("Setting gamma"); if (*red).size != (*green).size || (*red).size != (*blue).size { ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_post_error, resource, INVALID_GAMMA_CODE, SET_GAMMA_ERROR.as_bytes().as_ptr() as *const i8); warn!("Color size error, can't continue"); return } let r = (*red).data as *mut u16; let g = (*green).data as *mut u16; let b = (*blue).data as *mut u16; let user_data = ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_get_user_data, resource) as *const _; let output = WlcOutput(wlc_handle_from_wl_output_resource(user_data)); if output.is_null() { warn!("wl_resource didn't correspond to a wlc output"); return; } wlc_output_set_gamma(output.0, ((*red).size / 2) as u16, r, g, b) } unsafe extern "C" fn reset_gamma(_client: *mut wl_client, _resource: *mut wl_resource) { info!("Resetting gamma"); } unsafe extern "C" fn destroy(_client: *mut wl_client, resource: *mut wl_resource) { ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_destroy, resource ); } unsafe extern "C" fn get_gamma_control(client: *mut wl_client, _resource: *mut wl_resource, id: uint32_t, output: *mut wl_resource) { info!("Request received for control of the gamma ramps"); let manager_resource = ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_create, client, GammaControl::interface_ptr(), GammaControl::supported_version() as i32, id); let wlc_output = WlcOutput(wlc_handle_from_wl_output_resource(output as *const _)); if wlc_output.is_null() { warn!("This is triggering, dis bad?"); return; } info!("Client requested control of the gamma ramps for {:?}", wlc_output); let gamma_control_ptr = &mut GAMMA_CONTROL as *mut _ as *mut c_void; ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_set_implementation, manager_resource, gamma_control_ptr, output as *mut c_void, None ); info!("Request granted for gamma ramp control of {:?}", wlc_output); gamma_control_send_gamma_size(manager_resource, wlc_output_get_gamma_size(wlc_output.0)) } unsafe extern "C" fn bind(client: *mut wl_client, _data: *mut c_void, version: u32, id: u32) { info!("Binding Gamma Control resource"); let cur_version = GammaControlManager::supported_version(); if version > cur_version { warn!("Unsupported gamma control protocol version {}!", version); warn!("We only support version {}", cur_version); return } let resource = ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_create, client, GammaControlManager::interface_ptr(), version as c_int, id ); if resource.is_null() { warn!("Out of memory, could not make a new wl_resource \ for gamma control"); ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_client_post_no_memory, client ); } let global_manager_ptr = &mut GAMMA_CONTROL_MANAGER as *mut _ as *mut c_void; ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_set_implementation, resource, global_manager_ptr, ::std::ptr::null_mut(), None ); } unsafe extern "C" fn gamma_control_send_gamma_size(resource: *mut wl_resource, size: uint16_t) { ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_resource_post_event, resource, 0, size as c_uint); } pub fn init() { let w_display = wayland::get_display(); unsafe { info!("Initializing gamma control manager"); ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_global_create, w_display as *mut _, GammaControlManager::interface_ptr(), GammaControlManager::supported_version() as i32, ::std::ptr::null_mut(), bind ); } }
use wayland::gamma_control::generated ::server::gamma_control::GammaControl; use wayland::gamma_control::generated ::server::gamma_control_manager::GammaControlManager; use rustwlc::wayland; use rustwlc::handle::{wlc_handle_from_wl_output_resource, WlcOutput}; use rustwlc::render::{wlc_output_set_gamma, wlc_output_get_gamma_size}; use wayland_server::Resource; use wayland_sys::common::{wl_array}; use wayland_sys::server::{WAYLAND_SERVER_HANDLE, wl_client, wl_resource}; use std::os::raw::c_void; use nix::libc::{c_int, c_uint, uint32_t, uint16_t}; static SET_GAMMA_ERROR: &'static str = "The gamma ramps don't have the same size!"; static INVALID_GAMMA_CODE: u32 = 0; static mut GAMMA_CONTROL_MANAGER: GammaControlManagerInterface = GammaControlManagerInterface { destroy: destroy, get_gamma_control: get_gamma_control }; static mut GAMMA_CONTROL: GammaControlInterface = GammaControlInterface { destroy: destroy, set_gamma: set_gamma, reset_gamma: reset_gamma }; mod generated { #![allow(dead_code,non_camel_case_types,unused_unsafe,unused_variables)] #![allow(non_upper_case_globals,non_snake_case,unused_imports)] pub mod interfaces { #[doc(hidden)] pub use wayland_server::protocol_interfaces::{wl_output_interface}; include!(concat!(env!("OUT_DIR"), "/gamma-control_interface.rs")); } pub mod server { #[doc(hidden)] pub use wayland_server::{Resource, Client, Liveness, Implementable, EventLoopHandle, EventResult}; #[doc(hidden)] pub use wayland_server::protocol::{wl_output}; #[doc(hidden)] pub use super::interfaces; include!(concat!(env!("OUT_DIR"), "/gamma-control_api.rs")); } } #[repr(C)] struct GammaControlManagerInterface { destroy: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource), get_gamma_control: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource, id: u32, output: *mut wl_resource) } #[repr(C)] struct GammaControlInterface { destroy: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource), set_gamma: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource, red: *mut wl_array, green: *mut wl_array, blue: *mut wl_array), reset_gamma: unsafe extern "C" fn (client: *mut wl_client, resource: *mut wl_resource) } unsafe extern "C" fn set_gamma(_client: *mut wl_client, resource: *mut wl_resource, red: *mut wl_array, green: *mut wl_array, blue: *mut wl_array) { info!("Setting gamma"); if (*red).size != (*green).size || (*red).size != (*blue).size { ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_post_error, resource, INVALID_GAMMA_CODE, SET_GAMMA_ERROR.as_bytes().as_ptr() as *const i8); warn!("Color size error, can't continue"); return } let r = (*red).data as *mut u16; let g = (*green).data as *mut u16; let b = (*blue).data as *mut u16; let user_data = ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_get_user_data, resource) as *const _; let output = WlcOutput(wlc_handle_from_wl_output_resource(user_data)); if output.is_null() { warn!("wl_resource didn't correspond to a wlc output"); return; } wlc_output_set_gamma(output.0, ((*red).size / 2) as u16, r, g, b) } unsafe extern "C" fn reset_gamma(_client: *mut wl_client, _resource: *mut wl_resource) { info!("Resetting gamma"); } unsafe extern "C" fn destroy(_client: *mut wl_client,
unsafe extern "C" fn get_gamma_control(client: *mut wl_client, _resource: *mut wl_resource, id: uint32_t, output: *mut wl_resource) { info!("Request received for control of the gamma ramps"); let manager_resource = ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_create, client, GammaControl::interface_ptr(), GammaControl::supported_version() as i32, id); let wlc_output = WlcOutput(wlc_handle_from_wl_output_resource(output as *const _)); if wlc_output.is_null() { warn!("This is triggering, dis bad?"); return; } info!("Client requested control of the gamma ramps for {:?}", wlc_output); let gamma_control_ptr = &mut GAMMA_CONTROL as *mut _ as *mut c_void; ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_set_implementation, manager_resource, gamma_control_ptr, output as *mut c_void, None ); info!("Request granted for gamma ramp control of {:?}", wlc_output); gamma_control_send_gamma_size(manager_resource, wlc_output_get_gamma_size(wlc_output.0)) } unsafe extern "C" fn bind(client: *mut wl_client, _data: *mut c_void, version: u32, id: u32) { info!("Binding Gamma Control resource"); let cur_version = GammaControlManager::supported_version(); if version > cur_version { warn!("Unsupported gamma control protocol version {}!", version); warn!("We only support version {}", cur_version); return } let resource = ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_create, client, GammaControlManager::interface_ptr(), version as c_int, id ); if resource.is_null() { warn!("Out of memory, could not make a new wl_resource \ for gamma control"); ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_client_post_no_memory, client ); } let global_manager_ptr = &mut GAMMA_CONTROL_MANAGER as *mut _ as *mut c_void; ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_set_implementation, resource, global_manager_ptr, ::std::ptr::null_mut(), None ); } unsafe extern "C" fn gamma_control_send_gamma_size(resource: *mut wl_resource, size: uint16_t) { ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_resource_post_event, resource, 0, size as c_uint); } pub fn init() { let w_display = wayland::get_display(); unsafe { info!("Initializing gamma control manager"); ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_global_create, w_display as *mut _, GammaControlManager::interface_ptr(), GammaControlManager::supported_version() as i32, ::std::ptr::null_mut(), bind ); } }
resource: *mut wl_resource) { ffi_dispatch!( WAYLAND_SERVER_HANDLE, wl_resource_destroy, resource ); }
function_block-function_prefix_line
[ { "content": "pub fn lock_screen(client: *mut wl_client, output: WlcOutput) {\n\n let mut mode = write_current_mode();\n\n {\n\n match *mode {\n\n Modes::LockScreen(ref mut lock_mode) => {\n\n lock_mode.clients.push((client as _, output, None));\n\n return\n\n },\n\n _ => {}\n\n }\n\n }\n\n *mode = Modes::LockScreen(LockScreen::new(client, output));\n\n}\n\n\n", "file_path": "src/modes/lock_screen.rs", "rank": 0, "score": 385368.34766266914 }, { "content": "pub fn unlock_screen(cur_client: *mut wl_client) {\n\n let mode = {\n\n write_current_mode().clone()\n\n };\n\n match mode {\n\n Modes::LockScreen(ref lock_mode) => {\n\n let mut seen = false;\n\n for &(client, _, view) in &lock_mode.clients {\n\n if client == cur_client as _ {\n\n seen = true;\n\n break;\n\n }\n\n view.map(WlcView::close);\n\n }\n\n if !seen {\n\n return\n\n }\n\n },\n\n _ => {}\n\n }\n\n *write_current_mode() = Modes::Default(Default);\n\n}\n", "file_path": "src/modes/lock_screen.rs", "rank": 1, "score": 252014.02766517943 }, { "content": "pub fn scraped_pixels_lock() -> Result<MutexGuard<'static, (Vec<u8>, Option<WlcOutput>)>,\n\n PoisonError<MutexGuard<'static, (Vec<u8>, Option<WlcOutput>)>>> {\n\n trace!(\"Locking scraped pixels lock\");\n\n SCRAPED_PIXELS.lock()\n\n}\n\n\n", "file_path": "src/render/screen_scrape.rs", "rank": 2, "score": 233974.6979149085 }, { "content": "/// Parses a `Direction` from a string, returning `MethodErr::invalid_arg`\n\n/// if the string is invalid.\n\npub fn parse_direction(arg: &'static str, text: &str) -> DBusResult<Direction> {\n\n match text.to_lowercase().as_str() {\n\n \"up\" => Ok(Direction::Up),\n\n \"down\" => Ok(Direction::Down),\n\n \"left\" => Ok(Direction::Left),\n\n \"right\" => Ok(Direction::Right),\n\n _ => Err(MethodErr::invalid_arg(\n\n &format!(\"{}: {} is not a valid direction. \\\n\n May be one of 'up', 'down', 'left', 'right'.\", arg, text)))\n\n }\n\n}\n\n\n", "file_path": "src/ipc/utils.rs", "rank": 3, "score": 231054.1904939004 }, { "content": "pub fn parse_axis(arg: &'static str, text: &str) -> DBusResult<Layout> {\n\n match text.to_lowercase().as_str() {\n\n \"vertical\" => Ok(Layout::Vertical),\n\n \"horizontal\" => Ok(Layout::Horizontal),\n\n \"tabbed\" => Ok(Layout::Tabbed),\n\n \"stacked\" => Ok(Layout::Stacked),\n\n _ => Err(MethodErr::invalid_arg(\n\n &format!(\"{}: {} is not a valid axis direction. \\\n\n May be either 'horizontal' or 'vertical'\", arg, text)))\n\n }\n\n}\n\n\n", "file_path": "src/ipc/utils.rs", "rank": 4, "score": 231049.45076613466 }, { "content": "/// Gets a command from the API\n\npub fn get(name: &str) -> Option<CommandFn> {\n\n read_lock().get(name).map(|com| com.clone())\n\n}\n\n\n\n/// Gets a command in the API\n", "file_path": "src/commands/mod.rs", "rank": 5, "score": 225841.0893535567 }, { "content": "/// Parses a uuid from a string, returning `MethodErr::invalid_arg`\n\n/// if the uuid is invalid.\n\npub fn parse_uuid(arg: &'static str, text: &str) -> DBusResult<Option<Uuid>> {\n\n if text == \"\" {\n\n Ok(None)\n\n } else {\n\n match Uuid::parse_str(text) {\n\n Ok(uuid) => Ok(Some(uuid)),\n\n Err(reason) => Err(MethodErr::invalid_arg(\n\n &format!(\"{}: {} is not a valid UUID: {:?}\", arg, text, reason)))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ipc/utils.rs", "rank": 6, "score": 224025.93471905962 }, { "content": "#[cfg(test)]\n\npub fn get_config() -> (bool, Result<(PathBuf, File), &'static str>) {\n\n (false, Err(\"Loading config should be ignored during tests for now\"))\n\n}\n\n\n", "file_path": "src/lua/init_path.rs", "rank": 7, "score": 216640.16527512786 }, { "content": "/// Parses a KeyMod from key names.\n\npub fn keymod_from_names(keys: &[&str]) -> Result<KeyMod, String> {\n\n let mut result = KeyMod::empty();\n\n for key in keys {\n\n match key.to_lowercase().as_str() {\n\n \"shift\" => result = result | MOD_SHIFT,\n\n \"control\" | \"ctrl\" => result = result | MOD_CTRL,\n\n \"alt\" => result = result | MOD_ALT,\n\n \"mod2\" => result = result | MOD_MOD2,\n\n \"mod3\" => result = result | MOD_MOD3,\n\n \"mod4\" | \"super\" | \"logo\" => result = result | MOD_MOD4,\n\n \"mod5\" | \"5mod5me\" => result = result | MOD_MOD5,\n\n err => return Err(format!(\"Invalid modifier: {}\", err))\n\n }\n\n }\n\n return Ok(result)\n\n}\n\n\n", "file_path": "src/keys/mod.rs", "rank": 8, "score": 214941.62506161054 }, { "content": "pub fn size_to_json(size: Size) -> Json {\n\n let mut map = BTreeMap::new();\n\n map.insert(\"w\".into(), size.w.to_json());\n\n map.insert(\"h\".into(), size.h.to_json());\n\n map.to_json()\n\n}\n\n\n", "file_path": "src/convert/json.rs", "rank": 9, "score": 213483.88821215188 }, { "content": "/// Convert a modifier to the Lua interpretation\n\npub fn mods_to_lua(lua: &Lua, mut mods: KeyMod) -> rlua::Result<Table> {\n\n let mut mods_list: Vec<String> = Vec::with_capacity(MOD_NAMES.len());\n\n for mod_name in &MOD_NAMES {\n\n if mods == MOD_NONE {\n\n break;\n\n }\n\n if mods.bits() & 1 != 0 {\n\n mods_list.push((*mod_name).into());\n\n }\n\n mods = KeyMod::from_bits_truncate(mods.bits() >> 1);\n\n }\n\n lua.create_table_from(mods_list.into_iter().enumerate())\n\n}\n\n\n", "file_path": "src/lua/utils.rs", "rank": 10, "score": 201727.15759691337 }, { "content": "pub fn setup(f: &mut DBusFactory) -> DBusObjPath {\n\n f.object_path(\"/org/way_cooler/Screen\", ()).introspectable().add(\n\n f.interface(\"org.way_cooler.Screen\", ())\n\n .add_m(\n\n f.method(\"List\", (), |m| {\n\n let tree = lock_tree_dbus()?;\n\n let outputs = tree.outputs().iter()\n\n .map(|id|\n\n MessageItem::Str(format!(\"{}\", id.simple())))\n\n .collect();\n\n Ok(vec![m.msg.method_return()\n\n .append((MessageItem::Array(outputs, \"s\".into())))\n\n ])\n\n }).outarg::<Array<String, Vec<String>>, _>(\"success\")\n\n )\n\n .add_m(\n\n f.method(\"ActiveScreen\", (), |m| {\n\n let tree = lock_tree_dbus()?;\n\n let id = tree.lookup_handle(Handle::Output(WlcOutput::focused()))\n\n .map_err(|err| MethodErr::failed(&format!(\"{:?}\", err)))?.to_string();\n", "file_path": "src/ipc/interfaces/screen.rs", "rank": 11, "score": 201030.922426273 }, { "content": "/// Attempts to lock the action mutex. If the Result is Err, then the lock could\n\n/// not be returned at this time, already locked.\n\npub fn try_lock_action() -> Result<MutexGuard<'static, Option<Action>>,\n\n TryLockError<MutexGuard<'static,\n\n Option<Action>>>> {\n\n PREV_ACTION.try_lock()\n\n}\n", "file_path": "src/layout/mod.rs", "rank": 12, "score": 199949.5532653912 }, { "content": "pub fn clients_read<'a>() -> RwLockReadGuard<'a, Clients> {\n\n CLIENTS.read().expect(\"Unable to read client mapping\")\n\n}\n\n\n", "file_path": "src/registry/mod.rs", "rank": 13, "score": 196414.67473760145 }, { "content": "#[allow(dead_code)]\n\npub fn clients_write<'a>() -> RwLockWriteGuard<'a, Clients> {\n\n CLIENTS.write().expect(\"Unable to write client mapping\")\n\n}\n\n\n", "file_path": "src/registry/mod.rs", "rank": 14, "score": 196414.67473760145 }, { "content": "/// Convert a mouse event from Wayland to the representation Lua expcets\n\npub fn mouse_events_to_lua(_: &rlua::Lua, button: u32,\n\n button_state: ButtonState) -> rlua::Result<Vec<bool>> {\n\n let mut event_list = Vec::with_capacity(MOUSE_EVENTS.len());\n\n for mouse_event in &MOUSE_EVENTS[..5] {\n\n let state_pressed = button_state == ButtonState::Pressed;\n\n let is_pressed = button == *mouse_event && state_pressed;\n\n event_list.push(is_pressed);\n\n }\n\n Ok(event_list)\n\n}\n", "file_path": "src/lua/utils.rs", "rank": 15, "score": 196250.70904929267 }, { "content": "/// Initialize the registry and client mapping\n\npub fn init() {\n\n let mut registry = REGISTRY.write()\n\n .expect(\"Could not write to the registry\");\n\n // Construct the layout category\n\n registry.add_category(\"windows\".into())\n\n .expect(\"Could not add windows category\");\n\n // Construct the programs category\n\n registry.add_category(\"programs\".into())\n\n .expect(\"Could not add programs category\");\n\n // Construct the mouse category\n\n registry.add_category(\"mouse\".into())\n\n .expect(\"Could not add mouse category\");\n\n}\n", "file_path": "src/registry/mod.rs", "rank": 16, "score": 183691.58025236352 }, { "content": "pub fn init() {\n\n let (send, recv) = mpsc::channel();\n\n let _join = thread::spawn( move || {\n\n let mut session = DBusSession::create(recv);\n\n\n\n *SENDER.lock().expect(\"Unable to unlock\") = Some(send);\n\n\n\n session.run_thread();\n\n });\n\n}\n", "file_path": "src/ipc/mod.rs", "rank": 17, "score": 183686.59988030646 }, { "content": "#[allow(deprecated)] // keysyms\n\npub fn init() {\n\n use rustwlc::xkb::keysyms;\n\n use commands;\n\n if !is_quit_bound() {\n\n register(KeyPress::new(MOD_ALT | MOD_SHIFT, keysyms::KEY_Escape),\n\n KeyEvent::Command(commands::get(\"way_cooler_quit\")\n\n .expect(\"Error reading commands::way_cooler_quit\")),\n\n false);\n\n }\n\n}\n\n\n", "file_path": "src/keys/mod.rs", "rank": 18, "score": 183686.59988030646 }, { "content": "/// Initialize commands API (register default commands)\n\npub fn init() {\n\n defaults::register_defaults();\n\n}\n\n\n", "file_path": "src/commands/mod.rs", "rank": 19, "score": 183686.59988030646 }, { "content": "/// Clears all the keys from Way Cooler's memory.\n\npub fn clear_keys() {\n\n let mut bindings = BINDINGS.write()\n\n .expect(\"Keybindings/clear_keys: unable to lock keybindings\");\n\n bindings.drain();\n\n}\n\n\n", "file_path": "src/keys/mod.rs", "rank": 21, "score": 179923.62583497522 }, { "content": "/// Gets the current key modifier for mouse control\n\npub fn mouse_modifier() -> KeyMod {\n\n let key_mod = MOUSE_MODIFIER.read()\n\n .expect(\"Keybindings/register_mouse_modifier: unable to lock MOUSE MODIFIER\");\n\n *key_mod\n\n}\n\n\n", "file_path": "src/keys/mod.rs", "rank": 22, "score": 179699.22684212684 }, { "content": "pub fn lock_tree() -> Result<TreeGuard, PoisonError<TreeGuard>> {\n\n let tree = try!(TREE.lock());\n\n Ok(tree)\n\n}\n\n\n", "file_path": "src/layout/mod.rs", "rank": 23, "score": 177851.34021471685 }, { "content": "/// Initializes the appropriate handlers for each wayland protocol\n\n/// that Way Cooler supports.\n\npub fn init_wayland_protocols() {\n\n info!(\"Initializing wayland protocols\");\n\n gamma_control::init();\n\n desktop_shell::init();\n\n}\n", "file_path": "src/wayland/mod.rs", "rank": 24, "score": 176383.31297575822 }, { "content": "/// Parses environment variables\n\nfn get_config_file() -> Result<(PathBuf, File), &'static str> {\n\n let home_var = env::var(\"HOME\").expect(\"HOME environment variable not defined!\");\n\n let home = home_var.as_str();\n\n\n\n if let Ok(path_env) = env::var(\"WAY_COOLER_INIT_FILE\") {\n\n info!(\"Found $WAY_COOLER_INIT_FILE to be defined, will look for the init file there.\");\n\n let path = Path::new(&path_env);\n\n if let Ok(file) = read_file(&path) {\n\n info!(\"Reading init file from $WAY_COOLER_INIT_FILE: {}\",\n\n path_env.as_str().replace(home, \"~\"));\n\n // If the parent doesn't exist it's just in the current directory.\n\n let dir = path.parent().map_or(PathBuf::new(), Path::to_path_buf);\n\n return Ok((dir, file))\n\n }\n\n warn!(\"Did not find an init file at $WAY_COOLER_INIT_FILE! It points to {}\",\n\n path_env.as_str().replace(home, \"~\"));\n\n }\n\n\n\n if let Ok(xdg) = env::var(\"XDG_CONFIG_HOME\") {\n\n let dir = Path::new(&xdg).join(\"way-cooler\");\n", "file_path": "src/lua/init_path.rs", "rank": 25, "score": 172732.36370424868 }, { "content": "/// Determine if the way_cooler_quit command is already bound\n\npub fn is_quit_bound() -> bool {\n\n use commands;\n\n\n\n let bindings = BINDINGS.read()\n\n .expect(\"Keybindings/get: unable to lock keybindings\");\n\n let quit = commands::get(\"way_cooler_quit\")\n\n .expect(\"Error reading commands::way_cooler_quit\");\n\n\n\n for value in bindings.values() {\n\n let value = &value.event;\n\n if let &KeyEvent::Command(ref cmd) = value {\n\n if (&*cmd as *const _) == (&quit as *const _) {\n\n return true;\n\n }\n\n }\n\n };\n\n false\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/keys/mod.rs", "rank": 26, "score": 171928.5026550966 }, { "content": "pub fn parse_edge(dir: &str) -> DBusResult<ResizeEdge> {\n\n let result = Ok(match dir.to_lowercase().as_str() {\n\n \"up\" => RESIZE_TOP,\n\n \"down\" => RESIZE_BOTTOM,\n\n \"left\" => RESIZE_LEFT,\n\n \"right\" => RESIZE_RIGHT,\n\n _ => return Err(MethodErr::invalid_arg(\n\n &format!(\"{} is not a valid direction. \\\n\n May be one of 'up', 'down', 'left', 'right'.\", dir)))\n\n });\n\n result\n\n}\n\n\n", "file_path": "src/ipc/utils.rs", "rank": 27, "score": 170716.21989367704 }, { "content": "/// Registers a modifier to be used with mouse commands\n\npub fn register_mouse_modifier(modifier: KeyMod) {\n\n let mut key_mod = MOUSE_MODIFIER.write()\n\n .expect(\"Keybindings/register_mouse_modifier: unable to lock MOUSE MODIFIER\");\n\n *key_mod = modifier;\n\n}\n\n\n", "file_path": "src/keys/mod.rs", "rank": 28, "score": 170041.5630417196 }, { "content": "/// Can be used for implementing UserData for Lua objects. This provides some default metafunctions.\n\npub fn default_add_methods<S>(methods: &mut UserDataMethods<S>)\n\n where S: UserData {\n\n methods.add_meta_function(MetaMethod::Index, default_index);\n\n methods.add_meta_function(MetaMethod::NewIndex, default_newindex);\n\n methods.add_meta_function(MetaMethod::ToString, default_tostring);\n\n}\n\n\n", "file_path": "src/awesome/object.rs", "rank": 30, "score": 164250.6460366325 }, { "content": "pub fn dummy<'lua>(_: &'lua Lua, _: rlua::Value) -> rlua::Result<()> { Ok(()) }\n", "file_path": "src/awesome/mod.rs", "rank": 32, "score": 160626.59601226577 }, { "content": "/// Convert a modifier to the Rust interpretation, from the Lua interpretation\n\npub fn mods_to_rust(mods_table: Table) -> rlua::Result<KeyMod> {\n\n let mut mods = KeyMod::empty();\n\n for modifier in mods_table.pairs::<Value, String>() {\n\n match &*modifier?.1 {\n\n \"Shift\" => mods.insert(MOD_SHIFT),\n\n \"Caps\"|\"Lock\" => mods.insert(MOD_CAPS),\n\n \"Control\"|\"Ctrl\" => mods.insert(MOD_CTRL),\n\n \"Alt\"|\"Mod1\" => mods.insert(MOD_ALT),\n\n \"Mod2\" => mods.insert(MOD_MOD2),\n\n \"Mod3\" => mods.insert(MOD_MOD3),\n\n \"Mod4\" => mods.insert(MOD_MOD4),\n\n \"Mod5\" => mods.insert(MOD_MOD5),\n\n string => {\n\n use rlua::Error::RuntimeError;\n\n Err(RuntimeError(format!(\"{} is an invalid modifier\", string)))?\n\n }\n\n }\n\n }\n\n Ok(mods)\n\n}\n\n\n", "file_path": "src/lua/utils.rs", "rank": 33, "score": 159380.7552173312 }, { "content": "pub fn mousegrabber_handle(x: i32, y: i32, button: Option<(u32, ButtonState)>)\n\n -> rlua::Result<()> {\n\n run_with_lua(move |lua| {\n\n let button_events = button.map(|(button, button_state)|\n\n ::lua::mouse_events_to_lua(lua, button, button_state))\n\n .unwrap_or_else(|| Ok(vec![false, false, false, false, false]))?;\n\n call_mousegrabber(lua, (x, y, button_events))\n\n })\n\n}\n\n\n", "file_path": "src/awesome/mousegrabber.rs", "rank": 34, "score": 156830.809525137 }, { "content": "#[allow(dead_code)] // Used in tests\n\npub fn set(name: String, val: CommandFn) -> Option<CommandFn> {\n\n write_lock().insert(name, val)\n\n}\n", "file_path": "src/commands/mod.rs", "rank": 35, "score": 156526.4084030137 }, { "content": "pub fn init(lua: &Lua) -> rlua::Result<()> {\n\n setup_awesome_path(lua)?;\n\n setup_global_signals(lua)?;\n\n setup_xcb_connection(lua)?;\n\n button::init(lua)?;\n\n awesome::init(lua)?;\n\n key::init(lua)?;\n\n client::init(lua)?;\n\n screen::init(lua)?;\n\n keygrabber::init(lua)?;\n\n root::init(lua)?;\n\n mouse::init(lua)?;\n\n tag::init(lua)?;\n\n drawin::init(lua)?;\n\n drawable::init(lua)?;\n\n mousegrabber::init(lua)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/awesome/mod.rs", "rank": 36, "score": 155311.32706725417 }, { "content": "fn size_to_lua(size: Size) -> String {\n\n format!(\"{}\", size_to_json(size)).replace(\":\", \"=\").replace(\"\\\"\", \"\")\n\n}\n\n\n", "file_path": "src/modes/custom_lua.rs", "rank": 37, "score": 153741.94000239193 }, { "content": "/// Using a Pixbuf buffer, loads the data into a Cairo surface.\n\npub fn load_surface_from_pixbuf(pixbuf: Pixbuf) -> ImageSurface {\n\n let width = pixbuf.get_width();\n\n let height = pixbuf.get_height();\n\n let channels = pixbuf.get_n_channels();\n\n let pix_stride = pixbuf.get_rowstride() as usize;\n\n // NOTE This is safe because we aren't modifying the bytes, but there's no immutable view\n\n let pixels = unsafe { pixbuf.get_pixels() };\n\n let format = if channels == 3 {cairo::Format::Rgb24} else { cairo::Format::ARgb32};\n\n let mut surface = ImageSurface::create(format, width, height)\n\n .expect(\"Could not create image of that size\");\n\n let cairo_stride = surface.get_stride() as usize;\n\n {\n\n let mut cairo_data = surface.get_data().unwrap();\n\n let mut pix_pixels_index = 0;\n\n let mut cairo_pixels_index = 0;\n\n for _ in 0..height {\n\n let mut pix_pixels_index2 = pix_pixels_index;\n\n let mut cairo_pixels_index2 = cairo_pixels_index;\n\n for _ in 0..width {\n\n if channels == 3 {\n", "file_path": "src/render/mod.rs", "rank": 38, "score": 152456.97441448458 }, { "content": "/// Unregisters a key mapping\n\npub fn unregister(key: &KeyPress) -> Option<Action> {\n\n let mut bindings = BINDINGS.write()\n\n .expect(\"Keybindings/unregister: unable to lock keybindings\");\n\n bindings.remove(key)\n\n}\n\n\n", "file_path": "src/keys/mod.rs", "rank": 39, "score": 152332.11552954293 }, { "content": "/// Get a key mapping from the list.\n\npub fn get(key: &KeyPress) -> Option<Action> {\n\n let bindings = BINDINGS.read()\n\n .expect(\"Keybindings/get: unable to lock keybindings\");\n\n bindings.get(key).map(Action::clone)\n\n}\n\n\n", "file_path": "src/keys/mod.rs", "rank": 40, "score": 152332.11552954293 }, { "content": "pub fn init(lua: &Lua) -> rlua::Result<Class> {\n\n method_setup(lua, Class::builder(lua, \"client\", None)?)?\n\n .save_class(\"client\")?\n\n .build()\n\n}\n\n\n", "file_path": "src/awesome/client.rs", "rank": 41, "score": 149980.0592614889 }, { "content": "/// Attempts to lock the tree. If the Result is Err, then the lock could\n\n/// not be returned at this time, already locked.\n\npub fn try_lock_tree() -> Result<TreeGuard, TreeErr> {\n\n let tree = try!(TREE.try_lock());\n\n Ok(tree)\n\n}\n\n\n", "file_path": "src/layout/mod.rs", "rank": 42, "score": 149780.8361036983 }, { "content": "/// Parses a keypress from a string\n\nfn keypress_from_string(mods: &str) -> Result<KeyPress, String> {\n\n let parts: Vec<&str> = mods.split(',').collect();\n\n if let Some((ref key, mods)) = parts.split_last() {\n\n KeyPress::from_key_names(mods, &key)\n\n }\n\n else {\n\n Err(format!(\"Invalid key '{}'\", mods))\n\n }\n\n}\n\n\n", "file_path": "src/lua/rust_interop.rs", "rank": 43, "score": 148813.25708985253 }, { "content": "pub fn read_current_mode<'a>() -> RwLockReadGuard<'a, Modes> {\n\n CURRENT_MODE.try_read()\n\n .expect(\"Unable to read current mode\")\n\n}\n\n\n\nimpl Deref for Modes {\n\n type Target = Mode;\n\n\n\n fn deref(&self) -> &(Mode + 'static) {\n\n match *self {\n\n Modes::Default(ref mode) => mode as &Mode,\n\n Modes::CustomLua(ref mode) => mode as &Mode,\n\n Modes::LockScreen(ref mode) => mode as &Mode\n\n }\n\n }\n\n}\n\n\n\nimpl DerefMut for Modes {\n\n fn deref_mut(&mut self) -> &mut (Mode + 'static) {\n\n match *self {\n\n Modes::Default(ref mut mode) => mode as &mut Mode,\n\n Modes::CustomLua(ref mut mode) => mode as &mut Mode,\n\n Modes::LockScreen(ref mut mode) => mode as &mut Mode\n\n }\n\n }\n\n}\n", "file_path": "src/modes/mod.rs", "rank": 44, "score": 146834.39526923042 }, { "content": "/// Acquires a write lock on the commands map.\n\npub fn write_lock<'a>() -> RwLockWriteGuard<'a, ComMap> {\n\n COMMANDS.write().expect(\"Unable to write to commands!\")\n\n}\n\n\n", "file_path": "src/commands/mod.rs", "rank": 45, "score": 146834.39526923042 }, { "content": "pub fn write_current_mode<'a>() -> RwLockWriteGuard<'a, Modes> {\n\n CURRENT_MODE.try_write()\n\n .expect(\"Unable to write current mode\")\n\n}\n\n\n", "file_path": "src/modes/mod.rs", "rank": 46, "score": 146834.39526923042 }, { "content": "/// Acquires a read lock on the commands map.\n\npub fn read_lock<'a>() -> RwLockReadGuard<'a, ComMap> {\n\n COMMANDS.read().expect(\"Unable to read from commands!\")\n\n}\n\n\n", "file_path": "src/commands/mod.rs", "rank": 47, "score": 146834.39526923042 }, { "content": "pub fn init() {\n\n use rustwlc::callback;\n\n\n\n callback::output_created(output_created);\n\n callback::output_destroyed(output_destroyed);\n\n callback::output_focus(output_focus);\n\n callback::output_resolution(output_resolution);\n\n callback::output_render_post(post_render);\n\n callback::view_created(view_created);\n\n callback::view_destroyed(view_destroyed);\n\n callback::view_focus(view_focus);\n\n callback::view_move_to_output(view_move_to_output);\n\n callback::view_request_geometry(view_request_geometry);\n\n callback::view_request_state(view_request_state);\n\n callback::view_request_move(view_request_move);\n\n callback::view_request_resize(view_request_resize);\n\n callback::view_properties_changed(view_props_changed);\n\n callback::keyboard_key(keyboard_key);\n\n callback::pointer_button(pointer_button);\n\n callback::pointer_scroll(pointer_scroll);\n\n callback::pointer_motion_v2(pointer_motion);\n\n callback::compositor_ready(compositor_ready);\n\n callback::compositor_terminate(compositor_terminating);\n\n callback::view_render_pre(view_pre_render);\n\n trace!(\"Registered wlc callbacks\");\n\n}\n", "file_path": "src/callbacks.rs", "rank": 48, "score": 144689.52582169353 }, { "content": "pub fn class_setup<'lua>(lua: &'lua Lua, name: &str) -> rlua::Result<Class<'lua>> {\n\n let class = lua.globals().get::<_, AnyUserData>(name)\n\n .expect(\"Class was not set! Did you call init?\");\n\n assert!(class.is::<ClassState>()?,\n\n \"This user data was not a class!\");\n\n Ok(Class { class })\n\n}\n\n\n\n\n", "file_path": "src/awesome/class.rs", "rank": 49, "score": 144651.70860448212 }, { "content": "/// Default new indexing (assignment) of an Awesome object.\n\n///\n\n/// Automatically looks up contents in meta table, so instead of overriding this\n\n/// it's easier to just add the required data in the meta table.\n\npub fn default_newindex<'lua>(_: &'lua Lua,\n\n (obj, index, val):\n\n (AnyUserData<'lua>, String, Value<'lua>))\n\n -> rlua::Result<Value<'lua>> {\n\n // Look up in metatable first\n\n let obj: Object<'lua> = obj.into();\n\n let obj_table = obj.table()?;\n\n if let Some(meta) = obj_table.get_metatable() {\n\n if let Ok(val) = meta.raw_get::<_, Value>(index.clone()) {\n\n match val {\n\n Value::Nil => {},\n\n val => return Ok(val)\n\n }\n\n }\n\n let class = meta.get::<_, AnyUserData>(\"__class\")?;\n\n let class_table = class.get_user_value::<Table>()?;\n\n let props = class_table.get::<_, Vec<Property>>(\"properties\")?;\n\n for prop in props {\n\n if prop.name.as_str() == index {\n\n // Property exists and has a newindex callback\n", "file_path": "src/awesome/object.rs", "rank": 50, "score": 144054.82861899392 }, { "content": "pub fn default_tostring<'lua>(_: &'lua Lua,\n\n obj: AnyUserData<'lua>)\n\n -> rlua::Result<String> {\n\n let obj: Object<'lua> = obj.into();\n\n let obj_table = obj.table()?;\n\n if let Some(meta) = obj_table.get_metatable() {\n\n let class = meta.get::<_, AnyUserData>(\"__class\")?;\n\n let class_table = class.get_user_value::<Table>()?;\n\n let name = class_table.get::<_, String>(\"name\")?;\n\n return Ok(format!(\"{}: {:p}\", name, &obj.object as *const _));\n\n }\n\n Err(rlua::Error::UserDataTypeMismatch)\n\n}\n\n\n", "file_path": "src/awesome/object.rs", "rank": 51, "score": 144054.82861899392 }, { "content": "fn set_preferred_icon_size(lua: &Lua, val: u32) -> rlua::Result<()> {\n\n let mut awesome_state = lua.globals().get::<_, AwesomeState>(\"awesome\")?;\n\n awesome_state.preferred_icon_size = val;\n\n lua.globals().set(\"awesome\", awesome_state.to_lua(lua)?)\n\n\n\n}\n\n\n", "file_path": "src/awesome/awesome.rs", "rank": 52, "score": 143995.9875501396 }, { "content": "/// Initialize the Lua thread.\n\npub fn init() {\n\n info!(\"Starting Lua thread...\");\n\n let _lua_handle = thread::Builder::new()\n\n .name(\"Lua thread\".to_string())\n\n .spawn(|| main_loop());\n\n // Immediately update all the values that the init file set\n\n send(LuaQuery::UpdateRegistryFromCache)\n\n .expect(\"Could not update registry from cache\");\n\n\n\n // Re-tile the layout tree, to make any changes appear immediantly.\n\n if let Ok(mut tree) = lock_tree() {\n\n tree.layout_active_of(ContainerType::Root)\n\n .unwrap_or_else(|_| {\n\n warn!(\"Lua thread could not re-tile the layout tree\");\n\n });\n\n // Yeah this is silly, it's so the active border can be updated properly.\n\n if let Some(active_id) = tree.active_id() {\n\n tree.focus(active_id)\n\n .expect(\"Could not focus on the focused id\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lua/thread.rs", "rank": 53, "score": 142034.5423428177 }, { "content": "/// Initializes the logging system.\n\npub fn init_logs() {\n\n let mut builder = env_logger::LogBuilder::new();\n\n builder.format(log_format);\n\n builder.filter(None, log::LogLevelFilter::Trace);\n\n if env::var(\"WAY_COOLER_LOG\").is_ok() {\n\n builder.parse(&env::var(\"WAY_COOLER_LOG\")\n\n .expect(\"WAY_COOLER_LOG not defined\"));\n\n }\n\n builder.init().expect(\"Unable to initialize logging!\");\n\n info!(\"Logger initialized, setting wlc handlers.\");\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 54, "score": 142034.5423428177 }, { "content": "pub fn focus_up() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n tree.move_focus(Direction::Up)\n\n .unwrap_or_else(|_| {\n\n warn!(\"Could not focus up\");\n\n });\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 55, "score": 142034.5423428177 }, { "content": "#[test]\n\npub fn command() {\n\n println!(\"command being run!\");\n\n}\n\n\n", "file_path": "src/commands/tests.rs", "rank": 56, "score": 142034.5423428177 }, { "content": "pub fn focus_down() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n tree.move_focus(Direction::Down)\n\n .unwrap_or_else(|_| {\n\n warn!(\"Could not focus down\");\n\n });\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 57, "score": 142034.5423428177 }, { "content": "fn load_config(mut lua: &mut rlua::Lua) {\n\n info!(\"Loading way-cooler libraries...\");\n\n\n\n let (use_config, maybe_init_file) = init_path::get_config();\n\n if use_config {\n\n match maybe_init_file {\n\n Ok((init_dir, mut init_file)) => {\n\n if init_dir.components().next().is_some() {\n\n // Add the config directory to the package path.\n\n let globals = lua.globals();\n\n let package: rlua::Table = globals.get(\"package\")\n\n .expect(\"package not defined in Lua\");\n\n let paths: String = package.get(\"path\")\n\n .expect(\"package.path not defined in Lua\");\n\n package.set(\"path\", paths + \";\" + init_dir.join(\"?.lua\").to_str()\n\n .expect(\"init_dir not a valid UTF-8 string\"))\n\n .expect(\"Failed to set package.path\");\n\n }\n\n let mut init_contents = String::new();\n\n init_file.read_to_string(&mut init_contents)\n", "file_path": "src/lua/thread.rs", "rank": 58, "score": 140007.90866971627 }, { "content": "/// Sets up Way Cooler to announce the desktop-shell interface.\n\npub fn init() {\n\n let w_display = get_display();\n\n unsafe {\n\n info!(\"Initializing desktop shell\");\n\n ffi_dispatch!(WAYLAND_SERVER_HANDLE,\n\n wl_global_create,\n\n w_display as *mut _,\n\n DesktopShell::interface_ptr(),\n\n DesktopShell::supported_version() as i32,\n\n ::std::ptr::null_mut(),\n\n bind);\n\n }\n\n}\n", "file_path": "src/wayland/desktop_shell.rs", "rank": 59, "score": 139546.35151995328 }, { "content": "pub fn tile_tabbed() {\n\n debug!(\"Layout.SplitTabbed()\");\n\n if let Ok(mut tree) = try_lock_tree() {\n\n tree.0.set_active_layout(Layout::Tabbed).unwrap_or_else(|err| {\n\n warn!(\"Could not tile as tabbed: {:?}\", err);\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 60, "score": 139541.44810045866 }, { "content": "pub fn focus_left() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n tree.move_focus(Direction::Left)\n\n .unwrap_or_else(|_| {\n\n warn!(\"Could not focus left\");\n\n });\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 61, "score": 139541.44810045866 }, { "content": "pub fn on_compositor_ready() {\n\n info!(\"Running lua on_init()\");\n\n // Call the special init hook function that we read from the init file\n\n init();\n\n send(LuaQuery::Execute(INIT_LUA_FUNC.to_owned())).err()\n\n .map(|error| warn!(\"Lua init callback returned an error: {:?}\", error));\n\n}\n\n\n", "file_path": "src/lua/thread.rs", "rank": 62, "score": 139541.44810045866 }, { "content": "pub fn tile_switch() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n if let Some(id) = tree.active_id() {\n\n tree.toggle_cardinal_tiling(id).unwrap_or_else(|err| {\n\n warn!(\"Could not toggle cardinal tiling: {:#?}\", err);\n\n });\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 63, "score": 139541.44810045866 }, { "content": "pub fn focus_right() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n tree.move_focus(Direction::Right)\n\n .unwrap_or_else(|_| {\n\n warn!(\"Could not focus right\");\n\n });\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 64, "score": 139541.44810045866 }, { "content": "pub fn split_vertical() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n debug!(\"Layout.SplitVertical()\");\n\n tree.0.toggle_active_layout(Layout::Vertical).ok();\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 65, "score": 139541.44810045866 }, { "content": "pub fn fullscreen_toggle() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n if let Some(id) = tree.active_id() {\n\n let toggle = !tree.is_fullscreen(id)\n\n .expect(\"Active ID was invalid!\");\n\n tree.set_fullscreen(id, toggle)\n\n .unwrap_or_else(|_| {\n\n warn!(\"Could not set {:?} to fullscreen flag to be {:?}\",\n\n id, toggle);\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 66, "score": 139541.44810045866 }, { "content": "pub fn tile_stacked() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n debug!(\"Layout.SplitStacked()\");\n\n tree.0.set_active_layout(Layout::Stacked).unwrap_or_else(|err| {\n\n warn!(\"Could not tile as stacked: {:?}\", err);\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 67, "score": 139541.44810045866 }, { "content": "/// Register the default commands in the API.\n\n///\n\n/// Some of this code will be moved to be called after the config,\n\n/// and will be registered dynamically.\n\npub fn register_defaults() {\n\n let mut coms = commands::write_lock();\n\n\n\n let mut register = |name: &'static str, val: CommandFn| {\n\n coms.insert(name.to_string(), val);\n\n };\n\n\n\n register(\"way_cooler_quit\", way_cooler_quit);\n\n register(\"print_pointer\", print_pointer);\n\n\n\n register(\"dmenu_eval\", dmenu_eval);\n\n register(\"way_cooler_restart\", way_cooler_restart);\n\n register(\"dmenu_lua_dofile\", dmenu_lua_dofile);\n\n\n\n register(\"noop\", noop);\n\n\n\n /// Generate switch_workspace methods and register them\n\n macro_rules! gen_switch_workspace {\n\n ( $($b:ident, $n:expr);+ ) => {\n\n $(fn $b() {\n", "file_path": "src/commands/defaults.rs", "rank": 68, "score": 139541.44810045866 }, { "content": "pub fn split_horizontal() {\n\n debug!(\"Layout.SplitHorizontal()\");\n\n if let Ok(mut tree) = try_lock_tree() {\n\n tree.0.toggle_active_layout(Layout::Horizontal).ok();\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 69, "score": 139541.44810045866 }, { "content": "pub fn move_active_down() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n tree.move_active(None, Direction::Down)\n\n .unwrap_or_else(|_| {\n\n warn!(\"Could not focus down\");\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 70, "score": 139541.44810045866 }, { "content": "pub fn panic_command() {\n\n panic!(\"panic_command panic\")\n\n}\n\n\n", "file_path": "src/commands/tests.rs", "rank": 71, "score": 139541.44810045866 }, { "content": "pub fn remove_active() {\n\n let mut handle_to_remove = None;\n\n if let Ok(mut tree) = try_lock_tree() {\n\n if let Some(container) = tree.0.get_active_container_mut() {\n\n match *container {\n\n Container::View { handle, .. } => {\n\n handle_to_remove = Some(handle);\n\n // Views shouldn't be removed from tree, that's handled by\n\n // view_destroyed callback\n\n },\n\n _ => {}\n\n }\n\n }\n\n // views have it removed in view_destroyed callback\n\n // container should be removed here though.\n\n if handle_to_remove.is_none() {\n\n if let Err(err) = tree.0.remove_active() {\n\n warn!(\"Could not remove the active container! {:?}\\n{:?}\\n{:?}\",\n\n tree.0.get_active_container(), err, tree.0);\n\n };\n\n }\n\n }\n\n if let Some(handle) = handle_to_remove {\n\n handle.close();\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 72, "score": 139541.44810045866 }, { "content": "pub fn toggle_float() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n tree.toggle_float().ok();\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 73, "score": 139541.44810045866 }, { "content": "pub fn move_active_up() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n tree.move_active(None, Direction::Up)\n\n .unwrap_or_else(|_| {\n\n warn!(\"Could not focus up\");\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 74, "score": 139541.44810045866 }, { "content": "/// Register a new set of key mappings\n\npub fn register(key: KeyPress, event: KeyEvent, passthrough: bool)\n\n -> Option<Action> {\n\n let mut bindings = BINDINGS.write()\n\n .expect(\"Keybindings/register: unable to lock keybindings\");\n\n let action = Action { event, passthrough };\n\n trace!(\"Registering {} for {:?}\", key, action);\n\n bindings.insert(key, action)\n\n}\n\n\n", "file_path": "src/keys/mod.rs", "rank": 75, "score": 139361.99866815898 }, { "content": "/// Query & set information about the systray\n\nfn systray<'lua>(_: &'lua Lua, _: ()) -> rlua::Result<(u32, Value)> {\n\n Ok((0, Value::Nil))\n\n}\n\n\n", "file_path": "src/awesome/awesome.rs", "rank": 76, "score": 137932.15817427068 }, { "content": "/// Ensure that the thread that's waiting is synced up with the main thread before\n\n/// continuing either.\n\npub fn sync_scrape() {\n\n SCRAPE_BARRIER.wait();\n\n trace!(\"Screen scraped barrier synced!\")\n\n}\n", "file_path": "src/render/screen_scrape.rs", "rank": 77, "score": 137200.73687212833 }, { "content": "pub fn move_active_right() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n tree.move_active(None, Direction::Right)\n\n .unwrap_or_else(|_| {\n\n warn!(\"Could not focus right\");\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 78, "score": 137195.87422746996 }, { "content": "/// Sets the mode to the Custom Lua mode (execute any custom Lua code that\n\n/// the user has defined).\n\npub fn set_custom_mode() {\n\n *write_current_mode() = Modes::CustomLua(CustomLua)\n\n}\n", "file_path": "src/modes/commands.rs", "rank": 79, "score": 137195.87422746996 }, { "content": "pub fn toggle_float_focus() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n if let Err(err) = tree.toggle_floating_focus() {\n\n warn!(\"Could not float focus: {:#?}\", err);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 80, "score": 137195.87422746996 }, { "content": "pub fn move_active_left() {\n\n if let Ok(mut tree) = try_lock_tree() {\n\n tree.move_active(None, Direction::Left)\n\n .unwrap_or_else(|_| {\n\n warn!(\"Could not focus left\");\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 81, "score": 137195.87422746996 }, { "content": "/// Sets the mode to the default (don't execute custom Lua code).\n\npub fn set_default_mode() {\n\n *write_current_mode() = Modes::Default(Default)\n\n}\n\n\n", "file_path": "src/modes/commands.rs", "rank": 82, "score": 137195.87422746996 }, { "content": "/// Given the current input, handle calling the Lua defined callback if it is\n\n/// defined with the input.\n\npub fn keygrabber_handle(mods: KeyboardModifiers, sym: Keysym, state: KeyState)\n\n -> rlua::Result<()> {\n\n run_with_lua(move |lua| {\n\n let lua_state = if state == KeyState::Pressed {\n\n \"press\"\n\n } else {\n\n \"release\"\n\n }.into();\n\n let lua_sym = sym.get_name()\n\n .ok_or_else(|| rlua::Error::RuntimeError(\n\n format!(\"Symbol did not have a name: {:#?}\", sym)))?;\n\n let lua_mods = ::lua::mods_to_lua(lua, mods.mods)?;\n\n let res = call_keygrabber(lua, (lua_mods, lua_sym, lua_state));\n\n match res {\n\n Ok(_) | Err(rlua::Error::FromLuaConversionError { .. }) => {Ok(())},\n\n err => {\n\n err\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/awesome/keygrabber.rs", "rank": 83, "score": 136944.930208063 }, { "content": "/// Determines if we should build with debug symbols.\n\npub fn debug_enabled() -> bool {\n\n cfg!(not(disable_debug))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 84, "score": 135086.63777979705 }, { "content": "// Reexported in lua/mod.rs:11\n\n/// Attemps to send a LuaQuery to the Lua thread.\n\npub fn send(query: LuaQuery) -> Result<Receiver<LuaResponse>, LuaSendError> {\n\n // Create a response channel\n\n let (response_tx, response_rx) = channel();\n\n match CHANNEL.sender.lock() {\n\n Err(_) => Err(LuaSendError::Sender(query)),\n\n Ok(sender) => {\n\n let message = LuaMessage { reply: response_tx, query: query };\n\n sender.send(message)\n\n .map_err(|e| LuaSendError::Sender(e.0.query))\n\n }\n\n }?;\n\n idle_add_once(|| {\n\n let receiver = CHANNEL.receiver.lock().unwrap();\n\n if let Some(ref receiver) = *receiver {\n\n LUA.with(|lua| {\n\n let lua = &mut *lua.borrow_mut();\n\n for message in receiver.try_iter() {\n\n trace!(\"Handling a request\");\n\n if !handle_message(message, lua) {\n\n MAIN_LOOP.with(|main_loop| main_loop.borrow().quit())\n\n }\n\n }\n\n emit_refresh(lua);\n\n });\n\n }\n\n });\n\n Ok(response_rx)\n\n}\n\n\n", "file_path": "src/lua/thread.rs", "rank": 85, "score": 134913.70673394413 }, { "content": "fn init_workspaces(_: &rlua::Lua, _: rlua::Value) -> Result<(), rlua::Error> {\n\n warn!(\"Attempting to call `init_workspaces`, this is not implemented\");\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lua/rust_interop.rs", "rank": 86, "score": 134420.55760541552 }, { "content": "pub fn tree_as_json() -> Json {\n\n if let Ok(tree) = lock_tree() {\n\n tree.0.to_json()\n\n } else {\n\n Json::Null\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 87, "score": 132741.06390680835 }, { "content": "pub fn command_map() -> ComMap {\n\n let mut map: ComMap = HashMap::new();\n\n map.insert(\"command\".to_string(), command);\n\n map.insert(\"panic_command\".to_string(), panic_command);\n\n map\n\n}\n\n\n", "file_path": "src/commands/tests.rs", "rank": 88, "score": 130530.29312996846 }, { "content": "fn rust_lua_fn(lua: &Lua) -> rlua::Value<'static> {\n\n {\n\n let globals = lua.globals();\n\n let foo = lua.create_table().unwrap();\n\n foo.set(\"bar\", 12.0).unwrap();\n\n globals.set::<String, rlua::Table>(\"foo\".into(), foo).unwrap();\n\n }\n\n let globals = lua.globals();\n\n let maybe_foo = globals.get::<String, rlua::Table>(\"foo\".into());\n\n assert!(maybe_foo.is_ok());\n\n let foo = maybe_foo\n\n .expect(\"asserted maybe_foo.is_some()\");\n\n assert!(foo.get::<String, f64>(\"bar\".into()).is_ok());\n\n rlua::Value::Boolean(true)\n\n}\n", "file_path": "src/lua/tests.rs", "rank": 89, "score": 127698.54829424436 }, { "content": "/// Returns a copy of the action behind the lock.\n\npub fn performing_action() -> Option<Action> {\n\n if let Ok(action) = try_lock_action() {\n\n *action\n\n } else {\n\n warn!(\"Could not lock action mutex!\");\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/layout/commands.rs", "rank": 90, "score": 126693.75880963984 }, { "content": "/// Makes a very basic tree. This is sufficient for basic tests.\n\n/// There is only one output,\n\n/// Two workspaces,\n\n/// First workspace has a single view in the root container,\n\n/// second workspace has a container with two views in it\n\n/// (the container is a child of the root container).\n\n///\n\n/// The active container is the only view in the first workspace\n\npub fn basic_tree() -> UnitTestTree {\n\n UnitTestTree::new()\n\n .add_new_output().unwrap()\n\n .add_workspace(\"1\").unwrap()\n\n .add_new_view().unwrap()\n\n .add_workspace(\"2\").unwrap()\n\n .add_new_view().unwrap()\n\n .set_layout(Layout::Horizontal).unwrap()\n\n .add_new_view().unwrap()\n\n .add_workspace(\"1\").unwrap()\n\n}\n\n\n", "file_path": "src/layout/unit_tests/util.rs", "rank": 91, "score": 124603.71914946276 }, { "content": "/// Appends this combination of category and key to the registry queue.\n\npub fn update_registry_value(category: String) {\n\n let mut queue = REGISTRY_QUEUE.write().expect(ERR_LOCK_QUEUE);\n\n queue.push(category);\n\n}\n\n\n", "file_path": "src/lua/thread.rs", "rank": 92, "score": 124601.52545695787 }, { "content": "/// Callback to route wlc logs into env_logger\n\nfn log_handler(level: LogType, message: &str) {\n\n match level {\n\n LogType::Info => info!(\"wlc: {}\", message),\n\n LogType::Warn => warn!(\"wlc: {}\", message),\n\n LogType::Error => error!(\"wlc: {}\", message),\n\n LogType::Wayland => info!(\"wayland: {}\", message)\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 93, "score": 123375.48497090644 }, { "content": "pub fn point_to_json(point: Point) -> Json {\n\n let mut map = BTreeMap::new();\n\n map.insert(\"x\".into(), point.x.to_json());\n\n map.insert(\"y\".into(), point.y.to_json());\n\n map.to_json()\n\n}\n\n\n", "file_path": "src/convert/json.rs", "rank": 94, "score": 121254.83181384068 }, { "content": "pub fn geometry_to_json(geometry: Geometry) -> Json {\n\n let mut map = BTreeMap::new();\n\n let origin = point_to_json(geometry.origin);\n\n let size = size_to_json(geometry.size);\n\n map.insert(\"origin\".into(), origin);\n\n map.insert(\"size\".into(), size);\n\n map.to_json()\n\n}\n", "file_path": "src/convert/json.rs", "rank": 95, "score": 121254.83181384068 }, { "content": "/// Sets the value behind the lock to the provided value.\n\n///\n\n/// Note that this method blocks until the lock is released\n\n///\n\n/// None means an action is done being performed.\n\npub fn set_performing_action(val: Option<Action>) {\n\n if let Ok(mut action) = try_lock_action() {\n\n *action = val;\n\n } else {\n\n error!(\"Action mutex was poisoned\");\n\n panic!(\"Action mutex was poisoned\");\n\n }\n\n}\n\n\n\n// TODO Remove all instances of self.0.tree, that should be abstracted in LayoutTree.\n\n\n\n/* These commands are the interface that the rest of Way Cooler has to the\n\n * tree. Any action done, whether through a callback, or from the IPC/Lua thread\n\n * it will have to go through one of these methods.\n\n */\n\n\n\n/// These commands are the interface that the rest of Way Cooler has to the\n\n/// tree. Any action done, whether through a callback, or from the IPC/Lua thread\n\n/// it will have to go through one of these methods.\n\n#[allow(dead_code)]\n", "file_path": "src/layout/commands.rs", "rank": 96, "score": 119281.00604678475 }, { "content": "pub fn lock_tree_dbus() -> DBusResult<TreeGuard> {\n\n match lock_tree() {\n\n Ok(tree) => Ok(tree),\n\n Err(err) => Err(MethodErr::failed(&format!(\"{:?}\", err)))\n\n }\n\n}\n\n\n", "file_path": "src/ipc/utils.rs", "rank": 97, "score": 118985.27894451004 }, { "content": "fn set_wallpaper<'lua>(_: &'lua Lua, _pattern: *mut cairo_pattern_t) -> rlua::Result<bool> {\n\n warn!(\"Fake setting the wallpaper\");\n\n Ok(true)\n\n}\n\n\n", "file_path": "src/awesome/root.rs", "rank": 98, "score": 118679.24734329765 } ]
Rust
src/lib.rs
LaBatata101/grep_bin
89d41dfbcd4dc9b8d7e174bfe802971ab24eaebf
use clap::{values_t, App, AppSettings, Arg, ArgMatches}; use std::process; use std::{env, path::PathBuf}; use ansi_term::Colour; mod utils; use utils::{file, search}; use crate::utils::{print_hexdump_output, PatternType}; pub fn setup_args<'a>() -> ArgMatches<'a> { let integer_validator = |value: String| match value.parse::<usize>() { Ok(_) => Ok(()), Err(_) => Err(String::from("the value needs to be a valid integer")), }; App::new("grep_bin") .version(clap::crate_version!()) .author(clap::crate_authors!()) .long_about(clap::crate_description!()) .arg( Arg::with_name("FILE") .index(1) .required(true) .multiple(true) .empty_values(false) .help("The file path"), ) .arg( Arg::with_name("PATTERN") .index(2) .required(true) .empty_values(false) .long_help( "Can be a ascii string or a byte sequence. Ascii strings should be passed inside quotes like so '\"This is a string\"' Escaping quotes '\"This is a \\\"quote\\\"\"' All of these byte sequence are valid: f9b4ca, F9B4CA and f9B4Ca", ), ) .arg( Arg::with_name("filetype") .short("f") .multiple(true) .takes_value(true) .empty_values(false) .long_help( "Filter the search by the file extensions. Examples of input: jpg, mp3, exe", ), ) .arg( Arg::with_name("context_bytes_size") .short("c") .default_value("16") .validator(integer_validator) .long_help("Defines the number of bytes that will be printed in each line."), ) .arg( Arg::with_name("print_only") .short("p") .long("print-only") .help("Prints only the file name that contais the match."), ) .arg( Arg::with_name("skip_bytes") .short("s") .long("skip-bytes") .default_value("0") .takes_value(true) .validator(integer_validator) .help("Skip n bytes before searching."), ) .settings(&[AppSettings::ArgRequiredElseHelp, AppSettings::ColoredHelp]) .get_matches() } pub fn run(args: ArgMatches) { let filetypes: Vec<&str> = args.values_of("filetype").unwrap_or_default().collect(); let filepaths = values_t!(args, "FILE", PathBuf).unwrap(); let files: Vec<PathBuf> = if filetypes.is_empty() { file::get_all_files_from_paths(filepaths) } else { file::filter_filetypes(file::get_all_files_from_paths(filepaths), &filetypes) }; let pattern: Vec<u8> = match PatternType::from(args.value_of("PATTERN").unwrap()) { PatternType::Str(pattern) => pattern.into_bytes(), PatternType::HexStr(pattern) => hex::decode(pattern).unwrap_or_else(|error| { eprintln!("Error: {} in byte sequence!", error); process::exit(1); }), }; let context_bytes_size: usize = args .value_of("context_bytes_size") .unwrap() .parse() .unwrap(); let skip_bytes: u64 = args.value_of("skip_bytes").unwrap().parse().unwrap(); for filename in files { let mut searcher = search::Searcher::new(&pattern, context_bytes_size, skip_bytes); let filename = filename.to_str().unwrap(); searcher.search_in_file(filename).unwrap_or_else(|error| { eprintln!("{}: {}", filename, error); process::exit(1); }); let result = searcher.result(); if !result.is_empty() { println!("{}", Colour::Purple.paint(filename)); } if !args.is_present("print_only") { print_hexdump_output(result, searcher.context_bytes_size()); } } }
use clap::{values_t, App, AppSettings, Arg, ArgMatches}; use std::process; use std::{env, path::PathBuf}; use ansi_term::Colour; mod utils; use utils::{file, search}; use crate::utils::{print_hexdump_output, PatternType}; pub fn setup_args<'a>() -> ArgMatches<'a> { let integer_validator = |value: String| match value.parse::<usize>() { Ok(_) => Ok(()), Err(_) => Err(String::from("the value needs to be a valid integer")), }; App::new("grep_bin") .version(clap::crate_version!()) .author(clap::crate_authors!()) .long_about(clap::crate_description!()) .arg( Arg::with_name("FILE") .index(1) .required(true) .multiple(true)
ne."), ) .arg( Arg::with_name("print_only") .short("p") .long("print-only") .help("Prints only the file name that contais the match."), ) .arg( Arg::with_name("skip_bytes") .short("s") .long("skip-bytes") .default_value("0") .takes_value(true) .validator(integer_validator) .help("Skip n bytes before searching."), ) .settings(&[AppSettings::ArgRequiredElseHelp, AppSettings::ColoredHelp]) .get_matches() } pub fn run(args: ArgMatches) { let filetypes: Vec<&str> = args.values_of("filetype").unwrap_or_default().collect(); let filepaths = values_t!(args, "FILE", PathBuf).unwrap(); let files: Vec<PathBuf> = if filetypes.is_empty() { file::get_all_files_from_paths(filepaths) } else { file::filter_filetypes(file::get_all_files_from_paths(filepaths), &filetypes) }; let pattern: Vec<u8> = match PatternType::from(args.value_of("PATTERN").unwrap()) { PatternType::Str(pattern) => pattern.into_bytes(), PatternType::HexStr(pattern) => hex::decode(pattern).unwrap_or_else(|error| { eprintln!("Error: {} in byte sequence!", error); process::exit(1); }), }; let context_bytes_size: usize = args .value_of("context_bytes_size") .unwrap() .parse() .unwrap(); let skip_bytes: u64 = args.value_of("skip_bytes").unwrap().parse().unwrap(); for filename in files { let mut searcher = search::Searcher::new(&pattern, context_bytes_size, skip_bytes); let filename = filename.to_str().unwrap(); searcher.search_in_file(filename).unwrap_or_else(|error| { eprintln!("{}: {}", filename, error); process::exit(1); }); let result = searcher.result(); if !result.is_empty() { println!("{}", Colour::Purple.paint(filename)); } if !args.is_present("print_only") { print_hexdump_output(result, searcher.context_bytes_size()); } } }
.empty_values(false) .help("The file path"), ) .arg( Arg::with_name("PATTERN") .index(2) .required(true) .empty_values(false) .long_help( "Can be a ascii string or a byte sequence. Ascii strings should be passed inside quotes like so '\"This is a string\"' Escaping quotes '\"This is a \\\"quote\\\"\"' All of these byte sequence are valid: f9b4ca, F9B4CA and f9B4Ca", ), ) .arg( Arg::with_name("filetype") .short("f") .multiple(true) .takes_value(true) .empty_values(false) .long_help( "Filter the search by the file extensions. Examples of input: jpg, mp3, exe", ), ) .arg( Arg::with_name("context_bytes_size") .short("c") .default_value("16") .validator(integer_validator) .long_help("Defines the number of bytes that will be printed in each li
random
[ { "content": "pub fn print_hexdump_output(matches: &Matches, bytes_per_line: usize) {\n\n let mut ascii_repr = Vec::new();\n\n\n\n for range in matches.context_bytes_indexes() {\n\n let offset = range.range().start;\n\n print!(\n\n \"{}: \",\n\n Colour::Green.paint(format!(\"{:08X}\", offset - (offset % bytes_per_line)))\n\n );\n\n for i in range.range() {\n\n let byte = matches.get_data(i % matches.data_len());\n\n\n\n if matches.indexes().contains(&i) {\n\n print!(\"{} \", Colour::Red.bold().paint(format!(\"{:02X}\", byte)));\n\n ascii_repr.push(format!(\n\n \"{}\",\n\n Colour::Red.bold().paint(to_ascii_repr(byte).to_string())\n\n ));\n\n } else {\n\n print!(\"{:02X} \", byte);\n", "file_path": "src/utils.rs", "rank": 2, "score": 65408.84756942054 }, { "content": "fn print_ascii_repr(ascii_repr: &[String]) {\n\n print!(\"|\");\n\n for ascii in ascii_repr {\n\n print!(\"{}\", ascii);\n\n }\n\n println!(\"|\");\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 3, "score": 58782.179688217424 }, { "content": "pub fn get_all_files_from_paths(paths: Vec<PathBuf>) -> Vec<PathBuf> {\n\n let mut files = Vec::new();\n\n\n\n for path in paths {\n\n if path.is_dir() {\n\n files.extend(get_all_files_from_dir(path));\n\n } else {\n\n files.push(path);\n\n }\n\n }\n\n\n\n files\n\n}\n\n\n", "file_path": "src/utils/file.rs", "rank": 4, "score": 43779.29890923811 }, { "content": "pub fn filter_filetypes(files: Vec<PathBuf>, filetypes: &[&str]) -> Vec<PathBuf> {\n\n files\n\n .into_iter()\n\n .filter(|path| {\n\n filetypes.contains(\n\n &path\n\n .extension()\n\n .unwrap_or_default()\n\n .to_str()\n\n .unwrap_or_default(),\n\n )\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/utils/file.rs", "rank": 5, "score": 41309.60491568724 }, { "content": "}\n\n\n\n/* #[cfg(test)]\n\nmod test {\n\n use super::*;\n\n #[test]\n\n fn tests_search() {\n\n let text = vec![0x00, 0x01, 0x00, 0xFF, 0xFE, 0x00, 0xA4, 0x00];\n\n // assert_eq!(vec![3..6], search_subslice(&text, &[0xFF, 0xFE, 0x00]));\n\n }\n\n\n\n #[test]\n\n fn test_string_search() {\n\n // assert_eq!(\n\n // vec![0..4, 9..13, 12..16],\n\n // search_subslice(\n\n // &[\n\n // b'A', b'A', b'B', b'A', b'A', b'C', b'A', b'A', b'D', b'A', b'A', b'B', b'A',\n\n // b'A', b'B', b'A'\n\n // ],\n\n // &[b'A', b'A', b'B', b'A']\n\n // )\n\n // )\n\n }\n\n} */\n", "file_path": "src/utils/search.rs", "rank": 6, "score": 31347.942246565726 }, { "content": " }\n\n\n\n Ok(())\n\n }\n\n\n\n /// Uses the KMP algorithm to search\n\n /// Returns a vector of indexes where the slice pattern starts\n\n pub fn search_slice(src: &[u8], slice: &[u8]) -> Vec<usize> {\n\n let mut match_indexes: Vec<usize> = Vec::new();\n\n\n\n let mut curr_pos_pattern: usize = 0;\n\n let table_of_ocurrencies = Self::compute_toc(slice);\n\n\n\n for (i, &ch) in src.iter().enumerate() {\n\n while curr_pos_pattern > 0 && slice[curr_pos_pattern] != ch {\n\n curr_pos_pattern = table_of_ocurrencies[curr_pos_pattern - 1];\n\n }\n\n\n\n if slice[curr_pos_pattern] == ch {\n\n if curr_pos_pattern == slice.len() - 1 {\n", "file_path": "src/utils/search.rs", "rank": 7, "score": 31347.156268101084 }, { "content": " // The actual bytes for context + the matching bytes\n\n // needed for printing the result\n\n self.data.extend_from_slice(bytes);\n\n }\n\n }\n\n\n\n self.indexes.extend_from_slice(indexes);\n\n }\n\n}\n\n\n\npub struct Searcher<'a> {\n\n pattern: &'a [u8],\n\n matches: Matches,\n\n context_bytes_size: usize,\n\n skip_bytes: u64,\n\n}\n\n\n\nimpl<'a> Searcher<'a> {\n\n const BUFFER_SIZE: usize = 8192;\n\n\n", "file_path": "src/utils/search.rs", "rank": 8, "score": 31346.910303292803 }, { "content": "use std::collections::BTreeSet;\n\nuse std::fs::File;\n\nuse std::io::{BufReader, Read, Seek, SeekFrom};\n\n\n\nuse super::CustomRange;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Matches {\n\n indexes: Vec<usize>,\n\n context_bytes_indexes: BTreeSet<CustomRange>,\n\n data: Vec<u8>,\n\n context_bytes_size: usize,\n\n}\n\n\n\nimpl Matches {\n\n pub fn new(context_bytes_size: usize) -> Self {\n\n Self {\n\n context_bytes_size,\n\n indexes: Vec::new(),\n\n context_bytes_indexes: BTreeSet::new(),\n", "file_path": "src/utils/search.rs", "rank": 9, "score": 31346.860297451152 }, { "content": " data: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn context_bytes_indexes(&self) -> &BTreeSet<CustomRange> {\n\n &self.context_bytes_indexes\n\n }\n\n\n\n /// Get a reference to the match's index.\n\n pub fn indexes(&self) -> &[usize] {\n\n &self.indexes\n\n }\n\n\n\n /// Get a reference to the match's data.\n\n pub fn get_data(&self, index: usize) -> u8 {\n\n *self.data.get(index).unwrap()\n\n }\n\n\n\n pub fn data_len(&self) -> usize {\n\n self.data.len()\n", "file_path": "src/utils/search.rs", "rank": 10, "score": 31346.02699744445 }, { "content": " }\n\n\n\n if pattern[pos] == pattern[i] {\n\n pos += 1;\n\n table_of_ocurrencies[i] = pos;\n\n }\n\n }\n\n\n\n table_of_ocurrencies\n\n }\n\n\n\n /// Get a reference to the searcher's result.\n\n pub fn result(&self) -> &Matches {\n\n &self.matches\n\n }\n\n\n\n /// Return the context bytes size.\n\n pub fn context_bytes_size(&self) -> usize {\n\n self.context_bytes_size\n\n }\n", "file_path": "src/utils/search.rs", "rank": 11, "score": 31345.97583159571 }, { "content": " pub fn new(pattern: &'a [u8], context_bytes_size: usize, skip_bytes: u64) -> Self {\n\n Self {\n\n pattern,\n\n matches: Matches::new(context_bytes_size),\n\n context_bytes_size,\n\n skip_bytes,\n\n }\n\n }\n\n\n\n pub fn search_in_file(&mut self, filepath: &str) -> std::io::Result<()> {\n\n let mut file = File::open(filepath)?;\n\n let file_size = file.metadata().unwrap().len() as usize;\n\n\n\n let _pos_in_file = file.seek(SeekFrom::Start(self.skip_bytes)).unwrap_or(0) as usize;\n\n let mut reader = BufReader::new(file);\n\n\n\n if file_size < self.context_bytes_size {\n\n self.context_bytes_size = file_size;\n\n }\n\n\n", "file_path": "src/utils/search.rs", "rank": 12, "score": 31345.714079284113 }, { "content": " if file_size <= Self::BUFFER_SIZE {\n\n let mut buffer = Vec::with_capacity(Self::BUFFER_SIZE);\n\n reader.read_to_end(&mut buffer)?;\n\n\n\n let result = Self::search_slice(&buffer, self.pattern);\n\n self.matches.populate_matches(&result, &buffer);\n\n } else {\n\n let mut buffer = [0; Self::BUFFER_SIZE];\n\n loop {\n\n let n = reader.read(&mut buffer).unwrap();\n\n\n\n if n == 0 {\n\n break;\n\n }\n\n\n\n let result = Self::search_slice(&buffer, self.pattern);\n\n self.matches.populate_matches(&result, &buffer);\n\n\n\n // pos_in_file += Self::BUFFER_SIZE;\n\n }\n", "file_path": "src/utils/search.rs", "rank": 13, "score": 31345.631317411233 }, { "content": " }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.data.is_empty() && self.indexes.is_empty() && self.indexes.is_empty()\n\n }\n\n\n\n fn populate_matches(&mut self, indexes: &[usize], buffer: &[u8]) {\n\n for index in indexes {\n\n // index where we should start collecting bytes for context\n\n let offset = index - (index % self.context_bytes_size);\n\n\n\n // Creates the index range for the context bytes.\n\n let context_bytes_indexes = if offset + self.context_bytes_size <= buffer.len() {\n\n CustomRange::new(offset..offset + self.context_bytes_size)\n\n } else {\n\n CustomRange::new(offset..buffer.len())\n\n };\n\n\n\n let bytes = &buffer[context_bytes_indexes.range.start..context_bytes_indexes.range.end];\n\n if self.context_bytes_indexes.insert(context_bytes_indexes) {\n", "file_path": "src/utils/search.rs", "rank": 14, "score": 31344.069053216685 }, { "content": " let pos = i - curr_pos_pattern;\n\n match_indexes\n\n .extend_from_slice(&(pos..pos + slice.len()).collect::<Vec<usize>>());\n\n curr_pos_pattern = table_of_ocurrencies[curr_pos_pattern];\n\n } else {\n\n curr_pos_pattern += 1;\n\n }\n\n }\n\n }\n\n\n\n match_indexes\n\n }\n\n\n\n fn compute_toc(pattern: &[u8]) -> Vec<usize> {\n\n let mut table_of_ocurrencies: Vec<usize> = vec![0; pattern.len()];\n\n let mut pos = 0;\n\n\n\n for i in 1..pattern.len() {\n\n while pos > 0 && pattern[i] != pattern[pos] {\n\n pos = table_of_ocurrencies[pos - 1];\n", "file_path": "src/utils/search.rs", "rank": 15, "score": 31343.879958039135 }, { "content": "fn to_ascii_repr(byte: u8) -> char {\n\n let ch = byte as char;\n\n\n\n if ch.is_ascii() && !ch.is_ascii_control() {\n\n ch\n\n } else {\n\n '.'\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_trim_backslash() {\n\n assert_eq!(\n\n PatternType::from(\"\\\"backslash\\\"\"),\n\n PatternType::Str(\"backslash\".to_string())\n\n )\n", "file_path": "src/utils.rs", "rank": 16, "score": 30686.868574433516 }, { "content": "fn strip(src: &str, p: char) -> &str {\n\n if let Some(prefix_striped) = src.strip_prefix(p) {\n\n if let Some(suffix_striped) = prefix_striped.strip_suffix(p) {\n\n suffix_striped\n\n } else {\n\n prefix_striped\n\n }\n\n } else {\n\n src\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 17, "score": 29998.887170925147 }, { "content": "fn get_all_files_from_dir(dir: PathBuf) -> Vec<PathBuf> {\n\n let mut filepaths: Vec<PathBuf> = Vec::new();\n\n\n\n visit_dirs(dir, &mut |file| filepaths.push(file)).unwrap();\n\n\n\n filepaths\n\n}\n\n\n", "file_path": "src/utils/file.rs", "rank": 18, "score": 25461.485221023286 }, { "content": "fn visit_dirs(dir: PathBuf, cb: &mut dyn FnMut(PathBuf)) -> std::io::Result<()> {\n\n if dir.is_dir() {\n\n for entry in std::fs::read_dir(dir)? {\n\n let entry = entry?;\n\n let path = entry.path();\n\n if path.is_dir() {\n\n visit_dirs(path, cb)?;\n\n } else {\n\n cb(entry.path());\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/utils/file.rs", "rank": 19, "score": 25357.360137046056 }, { "content": "fn main() {\n\n let args = setup_args();\n\n run(args);\n\n}\n", "file_path": "src/main.rs", "rank": 20, "score": 24668.665702920083 }, { "content": "use ansi_term::Colour;\n\nuse std::{io::Write, ops::Range};\n\n\n\nuse self::search::Matches;\n\n\n\npub mod file;\n\npub mod search;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct CustomRange {\n\n range: Range<usize>,\n\n}\n\n\n\nimpl CustomRange {\n\n pub fn new(range: Range<usize>) -> Self {\n\n Self { range }\n\n }\n\n\n\n pub fn range(&self) -> Range<usize> {\n\n self.range.clone()\n", "file_path": "src/utils.rs", "rank": 21, "score": 14328.470228325068 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum PatternType {\n\n Str(String),\n\n HexStr(String),\n\n}\n\n\n\nimpl<'a> From<&'a str> for PatternType {\n\n fn from(pattern: &'a str) -> Self {\n\n if pattern.starts_with('\"') && pattern.ends_with('\"') {\n\n let quote_striped = strip(pattern, '\"');\n\n\n\n if quote_striped.starts_with(\"\\\\\\\"\") && quote_striped.ends_with(\"\\\\\\\"\") {\n\n return PatternType::Str(quote_striped.chars().filter(|&c| c != '\\\\').collect());\n\n }\n\n PatternType::Str(quote_striped.to_string())\n\n } else {\n\n PatternType::HexStr(pattern.to_string())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 22, "score": 14323.359826359623 }, { "content": " }\n\n\n\n #[test]\n\n fn test_trim_backslash_with_quotes() {\n\n assert_eq!(\n\n PatternType::from(\"\\\"\\\\\\\"backslash with quote\\\\\\\"\\\"\"),\n\n PatternType::Str(\"\\\"backslash with quote\\\"\".to_string())\n\n )\n\n }\n\n\n\n #[test]\n\n fn test_strip() {\n\n assert_eq!(\n\n strip(\"\\\"\\\"remove only one quote\\\"\\\"\", '\"'),\n\n \"\\\"remove only one quote\\\"\"\n\n )\n\n }\n\n\n\n #[test]\n\n fn test_is_hex() {\n\n assert_eq!(\n\n PatternType::from(\"eeffgg\"),\n\n PatternType::HexStr(\"eeffgg\".to_string())\n\n )\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 23, "score": 14321.997041406627 }, { "content": " ascii_repr.push(to_ascii_repr(byte).to_string());\n\n }\n\n\n\n if bytes_per_line >= 8 && (i + 1) % 8 == 0 {\n\n print!(\" \");\n\n }\n\n\n\n if (i + 1) % bytes_per_line == 0 {\n\n print_ascii_repr(&ascii_repr);\n\n ascii_repr.clear();\n\n }\n\n }\n\n }\n\n\n\n // fix ascii column alignment\n\n if !ascii_repr.is_empty() {\n\n let total_chars_in_line = bytes_per_line * 3 + 2;\n\n let total_chars_bytes_printed = if ascii_repr.len() > 8 {\n\n ascii_repr.len() * 3 + 1\n\n } else {\n", "file_path": "src/utils.rs", "rank": 24, "score": 14321.473933082943 }, { "content": " ascii_repr.len() * 3\n\n };\n\n let total_spaces_to_print = total_chars_in_line - total_chars_bytes_printed;\n\n\n\n for _ in 0..total_spaces_to_print {\n\n print!(\" \");\n\n }\n\n print_ascii_repr(&ascii_repr);\n\n }\n\n\n\n std::io::stdout().flush().unwrap();\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 25, "score": 14320.38342163913 }, { "content": " }\n\n}\n\n\n\nimpl Eq for CustomRange {}\n\n\n\nimpl PartialEq for CustomRange {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.range.start == other.range.start && self.range.end == other.range.end\n\n }\n\n}\n\n\n\nimpl PartialOrd for CustomRange {\n\n fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\nimpl Ord for CustomRange {\n\n fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n\n self.range.start.cmp(&other.range.start)\n", "file_path": "src/utils.rs", "rank": 26, "score": 14320.38342163913 }, { "content": "use std::path::PathBuf;\n\n\n", "file_path": "src/utils/file.rs", "rank": 27, "score": 13475.905728502134 }, { "content": "use grep_bin::{run, setup_args};\n\n\n", "file_path": "src/main.rs", "rank": 31, "score": 4.037558357087617 }, { "content": "### Help\n\n```\n\n$ grep_bin -h\n\n\n\nLaBatata101 <labatata101@linuxmail.org>\n\nSearches recursively a directory or multiple files for a sequence of bytes or ASCII string.\n\n\n\nUSAGE:\n\n grep_bin [FLAGS] [OPTIONS] <FILE>... <PATTERN>\n\n\n\nFLAGS:\n\n -h, --help \n\n Prints help information\n\n\n\n -p, --print-only \n\n Prints only the file name that contais the match.\n\n\n\n -V, --version \n\n Prints version information\n\n\n\n\n\nOPTIONS:\n\n -c <context_bytes_size> \n\n Defines the number of bytes that will be printed in each line. [default: 16]\n\n\n\n -f <filetype>... \n\n Filter the search by the file extensions.\n\n Examples of input: jpg, mp3, exe\n\n -s, --skip-bytes <skip_bytes> \n\n Skip n bytes before searching. [default: 0]\n\n\n\n\n\nARGS:\n\n <FILE>... \n\n The file path\n\n\n\n <PATTERN> \n\n Can be a ascii string or a byte sequence.\n\n Ascii strings should be passed inside quotes like so '\"This is a string\"'\n\n Escaping quotes '\"This is a \\\"quote\\\"\"'\n\n All of these byte sequence are valid: f9b4ca, F9B4CA and f9B4Ca\n\n```\n\n\n\n# Building Manually\n\n## Dependencies\n\n- rustc(latest version)\n\n- cargo\n\n\n\n`$ git clone https://github.com/LaBatata101/grep_bin`\n\n\n\n`$ cd grep_bin/`\n\n\n\n`$ cargo build --release`\n\n\n\nThe final binary will be in `target/release/`\n\n\n\n# Installing with Cargo\n\n`cargo install grep_bin`\n", "file_path": "README.md", "rank": 32, "score": 4.032438810059784 }, { "content": "# grep_bin\n\n`grep_bin` can search recursively a directory or multiple files for a sequence of bytes or ascii string.\n\n\n\n## Usage\n\n### Searching for a byte sequence in a file\n\n`$ grep_bin test.bin fffe`\n\n\n\n### Searching recursively a directory for a byte sequence\n\n`$ grep_bin ~/Downloads FFFE`\n\n\n\n### Filtering the filetypes\n\n`$ grep_bin ~/Downloads FFfe0000 -f mp3 `\n\n\n\n### Search for an ASCII string inside the binary\n\n`$ grep_bin test.bin '\"Hello World\"'`\n\n\n\nSearch for an ASCII string with quotes included: `$ grep_bin test.bin '\"This is a \\\"quote\\\"\"'`\n\n\n\n### Search a byte sequence in multiple files\n\n`$ grep_bin test1.bin test2.bin fFFe`\n\n\n\n### Specify the number of bytes per line in the output\n\n`$ grep_bin README.md \"information\" -c 32`\n\n\n\nOutput:\n\n<pre>\n\nREADME.md\n\n00000320: 73 20 68 65 6C 70 20 <b>69 6E 66 6F 72 6D 61 74 69 6F 6E</b> 0A 0A 20 20 20 20 2D 56 2C 20 2D 2D 76 65 |s help <b>information</b>.. -V, --ve|\n\n00000360: 73 69 6F 6E 20 <b>69 6E 66 6F 72 6D 61 74 69 6F 6E</b> 0A 0A 0A 4F 50 54 49 4F 4E 53 3A 0A 20 20 20 20 |sion <b>information</b>...OPTIONS:. |\n\n</pre>\n\n* the characters in bold represent the colored match\n", "file_path": "README.md", "rank": 34, "score": 3.311337227577165 } ]
Rust
examples/demo/main.rs
cohaereo/egui_glfw_gl
c2d244eaecffc06d6010d6aa6193c24757e31a23
use egui_glfw_gl as egui_backend; use std::time::Instant; use egui_backend::egui::{vec2, Color32, Image, Pos2, Rect}; use egui_glfw_gl::glfw::{Context, Key}; use glfw::Action; const SCREEN_WIDTH: u32 = 800; const SCREEN_HEIGHT: u32 = 600; const PIC_WIDTH: i32 = 320; const PIC_HEIGHT: i32 = 192; mod triangle; fn main() { let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap(); glfw.window_hint(glfw::WindowHint::ContextVersion(3, 2)); glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core)); glfw.window_hint(glfw::WindowHint::DoubleBuffer(true)); glfw.window_hint(glfw::WindowHint::Resizable(false)); let (mut window, events) = glfw.create_window(SCREEN_WIDTH, SCREEN_HEIGHT, "Egui in GLFW!", glfw::WindowMode::Windowed) .expect("Failed to create GLFW window."); window.set_char_polling(true); window.set_cursor_pos_polling(true); window.set_key_polling(true); window.set_mouse_button_polling(true); window.make_current(); glfw.set_swap_interval(glfw::SwapInterval::Sync(1)); gl::load_with(|symbol| window.get_proc_address(symbol) as *const _); let mut painter = egui_backend::Painter::new(&mut window, SCREEN_WIDTH, SCREEN_HEIGHT); let mut egui_ctx = egui::CtxRef::default(); let (width, height) = window.get_framebuffer_size(); let native_pixels_per_point = window.get_content_scale().0; let mut egui_input_state = egui_backend::EguiInputState::new(egui::RawInput { screen_rect: Some(Rect::from_min_size( Pos2::new(0f32, 0f32), vec2(width as f32, height as f32) / native_pixels_per_point, )), pixels_per_point: Some(native_pixels_per_point), ..Default::default() }); let start_time = Instant::now(); let mut srgba: Vec<Color32> = Vec::new(); for _ in 0..PIC_HEIGHT { for _ in 0..PIC_WIDTH { srgba.push(Color32::BLACK); } } let plot_tex_id = painter.new_user_texture((PIC_WIDTH as usize, PIC_HEIGHT as usize), &srgba, false); let mut sine_shift = 0f32; let mut amplitude: f32 = 50f32; let mut test_str: String = "A text box to write in. Cut, copy, paste commands are available.".to_owned(); let triangle = triangle::Triangle::new(); let mut quit = false; while !window.should_close() { egui_input_state.input.time = Some(start_time.elapsed().as_secs_f64()); egui_ctx.begin_frame(egui_input_state.input.take()); egui_input_state.input.pixels_per_point = Some(native_pixels_per_point); unsafe { gl::ClearColor(0.455, 0.302, 0.663, 1.0); gl::Clear(gl::COLOR_BUFFER_BIT); } triangle.draw(); let mut srgba: Vec<Color32> = Vec::new(); let mut angle = 0f32; for y in 0..PIC_HEIGHT { for x in 0..PIC_WIDTH { srgba.push(Color32::BLACK); if y == PIC_HEIGHT - 1 { let y = amplitude * (angle * 3.142f32 / 180f32 + sine_shift).sin(); let y = PIC_HEIGHT as f32 / 2f32 - y; srgba[(y as i32 * PIC_WIDTH + x) as usize] = Color32::YELLOW; angle += 360f32 / PIC_WIDTH as f32; } } } sine_shift += 0.1f32; painter.update_user_texture_data(plot_tex_id, &srgba); egui::Window::new("Egui with GLFW").show(&egui_ctx, |ui| { ui.add(Image::new(plot_tex_id, vec2(PIC_WIDTH as f32, PIC_HEIGHT as f32))); ui.separator(); ui.label("A simple sine wave plotted onto a GL texture then blitted to an egui managed Image."); ui.label(" "); ui.text_edit_multiline(&mut test_str); ui.label(" "); ui.add(egui::Slider::new(&mut amplitude, 0.0..=50.0).text("Amplitude")); ui.label(" "); if ui.button("Quit").clicked() { quit = true; } }); let (egui_output, paint_cmds) = egui_ctx.end_frame(); if !egui_output.copied_text.is_empty() { egui_backend::copy_to_clipboard(&mut egui_input_state, egui_output.copied_text); } let paint_jobs = egui_ctx.tessellate(paint_cmds); painter.paint_jobs( None, paint_jobs, &egui_ctx.texture(), native_pixels_per_point, ); for (_, event) in glfw::flush_messages(&events) { match event { glfw::WindowEvent::Close => window.set_should_close(true), _ => { egui_backend::handle_event(event, &mut egui_input_state); } } } window.swap_buffers(); glfw.poll_events(); if quit { break; } } }
use egui_glfw_gl as egui_backend; use std::time::Instant; use egui_backend::egui::{vec2, Color32, Image, Pos2, Rect}; use egui_glfw_gl::glfw::{Context, Key}; use glfw::Action; const SCREEN_WIDTH: u32 = 800; const SCREEN_HEIGHT: u32 = 600; const PIC_WIDTH: i32 = 320; const PIC_HEIGHT: i32 = 192; mod triangle; fn main() { let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap(); glfw.window_hint(glfw::WindowHint::ContextVersion(3, 2)); glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core)); glfw.window_hint(glfw::WindowHint::DoubleBuffer(true)); glfw.window_hint(glfw::WindowHint::Resizable(false)); let (mut window, events) = glfw.create_window(SCREEN_WIDTH, SCREEN_HEIGHT, "Egui in GLFW!", glfw::WindowMode::Windowed) .expect("Failed to create GLFW window."); window.set_char_polling(true); window.set_cursor_pos_polling(true); window.set_key_polling(true); window.set_mouse_button_polling(true); window.make_current(); glfw.set_swap_interval(glfw::SwapInterval::Sync(1)); gl::load_with(|symbol| window.get_proc_address(symbol) as *const _); let mut painter = egui_backend::Painter::new(&mut window, SCREEN_WIDTH, SCREEN_HEIGHT); let mut egui_ctx = egui::CtxRef::default(); let (width, height) = window.get_framebuffer_size(); let native_pixels_per_point = window.get_content_scale().0; let mut egui_input_state = egui_backend::EguiInputState::new(egui::RawInput { screen_rect:
, pixels_per_point: Some(native_pixels_per_point), ..Default::default() }); let start_time = Instant::now(); let mut srgba: Vec<Color32> = Vec::new(); for _ in 0..PIC_HEIGHT { for _ in 0..PIC_WIDTH { srgba.push(Color32::BLACK); } } let plot_tex_id = painter.new_user_texture((PIC_WIDTH as usize, PIC_HEIGHT as usize), &srgba, false); let mut sine_shift = 0f32; let mut amplitude: f32 = 50f32; let mut test_str: String = "A text box to write in. Cut, copy, paste commands are available.".to_owned(); let triangle = triangle::Triangle::new(); let mut quit = false; while !window.should_close() { egui_input_state.input.time = Some(start_time.elapsed().as_secs_f64()); egui_ctx.begin_frame(egui_input_state.input.take()); egui_input_state.input.pixels_per_point = Some(native_pixels_per_point); unsafe { gl::ClearColor(0.455, 0.302, 0.663, 1.0); gl::Clear(gl::COLOR_BUFFER_BIT); } triangle.draw(); let mut srgba: Vec<Color32> = Vec::new(); let mut angle = 0f32; for y in 0..PIC_HEIGHT { for x in 0..PIC_WIDTH { srgba.push(Color32::BLACK); if y == PIC_HEIGHT - 1 { let y = amplitude * (angle * 3.142f32 / 180f32 + sine_shift).sin(); let y = PIC_HEIGHT as f32 / 2f32 - y; srgba[(y as i32 * PIC_WIDTH + x) as usize] = Color32::YELLOW; angle += 360f32 / PIC_WIDTH as f32; } } } sine_shift += 0.1f32; painter.update_user_texture_data(plot_tex_id, &srgba); egui::Window::new("Egui with GLFW").show(&egui_ctx, |ui| { ui.add(Image::new(plot_tex_id, vec2(PIC_WIDTH as f32, PIC_HEIGHT as f32))); ui.separator(); ui.label("A simple sine wave plotted onto a GL texture then blitted to an egui managed Image."); ui.label(" "); ui.text_edit_multiline(&mut test_str); ui.label(" "); ui.add(egui::Slider::new(&mut amplitude, 0.0..=50.0).text("Amplitude")); ui.label(" "); if ui.button("Quit").clicked() { quit = true; } }); let (egui_output, paint_cmds) = egui_ctx.end_frame(); if !egui_output.copied_text.is_empty() { egui_backend::copy_to_clipboard(&mut egui_input_state, egui_output.copied_text); } let paint_jobs = egui_ctx.tessellate(paint_cmds); painter.paint_jobs( None, paint_jobs, &egui_ctx.texture(), native_pixels_per_point, ); for (_, event) in glfw::flush_messages(&events) { match event { glfw::WindowEvent::Close => window.set_should_close(true), _ => { egui_backend::handle_event(event, &mut egui_input_state); } } } window.swap_buffers(); glfw.poll_events(); if quit { break; } } }
Some(Rect::from_min_size( Pos2::new(0f32, 0f32), vec2(width as f32, height as f32) / native_pixels_per_point, ))
call_expression
[ { "content": "pub fn handle_event(event: glfw::WindowEvent, state: &mut EguiInputState) {\n\n use glfw::WindowEvent::*;\n\n\n\n match event {\n\n FramebufferSize(width, height) => {\n\n state.input.screen_rect = Some(Rect::from_min_size(\n\n Pos2::new(0f32, 0f32),\n\n egui::vec2(width as f32, height as f32) / state.input.pixels_per_point.unwrap(),\n\n ))\n\n }\n\n\n\n MouseButton (mouse_btn, glfw::Action::Press, _) => state.input.events.push(egui::Event::PointerButton {\n\n pos: state.pointer_pos,\n\n button: match mouse_btn {\n\n glfw::MouseButtonLeft => egui::PointerButton::Primary,\n\n glfw::MouseButtonRight => egui::PointerButton::Secondary,\n\n glfw::MouseButtonMiddle => egui::PointerButton::Middle,\n\n _ => unreachable!(),\n\n },\n\n pressed: true,\n", "file_path": "src/lib.rs", "rank": 0, "score": 116156.58114819843 }, { "content": "pub fn translate_virtual_key_code(key: glfw::Key) -> Option<egui::Key> {\n\n use glfw::Key::*;\n\n\n\n Some(match key {\n\n Left => Key::ArrowLeft,\n\n Up => Key::ArrowUp,\n\n Right => Key::ArrowRight,\n\n Down => Key::ArrowDown,\n\n\n\n Escape => Key::Escape,\n\n Tab => Key::Tab,\n\n Backspace => Key::Backspace,\n\n Space => Key::Space,\n\n\n\n Enter => Key::Enter,\n\n\n\n Insert => Key::Insert,\n\n Home => Key::Home,\n\n Delete => Key::Delete,\n\n End => Key::End,\n", "file_path": "src/lib.rs", "rank": 1, "score": 104497.68137386147 }, { "content": "pub fn translate_cursor(cursor_icon: egui::CursorIcon) -> glfw::StandardCursor {\n\n match cursor_icon {\n\n CursorIcon::Default => glfw::StandardCursor::Arrow,\n\n CursorIcon::PointingHand => glfw::StandardCursor::Hand,\n\n CursorIcon::ResizeHorizontal => glfw::StandardCursor::HResize,\n\n CursorIcon::ResizeVertical => glfw::StandardCursor::VResize,\n\n // TODO: GLFW doesnt have these specific resize cursors, so we'll just use the HResize and VResize ones instead\n\n CursorIcon::ResizeNeSw => glfw::StandardCursor::HResize,\n\n CursorIcon::ResizeNwSe => glfw::StandardCursor::VResize,\n\n CursorIcon::Text => glfw::StandardCursor::IBeam,\n\n CursorIcon::Crosshair => glfw::StandardCursor::Crosshair,\n\n // TODO: Same for these\n\n CursorIcon::NotAllowed | CursorIcon::NoDrop => glfw::StandardCursor::Arrow,\n\n CursorIcon::Wait => glfw::StandardCursor::Arrow,\n\n CursorIcon::Grab | CursorIcon::Grabbing => glfw::StandardCursor::Hand,\n\n\n\n _ => glfw::StandardCursor::Arrow,\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 62702.49822667513 }, { "content": "pub fn copy_to_clipboard(egui_state: &mut EguiInputState, copy_text: String) {\n\n if let Some(clipboard) = egui_state.clipboard.as_mut() {\n\n let result = clipboard.set_contents(copy_text);\n\n if result.is_err() {\n\n dbg!(\"Unable to set clipboard content.\");\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 4, "score": 58093.17643941076 }, { "content": "pub fn compile_shader(src: &str, ty: GLenum) -> GLuint {\n\n let shader;\n\n unsafe {\n\n shader = gl::CreateShader(ty);\n\n // Attempt to compile the shader\n\n let c_str = CString::new(src.as_bytes()).unwrap();\n\n gl::ShaderSource(shader, 1, &c_str.as_ptr(), ptr::null());\n\n gl::CompileShader(shader);\n\n\n\n // Get the compile status\n\n let mut status = gl::FALSE as GLint;\n\n gl::GetShaderiv(shader, gl::COMPILE_STATUS, &mut status);\n\n\n\n // Fail on error\n\n if status != (gl::TRUE as GLint) {\n\n let mut len = 0;\n\n gl::GetShaderiv(shader, gl::INFO_LOG_LENGTH, &mut len);\n\n let mut buf = Vec::with_capacity(len as usize);\n\n buf.set_len((len as usize) - 1); // subtract 1 to skip the trailing null character\n\n gl::GetShaderInfoLog(\n", "file_path": "examples/demo/triangle.rs", "rank": 5, "score": 30312.63371070995 }, { "content": "pub fn compile_shader(src: &str, ty: GLenum) -> GLuint {\n\n let shader;\n\n unsafe {\n\n shader = gl::CreateShader(ty);\n\n // Attempt to compile the shader\n\n let c_str = CString::new(src.as_bytes()).unwrap();\n\n gl::ShaderSource(shader, 1, &c_str.as_ptr(), ptr::null());\n\n gl::CompileShader(shader);\n\n\n\n // Get the compile status\n\n let mut status = gl::FALSE as GLint;\n\n gl::GetShaderiv(shader, gl::COMPILE_STATUS, &mut status);\n\n\n\n // Fail on error\n\n if status != (gl::TRUE as GLint) {\n\n let mut len = 0;\n\n gl::GetShaderiv(shader, gl::INFO_LOG_LENGTH, &mut len);\n\n let mut buf = Vec::with_capacity(len as usize);\n\n buf.set_len((len as usize) - 1); // subtract 1 to skip the trailing null character\n\n gl::GetShaderInfoLog(\n", "file_path": "src/painter.rs", "rank": 6, "score": 29907.553697714487 }, { "content": "pub fn link_program(vs: GLuint, fs: GLuint) -> GLuint {\n\n unsafe {\n\n let program = gl::CreateProgram();\n\n gl::AttachShader(program, vs);\n\n gl::AttachShader(program, fs);\n\n gl::LinkProgram(program);\n\n // Get the link status\n\n let mut status = gl::FALSE as GLint;\n\n gl::GetProgramiv(program, gl::LINK_STATUS, &mut status);\n\n\n\n // Fail on error\n\n if status != (gl::TRUE as GLint) {\n\n let mut len: GLint = 0;\n\n gl::GetProgramiv(program, gl::INFO_LOG_LENGTH, &mut len);\n\n let mut buf = Vec::with_capacity(len as usize);\n\n buf.set_len((len as usize) - 1); // subtract 1 to skip the trailing null character\n\n gl::GetProgramInfoLog(\n\n program,\n\n len,\n\n ptr::null_mut(),\n", "file_path": "examples/demo/triangle.rs", "rank": 7, "score": 29365.686117957448 }, { "content": "pub fn link_program(vs: GLuint, fs: GLuint) -> GLuint {\n\n unsafe {\n\n let program = gl::CreateProgram();\n\n gl::AttachShader(program, vs);\n\n gl::AttachShader(program, fs);\n\n gl::LinkProgram(program);\n\n // Get the link status\n\n let mut status = gl::FALSE as GLint;\n\n gl::GetProgramiv(program, gl::LINK_STATUS, &mut status);\n\n\n\n // Fail on error\n\n if status != (gl::TRUE as GLint) {\n\n let mut len: GLint = 0;\n\n gl::GetProgramiv(program, gl::INFO_LOG_LENGTH, &mut len);\n\n let mut buf = Vec::with_capacity(len as usize);\n\n buf.set_len((len as usize) - 1); // subtract 1 to skip the trailing null character\n\n gl::GetProgramInfoLog(\n\n program,\n\n len,\n\n ptr::null_mut(),\n", "file_path": "src/painter.rs", "rank": 8, "score": 28938.2995681929 }, { "content": "# egui_glfw_gl\n\n[![Latest version](https://img.shields.io/crates/v/egui_glfw_gl.svg)](https://crates.io/crates/egui_glfw_gl)\n\n![MIT](https://img.shields.io/badge/license-MIT-blue.svg)\n\n\n\n![Example screenshot](/media/screenshot.png)\n\n\n\nThis is a backend implementation for [Egui](https://github.com/emilk/egui) that can be used with Rust bindings for [GLFW](https://github.com/PistonDevelopers/glfw-rs) and [OpenGL](https://github.com/brendanzab/gl-rs).\n\n\n\n## Example\n\nI have made an example to demonstrate the usage of egui_glfw_gl. To run the example, run the following:\n\n```\n\ncargo run --example demo\n\n```\n\n\n\n## Credits\n", "file_path": "README.md", "rank": 9, "score": 23477.73219079344 }, { "content": "pub fn init_clipboard() -> Option<ClipboardContext> {\n\n match ClipboardContext::new() {\n\n Ok(clipboard) => Some(clipboard),\n\n Err(err) => {\n\n eprintln!(\"Failed to initialize clipboard: {}\", err);\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 10, "score": 22838.4669529257 }, { "content": "// Draws a simple white triangle\n\n// based on the example from:\n\n// https://github.com/brendanzab/gl-rs/blob/master/gl/examples/triangle.rs\n\n\n\nuse egui_glfw_gl::gl;\n\nuse egui_glfw_gl::gl::types::*;\n\nuse std::ffi::CString;\n\nuse std::mem;\n\nuse std::ptr;\n\nuse std::str;\n\n\n\nconst VS_SRC: &'static str = \"\n\n#version 150\n\nin vec2 position;\n\n\n\nvoid main() {\n\n gl_Position = vec4(position, 0.0, 1.0);\n\n}\";\n\n\n\nconst FS_SRC: &'static str = \"\n", "file_path": "examples/demo/triangle.rs", "rank": 18, "score": 20406.552545008548 }, { "content": " gl::GenVertexArrays(1, &mut vao);\n\n gl::GenBuffers(1, &mut vbo);\n\n }\n\n Triangle {\n\n // Create GLSL shaders\n\n vs,\n\n fs,\n\n program,\n\n vao,\n\n vbo,\n\n }\n\n }\n\n pub fn draw(&self) {\n\n unsafe {\n\n gl::BindVertexArray(self.vao);\n\n\n\n // Create a Vertex Buffer Object and copy the vertex data to it\n\n\n\n gl::BindBuffer(gl::ARRAY_BUFFER, self.vbo);\n\n gl::BufferData(\n", "file_path": "examples/demo/triangle.rs", "rank": 20, "score": 20402.49276314635 }, { "content": " buf.as_mut_ptr() as *mut GLchar,\n\n );\n\n panic!(\n\n \"{}\",\n\n str::from_utf8(&buf).expect(\"ProgramInfoLog not valid utf8\")\n\n );\n\n }\n\n program\n\n }\n\n}\n\n\n\nimpl Triangle {\n\n pub fn new() -> Self {\n\n // Create Vertex Array Object\n\n let mut vao = 0;\n\n let mut vbo = 0;\n\n let vs = compile_shader(VS_SRC, gl::VERTEX_SHADER);\n\n let fs = compile_shader(FS_SRC, gl::FRAGMENT_SHADER);\n\n let program = link_program(vs, fs);\n\n unsafe {\n", "file_path": "examples/demo/triangle.rs", "rank": 21, "score": 20402.063899536057 }, { "content": " 0,\n\n ptr::null(),\n\n );\n\n\n\n // Draw a triangle from the 3 vertices\n\n gl::DrawArrays(gl::TRIANGLES, 0, 3);\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for Triangle {\n\n fn drop(&mut self) {\n\n unsafe {\n\n gl::DeleteProgram(self.program);\n\n gl::DeleteShader(self.fs);\n\n gl::DeleteShader(self.vs);\n\n gl::DeleteBuffers(1, &self.vbo);\n\n gl::DeleteVertexArrays(1, &self.vao);\n\n }\n\n }\n\n}\n", "file_path": "examples/demo/triangle.rs", "rank": 22, "score": 20400.989582339633 }, { "content": "#version 150\n\nout vec4 out_color;\n\n\n\nvoid main() {\n\n out_color = vec4(1.0, 1.0, 1.0, 1.0);\n\n}\";\n\n\n\nstatic VERTEX_DATA: [GLfloat; 6] = [0.0, 0.5, 0.5, -0.5, -0.5, -0.5];\n\n\n\npub struct Triangle {\n\n pub vs: GLuint,\n\n pub fs: GLuint,\n\n pub program: GLuint,\n\n pub vao: GLuint,\n\n pub vbo: GLuint,\n\n}\n\n\n", "file_path": "examples/demo/triangle.rs", "rank": 23, "score": 20400.28290670714 }, { "content": " shader,\n\n len,\n\n ptr::null_mut(),\n\n buf.as_mut_ptr() as *mut GLchar,\n\n );\n\n panic!(\n\n \"{}\",\n\n str::from_utf8(&buf).expect(\"ShaderInfoLog not valid utf8\")\n\n );\n\n }\n\n }\n\n shader\n\n}\n\n\n", "file_path": "examples/demo/triangle.rs", "rank": 24, "score": 20399.554575559505 }, { "content": " gl::ARRAY_BUFFER,\n\n (VERTEX_DATA.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,\n\n mem::transmute(&VERTEX_DATA[0]),\n\n gl::STATIC_DRAW,\n\n );\n\n\n\n // Use shader program\n\n gl::UseProgram(self.program);\n\n let c_out_color = CString::new(\"out_color\").unwrap();\n\n gl::BindFragDataLocation(self.program, 0, c_out_color.as_ptr());\n\n\n\n // Specify the layout of the vertex data\n\n let c_position = CString::new(\"position\").unwrap();\n\n let pos_attr = gl::GetAttribLocation(self.program, c_position.as_ptr());\n\n gl::EnableVertexAttribArray(pos_attr as GLuint);\n\n gl::VertexAttribPointer(\n\n pos_attr as GLuint,\n\n 2,\n\n gl::FLOAT,\n\n gl::FALSE as GLboolean,\n", "file_path": "examples/demo/triangle.rs", "rank": 25, "score": 20398.49573591609 }, { "content": " buf.as_mut_ptr() as *mut GLchar,\n\n );\n\n panic!(\n\n \"{}\",\n\n str::from_utf8(&buf).expect(\"ProgramInfoLog not valid utf8\")\n\n );\n\n }\n\n program\n\n }\n\n}\n\n\n\nimpl Painter {\n\n pub fn new(\n\n window: &mut glfw::Window,\n\n canvas_width: u32,\n\n canvas_height: u32,\n\n ) -> Painter {\n\n unsafe {\n\n let mut egui_texture = 0;\n\n gl::load_with(|symbol| window.get_proc_address(symbol) as *const _);\n", "file_path": "src/painter.rs", "rank": 26, "score": 19434.9311281686 }, { "content": " gl::GenBuffers(1, &mut tc_buffer);\n\n gl::GenBuffers(1, &mut color_buffer);\n\n\n\n Painter {\n\n vertex_array,\n\n program,\n\n canvas_width,\n\n canvas_height,\n\n index_buffer,\n\n pos_buffer,\n\n tc_buffer,\n\n color_buffer,\n\n egui_texture,\n\n vert_shader,\n\n frag_shader,\n\n egui_texture_version: None,\n\n user_textures: Default::default(),\n\n }\n\n }\n\n }\n", "file_path": "src/painter.rs", "rank": 27, "score": 19428.214535701107 }, { "content": "\n\n void main() {\n\n // Need to convert from SRGBA to linear.\n\n vec4 texture_rgba = linear_from_srgba(texture(u_sampler, v_tc) * 255.0);\n\n f_color = v_rgba * texture_rgba;\n\n }\n\n\"#;\n\n\n\npub struct Painter {\n\n vertex_array: GLuint,\n\n program: GLuint,\n\n index_buffer: GLuint,\n\n pos_buffer: GLuint,\n\n tc_buffer: GLuint,\n\n color_buffer: GLuint,\n\n canvas_width: u32,\n\n canvas_height: u32,\n\n egui_texture: GLuint,\n\n egui_texture_version: Option<u64>,\n\n vert_shader: GLuint,\n\n frag_shader: GLuint,\n\n user_textures: Vec<UserTexture>,\n\n}\n\n\n", "file_path": "src/painter.rs", "rank": 28, "score": 19427.658604989527 }, { "content": "extern crate gl;\n\nextern crate glfw;\n\nuse gl::types::*;\n\nuse std::ffi::CString;\n\nuse std::mem;\n\nuse std::os::raw::c_void;\n\nuse std::ptr;\n\nuse std::str;\n\n\n\nuse egui::{\n\n paint::{Color32, Mesh, Texture},\n\n vec2, ClippedMesh,\n\n};\n\n\n\n#[derive(Default)]\n", "file_path": "src/painter.rs", "rank": 29, "score": 19427.507605924435 }, { "content": " }\n\n\n\n unsafe {\n\n gl::BindTexture(gl::TEXTURE_2D, self.egui_texture);\n\n\n\n let level = 0;\n\n let internal_format = gl::RGBA;\n\n let border = 0;\n\n let src_format = gl::RGBA;\n\n let src_type = gl::UNSIGNED_BYTE;\n\n gl::TexImage2D(\n\n gl::TEXTURE_2D,\n\n level,\n\n internal_format as i32,\n\n texture.width as i32,\n\n texture.height as i32,\n\n border,\n\n src_format,\n\n src_type,\n\n pixels.as_ptr() as *const c_void,\n", "file_path": "src/painter.rs", "rank": 30, "score": 19426.1954073295 }, { "content": " gl::Uniform2f(\n\n u_screen_size_loc,\n\n screen_size_points.x,\n\n screen_size_points.y,\n\n );\n\n let u_sampler = CString::new(\"u_sampler\").unwrap();\n\n let u_sampler_ptr = u_sampler.as_ptr();\n\n let u_sampler_loc = gl::GetUniformLocation(self.program, u_sampler_ptr);\n\n gl::Uniform1i(u_sampler_loc, 0);\n\n gl::Viewport(0, 0, self.canvas_width as i32, self.canvas_height as i32);\n\n\n\n for ClippedMesh(clip_rect, mesh) in meshes {\n\n gl::BindTexture(gl::TEXTURE_2D, self.get_texture(mesh.texture_id));\n\n\n\n let clip_min_x = pixels_per_point * clip_rect.min.x;\n\n let clip_min_y = pixels_per_point * clip_rect.min.y;\n\n let clip_max_x = pixels_per_point * clip_rect.max.x;\n\n let clip_max_y = pixels_per_point * clip_rect.max.y;\n\n let clip_min_x = clip_min_x.clamp(0.0, screen_size_pixels.x);\n\n let clip_min_y = clip_min_y.clamp(0.0, screen_size_pixels.y);\n", "file_path": "src/painter.rs", "rank": 31, "score": 19425.20644338761 }, { "content": " gl::GenTextures(1, &mut egui_texture);\n\n gl::BindTexture(gl::TEXTURE_2D, egui_texture);\n\n gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, gl::CLAMP_TO_EDGE as i32);\n\n gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, gl::CLAMP_TO_EDGE as i32);\n\n gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::LINEAR as i32);\n\n gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::LINEAR as i32);\n\n\n\n let vert_shader = compile_shader(VS_SRC, gl::VERTEX_SHADER);\n\n let frag_shader = compile_shader(FS_SRC, gl::FRAGMENT_SHADER);\n\n\n\n let program = link_program(vert_shader, frag_shader);\n\n let mut vertex_array = 0;\n\n let mut index_buffer = 0;\n\n let mut pos_buffer = 0;\n\n let mut tc_buffer = 0;\n\n let mut color_buffer = 0;\n\n gl::GenVertexArrays(1, &mut vertex_array);\n\n gl::BindVertexArray(vertex_array);\n\n gl::GenBuffers(1, &mut index_buffer);\n\n gl::GenBuffers(1, &mut pos_buffer);\n", "file_path": "src/painter.rs", "rank": 32, "score": 19425.092021002947 }, { "content": " }\n\n }\n\n\n\n pub fn paint_jobs(\n\n &mut self,\n\n bg_color: Option<Color32>,\n\n meshes: Vec<ClippedMesh>,\n\n egui_texture: &Texture,\n\n pixels_per_point: f32,\n\n ) {\n\n self.upload_egui_texture(egui_texture);\n\n self.upload_user_textures();\n\n\n\n unsafe {\n\n if let Some(color) = bg_color {\n\n gl::ClearColor(\n\n color[0] as f32 / 255.0,\n\n color[1] as f32 / 255.0,\n\n color[2] as f32 / 255.0,\n\n color[3] as f32 / 255.0,\n", "file_path": "src/painter.rs", "rank": 33, "score": 19424.919402488496 }, { "content": " pixels,\n\n texture: None,\n\n filtering,\n\n dirty: true,\n\n });\n\n id\n\n }\n\n\n\n fn upload_egui_texture(&mut self, texture: &Texture) {\n\n if self.egui_texture_version == Some(texture.version) {\n\n return; // No change\n\n }\n\n\n\n let mut pixels: Vec<u8> = Vec::with_capacity(texture.pixels.len() * 4);\n\n for &alpha in &texture.pixels {\n\n let srgba = Color32::from_white_alpha(alpha);\n\n pixels.push(srgba.r());\n\n pixels.push(srgba.g());\n\n pixels.push(srgba.b());\n\n pixels.push(srgba.a());\n", "file_path": "src/painter.rs", "rank": 34, "score": 19424.902394678807 }, { "content": " );\n\n\n\n self.egui_texture_version = Some(texture.version);\n\n }\n\n }\n\n\n\n fn upload_user_textures(&mut self) {\n\n unsafe {\n\n for user_texture in &mut self.user_textures {\n\n if !user_texture.texture.is_none() && !user_texture.dirty {\n\n continue;\n\n }\n\n let pixels = std::mem::take(&mut user_texture.pixels);\n\n\n\n if user_texture.texture.is_none() {\n\n let mut gl_texture = 0;\n\n gl::GenTextures(1, &mut gl_texture);\n\n gl::BindTexture(gl::TEXTURE_2D, gl_texture);\n\n gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, gl::CLAMP_TO_EDGE as i32);\n\n gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, gl::CLAMP_TO_EDGE as i32);\n", "file_path": "src/painter.rs", "rank": 35, "score": 19424.607454901732 }, { "content": "\n\n pub fn new_user_texture(\n\n &mut self,\n\n size: (usize, usize),\n\n srgba_pixels: &[Color32],\n\n filtering: bool,\n\n ) -> egui::TextureId {\n\n assert_eq!(size.0 * size.1, srgba_pixels.len());\n\n\n\n let mut pixels: Vec<u8> = Vec::with_capacity(srgba_pixels.len() * 4);\n\n for srgba in srgba_pixels {\n\n pixels.push(srgba.r());\n\n pixels.push(srgba.g());\n\n pixels.push(srgba.b());\n\n pixels.push(srgba.a());\n\n }\n\n\n\n let id = egui::TextureId::User(self.user_textures.len() as u64);\n\n self.user_textures.push(UserTexture {\n\n size,\n", "file_path": "src/painter.rs", "rank": 36, "score": 19424.336442831172 }, { "content": " gl::UNSIGNED_SHORT,\n\n ptr::null(),\n\n );\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for Painter {\n\n fn drop(&mut self) {\n\n self.cleanup();\n\n }\n\n}\n", "file_path": "src/painter.rs", "rank": 37, "score": 19424.29318676939 }, { "content": " }\n\n }\n\n }\n\n\n\n pub fn update_user_texture_data(&mut self, texture_id: egui::TextureId, pixels: &[Color32]) {\n\n match texture_id {\n\n egui::TextureId::Egui => {}\n\n egui::TextureId::User(id) => {\n\n let id = id as usize;\n\n assert!(id < self.user_textures.len());\n\n self.user_textures[id].pixels = Vec::with_capacity(pixels.len() * 4);\n\n\n\n for p in pixels {\n\n self.user_textures[id].pixels.push(p.r());\n\n self.user_textures[id].pixels.push(p.g());\n\n self.user_textures[id].pixels.push(p.b());\n\n self.user_textures[id].pixels.push(p.a());\n\n }\n\n self.user_textures[id].dirty = true;\n\n }\n", "file_path": "src/painter.rs", "rank": 38, "score": 19424.1796121667 }, { "content": " let a_srgba_loc = gl::GetAttribLocation(self.program, a_srgba_ptr);\n\n assert!(a_srgba_loc >= 0);\n\n let a_srgba_loc = a_srgba_loc as u32;\n\n\n\n let stride = 0;\n\n gl::VertexAttribPointer(\n\n a_srgba_loc,\n\n 4,\n\n gl::UNSIGNED_BYTE,\n\n gl::FALSE,\n\n stride,\n\n ptr::null(),\n\n );\n\n gl::EnableVertexAttribArray(a_srgba_loc);\n\n\n\n // --------------------------------------------------------------------\n\n\n\n gl::DrawElements(\n\n gl::TRIANGLES,\n\n indices.len() as i32,\n", "file_path": "src/painter.rs", "rank": 39, "score": 19424.050985688016 }, { "content": " user_texture.size.1 as i32,\n\n border,\n\n src_format,\n\n src_type,\n\n pixels.as_ptr() as *const c_void,\n\n );\n\n\n\n user_texture.dirty = false;\n\n }\n\n }\n\n }\n\n\n\n fn get_texture(&self, texture_id: egui::TextureId) -> GLuint {\n\n match texture_id {\n\n egui::TextureId::Egui => self.egui_texture,\n\n egui::TextureId::User(id) => {\n\n let id = id as usize;\n\n assert!(id < self.user_textures.len());\n\n let texture = self.user_textures[id].texture;\n\n texture.expect(\"Should have been uploaded\")\n", "file_path": "src/painter.rs", "rank": 40, "score": 19423.532065732357 }, { "content": " );\n\n\n\n gl::Clear(gl::COLOR_BUFFER_BIT);\n\n }\n\n //Let OpenGL know we are dealing with SRGB colors so that it\n\n //can do the blending correctly. Not setting the framebuffer\n\n //leads to darkened, oversaturated colors.\n\n gl::Enable(gl::FRAMEBUFFER_SRGB);\n\n\n\n gl::Enable(gl::SCISSOR_TEST);\n\n gl::Enable(gl::BLEND);\n\n gl::BlendFunc(gl::ONE, gl::ONE_MINUS_SRC_ALPHA); // premultiplied alpha\n\n gl::UseProgram(self.program);\n\n gl::ActiveTexture(gl::TEXTURE0);\n\n\n\n let u_screen_size = CString::new(\"u_screen_size\").unwrap();\n\n let u_screen_size_ptr = u_screen_size.as_ptr();\n\n let u_screen_size_loc = gl::GetUniformLocation(self.program, u_screen_size_ptr);\n\n let screen_size_pixels = vec2(self.canvas_width as f32, self.canvas_height as f32);\n\n let screen_size_points = screen_size_pixels / pixels_per_point;\n", "file_path": "src/painter.rs", "rank": 41, "score": 19423.338707099465 }, { "content": " let clip_max_x = clip_max_x.clamp(clip_min_x, screen_size_pixels.x);\n\n let clip_max_y = clip_max_y.clamp(clip_min_y, screen_size_pixels.y);\n\n let clip_min_x = clip_min_x.round() as i32;\n\n let clip_min_y = clip_min_y.round() as i32;\n\n let clip_max_x = clip_max_x.round() as i32;\n\n let clip_max_y = clip_max_y.round() as i32;\n\n\n\n //scissor Y coordinate is from the bottom\n\n gl::Scissor(\n\n clip_min_x,\n\n self.canvas_height as i32 - clip_max_y,\n\n clip_max_x - clip_min_x,\n\n clip_max_y - clip_min_y,\n\n );\n\n\n\n self.paint_mesh(&mesh);\n\n gl::Disable(gl::SCISSOR_TEST);\n\n }\n\n gl::Disable(gl::FRAMEBUFFER_SRGB);\n\n }\n", "file_path": "src/painter.rs", "rank": 42, "score": 19423.306583264155 }, { "content": " shader,\n\n len,\n\n ptr::null_mut(),\n\n buf.as_mut_ptr() as *mut GLchar,\n\n );\n\n panic!(\n\n \"{}\",\n\n str::from_utf8(&buf).expect(\"ShaderInfoLog not valid utf8\")\n\n );\n\n }\n\n }\n\n shader\n\n}\n\n\n", "file_path": "src/painter.rs", "rank": 43, "score": 19422.39561207844 }, { "content": "\n\n if user_texture.filtering {\n\n gl::TexParameteri(\n\n gl::TEXTURE_2D,\n\n gl::TEXTURE_MIN_FILTER,\n\n gl::LINEAR as i32,\n\n );\n\n gl::TexParameteri(\n\n gl::TEXTURE_2D,\n\n gl::TEXTURE_MAG_FILTER,\n\n gl::LINEAR as i32,\n\n );\n\n } else {\n\n gl::TexParameteri(\n\n gl::TEXTURE_2D,\n\n gl::TEXTURE_MIN_FILTER,\n\n gl::NEAREST as i32,\n\n );\n\n gl::TexParameteri(\n\n gl::TEXTURE_2D,\n", "file_path": "src/painter.rs", "rank": 44, "score": 19422.164150623652 }, { "content": " gl::TEXTURE_MAG_FILTER,\n\n gl::NEAREST as i32,\n\n );\n\n }\n\n user_texture.texture = Some(gl_texture);\n\n } else {\n\n gl::BindTexture(gl::TEXTURE_2D, user_texture.texture.unwrap());\n\n }\n\n\n\n let level = 0;\n\n let internal_format = gl::RGBA;\n\n let border = 0;\n\n let src_format = gl::RGBA;\n\n let src_type = gl::UNSIGNED_BYTE;\n\n\n\n gl::TexImage2D(\n\n gl::TEXTURE_2D,\n\n level,\n\n internal_format as i32,\n\n user_texture.size.0 as i32,\n", "file_path": "src/painter.rs", "rank": 45, "score": 19422.104205617918 }, { "content": " void main() {\n\n gl_Position = vec4(\n\n 2.0 * a_pos.x / u_screen_size.x - 1.0,\n\n 1.0 - 2.0 * a_pos.y / u_screen_size.y,\n\n 0.0,\n\n 1.0);\n\n v_rgba = linear_from_srgba(a_srgba);\n\n v_tc = a_tc;\n\n }\n\n\"#;\n\n\n\nconst FS_SRC: &str = r#\"\n\n #version 150\n\n uniform sampler2D u_sampler;\n\n in vec4 v_rgba;\n\n in vec2 v_tc;\n\n out vec4 f_color;\n\n\n\n // 0-255 sRGB from 0-1 linear\n\n vec3 srgb_from_linear(vec3 rgb) {\n", "file_path": "src/painter.rs", "rank": 46, "score": 19421.36269675989 }, { "content": " let mut tex_coords: Vec<f32> = Vec::with_capacity(2 * mesh.vertices.len());\n\n for v in &mesh.vertices {\n\n positions.push(v.pos.x);\n\n positions.push(v.pos.y);\n\n tex_coords.push(v.uv.x);\n\n tex_coords.push(v.uv.y);\n\n }\n\n\n\n let mut colors: Vec<u8> = Vec::with_capacity(4 * mesh.vertices.len());\n\n for v in &mesh.vertices {\n\n colors.push(v.color[0]);\n\n colors.push(v.color[1]);\n\n colors.push(v.color[2]);\n\n colors.push(v.color[3]);\n\n }\n\n\n\n unsafe {\n\n gl::BindVertexArray(self.vertex_array);\n\n gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, self.index_buffer);\n\n gl::BufferData(\n", "file_path": "src/painter.rs", "rank": 47, "score": 19421.35235011775 }, { "content": " assert!(a_pos_loc >= 0);\n\n let a_pos_loc = a_pos_loc as u32;\n\n\n\n let stride = 0;\n\n gl::VertexAttribPointer(a_pos_loc, 2, gl::FLOAT, gl::FALSE, stride, ptr::null());\n\n gl::EnableVertexAttribArray(a_pos_loc);\n\n\n\n // --------------------------------------------------------------------\n\n\n\n gl::BindBuffer(gl::ARRAY_BUFFER, self.tc_buffer);\n\n gl::BufferData(\n\n gl::ARRAY_BUFFER,\n\n (tex_coords.len() * mem::size_of::<f32>()) as GLsizeiptr,\n\n //mem::transmute(&tex_coords.as_ptr()),\n\n tex_coords.as_ptr() as *const gl::types::GLvoid,\n\n gl::STREAM_DRAW,\n\n );\n\n\n\n let a_tc = CString::new(\"a_tc\").unwrap();\n\n let a_tc_ptr = a_tc.as_ptr();\n", "file_path": "src/painter.rs", "rank": 48, "score": 19420.839868112733 }, { "content": " let a_tc_loc = gl::GetAttribLocation(self.program, a_tc_ptr);\n\n assert!(a_tc_loc >= 0);\n\n let a_tc_loc = a_tc_loc as u32;\n\n\n\n let stride = 0;\n\n gl::VertexAttribPointer(a_tc_loc, 2, gl::FLOAT, gl::FALSE, stride, ptr::null());\n\n gl::EnableVertexAttribArray(a_tc_loc);\n\n\n\n // --------------------------------------------------------------------\n\n gl::BindBuffer(gl::ARRAY_BUFFER, self.color_buffer);\n\n gl::BufferData(\n\n gl::ARRAY_BUFFER,\n\n (colors.len() * mem::size_of::<u8>()) as GLsizeiptr,\n\n //mem::transmute(&colors.as_ptr()),\n\n colors.as_ptr() as *const gl::types::GLvoid,\n\n gl::STREAM_DRAW,\n\n );\n\n\n\n let a_srgba = CString::new(\"a_srgba\").unwrap();\n\n let a_srgba_ptr = a_srgba.as_ptr();\n", "file_path": "src/painter.rs", "rank": 49, "score": 19420.785976897878 }, { "content": " }\n\n\n\n pub fn cleanup(&self) {\n\n unsafe {\n\n gl::DeleteProgram(self.program);\n\n gl::DeleteShader(self.vert_shader);\n\n gl::DeleteShader(self.frag_shader);\n\n gl::DeleteBuffers(1, &self.pos_buffer);\n\n gl::DeleteBuffers(1, &self.tc_buffer);\n\n gl::DeleteBuffers(1, &self.color_buffer);\n\n gl::DeleteBuffers(1, &self.index_buffer);\n\n gl::DeleteVertexArrays(1, &self.vertex_array);\n\n }\n\n }\n\n\n\n fn paint_mesh(&self, mesh: &Mesh) {\n\n debug_assert!(mesh.is_valid());\n\n let indices: Vec<u16> = mesh.indices.iter().map(|idx| *idx as u16).collect();\n\n\n\n let mut positions: Vec<f32> = Vec::with_capacity(2 * mesh.vertices.len());\n", "file_path": "src/painter.rs", "rank": 50, "score": 19420.75398860573 }, { "content": " bvec3 cutoff = lessThan(rgb, vec3(0.0031308));\n\n vec3 lower = rgb * vec3(3294.6);\n\n vec3 higher = vec3(269.025) * pow(rgb, vec3(1.0 / 2.4)) - vec3(14.025);\n\n return mix(higher, lower, vec3(cutoff));\n\n }\n\n\n\n vec4 srgba_from_linear(vec4 rgba) {\n\n return vec4(srgb_from_linear(rgba.rgb), 255.0 * rgba.a);\n\n }\n\n \n\n vec3 linear_from_srgb(vec3 srgb) {\n\n bvec3 cutoff = lessThan(srgb, vec3(10.31475));\n\n vec3 lower = srgb / vec3(3294.6);\n\n vec3 higher = pow((srgb + vec3(14.025)) / vec3(269.025), vec3(2.4));\n\n return mix(higher, lower, vec3(cutoff));\n\n }\n\n\n\n vec4 linear_from_srgba(vec4 srgba) {\n\n return vec4(linear_from_srgb(srgba.rgb), srgba.a / 255.0);\n\n }\n", "file_path": "src/painter.rs", "rank": 51, "score": 19419.7190253897 }, { "content": " #version 150\n\n uniform vec2 u_screen_size;\n\n in vec2 a_pos;\n\n in vec4 a_srgba; // 0-255 sRGB\n\n in vec2 a_tc;\n\n out vec4 v_rgba;\n\n out vec2 v_tc;\n\n\n\n // 0-1 linear from 0-255 sRGB\n\n vec3 linear_from_srgb(vec3 srgb) {\n\n bvec3 cutoff = lessThan(srgb, vec3(10.31475));\n\n vec3 lower = srgb / vec3(3294.6);\n\n vec3 higher = pow((srgb + vec3(14.025)) / vec3(269.025), vec3(2.4));\n\n return mix(higher, lower, cutoff);\n\n }\n\n\n\n vec4 linear_from_srgba(vec4 srgba) {\n\n return vec4(linear_from_srgb(srgba.rgb), srgba.a / 255.0);\n\n }\n\n\n", "file_path": "src/painter.rs", "rank": 52, "score": 19419.7190253897 }, { "content": " gl::ELEMENT_ARRAY_BUFFER,\n\n (indices.len() * mem::size_of::<u16>()) as GLsizeiptr,\n\n //mem::transmute(&indices.as_ptr()),\n\n indices.as_ptr() as *const gl::types::GLvoid,\n\n gl::STREAM_DRAW,\n\n );\n\n\n\n // --------------------------------------------------------------------\n\n gl::BindBuffer(gl::ARRAY_BUFFER, self.pos_buffer);\n\n gl::BufferData(\n\n gl::ARRAY_BUFFER,\n\n (positions.len() * mem::size_of::<f32>()) as GLsizeiptr,\n\n //mem::transmute(&positions.as_ptr()),\n\n positions.as_ptr() as *const gl::types::GLvoid,\n\n gl::STREAM_DRAW,\n\n );\n\n\n\n let a_pos = CString::new(\"a_pos\").unwrap();\n\n let a_pos_ptr = a_pos.as_ptr();\n\n let a_pos_loc = gl::GetAttribLocation(self.program, a_pos_ptr);\n", "file_path": "src/painter.rs", "rank": 53, "score": 19419.7190253897 }, { "content": "#[derive(Default)]\n\nstruct UserTexture {\n\n size: (usize, usize),\n\n\n\n /// Pending upload (will be emptied later).\n\n pixels: Vec<u8>,\n\n\n\n /// Lazily uploaded\n\n texture: Option<GLuint>,\n\n\n\n /// For user textures there is a choice between\n\n /// Linear (default) and Nearest.\n\n filtering: bool,\n\n\n\n /// User textures can be modified and this flag\n\n /// is used to indicate if pixel data for the\n\n /// texture has been updated.\n\n dirty: bool,\n\n}\n\n\n\nconst VS_SRC: &str = r#\"\n", "file_path": "src/painter.rs", "rank": 54, "score": 16879.844701062622 }, { "content": " state\n\n .input\n\n .events\n\n .push(egui::Event::PointerMoved(state.pointer_pos))\n\n }\n\n\n\n Key(keycode, _scancode, glfw::Action::Release, keymod) => {\n\n use glfw::Modifiers as Mod;\n\n if let Some(key) = translate_virtual_key_code(keycode) {\n\n state.modifiers = Modifiers {\n\n alt: (keymod & Mod::Alt == Mod::Alt),\n\n ctrl: (keymod & Mod::Control == Mod::Control),\n\n shift: (keymod & Mod::Shift == Mod::Shift),\n\n\n\n // TODO: GLFW doesn't seem to support the mac command key\n\n // mac_cmd: keymod & Mod::LGUIMOD == Mod::LGUIMOD,\n\n command: (keymod & Mod::Control == Mod::Control),\n\n\n\n ..Default::default()\n\n };\n", "file_path": "src/lib.rs", "rank": 55, "score": 11.731081376567637 }, { "content": "#![warn(clippy::all)]\n\n#![allow(clippy::single_match)]\n\n\n\n// Re-export dependencies.\n\npub use egui;\n\npub use gl;\n\npub use glfw;\n\n\n\nmod painter;\n\n\n\npub use painter::Painter;\n\n\n\nuse egui::*;\n\n\n\n#[cfg(not(feature = \"clipboard\"))]\n\nmod clipboard;\n\n\n\nuse clipboard::{\n\n ClipboardContext, // TODO: remove\n\n ClipboardProvider,\n", "file_path": "src/lib.rs", "rank": 56, "score": 11.672239218107407 }, { "content": "\n\n state.input.events.push(Event::Key {\n\n key,\n\n pressed: false,\n\n modifiers: state.modifiers,\n\n });\n\n }\n\n }\n\n\n\n Key(keycode, _scancode, glfw::Action::Press | glfw::Action::Repeat, keymod) => {\n\n use glfw::Modifiers as Mod;\n\n if let Some(key) = translate_virtual_key_code(keycode) {\n\n state.modifiers = Modifiers {\n\n alt: (keymod & Mod::Alt == Mod::Alt),\n\n ctrl: (keymod & Mod::Control == Mod::Control),\n\n shift: (keymod & Mod::Shift == Mod::Shift),\n\n\n\n // TODO: GLFW doesn't seem to support the mac command key\n\n // mac_cmd: keymod & Mod::LGUIMOD == Mod::LGUIMOD,\n\n command: (keymod & Mod::Control == Mod::Control),\n", "file_path": "src/lib.rs", "rank": 57, "score": 11.246046437176949 }, { "content": "\n\n ..Default::default()\n\n };\n\n\n\n if state.modifiers.command && key == egui::Key::X {\n\n state.input.events.push(egui::Event::Cut);\n\n } else if state.modifiers.command && key == egui::Key::C {\n\n state.input.events.push(egui::Event::Copy);\n\n } else if state.modifiers.command && key == egui::Key::V {\n\n if let Some(clipboard_ctx) = state.clipboard.as_mut() {\n\n state.input.events.push(egui::Event::Text(clipboard_ctx.get_contents().unwrap_or(\"\".to_string())));\n\n }\n\n } else {\n\n state.input.events.push(Event::Key {\n\n key,\n\n pressed: true,\n\n modifiers: state.modifiers,\n\n });\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 58, "score": 9.712859816114559 }, { "content": " modifiers: state.modifiers,\n\n }),\n\n\n\n MouseButton (mouse_btn, glfw::Action::Release, _) => state.input.events.push(egui::Event::PointerButton {\n\n pos: state.pointer_pos,\n\n button: match mouse_btn {\n\n glfw::MouseButtonLeft => egui::PointerButton::Primary,\n\n glfw::MouseButtonRight => egui::PointerButton::Secondary,\n\n glfw::MouseButtonMiddle => egui::PointerButton::Middle,\n\n _ => unreachable!(),\n\n },\n\n pressed: false,\n\n modifiers: state.modifiers,\n\n }),\n\n\n\n CursorPos(x, y) => {\n\n state.pointer_pos = pos2(\n\n x as f32 / state.input.pixels_per_point.unwrap(),\n\n y as f32 / state.input.pixels_per_point.unwrap(),\n\n );\n", "file_path": "src/lib.rs", "rank": 59, "score": 7.299963337378204 }, { "content": "};\n\n\n\npub struct EguiInputState {\n\n pub pointer_pos: Pos2,\n\n pub clipboard: Option<ClipboardContext>,\n\n pub input: RawInput,\n\n pub modifiers: Modifiers,\n\n}\n\n\n\nimpl EguiInputState {\n\n pub fn new(input: RawInput) -> Self {\n\n EguiInputState {\n\n pointer_pos: Pos2::new(0f32, 0f32),\n\n clipboard: init_clipboard(),\n\n input,\n\n modifiers: Modifiers::default(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 60, "score": 4.406398806121524 }, { "content": " PageDown => Key::PageDown,\n\n PageUp => Key::PageUp,\n\n\n\n\n\n A => Key::A,\n\n B => Key::B,\n\n C => Key::C,\n\n D => Key::D,\n\n E => Key::E,\n\n F => Key::F,\n\n G => Key::G,\n\n H => Key::H,\n\n I => Key::I,\n\n J => Key::J,\n\n K => Key::K,\n\n L => Key::L,\n\n M => Key::M,\n\n N => Key::N,\n\n O => Key::O,\n\n P => Key::P,\n", "file_path": "src/lib.rs", "rank": 61, "score": 3.5962214868103386 }, { "content": " Q => Key::Q,\n\n R => Key::R,\n\n S => Key::S,\n\n T => Key::T,\n\n U => Key::U,\n\n V => Key::V,\n\n W => Key::W,\n\n X => Key::X,\n\n Y => Key::Y,\n\n Z => Key::Z,\n\n\n\n _ => {\n\n return None;\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 62, "score": 3.5228269403822203 }, { "content": " }\n\n\n\n Char(c) => {\n\n state.input.events.push(Event::Text(c.to_string()));\n\n }\n\n\n\n Scroll (x, y) => {\n\n state.input.scroll_delta = vec2(x as f32, y as f32);\n\n }\n\n\n\n _ => {}\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 63, "score": 2.8346897071267616 }, { "content": "#[derive(Clone, Copy, Debug)]\n\npub struct Error;\n\n\n\nimpl core::fmt::Display for Error {\n\n fn fmt(&self, _f: &mut core::fmt::Formatter) -> core::fmt::Result {\n\n Ok(())\n\n }\n\n}\n\n\n\npub type Result<T> = core::result::Result<T, Error>;\n\n\n", "file_path": "src/clipboard.rs", "rank": 64, "score": 1.6548788666933336 } ]
Rust
day22/src/main.rs
obi1kenobi/advent-of-code-2021
f4df532edc67efcfaae7332e60483032afb3801a
#![feature(map_first_last)] use std::{ collections::{BTreeMap, BTreeSet}, env, fs, ops::RangeInclusive, fmt::Display, }; #[allow(unused_imports)] use itertools::Itertools; fn parse_range(range: &str) -> (i64, i64) { let (low, high) = range.split_once("..").unwrap(); (low.parse().unwrap(), high.parse().unwrap()) } fn main() { let args: Vec<String> = env::args().collect(); let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect(); reversed_args .pop() .expect("Expected the executable name to be the first argument, but was missing"); let part = reversed_args.pop().expect("part number"); let input_file = reversed_args.pop().expect("input file"); let content = fs::read_to_string(input_file).unwrap(); let input_data: Vec<RebootStep> = content .trim_end() .split('\n') .map(|x| { let (direction, all_coords) = x.split_once(' ').unwrap(); let switch_on = match direction { "on" => true, "off" => false, _ => unreachable!("{}", direction), }; let (x_range, y_range, z_range) = { let (x_comp, (y_comp, z_comp)) = all_coords .split_once(',') .map(|(left, right)| (left, right.split_once(',').unwrap())) .unwrap(); ( parse_range(x_comp.split_once('=').unwrap().1), parse_range(y_comp.split_once('=').unwrap().1), parse_range(z_comp.split_once('=').unwrap().1), ) }; RebootStep { switch_on, range: [x_range, y_range, z_range], } }) .collect(); match part { "1" => { let result = solve_part1(&input_data); println!("{}", result); } "2" => { let result = solve_part2(&input_data); println!("{}", result); } "minify" => { minify(&input_data); } _ => unreachable!("{}", part), } } #[derive(Clone, Debug)] struct RebootStep { switch_on: bool, range: [(i64, i64); 3], } impl Display for RebootStep { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let state = if self.switch_on { "on" } else { "off" }; write!( f, "{} x={}..{},y={}..{},z={}..{}", state, self.range[0].0, self.range[0].1, self.range[1].0, self.range[1].1, self.range[2].0, self.range[2].1, ) } } struct RebootStepPrinter<'a>(&'a [RebootStep]); impl<'a> Display for RebootStepPrinter<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { for step in self.0.iter() { writeln!(f, "{}", step)?; } Ok(()) } } fn minify(data: &[RebootStep]) { let part1 = solve_part1(data); let part2 = solve_part2(data); assert_ne!(part1, part2); let mut minified = data; loop { let next_minified = &minified[1..]; if next_minified.is_empty() { break; } let part1 = solve_part1(next_minified); let part2 = solve_part2(next_minified); if part1 != part2 { minified = next_minified; println!("eliminated prefix: {}", minified.len()); } else { break; } } loop { let next_minified = &minified[..(minified.len() - 1)]; if next_minified.is_empty() { break; } let part1 = solve_part1(next_minified); let part2 = solve_part2(next_minified); if part1 != part2 { minified = next_minified; println!("eliminated suffix: {}", minified.len()); } else { break; } } println!("{}", RebootStepPrinter(minified)); } fn to_range(range: (i64, i64)) -> RangeInclusive<i64> { range.0..=range.1 } fn solve_part1(data: &[RebootStep]) -> usize { let target_range = -50i64..=50; target_range .clone() .cartesian_product(target_range.clone()) .cartesian_product(target_range) .filter(|((x, y), z)| { for step in data.iter().rev() { let (x_range, y_range, z_range) = step .range .iter() .copied() .map(to_range) .collect_tuple() .unwrap(); if x_range.contains(x) && y_range.contains(y) && z_range.contains(z) { return step.switch_on; } } false }) .count() } fn solve_part2(data: &[RebootStep]) -> usize { let notable_coordinates: Vec<BTreeSet<i64>> = (0..3).map(|idx| { data .iter() .flat_map(|step| [step.range[idx].0, step.range[idx].1 + 1]) .collect() }).collect_vec(); let splits = notable_coordinates.iter().map(|coords| coords.iter().copied().collect_vec()).collect_vec(); let offsets: Vec<BTreeMap<i64, usize>> = splits.iter().map(|axis_splits| axis_splits.iter().enumerate().map(|(idx, val)| (*val, idx)) .collect()).collect(); let mut cell_is_on = vec![ vec![vec![false; notable_coordinates[2].len()]; notable_coordinates[1].len()]; notable_coordinates[0].len() ]; let mut cell_volumes = vec![ vec![vec![0usize; notable_coordinates[2].len()]; notable_coordinates[1].len()]; notable_coordinates[0].len() ]; for (x_idx, (x_start, x_end)) in splits[0].iter().tuple_windows().enumerate() { for (y_idx, (y_start, y_end)) in splits[1].iter().tuple_windows().enumerate() { for (z_idx, (z_start, z_end)) in splits[2].iter().tuple_windows().enumerate() { let x_width = (*x_end - *x_start) as usize; let y_width = (*y_end - *y_start) as usize; let z_width = (*z_end - *z_start) as usize; cell_volumes[x_idx + 1][y_idx + 1][z_idx + 1] = x_width * y_width * z_width; } } } for step in data { let ( (x_start_cell, x_end_cell), (y_start_cell, y_end_cell), (z_start_cell, z_end_cell), ) = step.range.iter().zip(offsets.iter()).map(|((start, end_incl), offset)| { let end = end_incl + 1; (offset[start], offset[&end]) }).collect_tuple().unwrap(); #[allow(clippy::needless_range_loop)] for x in x_start_cell..x_end_cell { for y in y_start_cell..y_end_cell { for z in z_start_cell..z_end_cell { cell_is_on[x][y][z] = step.switch_on; } } } } let cell_volumes_ref = &cell_volumes; cell_is_on .iter() .enumerate() .flat_map(move |(x_idx, y)| { y.iter().enumerate().map::<usize, _>(move |(y_idx, z)| { z.iter() .enumerate() .filter_map(|(z_idx, is_on)| { if *is_on { Some(cell_volumes_ref[x_idx + 1][y_idx + 1][z_idx + 1]) } else { None } }) .sum() }) }) .sum() }
#![feature(map_first_last)] use std::{ collections::{BTreeMap, BTreeSet}, env, fs, ops::RangeInclusive, fmt::Display, }; #[allow(unused_imports)] use itertools::Itertools; fn parse_range(range: &str) -> (i64, i64) { let (low, high) = range.split_once("..").unwrap(); (low.parse().unwrap(), high.parse().unwrap()) } fn main() { let args: Vec<String> = env::args().collect(); let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect(); reversed_args .pop() .expect("Expected the executable name to be the first argument, but was missing"); let part = reversed_args.pop().expect("part number"); let input_file = reversed_args.pop().expect("input file"); let content = fs::read_to_string(input_file).unwrap(); let input_data: Vec<RebootStep> = content .trim_end() .split('\n') .map(|x| { let (direction, all_coords) = x.split_once(' ').unwrap(); let switch_on = match direction { "on" => true, "off" => false, _ => unreachable!("{}", direction), }; let (x_range, y_range, z_range) = { let (x_comp, (y_comp, z_comp)) = all_coords .split_once(',') .map(|(left, right)| (left, right.split_once(',').unwrap())) .unwrap(); ( parse_range(x_comp.split_once('=').unwrap().1), parse_range(y_comp.split_once('=').unwrap().1), parse_range(z_comp.split_once('=').unwrap().1), ) }; RebootStep { switch_on, range: [x_range, y_range, z_range], } }) .collect(); match part { "1" => { let result = solve_part1(&input_data); println!("{}", result); } "2" => { let result = solve_part2(&input_data); println!("{}", result); } "minify" => { minify(&input_data); } _ => unreachable!("{}", part), } } #[derive(Clone, Debug)] struct RebootStep { switch_on: bool, range: [(i64, i64); 3], } impl Display for RebootStep { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let state = if self.switch_on { "on" } else { "off" }; write!( f, "{} x={}..{},y={}..{},z={}..{}", state, self.range[0].0, self.range[0].1, self.range[1].0, self.range[1].1, self.range[2].0, self.range[2].1, ) } } struct RebootStepPrinter<'a>(&'a [RebootStep]); impl<'a> Display for RebootStepPrinter<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { for step in self.0.iter() { writeln!(f, "{}", step)?; } Ok(()) } } fn minify(data: &[RebootStep]) { let part1 = solve_part1(data); let part2 = solve_part2(data); assert_ne!(part1, part2); let mut minified = data; loop { let next_minified = &minified[1..]; if next_minified.is_empty() { break; } let part1 = solve_part1(next_minified); let part2 = solve_part2(next_minified); if part1 != part2 { minified = next_minified; println!("eliminated prefix: {}", minified.len()); } else { break; } } loop { let next_minified = &minified[..(minified.len() - 1)]; if next_minified.is_empty() { break; } let part1 = solve_part1(next_minified); let part2 = solve_part2(next_minified); if part1 != part2 { minified = next_minified; println!("eliminated suffix: {}", minified.len()); } else { break; } } println!("{}", RebootStepPrinter(minified)); } fn to_range(range: (i64, i64)) -> RangeInclusive<i64> { range.0..=range.1 } fn solve_part1(data: &[RebootStep]) -> usize { let target_range = -50i64..=50; target_range .clone() .cartesian_product(target_range.clone()) .cartesian_product(target_range) .filter(|((x, y), z)| { for step in data.iter().rev() { let (x_range, y_range, z_range) = step .range .iter() .copied() .map(to_range) .collect_tuple() .unwrap(); if x_range.contains(x) && y_range.contains(y) && z_range.contains(z) { return step.switch_on; } } false }) .count() } fn solve_part2(data: &[RebootStep]) -> usize { let notable_coordinates: Vec<BTreeSet<i64>> = (0..3).map(|idx| { data .iter() .flat_map(|step| [step.range[idx].0, step.range[idx].1 + 1]) .collect() }).collect_vec(); let splits = notable_coordinates.iter().map(|coords| coords.iter().copied().collect_vec()).collect_vec(); let offsets: Vec<BTreeMap<i64, usize>> = splits.iter().map(|axis_splits| axis_splits.iter().enumerate().map(|(idx, val)| (*val, idx)) .collect()).collect(); let mut cell_is_on = vec![ vec![vec![false; notable_coordinates[2].len()]; notable_coordinates[1].len()]; notable_coordinates[0].len() ]; let mut cell_volumes = vec![ vec![vec![0usize; notable_coordinates[2].len()]; notable_coordinates[1].len()]; notable_coordinates[0].len() ]; for (x_idx, (x_start, x_end)) in splits[0].iter().tuple_windows().enumerate() { for (y_idx, (y_start, y_end)) in splits[1].iter().tuple_windows().enumerate() { for (z_idx, (z_start, z_end)) in splits[2].iter().tuple_windows().enumerate() { let x_width = (*x_end - *x_start) as usize; let y_width = (*y_end - *y_start) as usize; let z_width = (*z_end - *z_start) as usize; cell_volumes[x_idx + 1][y_idx + 1][z_idx + 1] = x_width * y_width * z_width; } } } for step in data { let ( (x_start_cell, x_end_cell), (y_start_cell, y_end_cell), (z_start_cell, z_end_cell), ) = step.range.iter().zip(offsets.iter()).map(|((start, end_incl), offset)| { let end = end_incl + 1; (offset[start], offset[&end]) }).collect_tuple().unwrap(); #[allow(clippy::needless_range_loop)] for x in x_start_cell..x_end_cell { for y in y_start_cell..y_end_cell { for z in z_start_cell..z_end_cell { cell_is_on[x][y][z] = step.switch_on; } } } } let cell_volumes_ref = &cell_volumes; cell_is_on .iter() .enumerate() .flat_map(move |(x_idx, y)| { y.iter().enumerate().map::<usize, _>(mov
e |(y_idx, z)| { z.iter() .enumerate() .filter_map(|(z_idx, is_on)| { if *is_on { Some(cell_volumes_ref[x_idx + 1][y_idx + 1][z_idx + 1]) } else { None } }) .sum() }) }) .sum() }
function_block-function_prefixed
[ { "content": "fn neighbors(data: &[Vec<i64>], x: i64, y: i64) -> impl Iterator<Item = (i64, i64)> + '_ {\n\n assert!(get_height(data, x, y).is_some());\n\n\n\n NEIGHBOR_OFFSETS\n\n .iter()\n\n .copied()\n\n .filter_map(move |(dx, dy)| {\n\n let new_x = x + dx;\n\n let new_y = y + dy;\n\n\n\n get_height(data, new_x, new_y).map(|_| (new_x, new_y))\n\n })\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 0, "score": 314591.1977190918 }, { "content": "fn find_lowest_points(data: &[Vec<i64>]) -> impl Iterator<Item = (i64, i64)> + '_ {\n\n data.iter().enumerate().flat_map(move |(i, row)| {\n\n row.iter().enumerate().filter_map(move |(j, height)| {\n\n let x = i as i64;\n\n let y = j as i64;\n\n\n\n let lower_neighbor =\n\n neighbors(data, x, y).find(|(nx, ny)| *height >= data[*nx as usize][*ny as usize]);\n\n\n\n if lower_neighbor.is_none() {\n\n Some((x, y))\n\n } else {\n\n None\n\n }\n\n })\n\n })\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 1, "score": 304144.1653925681 }, { "content": "fn solve_part1(data: &[(Vec<&str>, Vec<&str>)]) -> usize {\n\n data.iter().map(|(_, output)| {\n\n output.iter().filter(|item| {\n\n let length = item.len();\n\n length == 2 || length == 3 || length == 4 || length == 7\n\n }).count()\n\n }).sum()\n\n}\n\n\n", "file_path": "day8/src/main.rs", "rank": 2, "score": 301168.64019470586 }, { "content": "fn solve_part2(data: &[Vec<i64>]) -> usize {\n\n let mut belongs_to: HashMap<(i64, i64), (i64, i64)> = Default::default();\n\n\n\n for basin in find_lowest_points(data) {\n\n let prior_basin = belongs_to.insert(basin, basin);\n\n assert!(prior_basin.is_none());\n\n\n\n flood_fill(data, &mut belongs_to, basin, basin);\n\n }\n\n\n\n let mut basin_sizes: HashMap<(i64, i64), usize> = Default::default();\n\n for (_, basin) in belongs_to {\n\n basin_sizes.entry(basin).and_modify(|x| *x += 1).or_insert(1);\n\n }\n\n\n\n let mut all_basin_sizes: Vec<_> = basin_sizes.values().collect();\n\n let basins_count = all_basin_sizes.len();\n\n all_basin_sizes.partition_by_kth(basins_count - 3);\n\n\n\n all_basin_sizes[(basins_count - 3)..].iter().copied().copied().product()\n\n}\n", "file_path": "day9/src/main.rs", "rank": 3, "score": 286157.0951083726 }, { "content": "fn solve_part1(data: &[Vec<i64>]) -> i64 {\n\n find_lowest_points(data)\n\n .map(|(x, y)| 1 + data[x as usize][y as usize])\n\n .sum()\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 4, "score": 275966.6629454094 }, { "content": "// Solve \"left * right = result\" for left,\n\n// returning either the solved number or the original left, whichever is farther from zero\n\nfn solve_mul_away_from_zero(left: i64, right: i64, result: i64) -> i64 {\n\n let estimate_left = result.saturating_div(right);\n\n\n\n // Return either our found value or the original left value,\n\n // whichever is farther from zero.\n\n match left.signum() {\n\n 1 => {\n\n assert!(estimate_left >= 0);\n\n std::cmp::max(left, estimate_left)\n\n }\n\n -1 => {\n\n assert!(estimate_left <= 0);\n\n std::cmp::min(left, estimate_left)\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "day24/src/analysis/range_analysis.rs", "rank": 5, "score": 275378.0860547286 }, { "content": "fn solve_mul_toward_zero(left: i64, right: i64, result: i64) -> i64 {\n\n let estimate_left = result.saturating_div(right);\n\n\n\n // Return either our found value or the original left value,\n\n // whichever is closer to zero.\n\n match left.signum() {\n\n 1 => {\n\n assert!(estimate_left >= 0);\n\n std::cmp::min(left, estimate_left)\n\n }\n\n -1 => {\n\n assert!(estimate_left <= 0);\n\n std::cmp::max(left, estimate_left)\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "day24/src/analysis/range_analysis.rs", "rank": 6, "score": 275360.8481073668 }, { "content": "fn solve_part2(data: &[(Vec<&str>, Vec<&str>)]) -> u64 {\n\n data.iter().map(|(clue, challenge)| solve_display(clue, challenge)).sum()\n\n}\n", "file_path": "day8/src/main.rs", "rank": 7, "score": 273951.6175740044 }, { "content": "fn get_height(data: &[Vec<i64>], x: i64, y: i64) -> Option<i64> {\n\n let x_limit = data.len() as i64;\n\n let y_limit = data[0].len() as i64;\n\n\n\n if x >= 0 && x < x_limit && y >= 0 && y < y_limit {\n\n Some(data[x as usize][y as usize])\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 8, "score": 248660.6395576478 }, { "content": "#[allow(clippy::type_complexity)]\n\nfn check_content(content: &str) -> Result<Option<Vec<char>>, (Vec<char>, char, Option<char>)> {\n\n let mut stack: Vec<char> = vec![];\n\n for c in content.chars() {\n\n match c {\n\n '(' | '[' | '{' | '<' => stack.push(c),\n\n ')' | ']' | '}' | '>' => {\n\n let opening_char = matching_opening_char(c);\n\n match stack.pop() {\n\n Some(open) if open == opening_char => {}\n\n Some(_) => {\n\n return Err((stack, c, Some(opening_char)));\n\n }\n\n None => {\n\n return Err((stack, c, None));\n\n }\n\n }\n\n }\n\n _ => unreachable!(\"{}\", c),\n\n }\n\n }\n\n\n\n if stack.is_empty() {\n\n Ok(None)\n\n } else {\n\n Ok(Some(stack))\n\n }\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 9, "score": 244861.3355086725 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part2(data: Vec<Vec<char>>) -> usize {\n\n todo!()\n\n}\n", "file_path": "day25/src/main.rs", "rank": 10, "score": 241951.26679287848 }, { "content": "fn bias_rows(data: &[Vec<u64>], bias: u64) -> impl Iterator<Item = Vec<u64>> + '_ {\n\n data.iter().map(move |row| {\n\n bias_line(row, bias).collect()\n\n })\n\n}\n\n\n", "file_path": "day15/src/main.rs", "rank": 13, "score": 237761.3771255791 }, { "content": "fn bias_copy_map(data: &[Vec<u64>], copies: usize) -> Vec<Vec<u64>> {\n\n let duplicated_top: Vec<Vec<u64>> = data.iter().map(|row| {\n\n (0..copies).flat_map(|bias| bias_line(row, bias as u64)).collect()\n\n }).collect();\n\n\n\n let mut result = duplicated_top.clone();\n\n\n\n for bias in 1..copies {\n\n result.extend(bias_rows(&duplicated_top, bias as u64));\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "day15/src/main.rs", "rank": 14, "score": 235368.27208960024 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part1(mut start: Vec<Vec<char>>) -> usize {\n\n let mut next = start.clone();\n\n\n\n let mut from = &mut start;\n\n let mut to = &mut next;\n\n\n\n let mut count = 0usize;\n\n loop {\n\n swap(&mut from, &mut to);\n\n\n\n advance(from, to);\n\n count += 1;\n\n\n\n if from == to {\n\n break count;\n\n }\n\n }\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 15, "score": 234851.91770359338 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part2(data: &[Vec<u64>]) -> usize {\n\n let mut data: Vec<Vec<u64>> = data.iter().map(Vec::clone).collect();\n\n let max_x = data.len();\n\n let max_y = data[0].len();\n\n let grid_count = max_x * max_y;\n\n\n\n let mut flashes: HashSet<(usize, usize)> = HashSet::with_capacity(grid_count);\n\n let mut step_count = 0usize;\n\n\n\n loop {\n\n step_count += 1;\n\n\n\n // increment all by one\n\n data.iter_mut().for_each(|v| v.iter_mut().for_each(|octopus| *octopus += 1));\n\n\n\n // process all flashes\n\n for i in 0..max_x {\n\n for j in 0..max_y {\n\n evaluate_point(&mut data, &mut flashes, (i, j));\n\n }\n", "file_path": "day11/src/main.rs", "rank": 17, "score": 232423.409213781 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part1(data: &[Vec<u64>]) -> usize {\n\n let mut data: Vec<Vec<u64>> = data.iter().map(Vec::clone).collect();\n\n let max_x = data.len();\n\n let max_y = data[0].len();\n\n\n\n let mut flashes: HashSet<(usize, usize)> = HashSet::with_capacity(max_x * max_y);\n\n let mut total_flashes = 0usize;\n\n\n\n for _ in 1..=100 {\n\n // increment all by one\n\n data.iter_mut().for_each(|v| v.iter_mut().for_each(|octopus| *octopus += 1));\n\n\n\n // process all flashes\n\n for i in 0..max_x {\n\n for j in 0..max_y {\n\n evaluate_point(&mut data, &mut flashes, (i, j));\n\n }\n\n }\n\n total_flashes += flashes.len();\n\n\n\n // set all flashed points to zero\n\n for flash_point in flashes.drain() {\n\n let (flash_x, flash_y) = flash_point;\n\n data[flash_x][flash_y] = 0;\n\n }\n\n }\n\n\n\n total_flashes\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 18, "score": 232411.27117901383 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part2(data: &[Scanner]) -> i64 {\n\n let (scanners, _) = locate_everything(data);\n\n\n\n scanners\n\n .values()\n\n .tuple_combinations()\n\n .map(|(a, b)| {\n\n let distance = a.location - b.location;\n\n distance.x.abs() + distance.y.abs() + distance.z.abs()\n\n })\n\n .max()\n\n .unwrap()\n\n}\n", "file_path": "day19/src/main.rs", "rank": 20, "score": 213131.7672684444 }, { "content": "fn solve_part2(data: &[&str]) -> u64 {\n\n let mut scores: Vec<_> = data.iter().filter_map(|content| match check_content(content) {\n\n Err(_) | Ok(None) => None,\n\n Ok(Some(remaining_stack)) => {\n\n let value = remaining_stack\n\n .iter()\n\n .rev()\n\n .copied()\n\n .map(|c| match matching_closing_char(c) {\n\n ')' => 1,\n\n ']' => 2,\n\n '}' => 3,\n\n '>' => 4,\n\n _ => unreachable!(\"{}\", c),\n\n })\n\n .fold(0, |acc, x| acc * 5 + x);\n\n Some(value)\n\n }\n\n }).collect();\n\n\n\n let median_index = scores.len() / 2;\n\n scores.partition_by_kth(median_index);\n\n scores[median_index]\n\n}\n", "file_path": "day10/src/main.rs", "rank": 21, "score": 212670.22592325677 }, { "content": "fn solve_part2(data: &[&str]) -> u64 {\n\n let binary_data: Vec<_> = data.iter().map(|s| {\n\n let mut value = 0u64;\n\n for c in s.chars() {\n\n value <<= 1;\n\n match c {\n\n '1' => value += 1,\n\n '0' => {},\n\n _ => unreachable!(\"{}\", c),\n\n }\n\n }\n\n value\n\n }).collect();\n\n\n\n let num_positions = data[0].len();\n\n\n\n let mut oxygen = 0u64;\n\n let mut co2 = 0u64;\n\n\n\n let mut inv_oxygen_mask = 0u64;\n", "file_path": "day3/src/main.rs", "rank": 22, "score": 212670.22592325677 }, { "content": "fn solve_part1(data: &[&str]) -> u64 {\n\n let mut gamma = 0;\n\n let mut epsilon = 0;\n\n\n\n let mut ones_counter = Vec::new();\n\n ones_counter.resize(data[0].len(), 0usize);\n\n let mut zeros_counter = Vec::new();\n\n zeros_counter.resize(data[0].len(), 0usize);\n\n\n\n for entry in data.iter().copied() {\n\n for (index, digit) in entry.chars().rev().enumerate() {\n\n match digit {\n\n '1' => ones_counter[index] += 1,\n\n '0' => zeros_counter[index] += 1,\n\n _ => unreachable!(\"{}\", digit),\n\n }\n\n }\n\n }\n\n\n\n for (index, (ones, zeros)) in ones_counter.iter().zip(zeros_counter.iter()).enumerate() {\n\n if ones > zeros {\n\n gamma += 1 << index;\n\n } else {\n\n epsilon += 1 << index;\n\n }\n\n }\n\n\n\n gamma * epsilon\n\n}\n\n\n", "file_path": "day3/src/main.rs", "rank": 23, "score": 212657.66910634816 }, { "content": "fn solve_part1(data: &[&str]) -> u64 {\n\n data.iter()\n\n .map(|content| match check_content(content) {\n\n Err((_, error_char, _)) => match error_char {\n\n ')' => 3,\n\n ']' => 57,\n\n '}' => 1197,\n\n '>' => 25137,\n\n _ => unreachable!(\"{}\", error_char),\n\n },\n\n Ok(_) => 0,\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 24, "score": 212657.66910634816 }, { "content": "fn text_signed_int(input: &str) -> IResult<&str, i64> {\n\n map_res(recognize(tuple((opt(char('-')), digit1))), |value: &str| {\n\n value.parse()\n\n })(input)\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 25, "score": 210889.87905453227 }, { "content": "fn scanner_list(input: &str) -> IResult<&str, Vec<Scanner>> {\n\n many1(scanner)(input)\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 26, "score": 206960.84415647492 }, { "content": "fn point_list(input: &str) -> IResult<&str, Vec<Point>> {\n\n many1(point)(input)\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 27, "score": 206960.8441564749 }, { "content": "fn solve_part2(data: &[(&str, u64)]) -> u64 {\n\n let mut position = 0;\n\n let mut depth = 0;\n\n let mut aim = 0;\n\n\n\n for (command, distance) in data.iter() {\n\n match *command {\n\n \"forward\" => {\n\n position += distance;\n\n depth += aim * distance;\n\n }\n\n \"up\" => aim -= distance,\n\n \"down\" => aim += distance,\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n position * depth\n\n}\n", "file_path": "day2/src/main.rs", "rank": 28, "score": 203419.62462901758 }, { "content": "fn solve_part1(data: &[(&str, u64)]) -> u64 {\n\n let mut position = 0;\n\n let mut depth = 0;\n\n\n\n for (command, distance) in data.iter() {\n\n match *command {\n\n \"forward\" => position += distance,\n\n \"up\" => depth -= distance,\n\n \"down\" => depth += distance,\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n position * depth\n\n}\n\n\n", "file_path": "day2/src/main.rs", "rank": 29, "score": 203407.48659425043 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part2(data: &[Vec<char>]) -> u64 {\n\n assert_eq!(data.len(), 5);\n\n let amended_data = vec![\n\n data[0].clone(),\n\n data[1].clone(),\n\n data[2].clone(),\n\n \" #D#C#B#A# \".chars().collect_vec(),\n\n \" #D#B#A#C# \".chars().collect_vec(),\n\n data[3].clone(),\n\n data[4].clone(),\n\n ];\n\n solve::<4>(&amended_data)\n\n}\n", "file_path": "day23/src/main.rs", "rank": 30, "score": 203378.81983930318 }, { "content": "fn solve_part1(data: &[Vec<char>]) -> u64 {\n\n solve::<2>(data)\n\n}\n\n\n", "file_path": "day23/src/main.rs", "rank": 31, "score": 203366.68180453603 }, { "content": "fn snailfish_number(input: &str) -> IResult<&str, SnailfishNumber> {\n\n if let Ok((remainder, parsed)) = digit1::<&str, nom::error::Error<&str>>(input) {\n\n Ok((remainder, SnailfishNumber::Literal(parsed.parse().unwrap())))\n\n } else {\n\n pair_number(input)\n\n }\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 32, "score": 198363.79094204126 }, { "content": "fn pair_number(input: &str) -> IResult<&str, SnailfishNumber> {\n\n map_res(delimited(\n\n char('['),\n\n separated_pair(snailfish_number, char(','), snailfish_number),\n\n char(']'),\n\n ), |pair| -> Result<SnailfishNumber, nom::error::Error<&str>> {\n\n Ok(SnailfishNumber::Pair(Box::new(pair)))\n\n })(input)\n\n}\n\n\n\nimpl From<&str> for SnailfishNumber {\n\n fn from(value: &str) -> SnailfishNumber {\n\n let (remainder, parsed) = complete(snailfish_number)(value).unwrap();\n\n assert!(remainder.is_empty());\n\n parsed\n\n }\n\n}\n\n\n\nimpl Display for SnailfishNumber {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n", "file_path": "day18/src/main.rs", "rank": 33, "score": 198363.79094204126 }, { "content": "fn solve_part2(x_bounds: (i64, i64), y_bounds: (i64, i64)) -> usize {\n\n let (lower_x, upper_x) = x_bounds;\n\n let (lower_y, upper_y) = y_bounds;\n\n\n\n // firing to the right\n\n assert!(lower_x >= 0);\n\n assert!(upper_x >= lower_x);\n\n\n\n // target is on our level or below us\n\n assert!(upper_y <= 0);\n\n assert!(lower_y <= upper_y);\n\n\n\n // We can just parameter-sweep the space, it's not that large.\n\n //\n\n // We can't fire slower in X than the max-height shot.\n\n // We can't fire faster in X than upper_x, or we'd overshoot immediately.\n\n // We can't fire faster (more positive) in Y than the max-height shot.\n\n // We can't fire slower (more negative) in Y than lower_y, or we'd overshoot immediately.\n\n let (max_height_shot_x, max_height_shot_y) = max_height_shot(x_bounds, y_bounds);\n\n let x_range = max_height_shot_x..=upper_x;\n\n let y_range = lower_y..=max_height_shot_y;\n\n\n\n x_range\n\n .cartesian_product(y_range)\n\n .filter(|shot| does_shot_hit(x_bounds, y_bounds, *shot))\n\n .count()\n\n}\n", "file_path": "day17/src/main.rs", "rank": 34, "score": 196981.85464983425 }, { "content": "fn solve_part2(data: &[usize]) -> usize {\n\n simulate_fish(data, 256)\n\n}\n", "file_path": "day6/src/main.rs", "rank": 35, "score": 196666.55156277452 }, { "content": "fn solve_part1(data: &[usize]) -> usize {\n\n simulate_fish(data, 80)\n\n}\n\n\n", "file_path": "day6/src/main.rs", "rank": 36, "score": 196653.99474586593 }, { "content": "fn minify_part1(data: &[Line]) {\n\n let axis_aligned_lines: Vec<_> = data\n\n .iter()\n\n .filter(|line| line.is_x_aligned() || line.is_y_aligned())\n\n .cloned()\n\n .collect();\n\n\n\n let mut start_lines = axis_aligned_lines.as_slice();\n\n let mut next_lines = &start_lines[..start_lines.len() / 2];\n\n while solve_part1(next_lines) != brute_force_part1(next_lines) {\n\n eprintln!(\"Successfully halved, len {}\", next_lines.len());\n\n\n\n start_lines = next_lines;\n\n next_lines = &start_lines[..start_lines.len() / 2];\n\n }\n\n\n\n eprintln!(\"Moving to single suffix truncation...\");\n\n\n\n next_lines = &start_lines[..start_lines.len() - 1];\n\n while solve_part1(next_lines) != brute_force_part1(next_lines) {\n", "file_path": "day5/src/main.rs", "rank": 37, "score": 195560.14884859664 }, { "content": "fn solve_part2(draws: Vec<u64>, mut boards: Vec<Board>) -> u64 {\n\n let mut winner_boards = 0usize;\n\n let num_boards = boards.len();\n\n let mut has_won: Vec<_> = boards.iter().map(|_| false).collect();\n\n for draw in draws {\n\n for (index, board) in boards.iter_mut().enumerate() {\n\n if has_won[index] {\n\n continue;\n\n }\n\n if board.call_number(draw) {\n\n assert!(!has_won[index]);\n\n has_won[index] = true;\n\n winner_boards += 1;\n\n\n\n if winner_boards == num_boards {\n\n return score_winning_board(board, draw);\n\n }\n\n }\n\n }\n\n }\n\n\n\n unreachable!()\n\n}\n", "file_path": "day4/src/main.rs", "rank": 38, "score": 193989.5255351839 }, { "content": "fn solve_part1(draws: Vec<u64>, mut boards: Vec<Board>) -> u64 {\n\n for draw in draws {\n\n for board in boards.iter_mut() {\n\n if board.call_number(draw) {\n\n return score_winning_board(board, draw);\n\n }\n\n }\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "day4/src/main.rs", "rank": 39, "score": 193978.81617148698 }, { "content": "fn brute_force(data: &[Line], ignore_diagonals: bool) -> usize {\n\n let all_lines: Vec<_> = if ignore_diagonals {\n\n data.iter()\n\n .filter(|line| line.is_x_aligned() || line.is_y_aligned())\n\n .collect()\n\n } else {\n\n data.iter().collect()\n\n };\n\n\n\n let min_x = all_lines\n\n .iter()\n\n .flat_map(|line| [line.start.x, line.end.x])\n\n .min()\n\n .unwrap();\n\n let max_x = all_lines\n\n .iter()\n\n .flat_map(|line| [line.start.x, line.end.x])\n\n .max()\n\n .unwrap();\n\n\n", "file_path": "day5/src/main.rs", "rank": 40, "score": 191692.63351901696 }, { "content": "fn bias_line(data: &[u64], bias: u64) -> impl Iterator<Item = u64> + '_ {\n\n data.iter().map(move |value| {\n\n let next_value = (*value + bias) % 9;\n\n if next_value == 0 {\n\n 9\n\n } else {\n\n next_value\n\n }\n\n })\n\n}\n\n\n", "file_path": "day15/src/main.rs", "rank": 41, "score": 191019.61936101317 }, { "content": "fn reset(map: &mut Vec<Vec<char>>) {\n\n map.iter_mut().for_each(|row| row.iter_mut().for_each(|c| *c = '.'));\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 42, "score": 190479.70349811087 }, { "content": "fn text_signed_int(input: &str) -> IResult<&str, i64> {\n\n map_res(recognize(tuple((opt(char('-')), digit1))), |value: &str| {\n\n value.parse()\n\n })(input)\n\n}\n\n\n", "file_path": "day24/src/parser.rs", "rank": 44, "score": 188701.09831962315 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part2(caves: &HashMap<&str, Cave<'_>>) -> usize {\n\n let current_cave = &caves[START_CAVE];\n\n\n\n let mut visit_stack = vec![START_CAVE];\n\n let mut visited_small_caves = [START_CAVE].iter().copied().collect();\n\n\n\n count_paths_visiting_small_twice(caves, current_cave, &mut visit_stack, &mut visited_small_caves)\n\n}\n", "file_path": "day12/src/main.rs", "rank": 45, "score": 188379.3548456076 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part1(caves: &HashMap<&str, Cave<'_>>) -> usize {\n\n let current_cave = &caves[START_CAVE];\n\n\n\n let mut visit_stack = vec![START_CAVE];\n\n let mut visited_small_caves = [START_CAVE].iter().copied().collect();\n\n\n\n count_paths(caves, current_cave, &mut visit_stack, &mut visited_small_caves)\n\n}\n\n\n", "file_path": "day12/src/main.rs", "rank": 46, "score": 188367.9758145033 }, { "content": "fn advance(from: &[Vec<char>], to: &mut Vec<Vec<char>>) {\n\n reset(to);\n\n\n\n // Move east-facing first.\n\n for (x, row) in from.iter().enumerate() {\n\n for (y, pos) in row.iter().enumerate() {\n\n let move_offset = match *pos {\n\n '.' | 'v' => None,\n\n '>' => Some((0, 1)),\n\n _ => unreachable!(),\n\n };\n\n if let Some((dx, dy)) = move_offset {\n\n let (nx, ny) = wrap_coordinates(from, x + dx, y + dy);\n\n if from[nx][ny] == '.' {\n\n // Free spot! Can move over.\n\n to[nx][ny] = *pos;\n\n } else {\n\n // Spot was taken, stay in place.\n\n to[x][y] = *pos;\n\n }\n", "file_path": "day25/src/main.rs", "rank": 47, "score": 187644.18843800147 }, { "content": "fn solve_part2(data: &[Line]) -> usize {\n\n brute_force_part2(data)\n\n}\n", "file_path": "day5/src/main.rs", "rank": 48, "score": 186753.1182283935 }, { "content": "fn solve_part2(data: &[u64]) -> usize {\n\n data.iter().tuple_windows().tuple_windows().map(|((first_a, first_b, first_c), (second_a, second_b, second_c))| {\n\n let sum_first = first_a + first_b + first_c;\n\n let sum_second = second_a + second_b + second_c;\n\n if sum_second > sum_first {\n\n 1\n\n } else {\n\n 0\n\n }\n\n }).sum()\n\n}\n", "file_path": "day1/src/main.rs", "rank": 49, "score": 186753.11822839355 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part1(data: &[Scanner]) -> usize {\n\n let (_, beacons) = locate_everything(data);\n\n beacons.len()\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 50, "score": 186740.5614114849 }, { "content": "fn solve_part1(data: &[u64]) -> usize {\n\n data.iter().tuple_windows().map(|(first, second)| {\n\n if second > first {\n\n 1\n\n } else {\n\n 0\n\n }\n\n }).sum()\n\n}\n\n\n", "file_path": "day1/src/main.rs", "rank": 51, "score": 186740.5614114849 }, { "content": "fn solve_part1(data: &[Line]) -> usize {\n\n let axis_aligned_lines: Vec<_> = data\n\n .iter()\n\n .filter(|line| line.is_x_aligned() || line.is_y_aligned())\n\n .collect();\n\n\n\n let mut all_points: Vec<(Point, bool, usize)> = axis_aligned_lines\n\n .iter()\n\n .copied()\n\n .enumerate()\n\n .flat_map(|(index, line)| {\n\n assert!(line.start.y <= line.end.y);\n\n\n\n [(line.start, false, index), (line.end, true, index)]\n\n })\n\n .collect();\n\n all_points.sort_unstable();\n\n\n\n let mut overlaps = 0usize;\n\n let mut active_points: BTreeMap<u64, usize> = Default::default();\n", "file_path": "day5/src/main.rs", "rank": 52, "score": 186740.5614114849 }, { "content": "fn solve_part1(dots: &[(i64, i64)], folds: &[Fold]) -> usize {\n\n let first_fold = folds.first().unwrap();\n\n let mut current_dots: HashSet<(i64, i64)> = dots.iter().copied().collect();\n\n\n\n apply_fold(&mut current_dots, first_fold);\n\n\n\n current_dots.len()\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 53, "score": 185930.39611731167 }, { "content": "fn does_shot_hit(x_bounds: (i64, i64), y_bounds: (i64, i64), shot: (i64, i64)) -> bool {\n\n let (lower_x, upper_x) = x_bounds;\n\n let (lower_y, upper_y) = y_bounds;\n\n let (x_shot, y_shot) = shot;\n\n\n\n // firing to the right\n\n assert!(lower_x >= 0);\n\n assert!(upper_x >= lower_x);\n\n assert!(x_shot >= 0);\n\n\n\n // target is on our level or below us\n\n assert!(upper_y <= 0);\n\n assert!(lower_y <= upper_y);\n\n\n\n let mut x = 0i64;\n\n let mut y = 0i64;\n\n let mut x_vel = x_shot;\n\n let mut y_vel = y_shot;\n\n loop {\n\n x += x_vel;\n", "file_path": "day17/src/main.rs", "rank": 54, "score": 184943.92032688495 }, { "content": "fn solve<const N: usize>(data: &[Vec<char>]) -> u64 {\n\n let hallway_spots: BTreeSet<(usize, usize)> = data\n\n .iter()\n\n .enumerate()\n\n .flat_map(|(x, row)| {\n\n row.iter().enumerate().filter_map(\n\n move |(y, c)| {\n\n if *c == '.' {\n\n Some((x, y))\n\n } else {\n\n None\n\n }\n\n },\n\n )\n\n })\n\n .collect();\n\n\n\n // Make sure the hallway is horizontal on the map.\n\n let hallway_x = hallway_spots.iter().next().unwrap().0;\n\n for &(x, _) in &hallway_spots {\n", "file_path": "day23/src/main.rs", "rank": 55, "score": 184047.54202212655 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part2(data: &[SnailfishNumber]) -> u64 {\n\n data.iter().tuple_combinations().map(|(left, right)| {\n\n let left_mag = (left.clone().normalize() + right.clone()).normalize().magnitude();\n\n let right_mag = (right.clone().normalize() + left.clone()).normalize().magnitude();\n\n\n\n max(left_mag, right_mag)\n\n }).max().unwrap()\n\n}\n", "file_path": "day18/src/main.rs", "rank": 56, "score": 182383.2866660534 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part1(data: &[SnailfishNumber]) -> u64 {\n\n let start = data[0].clone().normalize();\n\n\n\n let final_number = data[1..].iter().fold(start, |acc, elem| {\n\n (acc + elem.clone()).normalize()\n\n });\n\n\n\n final_number.magnitude()\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 57, "score": 182371.14863128628 }, { "content": "fn brute_force_part2(data: &[Line]) -> usize {\n\n brute_force(data, false)\n\n}\n\n\n", "file_path": "day5/src/main.rs", "rank": 58, "score": 181985.9884109408 }, { "content": "fn brute_force_part1(data: &[Line]) -> usize {\n\n brute_force(data, true)\n\n}\n\n\n", "file_path": "day5/src/main.rs", "rank": 59, "score": 181973.85037617362 }, { "content": "fn solve_part1(x_bounds: (i64, i64), y_bounds: (i64, i64)) -> i64 {\n\n let (x_shot, y_shot) = max_height_shot(x_bounds, y_bounds);\n\n\n\n // Since we lose 1 unit of vertical speed per time step all the way down to 0,\n\n // the max height is just the sum 1 + 2 + ... + y_shot, so Gauss' formula applies:\n\n let max_height = y_shot * (y_shot + 1) / 2;\n\n\n\n // Finally, simulate our shot and make sure it does hit the target.\n\n assert!(does_shot_hit(x_bounds, y_bounds, (x_shot, y_shot)));\n\n\n\n max_height\n\n}\n\n\n", "file_path": "day17/src/main.rs", "rank": 60, "score": 175058.3900441266 }, { "content": "fn solve_iterations(original: &str, rules: &[(char, char, char)], iterations: usize) -> usize {\n\n let rules: HashMap<(char, char), char> = rules\n\n .iter()\n\n .copied()\n\n .map(|(a, b, insert)| ((a, b), insert))\n\n .collect();\n\n\n\n let mut polymer_pairs: HashMap<(char, char), usize> = Default::default();\n\n for pair in original.chars().tuple_windows() {\n\n *polymer_pairs.entry(pair).or_default() += 1;\n\n }\n\n *polymer_pairs.entry((' ', original.chars().next().unwrap())).or_default() += 1;\n\n *polymer_pairs.entry((original.chars().next_back().unwrap(), ' ')).or_default() += 1;\n\n\n\n for _ in 0..iterations {\n\n polymer_pairs = expand_polymer(&polymer_pairs, &rules);\n\n }\n\n\n\n let mut counts: HashMap<char, usize> = HashMap::new();\n\n for ((a, b), count) in polymer_pairs {\n", "file_path": "day14/src/main.rs", "rank": 61, "score": 170716.73633664858 }, { "content": "fn apply_fold(current_dots: &mut HashSet<(i64, i64)>, fold: &Fold) {\n\n let dots_to_reflect: Vec<((i64, i64), (i64, i64))> = current_dots.iter().copied().filter_map(|pt| {\n\n let (x, y) = pt;\n\n if fold.fold_along_x {\n\n let delta_x = x - fold.coord;\n\n match delta_x.cmp(&0) {\n\n Ordering::Less => None, // not over the fold line, no reflection needed\n\n Ordering::Greater => Some((pt, (fold.coord - delta_x, y))),\n\n Ordering::Equal => unreachable!(),\n\n }\n\n } else {\n\n let delta_y = y - fold.coord;\n\n match delta_y.cmp(&0) {\n\n Ordering::Less => None, // not over the fold line, no reflection needed\n\n Ordering::Greater => Some((pt, (x, fold.coord - delta_y))),\n\n Ordering::Equal => unreachable!(),\n\n }\n\n }\n\n }).collect();\n\n\n\n for (original_dot, new_dot) in dots_to_reflect {\n\n let removed = current_dots.remove(&original_dot);\n\n assert!(removed);\n\n\n\n current_dots.insert(new_dot); // might overlap, so the return may be true or false here\n\n }\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 62, "score": 168924.9429027309 }, { "content": "fn wrap_coordinates(map: &[Vec<char>], x: usize, y: usize) -> (usize, usize) {\n\n (x % map.len(), y % map[0].len())\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 63, "score": 168312.66489285685 }, { "content": "fn scanner(input: &str) -> IResult<&str, Scanner> {\n\n let (remaining, id) = map_res(\n\n tuple((tag(\"--- scanner \"), digit1, tag(\" ---\"), line_ending)),\n\n |(_, scanner_id, _, _): (&str, &str, &str, &str)| scanner_id.parse::<usize>(),\n\n )(input)?;\n\n\n\n let (rest, (beacons, _)) = tuple((point_list, opt(line_ending)))(remaining)?;\n\n let beacon_locations: HashMap<Point, usize> = beacons\n\n .iter()\n\n .enumerate()\n\n .map(|(index, point)| (*point, index))\n\n .collect();\n\n\n\n Ok((\n\n rest,\n\n Scanner {\n\n id,\n\n beacons,\n\n beacon_locations,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 64, "score": 167168.7941773815 }, { "content": "fn point(input: &str) -> IResult<&str, Point> {\n\n map(\n\n tuple((\n\n text_signed_int,\n\n char(','),\n\n text_signed_int,\n\n char(','),\n\n text_signed_int,\n\n line_ending,\n\n )),\n\n |(x, _, y, _, z, _)| Point { x, y, z },\n\n )(input)\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 65, "score": 167168.7941773815 }, { "content": "fn solve_display(clues: &[&str], challenge: &[&str]) -> u64 {\n\n // 000\n\n // 1 2\n\n // 1 2\n\n // 333\n\n // 4 5\n\n // 4 5\n\n // 666\n\n let segments: [BTreeSet<usize>; 10] = [\n\n [0, 1, 2, 4, 5, 6].iter().copied().collect(), // 0\n\n [2, 5].iter().copied().collect(), // 1\n\n [0, 2, 3, 4, 6].iter().copied().collect(), // 2\n\n [0, 2, 3, 5, 6].iter().copied().collect(), // 3\n\n [1, 2, 3, 5].iter().copied().collect(), // 4\n\n [0, 1, 3, 5, 6].iter().copied().collect(), // 5\n\n [0, 1, 3, 4, 5, 6].iter().copied().collect(), // 6\n\n [0, 2, 5].iter().copied().collect(), // 7\n\n [0, 1, 2, 3, 4, 5, 6].iter().copied().collect(), // 8\n\n [0, 1, 2, 3, 5, 6].iter().copied().collect(), // 9\n\n ];\n", "file_path": "day8/src/main.rs", "rank": 66, "score": 163889.55736303792 }, { "content": "fn solve_part2(enhancement: &[char], input_image: &[Vec<char>]) -> usize {\n\n let compacted_kernel = compact_kernel(enhancement);\n\n let mut lit_pixels = image_lit_pixels(input_image);\n\n\n\n let stacked_kernel = stack_3x3_kernel(&compacted_kernel);\n\n for _ in 0..25 {\n\n lit_pixels = enhance_image_twice(&stacked_kernel, &lit_pixels);\n\n }\n\n\n\n lit_pixels.len()\n\n}\n", "file_path": "day20/src/main.rs", "rank": 67, "score": 162455.3273947161 }, { "content": "fn solve_part1(enhancement: &[char], input_image: &[Vec<char>]) -> usize {\n\n let compacted_kernel = compact_kernel(enhancement);\n\n let lit_pixels = image_lit_pixels(input_image);\n\n\n\n let stacked_kernel = stack_3x3_kernel(&compacted_kernel);\n\n let fast_solution = enhance_image_twice(&stacked_kernel, &lit_pixels);\n\n\n\n fast_solution.len()\n\n}\n\n\n", "file_path": "day20/src/main.rs", "rank": 68, "score": 162444.29334867414 }, { "content": "fn solve_part2(original: &str, rules: &[(char, char, char)]) -> usize {\n\n solve_iterations(original, rules, 40)\n\n}\n", "file_path": "day14/src/main.rs", "rank": 70, "score": 160511.38952628704 }, { "content": "fn solve_part1(original: &str, rules: &[(char, char, char)]) -> usize {\n\n solve_iterations(original, rules, 10)\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 71, "score": 160500.3554802451 }, { "content": "fn solve_part2(dots: &[(i64, i64)], folds: &[Fold]) -> String {\n\n let mut current_dots: HashSet<(i64, i64)> = dots.iter().copied().collect();\n\n\n\n for fold in folds {\n\n apply_fold(&mut current_dots, fold);\n\n }\n\n\n\n let mut max_x = 0i64;\n\n let mut max_y = 0i64;\n\n for (x, y) in current_dots.iter() {\n\n assert!(*x >= 0);\n\n assert!(*y >= 0);\n\n\n\n max_x = max(max_x, *x);\n\n max_y = max(max_y, *y);\n\n }\n\n\n\n let row_width = (max_x + 2) as usize; // 0..=max_x plus a newline char\n\n let row_count = (max_y + 1) as usize; // 0..=max_y\n\n let mut buffer: Vec<u8> = Vec::with_capacity(0);\n", "file_path": "day13/src/main.rs", "rank": 72, "score": 158713.3734966102 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part2(data: &[u8]) -> u64 {\n\n let bits = BitSlice::<Msb0, _>::from_slice(data).unwrap();\n\n\n\n let packet = Packet::read_complete(bits);\n\n\n\n packet.eval()\n\n}\n", "file_path": "day16/src/main.rs", "rank": 73, "score": 156706.44280211278 }, { "content": "fn solve_part2(data: &[Instruction]) -> u64 {\n\n let (current_analysis, input_ranges, expected_values) = prepare_for_input_search(data);\n\n\n\n // Simulate the program, checking that the register state after each instruction\n\n // matches the register states expected in the analysis for Z=0, and abandoning simulation\n\n // directions that produce register states inconsistent with that analysis.\n\n find_extremal_input_that_matches_analysis(\n\n current_analysis,\n\n input_ranges,\n\n expected_values,\n\n false,\n\n )\n\n}\n", "file_path": "day24/src/main.rs", "rank": 74, "score": 156706.44280211278 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part1(data: &[u8]) -> u64 {\n\n let bits = BitSlice::<Msb0, _>::from_slice(data).unwrap();\n\n\n\n let packet = Packet::read_complete(bits);\n\n\n\n packet.iter().map(|p| p.version as u64).sum()\n\n}\n\n\n", "file_path": "day16/src/main.rs", "rank": 75, "score": 156693.88598520414 }, { "content": "fn solve_part1(data: &[Instruction]) -> u64 {\n\n let (current_analysis, input_ranges, expected_values) = prepare_for_input_search(data);\n\n\n\n // Simulate the program, checking that the register state after each instruction\n\n // matches the register states expected in the analysis for Z=0, and abandoning simulation\n\n // directions that produce register states inconsistent with that analysis.\n\n find_extremal_input_that_matches_analysis(current_analysis, input_ranges, expected_values, true)\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 76, "score": 156693.88598520414 }, { "content": "fn simulate_fish(data: &[usize], total_days: usize) -> usize {\n\n let mut today: [usize; 9] = Default::default();\n\n let mut tomorrow: [usize; 9] = Default::default();\n\n\n\n for fish in data {\n\n tomorrow[*fish] += 1;\n\n }\n\n\n\n for _tomorrow_is_after_day in 1..=total_days {\n\n // Tomorrow becomes today.\n\n today[0..9].clone_from_slice(&tomorrow[0..9]);\n\n\n\n // Advance the timers.\n\n tomorrow[0..8].clone_from_slice(&today[1..9usize]);\n\n\n\n tomorrow[8] = today[0]; // Spawn new fish, then\n\n tomorrow[6] += today[0]; // add the spawning fish back to the queue.\n\n }\n\n\n\n tomorrow.iter().sum()\n\n}\n\n\n", "file_path": "day6/src/main.rs", "rank": 77, "score": 156137.91271730303 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part2(risks: &[Vec<u64>]) -> u64 {\n\n let risks = bias_copy_map(risks, 5);\n\n\n\n let mut visit_risks: HashMap<(usize, usize), u64> = Default::default();\n\n let mut pq: BinaryHeap<Reverse<(u64, (usize, usize))>> = Default::default();\n\n\n\n pq.push(Reverse((0, (0, 0))));\n\n\n\n let target = (risks.len() - 1, risks[0].len() - 1);\n\n\n\n loop {\n\n let (next_risk, next_location) = pq.pop().unwrap().0;\n\n if next_location == target {\n\n break next_risk;\n\n }\n\n\n\n process_location(&risks, &mut visit_risks, &mut pq, next_location, next_risk);\n\n }\n\n}\n", "file_path": "day15/src/main.rs", "rank": 78, "score": 150715.77671069393 }, { "content": "#[allow(unused_variables)]\n\nfn solve_part1(risks: &[Vec<u64>]) -> u64 {\n\n let mut visit_risks: HashMap<(usize, usize), u64> = Default::default();\n\n let mut pq: BinaryHeap<Reverse<(u64, (usize, usize))>> = Default::default();\n\n\n\n pq.push(Reverse((0, (0, 0))));\n\n\n\n let target = (risks.len() - 1, risks[0].len() - 1);\n\n\n\n loop {\n\n let (next_risk, next_location) = pq.pop().unwrap().0;\n\n if next_location == target {\n\n break next_risk;\n\n }\n\n\n\n process_location(risks, &mut visit_risks, &mut pq, next_location, next_risk);\n\n }\n\n}\n\n\n", "file_path": "day15/src/main.rs", "rank": 79, "score": 150703.63867592675 }, { "content": "fn fixpoint_iteration(mut current_analysis: Analysis) -> Analysis {\n\n // Iterate the next few passes until a fixpoint is found,\n\n // since the passes create opportunities for each other to optimize further.\n\n let mut value_ranges = current_analysis.values.clone();\n\n loop {\n\n current_analysis = current_analysis\n\n .constant_propagation()\n\n .known_operation_results()\n\n .forward_value_range_analysis()\n\n .matched_mul_and_div_or_mod();\n\n\n\n if current_analysis.values == value_ranges {\n\n break current_analysis;\n\n } else {\n\n value_ranges = current_analysis.values.clone();\n\n }\n\n }\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 80, "score": 148314.73218611706 }, { "content": "fn max_height_shot(x_bounds: (i64, i64), y_bounds: (i64, i64)) -> (i64, i64) {\n\n let (lower_x, upper_x) = x_bounds;\n\n let (lower_y, upper_y) = y_bounds;\n\n\n\n // firing to the right\n\n assert!(lower_x >= 0);\n\n assert!(upper_x >= lower_x);\n\n\n\n // target is on our level or below us\n\n assert!(upper_y <= 0);\n\n assert!(lower_y <= upper_y);\n\n\n\n // When firing to the right and aiming below us, we can consider the coordinates separately.\n\n // Since we are maximizing altitude, we can count on having run out of X velocity well before\n\n // encountering the target area.\n\n //\n\n // This means that any trajectory that is sufficient to reach and stay within the target area's\n\n // X coordinate space when it runs out of X velocity would work.\n\n // If we fire at x_spd in the X direction, the total displacement we'd get before running out\n\n // of X velocity is x_shot * (x_shot + 1) / 2.\n", "file_path": "day17/src/main.rs", "rank": 82, "score": 146751.13248603998 }, { "content": "pub fn parse_program(input: &str) -> Vec<Instruction> {\n\n let (remainder, program) = many1(instruction)(input).unwrap();\n\n assert!(remainder.is_empty());\n\n program\n\n}\n", "file_path": "day24/src/parser.rs", "rank": 83, "score": 144414.29929582763 }, { "content": "fn operand(input: &str) -> IResult<&str, Operand> {\n\n if let Ok((remainder, register)) = register(input) {\n\n Ok((remainder, Operand::Register(register)))\n\n } else {\n\n map(text_signed_int, Operand::Literal)(input)\n\n }\n\n}\n\n\n", "file_path": "day24/src/parser.rs", "rank": 84, "score": 143499.9783061047 }, { "content": "fn register(input: &str) -> IResult<&str, Register> {\n\n let (remainder, matched_char) = one_of(\"wxyz\")(input)?;\n\n let register_id = match matched_char {\n\n 'w' => 0,\n\n 'x' => 1,\n\n 'y' => 2,\n\n 'z' => 3,\n\n _ => unreachable!(\"{}\", matched_char),\n\n };\n\n\n\n Ok((remainder, Register(register_id)))\n\n}\n\n\n", "file_path": "day24/src/parser.rs", "rank": 85, "score": 143499.9783061047 }, { "content": "fn instruction(input: &str) -> IResult<&str, Instruction> {\n\n alt((input_instruction, binary_instruction))(input)\n\n}\n\n\n", "file_path": "day24/src/parser.rs", "rank": 86, "score": 143499.9783061047 }, { "content": "fn input_instruction(input: &str) -> IResult<&str, Instruction> {\n\n map(\n\n tuple((tag(\"inp\"), space1, register, opt(line_ending))),\n\n |(_, _, reg, _)| Instruction::Input(reg),\n\n )(input)\n\n}\n\n\n", "file_path": "day24/src/parser.rs", "rank": 87, "score": 140812.76754173863 }, { "content": "fn binary_instruction(input: &str) -> IResult<&str, Instruction> {\n\n map(\n\n tuple((\n\n alt((tag(\"add\"), tag(\"mul\"), tag(\"div\"), tag(\"mod\"), tag(\"eql\"))),\n\n space1,\n\n register,\n\n space1,\n\n operand,\n\n opt(line_ending),\n\n )),\n\n |(instr, _, reg, _, val, _)| match instr {\n\n \"add\" => Instruction::Add(reg, val),\n\n \"mul\" => Instruction::Mul(reg, val),\n\n \"div\" => Instruction::Div(reg, val),\n\n \"mod\" => Instruction::Mod(reg, val),\n\n \"eql\" => Instruction::Equal(reg, val),\n\n _ => unreachable!(\"{}\", instr),\n\n },\n\n )(input)\n\n}\n\n\n", "file_path": "day24/src/parser.rs", "rank": 88, "score": 140812.76754173863 }, { "content": "struct PacketIterator<'a> {\n\n stack: Vec<&'a Packet>,\n\n}\n\n\n\nimpl<'a> PacketIterator<'a> {\n\n fn new(start: &'a Packet) -> Self {\n\n Self { stack: vec![start] }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for PacketIterator<'a> {\n\n type Item = &'a Packet;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if let Some(packet) = self.stack.pop() {\n\n match &packet.data {\n\n PacketType::SumOperator(subpackets)\n\n | PacketType::ProductOperator(subpackets)\n\n | PacketType::MinOperator(subpackets)\n\n | PacketType::MaxOperator(subpackets) => {\n", "file_path": "day16/src/main.rs", "rank": 89, "score": 125302.78035540138 }, { "content": "fn locate_everything(data: &[Scanner]) -> (HashMap<usize, LocatedScanner>, HashSet<Point>) {\n\n // Without loss of generality, the first scanner is at (0, 0, 0) and has correct axes.\n\n let origin = Point { x: 0, y: 0, z: 0 };\n\n let mut all_scanners = hashmap! {\n\n 0 => LocatedScanner {\n\n scanner: data[0].clone(),\n\n location: origin,\n\n orientation: Orientation {\n\n axes: [[1, 0, 0], [0, 1, 0], [0, 0, 1]],\n\n },\n\n beacons_in_global_coords: data[0].beacon_locations.clone(),\n\n }\n\n };\n\n\n\n let signatures = data\n\n .iter()\n\n .map(|s| make_signature(&s.beacons))\n\n .collect_vec();\n\n\n\n let mut unlocated_scanners: BTreeSet<usize> = (1..data.len()).collect();\n", "file_path": "day19/src/main.rs", "rank": 90, "score": 125159.4880835146 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect();\n\n\n\n reversed_args\n\n .pop()\n\n .expect(\"Expected the executable name to be the first argument, but was missing\");\n\n\n\n let part = reversed_args.pop().expect(\"part number\");\n\n let input_file = reversed_args.pop().expect(\"input file\");\n\n let content = fs::read_to_string(input_file).unwrap();\n\n\n\n let input_data: Vec<SnailfishNumber> = content\n\n .trim_end()\n\n .split('\\n')\n\n .map(SnailfishNumber::from)\n\n .collect();\n\n\n\n match part {\n\n \"1\" => {\n", "file_path": "day18/src/main.rs", "rank": 91, "score": 124557.15681755141 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect();\n\n\n\n reversed_args\n\n .pop()\n\n .expect(\"Expected the executable name to be the first argument, but was missing\");\n\n\n\n let part = reversed_args.pop().expect(\"part number\");\n\n let input_file = reversed_args.pop().expect(\"input file\");\n\n let content = fs::read_to_string(input_file).unwrap();\n\n\n\n let (dots, fold_instructions) = content.trim_end().split_once(\"\\n\\n\").unwrap();\n\n\n\n let dot_coords: Vec<(i64, i64)> = dots\n\n .split('\\n')\n\n .map(|row| {\n\n let coords = row.split_once(',').unwrap();\n\n (coords.0.parse().unwrap(), coords.1.parse().unwrap())\n\n })\n", "file_path": "day13/src/main.rs", "rank": 92, "score": 124557.15681755141 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect();\n\n\n\n reversed_args\n\n .pop()\n\n .expect(\"Expected the executable name to be the first argument, but was missing\");\n\n\n\n let part = reversed_args.pop().expect(\"part number\");\n\n let input_file = reversed_args.pop().expect(\"input file\");\n\n let content = fs::read_to_string(input_file).unwrap();\n\n\n\n let input_data: Vec<Vec<u64>> = content\n\n .trim_end()\n\n .split('\\n')\n\n .map(|x| x.chars().map(|c| c.to_digit(10).unwrap() as u64).collect())\n\n .collect();\n\n\n\n match part {\n\n \"1\" => {\n", "file_path": "day11/src/main.rs", "rank": 93, "score": 124557.15681755141 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect();\n\n\n\n reversed_args\n\n .pop()\n\n .expect(\"Expected the executable name to be the first argument, but was missing\");\n\n\n\n let part = reversed_args.pop().expect(\"part number\");\n\n let input_file = reversed_args.pop().expect(\"input file\");\n\n let content = fs::read_to_string(input_file).unwrap();\n\n\n\n let (base, rules_component) = content.trim_end().split_once(\"\\n\\n\").unwrap();\n\n\n\n let rules: Vec<_> = rules_component\n\n .split('\\n')\n\n .map(|row| {\n\n let (from, to) = row.split_once(\" -> \").unwrap();\n\n assert_eq!(from.len(), 2);\n\n assert_eq!(to.len(), 1);\n", "file_path": "day14/src/main.rs", "rank": 94, "score": 124557.15681755141 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect();\n\n\n\n reversed_args\n\n .pop()\n\n .expect(\"Expected the executable name to be the first argument, but was missing\");\n\n\n\n let part = reversed_args.pop().expect(\"part number\");\n\n let input_file = reversed_args.pop().expect(\"input file\");\n\n let content = fs::read_to_string(input_file).unwrap();\n\n\n\n let input_data: (u64, u64) = content\n\n .trim_end()\n\n .split_once('\\n')\n\n .map(|(first, second)| {\n\n (\n\n first\n\n .strip_prefix(\"Player 1 starting position: \")\n\n .unwrap()\n", "file_path": "day21/src/main.rs", "rank": 95, "score": 124557.15681755141 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect();\n\n\n\n reversed_args\n\n .pop()\n\n .expect(\"Expected the executable name to be the first argument, but was missing\");\n\n\n\n let part = reversed_args.pop().expect(\"part number\");\n\n let input_file = reversed_args.pop().expect(\"input file\");\n\n let content = fs::read_to_string(input_file).unwrap();\n\n\n\n let edges: Vec<(&str, &str)> = content\n\n .trim_end()\n\n .split('\\n')\n\n .map(|x| x.split_once('-').unwrap())\n\n .collect();\n\n let mut caves: HashMap<&str, Cave<'_>> = HashMap::with_capacity(edges.len() / 2);\n\n for (src, dest) in edges {\n\n let src_cave = caves.entry(src).or_insert_with(|| Cave::new(src));\n", "file_path": "day12/src/main.rs", "rank": 96, "score": 124557.15681755141 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect();\n\n\n\n reversed_args\n\n .pop()\n\n .expect(\"Expected the executable name to be the first argument, but was missing\");\n\n\n\n let part = reversed_args.pop().expect(\"part number\");\n\n let input_file = reversed_args.pop().expect(\"input file\");\n\n let content = fs::read_to_string(input_file).unwrap();\n\n\n\n let (enhancement, input_image) = content\n\n .trim_end()\n\n .split_once(\"\\n\\n\")\n\n .map(|(enhancement, input_image_data)| {\n\n let enhancements = enhancement.chars().collect_vec();\n\n let input_image = input_image_data\n\n .split('\\n')\n\n .map(|row| row.chars().collect_vec())\n", "file_path": "day20/src/main.rs", "rank": 97, "score": 124557.15681755141 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect();\n\n\n\n reversed_args\n\n .pop()\n\n .expect(\"Expected the executable name to be the first argument, but was missing\");\n\n\n\n let part = reversed_args.pop().expect(\"part number\");\n\n let input_file = reversed_args.pop().expect(\"input file\");\n\n let content = fs::read_to_string(input_file).unwrap();\n\n\n\n let components: (&str, &str) = content\n\n .trim_end()\n\n .strip_prefix(\"target area: x=\")\n\n .unwrap()\n\n .split_once(\", y=\")\n\n .unwrap();\n\n let (lower_x, upper_x) = components.0.split_once(\"..\").unwrap();\n\n let (lower_y, upper_y) = components.1.split_once(\"..\").unwrap();\n", "file_path": "day17/src/main.rs", "rank": 98, "score": 124557.15681755141 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let mut reversed_args: Vec<_> = args.iter().map(|x| x.as_str()).rev().collect();\n\n\n\n reversed_args\n\n .pop()\n\n .expect(\"Expected the executable name to be the first argument, but was missing\");\n\n\n\n let part = reversed_args.pop().expect(\"part number\");\n\n let input_file = reversed_args.pop().expect(\"input file\");\n\n let content = fs::read_to_string(input_file).unwrap();\n\n let content = content.trim_end();\n\n assert_eq!(content.len() % 2, 0);\n\n\n\n let input_data: Vec<u8> = content\n\n .trim_end()\n\n .chars()\n\n .map(|x| x.to_digit(16).unwrap() as u8)\n\n .tuples()\n\n .map(|(a, b)| a * 16 + b)\n", "file_path": "day16/src/main.rs", "rank": 99, "score": 124557.15681755141 } ]
Rust
src/upstream.rs
ZigzagAK/ws-platform
f00e25dd8bd08d5269cfbd689e18f707c9f8963f
/* * Copyright (C) 2020 Aleksei Konovkin (alkon2000@mail.ru) */ use std::net::SocketAddr; use std::sync::{ Arc, RwLock, atomic::{ AtomicUsize, Ordering } }; use std::collections::{ HashMap, hash_map::Iter }; use std::time::Duration; use std::cmp::min; use crate::connection_pool::*; use crate::error::CoreError; pub trait UpstreamBalance: Send + Sync { fn balance(&self, iter: Iter<SocketAddr, ConnectionPool>) -> Option<SocketAddr>; } pub struct RoundRobin { index: AtomicUsize } impl RoundRobin { pub fn new() -> RoundRobin { RoundRobin { index: AtomicUsize::new(0) } } } impl UpstreamBalance for RoundRobin { fn balance(&self, mut iter: Iter<SocketAddr, ConnectionPool>) -> Option<SocketAddr> { match iter.nth(self.index.fetch_add(1, Ordering::SeqCst) % iter.len()) { Some((addr, _)) => Some(*addr), None => unreachable!() } } } pub struct Upstream { name: String, max_keepalive: usize, max_active: usize, timeout: Option<Duration>, keepalive_timeout: Option<Duration>, keepalive_requests: Option<u64>, active: Arc<usize>, servers: RwLock<[HashMap<SocketAddr, ConnectionPool>; 2]>, balancer: Box<dyn UpstreamBalance> } impl Upstream { pub fn new( balancer: Box<dyn UpstreamBalance>, name: &str, max_keepalive: usize, max_active: usize, timeout: Option<Duration>, keepalive_timeout: Option<Duration>, keepalive_requests: Option<u64> ) -> Upstream { Upstream { max_keepalive: max_keepalive, max_active: max_active, timeout: timeout, keepalive_timeout: keepalive_timeout, keepalive_requests: keepalive_requests, name: name.to_string(), servers: RwLock::new([HashMap::new(), HashMap::new()]), active: Arc::new(0), balancer: balancer } } pub fn add_primary(&mut self, addr: SocketAddr, max_keepalive: usize, max_active: usize) { self.servers.write().unwrap()[0] .insert(addr, ConnectionPool::with_timeouts( &self.name, min(max_keepalive, self.max_keepalive), min(max_active, self.max_active), self.timeout, self.keepalive_timeout, self.keepalive_requests )); } pub fn add_backup(&mut self, addr: SocketAddr, max_keepalive: usize, max_active: usize) { self.servers.write().unwrap()[1] .insert(addr, ConnectionPool::with_timeouts( &self.name, min(max_keepalive, self.max_keepalive), min(max_active, self.max_active), self.timeout, self.keepalive_timeout, self.keepalive_requests )); } pub fn connect(&self, timeout: Option<Duration>) -> Result<Peer, CoreError> { let userdata = Box::new(Arc::clone(&self.active)); if self.active() == self.max_active { return throw!("Bad gateway"); } let servers = self.servers.read().unwrap(); for i in 0..1 { for _ in 0..servers[i].len() { match self.balancer.balance(servers[i].iter()) { Some(addr) => { match servers[i].get(&addr) { Some(pool) => { if let Ok(mut peer) = pool.connect(&addr, timeout) { peer.attach_userdata(userdata); return Ok(peer); } }, None => { log_error!("error", "Can't find '{}' in upstream '{}'", addr, self.name); break; } } }, None => break } } } throw!("Bad gateway") } pub fn active(&self) -> usize { min(self.max_active, Arc::strong_count(&self.active) - 1) } pub fn idle(&self) -> usize { let servers = self.servers.read().unwrap(); let mut count = 0; for i in 0..1 { for server in servers[i].values() { count += server.idle() } } count } }
/* * Copyright (C) 2020 Aleksei Konovkin (alkon2000@mail.ru) */ use std::net::SocketAddr; use std::sync::{ Arc, RwLock, atomic::{ AtomicUsize, Ordering } }; use std::collections::{ HashMap, hash_map::Iter }; use std::time::Duration; use std::cmp::min; use crate::connection_pool::*; use crate::error::CoreError; pub trait UpstreamBalance: Send + Sync { fn balance(&self, iter: Iter<SocketAddr, ConnectionPool>) -> Option<SocketAddr>; } pub struct RoundRobin { index: AtomicUsize } impl RoundRobin { pub fn new() -> RoundRobin { RoundRobin { index: AtomicUsize::new(0) } } } impl UpstreamBalance for RoundRobin { fn balance(&self, mut iter: Iter<SocketAddr, ConnectionPool>) -> Option<SocketAddr> { match iter.nth(self.index.fetch_add(1, Ordering::SeqCst) % iter.len()) { Some((addr, _)) => Some(*addr), None => unreachable!() } } } pub struct Upstream { name: String, max_keepalive: usize, max_active: usize, timeout: Option<Duration>, keepalive_timeout: Option<Duration>, keepalive_requests: Option<u64>, active: Arc<usize>, servers: RwLock<[HashMap<SocketAddr, ConnectionPool>; 2]>, balancer: Box<dyn UpstreamBalance> } impl Upstream { pub fn new( balancer: Box<dyn UpstreamBalance>, name: &str, max_keepalive: usize, max_active: usize, timeout: Option<Duration>, keepalive_timeout: Option<Duratio
g(), servers: RwLock::new([HashMap::new(), HashMap::new()]), active: Arc::new(0), balancer: balancer } } pub fn add_primary(&mut self, addr: SocketAddr, max_keepalive: usize, max_active: usize) { self.servers.write().unwrap()[0] .insert(addr, ConnectionPool::with_timeouts( &self.name, min(max_keepalive, self.max_keepalive), min(max_active, self.max_active), self.timeout, self.keepalive_timeout, self.keepalive_requests )); } pub fn add_backup(&mut self, addr: SocketAddr, max_keepalive: usize, max_active: usize) { self.servers.write().unwrap()[1] .insert(addr, ConnectionPool::with_timeouts( &self.name, min(max_keepalive, self.max_keepalive), min(max_active, self.max_active), self.timeout, self.keepalive_timeout, self.keepalive_requests )); } pub fn connect(&self, timeout: Option<Duration>) -> Result<Peer, CoreError> { let userdata = Box::new(Arc::clone(&self.active)); if self.active() == self.max_active { return throw!("Bad gateway"); } let servers = self.servers.read().unwrap(); for i in 0..1 { for _ in 0..servers[i].len() { match self.balancer.balance(servers[i].iter()) { Some(addr) => { match servers[i].get(&addr) { Some(pool) => { if let Ok(mut peer) = pool.connect(&addr, timeout) { peer.attach_userdata(userdata); return Ok(peer); } }, None => { log_error!("error", "Can't find '{}' in upstream '{}'", addr, self.name); break; } } }, None => break } } } throw!("Bad gateway") } pub fn active(&self) -> usize { min(self.max_active, Arc::strong_count(&self.active) - 1) } pub fn idle(&self) -> usize { let servers = self.servers.read().unwrap(); let mut count = 0; for i in 0..1 { for server in servers[i].values() { count += server.idle() } } count } }
n>, keepalive_requests: Option<u64> ) -> Upstream { Upstream { max_keepalive: max_keepalive, max_active: max_active, timeout: timeout, keepalive_timeout: keepalive_timeout, keepalive_requests: keepalive_requests, name: name.to_strin
random
[]
Rust
sui_core/src/transaction_input_checker.rs
velvia/sui
976b6bbc2573b375776995a6d3239cba5b73c26e
use std::collections::HashSet; use prometheus_exporter::prometheus::IntCounter; use serde::{Deserialize, Serialize}; use sui_types::{ base_types::{ObjectID, ObjectRef, SequenceNumber, SuiAddress}, error::{SuiError, SuiResult}, fp_ensure, gas::{self, SuiGasStatus}, messages::{InputObjectKind, SingleTransactionKind, TransactionData, TransactionEnvelope}, object::{Object, Owner}, }; use tracing::{debug, instrument}; use crate::authority::SuiDataStore; #[instrument(level = "trace", skip_all)] pub async fn check_transaction_input<const A: bool, S, T>( store: &SuiDataStore<A, S>, transaction: &TransactionEnvelope<T>, shared_obj_metric: &IntCounter, ) -> Result<(SuiGasStatus<'static>, Vec<(InputObjectKind, Object)>), SuiError> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let (gas_object, mut gas_status) = check_gas( store, transaction.gas_payment_object_ref().0, transaction.data.gas_budget, ) .await?; let objects_by_kind = check_locks(store, &transaction.data, gas_object).await?; if transaction.contains_shared_object() { shared_obj_metric.inc(); gas_status.charge_consensus()?; } Ok((gas_status, objects_by_kind)) } #[instrument(level = "trace", skip_all)] async fn check_gas<const A: bool, S>( store: &SuiDataStore<A, S>, gas_payment_id: ObjectID, gas_budget: u64, ) -> SuiResult<(Object, SuiGasStatus<'static>)> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let gas_object = store.get_object(&gas_payment_id)?; let gas_object = gas_object.ok_or(SuiError::ObjectNotFound { object_id: gas_payment_id, })?; gas::check_gas_balance(&gas_object, gas_budget)?; let gas_status = gas::start_gas_metering(gas_budget, 1, 1)?; Ok((gas_object, gas_status)) } #[instrument(level = "trace", skip_all, fields(num_objects = input_objects.len()))] async fn fetch_objects<const A: bool, S>( store: &SuiDataStore<A, S>, input_objects: &[InputObjectKind], gas_object_opt: Option<Object>, ) -> Result<Vec<Option<Object>>, SuiError> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let ids: Vec<_> = input_objects.iter().map(|kind| kind.object_id()).collect(); if let Some(gas_object) = gas_object_opt { debug_assert_eq!(gas_object.id(), ids[ids.len() - 1]); let mut result = store.get_objects(&ids[..ids.len() - 1])?; result.push(Some(gas_object)); Ok(result) } else { store.get_objects(&ids[..]) } } #[instrument(level = "trace", skip_all)] async fn check_locks<const A: bool, S>( store: &SuiDataStore<A, S>, transaction: &TransactionData, gas_object: Object, ) -> Result<Vec<(InputObjectKind, Object)>, SuiError> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let input_objects = transaction.input_objects()?; let objects = fetch_objects(store, &input_objects, Some(gas_object)).await?; let mut owned_object_authenticators: HashSet<SuiAddress> = HashSet::new(); for object in objects.iter().flatten() { if !object.is_immutable() { fp_ensure!( owned_object_authenticators.insert(object.id().into()), SuiError::InvalidBatchTransaction { error: format!("Mutable object {} cannot appear in more than one single transactions in a batch", object.id()), } ); } } let mut all_objects = Vec::with_capacity(input_objects.len()); let mut errors = Vec::new(); let transfer_object_ids: HashSet<_> = transaction .kind .single_transactions() .filter_map(|s| { if let SingleTransactionKind::TransferCoin(t) = s { Some(t.object_ref.0) } else { None } }) .collect(); for (object_kind, object) in input_objects.into_iter().zip(objects) { let object = match object { Some(object) => object, None => { errors.push(object_kind.object_not_found_error()); continue; } }; if transfer_object_ids.contains(&object.id()) { object.is_transfer_eligible()?; } match check_one_lock( &transaction.signer(), object_kind, &object, &owned_object_authenticators, ) { Ok(()) => all_objects.push((object_kind, object)), Err(e) => { errors.push(e); } } } if !errors.is_empty() { return Err(SuiError::LockErrors { errors }); } fp_ensure!(!all_objects.is_empty(), SuiError::ObjectInputArityViolation); Ok(all_objects) } pub fn filter_owned_objects(all_objects: &[(InputObjectKind, Object)]) -> Vec<ObjectRef> { let owned_objects: Vec<_> = all_objects .iter() .filter_map(|(object_kind, object)| match object_kind { InputObjectKind::MovePackage(_) => None, InputObjectKind::ImmOrOwnedMoveObject(object_ref) => { if object.is_immutable() { None } else { Some(*object_ref) } } InputObjectKind::SharedMoveObject(_) => None, }) .collect(); debug!( num_mutable_objects = owned_objects.len(), "Checked locks and found mutable objects" ); owned_objects } fn check_one_lock( sender: &SuiAddress, object_kind: InputObjectKind, object: &Object, owned_object_authenticators: &HashSet<SuiAddress>, ) -> SuiResult { match object_kind { InputObjectKind::MovePackage(package_id) => { fp_ensure!( object.data.try_as_package().is_some(), SuiError::MoveObjectAsPackage { object_id: package_id } ); } InputObjectKind::ImmOrOwnedMoveObject((object_id, sequence_number, object_digest)) => { fp_ensure!( !object.is_package(), SuiError::MovePackageAsObject { object_id } ); fp_ensure!( sequence_number <= SequenceNumber::MAX, SuiError::InvalidSequenceNumber ); fp_ensure!( object.version() == sequence_number, SuiError::UnexpectedSequenceNumber { object_id, expected_sequence: object.version(), given_sequence: sequence_number, } ); let expected_digest = object.digest(); fp_ensure!( expected_digest == object_digest, SuiError::InvalidObjectDigest { object_id, expected_digest } ); match object.owner { Owner::Immutable => { } Owner::AddressOwner(owner) => { fp_ensure!( sender == &owner, SuiError::IncorrectSigner { error: format!("Object {:?} is owned by account address {:?}, but signer address is {:?}", object_id, owner, sender), } ); } Owner::ObjectOwner(owner) => { fp_ensure!( owned_object_authenticators.contains(&owner), SuiError::IncorrectSigner { error: format!( "Object {:?} is owned by object {:?}, which is not in the input", object.id(), owner ), } ); } Owner::Shared => { return Err(SuiError::NotSharedObjectError); } }; } InputObjectKind::SharedMoveObject(..) => { fp_ensure!(object.is_shared(), SuiError::NotSharedObjectError); } }; Ok(()) }
use std::collections::HashSet; use prometheus_exporter::prometheus::IntCounter; use serde::{Deserialize, Serialize}; use sui_types::{ base_types::{ObjectID, ObjectRef, SequenceNumber, SuiAddress}, error::{SuiError, SuiResult}, fp_ensure, gas::{self, SuiGasStatus}, messages::{InputObjectKind, SingleTransactionKind, TransactionData, TransactionEnvelope}, object::{Object, Owner}, }; use tracing::{debug, instrument}; use crate::authority::SuiDataStore; #[instrument(level = "trace", skip_all)] pub async fn check_transaction_input<const A: bool, S, T>( store: &SuiDataStore<A, S>, transaction: &TransactionEnvelope<T>, shared_obj_metric: &IntCounter, ) -> Result<(SuiGasStatus<'static>, Vec<(InputObjectKind, Object)>), SuiError> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let (gas_object, mut gas_status) = check_gas( store, transaction.gas_payment_object_ref().0, transaction.data.gas_budget, ) .await?; let objects_by_kind = check_locks(store, &transaction.data, gas_object).await?; if transaction.contains_shared_object() { shared_obj_metric.inc(); gas_status.charge_consensus()?; } Ok((gas_status, objects_by_kind)) } #[instrument(level = "trace", skip_all)] async fn check_gas<const A: bool, S>( store: &SuiDataStore<A, S>, gas_payment_id: ObjectID, gas_budget: u64, ) -> SuiResult<(Object, SuiGasStatus<'static>)> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let gas_object = store.get_object(&gas_payment_id)?; let gas_object = gas_object.ok_or(SuiError::ObjectNotFound { object_id: gas_payment_id, })?; gas::check_gas_balance(&gas_object, gas_budget)?; let gas_status = gas::start_gas_metering(gas_budget, 1, 1)?; Ok((gas_object, gas_status)) } #[instrument(level = "trace", skip_all, fields(num_objects = input_objects.len()))] async fn fetch_objects<const A: bool, S>( store: &SuiDataStore<A, S>, input_objects: &[InputObjectKind], gas_object_opt: Option<Object>, ) -> Result<Vec<Option<Object>>, SuiError> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let ids: Vec<_> = input_objects.iter().map(|kind| kind.object_id()).collect(); if let Some(gas_object) = gas_object_opt { debug_assert_eq!(gas_object.id(), ids[ids.len() - 1]); let mut result = store.get_objects(&ids[..ids.len() - 1])?; result.push(Some(gas_object)); Ok(result) } else { store.get_objects(&ids[..]) } } #[instrument(level = "trace", skip_all)] async fn check_locks<const A: bool, S>( store: &SuiDataStore<A, S>, transaction: &TransactionData, gas_object: Object, ) -> Result<Vec<(InputObjectKind, Object)>, SuiError> where S: Eq + Serialize + for<'de> Deserialize<'de>, { let input_objects = transaction.input_objects()?; let objects = fetch_objects(store, &input_objects, Some(gas_object)).await?; let mut owned_object_authenticators: HashSet<SuiAddress> = HashSet::new(); for object in objects.iter().flatten() { if !object.is_immutable() { fp_ensure!( owned_object_authenticators.insert(object.id().into()), SuiError::InvalidBatchTransaction { error: format!("Mutable object {} cannot appear in more than one single transact
errors.push(object_kind.object_not_found_error()); continue; } }; if transfer_object_ids.contains(&object.id()) { object.is_transfer_eligible()?; } match check_one_lock( &transaction.signer(), object_kind, &object, &owned_object_authenticators, ) { Ok(()) => all_objects.push((object_kind, object)), Err(e) => { errors.push(e); } } } if !errors.is_empty() { return Err(SuiError::LockErrors { errors }); } fp_ensure!(!all_objects.is_empty(), SuiError::ObjectInputArityViolation); Ok(all_objects) } pub fn filter_owned_objects(all_objects: &[(InputObjectKind, Object)]) -> Vec<ObjectRef> { let owned_objects: Vec<_> = all_objects .iter() .filter_map(|(object_kind, object)| match object_kind { InputObjectKind::MovePackage(_) => None, InputObjectKind::ImmOrOwnedMoveObject(object_ref) => { if object.is_immutable() { None } else { Some(*object_ref) } } InputObjectKind::SharedMoveObject(_) => None, }) .collect(); debug!( num_mutable_objects = owned_objects.len(), "Checked locks and found mutable objects" ); owned_objects } fn check_one_lock( sender: &SuiAddress, object_kind: InputObjectKind, object: &Object, owned_object_authenticators: &HashSet<SuiAddress>, ) -> SuiResult { match object_kind { InputObjectKind::MovePackage(package_id) => { fp_ensure!( object.data.try_as_package().is_some(), SuiError::MoveObjectAsPackage { object_id: package_id } ); } InputObjectKind::ImmOrOwnedMoveObject((object_id, sequence_number, object_digest)) => { fp_ensure!( !object.is_package(), SuiError::MovePackageAsObject { object_id } ); fp_ensure!( sequence_number <= SequenceNumber::MAX, SuiError::InvalidSequenceNumber ); fp_ensure!( object.version() == sequence_number, SuiError::UnexpectedSequenceNumber { object_id, expected_sequence: object.version(), given_sequence: sequence_number, } ); let expected_digest = object.digest(); fp_ensure!( expected_digest == object_digest, SuiError::InvalidObjectDigest { object_id, expected_digest } ); match object.owner { Owner::Immutable => { } Owner::AddressOwner(owner) => { fp_ensure!( sender == &owner, SuiError::IncorrectSigner { error: format!("Object {:?} is owned by account address {:?}, but signer address is {:?}", object_id, owner, sender), } ); } Owner::ObjectOwner(owner) => { fp_ensure!( owned_object_authenticators.contains(&owner), SuiError::IncorrectSigner { error: format!( "Object {:?} is owned by object {:?}, which is not in the input", object.id(), owner ), } ); } Owner::Shared => { return Err(SuiError::NotSharedObjectError); } }; } InputObjectKind::SharedMoveObject(..) => { fp_ensure!(object.is_shared(), SuiError::NotSharedObjectError); } }; Ok(()) }
ions in a batch", object.id()), } ); } } let mut all_objects = Vec::with_capacity(input_objects.len()); let mut errors = Vec::new(); let transfer_object_ids: HashSet<_> = transaction .kind .single_transactions() .filter_map(|s| { if let SingleTransactionKind::TransferCoin(t) = s { Some(t.object_ref.0) } else { None } }) .collect(); for (object_kind, object) in input_objects.into_iter().zip(objects) { let object = match object { Some(object) => object, None => {
random
[ { "content": "/// Creates an object for use in the microbench\n\nfn create_gas_object(object_id: ObjectID, owner: SuiAddress) -> Object {\n\n Object::with_id_owner_gas_coin_object_for_testing(\n\n object_id,\n\n SequenceNumber::new(),\n\n owner,\n\n GAS_PER_TX,\n\n )\n\n}\n\n\n", "file_path": "sui/src/benchmark/transaction_creator.rs", "rank": 0, "score": 346471.1508306954 }, { "content": "/// Check whether the given gas_object and gas_budget is legit:\n\n/// 1. If the gas object is owned.\n\n/// 2. If it's enough to pay the flat minimum transaction fee\n\n/// 3. If it's less than the max gas budget allowed\n\n/// 4. If the gas_object actually has enough balance to pay for the budget.\n\npub fn check_gas_balance(gas_object: &Object, gas_budget: u64) -> SuiResult {\n\n ok_or_gas_error!(\n\n gas_object.is_owned(),\n\n \"Gas object must be owned Move object\".to_owned()\n\n )?;\n\n ok_or_gas_error!(\n\n gas_budget <= *MAX_GAS_BUDGET,\n\n format!(\"Gas budget set too high; maximum is {}\", *MAX_GAS_BUDGET)\n\n )?;\n\n ok_or_gas_error!(\n\n gas_budget >= *MIN_GAS_BUDGET,\n\n format!(\n\n \"Gas budget is {}, smaller than minimum requirement {}\",\n\n gas_budget, *MIN_GAS_BUDGET\n\n )\n\n )?;\n\n\n\n let balance = get_gas_balance(gas_object)?;\n\n ok_or_gas_error!(\n\n balance >= gas_budget,\n\n format!(\"Gas balance is {balance}, not enough to pay {gas_budget}\")\n\n )\n\n}\n\n\n", "file_path": "sui_types/src/gas.rs", "rank": 1, "score": 331899.93935877236 }, { "content": "pub fn get_gas_balance(gas_object: &Object) -> SuiResult<u64> {\n\n Ok(GasCoin::try_from(gas_object)?.value())\n\n}\n", "file_path": "sui_types/src/gas.rs", "rank": 2, "score": 330117.84377770254 }, { "content": "pub fn refund_gas(gas_object: &mut Object, amount: u64) {\n\n // The object must be a gas coin as we have checked in transaction handle phase.\n\n let gas_coin = GasCoin::try_from(&*gas_object).unwrap();\n\n let balance = gas_coin.value();\n\n let new_gas_coin = GasCoin::new(*gas_coin.id(), gas_object.version(), balance + amount);\n\n let move_object = gas_object.data.try_as_move_mut().unwrap();\n\n move_object.update_contents(bcs::to_bytes(&new_gas_coin).unwrap());\n\n}\n\n\n", "file_path": "sui_types/src/gas.rs", "rank": 3, "score": 324855.4862640768 }, { "content": "/// Subtract the gas balance of \\p gas_object by \\p amount.\n\n/// This function should never fail, since we checked that the budget is always\n\n/// less than balance, and the amount is capped at the budget.\n\npub fn deduct_gas(gas_object: &mut Object, deduct_amount: u64, rebate_amount: u64) {\n\n // The object must be a gas coin as we have checked in transaction handle phase.\n\n let gas_coin = GasCoin::try_from(&*gas_object).unwrap();\n\n let balance = gas_coin.value();\n\n debug_assert!(balance >= deduct_amount);\n\n let new_gas_coin = GasCoin::new(\n\n *gas_coin.id(),\n\n gas_object.version(),\n\n balance + rebate_amount - deduct_amount,\n\n );\n\n let move_object = gas_object.data.try_as_move_mut().unwrap();\n\n move_object.update_contents(bcs::to_bytes(&new_gas_coin).unwrap());\n\n}\n\n\n", "file_path": "sui_types/src/gas.rs", "rank": 4, "score": 316794.841075155 }, { "content": "// Object { id: VersionedID { id: UniqueID { id: ID { bytes: address } } } .. }\n\n// Extract the first field of the struct 4 times to get the id bytes.\n\npub fn get_object_id(object: Value) -> Result<Value, PartialVMError> {\n\n get_nested_struct_field(object, &[0, 0, 0, 0])\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/natives/mod.rs", "rank": 5, "score": 306596.05268611875 }, { "content": "/// Make a transaction to publish a test move contracts package.\n\npub fn publish_move_package_transaction(gas_object: Object) -> Transaction {\n\n let build_config = BuildConfig::default();\n\n let mut path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n path.push(\"../sui_programmability/examples/basics\");\n\n let modules = sui_framework::build_move_package(&path, build_config, false).unwrap();\n\n\n\n let all_module_bytes = modules\n\n .iter()\n\n .map(|m| {\n\n let mut module_bytes = Vec::new();\n\n m.serialize(&mut module_bytes).unwrap();\n\n module_bytes\n\n })\n\n .collect();\n\n\n\n let gas_object_ref = gas_object.compute_object_reference();\n\n let (sender, keypair) = test_keys().pop().unwrap();\n\n let data = TransactionData::new_module(sender, gas_object_ref, all_module_bytes, MAX_GAS);\n\n let signature = Signature::new(&data, &keypair);\n\n Transaction::new(data, signature)\n\n}\n\n\n", "file_path": "test_utils/src/messages.rs", "rank": 6, "score": 272813.888018806 }, { "content": "pub fn check_transaction_response(reply_message: Result<TransactionInfoResponse, io::Error>) {\n\n match reply_message {\n\n Ok(res) => {\n\n if let Some(e) = res.signed_effects {\n\n if matches!(e.effects.status, ExecutionStatus::Failure { .. }) {\n\n info!(\"Execution Error {:?}\", e.effects.status);\n\n }\n\n }\n\n }\n\n Err(err) => {\n\n error!(\"Received Error {:?}\", err);\n\n }\n\n };\n\n}\n\n\n\npub async fn send_tx_chunks(\n\n tx_chunks: Vec<(Transaction, CertifiedTransaction)>,\n\n address: Multiaddr,\n\n conn: usize,\n\n) -> (u128, Vec<Result<TransactionInfoResponse, io::Error>>) {\n", "file_path": "sui/src/benchmark/load_generator.rs", "rank": 7, "score": 270135.2226017453 }, { "content": "pub fn run_test(path: &Path) -> Result<(), Box<dyn std::error::Error>> {\n\n run_test_impl::<SuiTestAdapter>(path, Some(&*PRE_COMPILED))\n\n}\n", "file_path": "sui_programmability/transactional-test-runner/src/lib.rs", "rank": 9, "score": 262036.32843543193 }, { "content": "fn resolve_object_arg(idx: usize, arg: &SuiJsonValue) -> Result<ObjectID, anyhow::Error> {\n\n // Every elem has to be a string convertible to a ObjectID\n\n match arg.to_json_value() {\n\n JsonValue::String(s) => {\n\n let s = s.trim().to_lowercase();\n\n if !s.starts_with(HEX_PREFIX) {\n\n return Err(anyhow!(\"ObjectID hex string must start with 0x.\",));\n\n }\n\n Ok(ObjectID::from_hex_literal(&s)?)\n\n }\n\n _ => Err(anyhow!(\n\n \"Unable to parse arg {:?} as ObjectID at pos {}. Expected {:?} byte hex string \\\n\n prefixed with 0x.\",\n\n ObjectID::LENGTH,\n\n idx,\n\n arg.to_json_value(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "sui_core/src/sui_json.rs", "rank": 10, "score": 253392.18776273038 }, { "content": "pub fn dbg_object_id(name: u8) -> ObjectID {\n\n ObjectID::from_bytes([name; ObjectID::LENGTH]).unwrap()\n\n}\n\n\n\nimpl std::fmt::Debug for ObjectDigest {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {\n\n let s = hex::encode(&self.0);\n\n write!(f, \"o#{}\", s)?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl AsRef<[u8]> for ObjectDigest {\n\n fn as_ref(&self) -> &[u8] {\n\n &self.0[..]\n\n }\n\n}\n\n\n\nimpl TryFrom<&[u8]> for ObjectDigest {\n\n type Error = SuiError;\n", "file_path": "sui_types/src/base_types.rs", "rank": 11, "score": 250127.33745076536 }, { "content": "pub fn sui_config_dir() -> Result<PathBuf, anyhow::Error> {\n\n match std::env::var_os(\"SUI_CONFIG_DIR\") {\n\n Some(config_env) => Ok(config_env.into()),\n\n None => match dirs::home_dir() {\n\n Some(v) => Ok(v.join(SUI_DIR).join(SUI_CONFIG_DIR)),\n\n None => bail!(\"Cannot obtain home directory path\"),\n\n },\n\n }\n\n .and_then(|dir| {\n\n if !dir.exists() {\n\n create_dir_all(dir.clone())?;\n\n }\n\n Ok(dir)\n\n })\n\n}\n\n\n\npub const AUTHORITIES_DB_NAME: &str = \"authorities_db\";\n\npub const DEFAULT_STARTING_PORT: u16 = 10000;\n\npub const CONSENSUS_DB_NAME: &str = \"consensus_db\";\n\n\n", "file_path": "sui/src/config/mod.rs", "rank": 12, "score": 240011.27934283967 }, { "content": "pub fn bytes_from_hex<'de, T, D>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n T: for<'a> TryFrom<&'a [u8]>,\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n let value = decode_bytes_hex(&s).map_err(serde::de::Error::custom)?;\n\n Ok(value)\n\n}\n\n\n", "file_path": "sui_types/src/base_types.rs", "rank": 13, "score": 239468.4441962827 }, { "content": "pub fn sui_config_dir() -> Result<PathBuf, anyhow::Error> {\n\n match std::env::var_os(\"SUI_CONFIG_DIR\") {\n\n Some(config_env) => Ok(config_env.into()),\n\n None => match dirs::home_dir() {\n\n Some(v) => Ok(v.join(SUI_DIR).join(SUI_CONFIG_DIR)),\n\n None => anyhow::bail!(\"Cannot obtain home directory path\"),\n\n },\n\n }\n\n .and_then(|dir| {\n\n if !dir.exists() {\n\n std::fs::create_dir_all(dir.clone())?;\n\n }\n\n Ok(dir)\n\n })\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct GenesisConfig {\n\n pub committee_size: usize,\n\n pub accounts: Vec<AccountConfig>,\n", "file_path": "crates/sui-config/src/lib.rs", "rank": 14, "score": 236549.7757845185 }, { "content": "// Extract a field valye that's nested inside value `v`. The offset of each nesting\n\n// is determined by `offsets`.\n\npub fn get_nested_struct_field(mut v: Value, offsets: &[usize]) -> Result<Value, PartialVMError> {\n\n for offset in offsets {\n\n v = get_nth_struct_field(v, *offset)?;\n\n }\n\n Ok(v)\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/natives/mod.rs", "rank": 15, "score": 232525.19451848677 }, { "content": "/// make a test shared object.\n\npub fn test_shared_object() -> Object {\n\n let seed = \"0x6666666666666660\";\n\n let shared_object_id = ObjectID::from_hex_literal(seed).unwrap();\n\n let content = GasCoin::new(shared_object_id, OBJECT_START_VERSION, 10);\n\n let obj = MoveObject::new(/* type */ GasCoin::type_(), content.to_bcs_bytes());\n\n Object::new_move(obj, Owner::Shared, TransactionDigest::genesis())\n\n}\n", "file_path": "test_utils/src/objects.rs", "rank": 16, "score": 229311.2625519944 }, { "content": "pub fn bytes_as_hex<B, S>(bytes: &B, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n B: AsRef<[u8]>,\n\n S: serde::ser::Serializer,\n\n{\n\n serializer.serialize_str(&encode_bytes_hex(bytes))\n\n}\n\n\n", "file_path": "sui_types/src/base_types.rs", "rank": 17, "score": 226175.21344837747 }, { "content": "/// Make a few different test transaction containing the same shared object.\n\npub fn test_shared_object_transactions() -> Vec<Transaction> {\n\n // Helper function to load genesis packages.\n\n fn get_genesis_package_by_module(genesis_objects: &[Object], module: &str) -> ObjectRef {\n\n genesis_objects\n\n .iter()\n\n .find_map(|o| match o.data.try_as_package() {\n\n Some(p) => {\n\n if p.serialized_module_map().keys().any(|name| name == module) {\n\n Some(o.compute_object_reference())\n\n } else {\n\n None\n\n }\n\n }\n\n None => None,\n\n })\n\n .unwrap()\n\n }\n\n\n\n // The key pair of the sender of the transaction.\n\n let (sender, keypair) = test_keys().pop().unwrap();\n", "file_path": "test_utils/src/messages.rs", "rank": 18, "score": 225899.9882656373 }, { "content": "/// Return the object ID involved in an event.\n\n/// This depends on the value format for each event type.\n\nfn get_object_id_from_event(event_type_byte: u64, val: &Value) -> Option<ObjectID> {\n\n let val = val.copy_value().unwrap();\n\n let address = if event_type_byte == WRAPPED_OBJECT_EVENT {\n\n val\n\n } else if event_type_byte == UPDATE_OBJECT_EVENT {\n\n get_object_id(val).unwrap()\n\n } else {\n\n let event_type = EventType::try_from_primitive(event_type_byte as u8).unwrap();\n\n match event_type {\n\n EventType::DeleteChildObject => val,\n\n EventType::DeleteObjectID => get_nested_struct_field(val, &[0, 0, 0]).unwrap(),\n\n EventType::User => {\n\n return None;\n\n }\n\n _ => get_object_id(val.copy_value().unwrap()).unwrap(),\n\n }\n\n };\n\n Some(ObjectID::try_from(address.value_as::<AccountAddress>().unwrap().as_slice()).unwrap())\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/natives/test_scenario.rs", "rank": 19, "score": 219976.4009143473 }, { "content": "/// Make a few test gas objects.\n\npub fn test_gas_objects() -> Vec<Object> {\n\n (0..19)\n\n .map(|i| {\n\n let seed = format!(\"0x555555555555555{i}\");\n\n let gas_object_id = ObjectID::from_hex_literal(&seed).unwrap();\n\n let (sender, _) = test_keys().pop().unwrap();\n\n Object::with_id_owner_for_testing(gas_object_id, sender)\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "test_utils/src/objects.rs", "rank": 20, "score": 218293.16054618856 }, { "content": "pub fn set_object_move_transaction(\n\n src: SuiAddress,\n\n secret: &dyn signature::Signer<Signature>,\n\n object_ref: ObjectRef,\n\n value: u64,\n\n framework_obj_ref: ObjectRef,\n\n gas_object_ref: ObjectRef,\n\n) -> Transaction {\n\n let args = vec![\n\n CallArg::ImmOrOwnedObject(object_ref),\n\n CallArg::Pure(bcs::to_bytes(&value).unwrap()),\n\n ];\n\n\n\n to_transaction(\n\n TransactionData::new_move_call(\n\n src,\n\n framework_obj_ref,\n\n ident_str!(\"ObjectBasics\").to_owned(),\n\n ident_str!(\"set_value\").to_owned(),\n\n Vec::new(),\n\n gas_object_ref,\n\n args,\n\n GAS_VALUE_FOR_TESTING / 2,\n\n ),\n\n secret,\n\n )\n\n}\n\n\n", "file_path": "sui_core/src/unit_tests/authority_aggregator_tests.rs", "rank": 21, "score": 218156.74640862233 }, { "content": "pub fn crate_object_move_transaction(\n\n src: SuiAddress,\n\n secret: &dyn signature::Signer<Signature>,\n\n dest: SuiAddress,\n\n value: u64,\n\n framework_obj_ref: ObjectRef,\n\n gas_object_ref: ObjectRef,\n\n) -> Transaction {\n\n // When creating an ObjectBasics object, we provide the value (u64) and address which will own the object\n\n let arguments = vec![\n\n CallArg::Pure(value.to_le_bytes().to_vec()),\n\n CallArg::Pure(bcs::to_bytes(&AccountAddress::from(dest)).unwrap()),\n\n ];\n\n\n\n to_transaction(\n\n TransactionData::new_move_call(\n\n src,\n\n framework_obj_ref,\n\n ident_str!(\"ObjectBasics\").to_owned(),\n\n ident_str!(\"create\").to_owned(),\n\n Vec::new(),\n\n gas_object_ref,\n\n arguments,\n\n GAS_VALUE_FOR_TESTING / 2,\n\n ),\n\n &*secret,\n\n )\n\n}\n\n\n", "file_path": "sui_core/src/unit_tests/authority_aggregator_tests.rs", "rank": 22, "score": 218156.74640862233 }, { "content": "pub fn delete_object_move_transaction(\n\n src: SuiAddress,\n\n secret: &dyn signature::Signer<Signature>,\n\n object_ref: ObjectRef,\n\n framework_obj_ref: ObjectRef,\n\n gas_object_ref: ObjectRef,\n\n) -> Transaction {\n\n to_transaction(\n\n TransactionData::new_move_call(\n\n src,\n\n framework_obj_ref,\n\n ident_str!(\"ObjectBasics\").to_owned(),\n\n ident_str!(\"delete\").to_owned(),\n\n Vec::new(),\n\n gas_object_ref,\n\n vec![CallArg::ImmOrOwnedObject(object_ref)],\n\n GAS_VALUE_FOR_TESTING / 2,\n\n ),\n\n secret,\n\n )\n\n}\n\n\n", "file_path": "sui_core/src/unit_tests/authority_aggregator_tests.rs", "rank": 23, "score": 218156.74640862233 }, { "content": "pub fn verify_module(module: &CompiledModule) -> SuiResult {\n\n verify_id_leak(module)\n\n}\n\n\n", "file_path": "sui_programmability/verifier/src/id_leak_verifier.rs", "rank": 24, "score": 217874.80002227874 }, { "content": "pub fn verify_module(module: &CompiledModule) -> SuiResult {\n\n verify_id_immutable(module)\n\n}\n\n\n", "file_path": "sui_programmability/verifier/src/id_immutable_verifier.rs", "rank": 25, "score": 217874.80002227874 }, { "content": "pub fn new_move_vm(natives: NativeFunctionTable) -> Result<MoveVM, SuiError> {\n\n MoveVM::new(natives).map_err(|_| SuiError::ExecutionInvariantViolation)\n\n}\n\n\n\n/// Execute `module::function<type_args>(object_args ++ pure_args)` as a call from `sender` with the given `gas_budget`.\n\n/// Execution will read from/write to the store in `state_view`.\n\n/// IMPORTANT NOTES on the return value:\n\n/// The return value is a two-layer SuiResult. The outer layer indicates whether a system error\n\n/// has occurred (i.e. issues with the sui system, not with user transaction).\n\n/// As long as there are no system issues we return Ok(SuiResult).\n\n/// The inner SuiResult indicates the execution result. If execution failed, we return Ok(Err),\n\n/// otherwise we return Ok(Ok).\n\n/// TODO: Do we really need the two layers?\n", "file_path": "sui_programmability/adapter/src/adapter.rs", "rank": 26, "score": 217823.33223640866 }, { "content": "fn get_type(pat_type: &mut PatType) -> Result<Type, syn::Error> {\n\n Ok(\n\n if let Some((pos, attr)) = pat_type\n\n .attrs\n\n .iter()\n\n .find_position(|a| a.path.is_ident(\"schemars\"))\n\n {\n\n let attribute = parse::<NamedAttribute>(attr.tokens.clone().into())?;\n\n\n\n let stream = syn::parse_str(&attribute.value.value())?;\n\n let tokens = respan_token_stream(stream, attribute.value.span());\n\n\n\n let path = syn::parse2(tokens)?;\n\n pat_type.attrs.remove(pos);\n\n path\n\n } else {\n\n pat_type.ty.as_ref().clone()\n\n },\n\n )\n\n}\n\n\n", "file_path": "sui/open_rpc/macros/src/lib.rs", "rank": 27, "score": 215871.46704899907 }, { "content": "/// Extract the package reference from a transaction effect. This is useful to deduce the\n\n/// authority-created package reference after attempting to publish a new Move package.\n\npub fn parse_package_ref(effects: &TransactionEffects) -> Option<ObjectRef> {\n\n effects\n\n .created\n\n .iter()\n\n .find(|(_, owner)| matches!(owner, Owner::Immutable))\n\n .map(|(reference, _)| *reference)\n\n}\n", "file_path": "test_utils/src/messages.rs", "rank": 28, "score": 215376.95961528696 }, { "content": "pub fn get_framework_object_ref() -> ObjectRef {\n\n let genesis = GENESIS.lock().unwrap();\n\n genesis\n\n .objects\n\n .iter()\n\n .find(|o| o.id() == SUI_FRAMEWORK_ADDRESS.into())\n\n .unwrap()\n\n .compute_object_reference()\n\n}\n\n\n", "file_path": "sui_programmability/adapter/src/genesis.rs", "rank": 29, "score": 215184.13896736375 }, { "content": "/// Fixture: a a test shared object.\n\npub fn test_shared_object() -> Object {\n\n let seed = \"0x6666666666666660\";\n\n let shared_object_id = ObjectID::from_hex_literal(seed).unwrap();\n\n let content = GasCoin::new(shared_object_id, OBJECT_START_VERSION, 10);\n\n let obj = MoveObject::new(/* type */ GasCoin::type_(), content.to_bcs_bytes());\n\n Object::new_move(obj, Owner::Shared, TransactionDigest::genesis())\n\n}\n\n\n\n/// Fixture: a few test certificates containing a shared object.\n\npub async fn test_certificates(authority: &AuthorityState) -> Vec<CertifiedTransaction> {\n\n let (sender, keypair) = test_keys().pop().unwrap();\n\n\n\n let mut certificates = Vec::new();\n\n let shared_object_id = test_shared_object().id();\n\n for gas_object in test_gas_objects() {\n\n // Make a sample transaction.\n\n let module = \"ObjectBasics\";\n\n let function = \"create\";\n\n let genesis_package_objects = genesis::clone_genesis_packages();\n\n let package_object_ref = get_genesis_package_by_module(&genesis_package_objects, module);\n", "file_path": "sui_core/src/unit_tests/consensus_tests.rs", "rank": 30, "score": 215128.14078491443 }, { "content": "fn call(verifier: &mut IDLeakAnalysis, function_handle: &FunctionHandle) -> SuiResult {\n\n let guaranteed_safe = is_call_safe_to_leak(verifier, function_handle);\n\n let parameters = verifier\n\n .binary_view\n\n .signature_at(function_handle.parameters);\n\n for _ in 0..parameters.len() {\n\n if verifier.stack.pop().unwrap() == AbstractValue::ID && !guaranteed_safe {\n\n return Err(verification_failure(\n\n \"ID leaked through function call.\".to_string(),\n\n ));\n\n }\n\n }\n\n\n\n let return_ = verifier.binary_view.signature_at(function_handle.return_);\n\n for _ in 0..return_.0.len() {\n\n verifier.stack.push(AbstractValue::NonID);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "sui_programmability/verifier/src/id_leak_verifier.rs", "rank": 31, "score": 209508.3619660227 }, { "content": "#[instrument(name = \"tx_execute_to_effects\", level = \"debug\", skip_all)]\n\npub fn execute_transaction_to_effects<S: BackingPackageStore>(\n\n shared_object_refs: Vec<ObjectRef>,\n\n temporary_store: &mut AuthorityTemporaryStore<S>,\n\n transaction_data: TransactionData,\n\n transaction_digest: TransactionDigest,\n\n mut transaction_dependencies: BTreeSet<TransactionDigest>,\n\n move_vm: &Arc<MoveVM>,\n\n native_functions: &NativeFunctionTable,\n\n gas_status: SuiGasStatus,\n\n) -> SuiResult<TransactionEffects> {\n\n let mut tx_ctx = TxContext::new(&transaction_data.signer(), &transaction_digest);\n\n\n\n let gas_object_id = transaction_data.gas_payment_object_ref().0;\n\n let status = execute_transaction(\n\n temporary_store,\n\n transaction_data,\n\n gas_object_id,\n\n &mut tx_ctx,\n\n move_vm,\n\n native_functions,\n", "file_path": "sui_core/src/execution_engine.rs", "rank": 32, "score": 206120.17167695527 }, { "content": "fn extract_gas_info(s: &str) -> Option<(ObjectID, SequenceNumber, u64)> {\n\n let tokens = s.split('|').map(|q| q.trim()).collect::<Vec<_>>();\n\n if tokens.len() != 3 {\n\n return None;\n\n }\n\n\n\n let id_str = tokens[0]\n\n .split(':')\n\n .map(|q| q.trim())\n\n .collect::<Vec<_>>()\n\n .iter()\n\n .last()\n\n .unwrap()\n\n .to_owned();\n\n Some((\n\n ObjectID::from_hex_literal(id_str).unwrap(),\n\n SequenceNumber::from_u64(tokens[1].parse::<u64>().unwrap()),\n\n tokens[2].parse::<u64>().unwrap(),\n\n ))\n\n}\n", "file_path": "sui/src/unit_tests/cli_tests.rs", "rank": 33, "score": 204931.20336247867 }, { "content": "/// Fixture: a few test gas objects.\n\npub fn test_gas_objects() -> Vec<Object> {\n\n (0..4)\n\n .map(|i| {\n\n let seed = format!(\"0x555555555555555{i}\");\n\n let gas_object_id = ObjectID::from_hex_literal(&seed).unwrap();\n\n let (sender, _) = test_keys().pop().unwrap();\n\n Object::with_id_owner_for_testing(gas_object_id, sender)\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "sui_core/src/unit_tests/consensus_tests.rs", "rank": 34, "score": 204297.0893685021 }, { "content": "fn parse_rpc_method(trait_data: &mut syn::ItemTrait) -> Result<RpcDefinition, syn::Error> {\n\n let mut methods = Vec::new();\n\n for trait_item in &mut trait_data.items {\n\n if let TraitItem::Method(method) = trait_item {\n\n let method_name = if let Some(attr) = find_attr(&method.attrs, \"method\").cloned() {\n\n let token: TokenStream = attr.tokens.clone().into();\n\n parse::<NamedAttribute>(token)?.value.value()\n\n } else {\n\n \"Unknown method name\".to_string()\n\n };\n\n\n\n let doc = extract_doc_comments(&method.attrs).to_string();\n\n\n\n let params: Vec<_> = method\n\n .sig\n\n .inputs\n\n .iter_mut()\n\n .filter_map(|arg| match arg {\n\n syn::FnArg::Receiver(_) => None,\n\n syn::FnArg::Typed(arg) => match *arg.pat.clone() {\n", "file_path": "sui/open_rpc/macros/src/lib.rs", "rank": 35, "score": 202708.04460220467 }, { "content": "pub fn delete_id(\n\n context: &mut NativeContext,\n\n mut ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.len() == 1);\n\n\n\n // unwrap safe because the interface of native function guarantees it.\n\n let ty = ty_args.pop().unwrap();\n\n let versioned_id = args.pop_back().unwrap();\n\n\n\n // TODO: what should the cost of this be?\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::EMIT_EVENT, 0);\n\n\n\n if !context.save_event(vec![], EventType::DeleteObjectID as u64, ty, versioned_id)? {\n\n return Ok(NativeResult::err(cost, 0));\n\n }\n\n\n\n Ok(NativeResult::ok(cost, smallvec![]))\n\n}\n", "file_path": "sui_programmability/framework/src/natives/id.rs", "rank": 36, "score": 202530.13626835667 }, { "content": "fn get_gas_value(o: &Object) -> u64 {\n\n GasCoin::try_from(o.data.try_as_move().unwrap())\n\n .unwrap()\n\n .value()\n\n}\n\n\n", "file_path": "sui/src/benchmark/validator_preparer.rs", "rank": 37, "score": 200844.12474371772 }, { "content": "pub fn get_genesis_package_by_module(genesis_objects: &[Object], module: &str) -> ObjectRef {\n\n genesis_objects\n\n .iter()\n\n .find_map(|o| match o.data.try_as_package() {\n\n Some(p) => {\n\n if p.serialized_module_map().keys().any(|name| name == module) {\n\n Some(o.compute_object_reference())\n\n } else {\n\n None\n\n }\n\n }\n\n None => None,\n\n })\n\n .unwrap()\n\n}\n\n\n\npub async fn call_move(\n\n authority: &AuthorityState,\n\n gas_object_id: &ObjectID,\n\n sender: &SuiAddress,\n", "file_path": "sui_core/src/unit_tests/authority_tests.rs", "rank": 38, "score": 199915.22667884233 }, { "content": "pub fn get_versioned_id(\n\n context: &mut NativeContext,\n\n ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.len() == 1);\n\n\n\n let obj = pop_arg!(args, StructRef);\n\n let id_field = obj.borrow_field(0)?;\n\n\n\n // TODO: what should the cost of this be?\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::SIGNER_BORROW, 0);\n\n\n\n Ok(NativeResult::ok(cost, smallvec![id_field]))\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/natives/id.rs", "rank": 39, "score": 199594.36092521797 }, { "content": "fn get_gas_value(o: &Object) -> u64 {\n\n GasCoin::try_from(o.data.try_as_move().unwrap())\n\n .unwrap()\n\n .value()\n\n}\n\n\n\nasync fn get_object(id: ObjectID, context: &mut WalletContext) -> Option<Object> {\n\n if let ObjectRead::Exists(_, o, _) = context.gateway.get_object_info(id).await.unwrap() {\n\n Some(o)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n#[allow(clippy::assertions_on_constants)]\n\n#[traced_test]\n\n#[tokio::test]\n\nasync fn test_merge_coin() -> Result<(), anyhow::Error> {\n\n let (network, mut context, address) = setup_network_and_wallet().await?;\n\n\n", "file_path": "sui/src/unit_tests/cli_tests.rs", "rank": 40, "score": 197870.4288510756 }, { "content": "pub fn clone_genesis_packages() -> Vec<Object> {\n\n let genesis = GENESIS.lock().unwrap();\n\n genesis.objects.clone()\n\n}\n\n\n", "file_path": "sui_programmability/adapter/src/genesis.rs", "rank": 41, "score": 197038.9720360494 }, { "content": "/// Make a transaction calling a specific move module & function.\n\npub fn move_transaction(\n\n gas_object: Object,\n\n module: &'static str,\n\n function: &'static str,\n\n package_ref: ObjectRef,\n\n arguments: Vec<CallArg>,\n\n) -> Transaction {\n\n // The key pair of the sender of the transaction.\n\n let (sender, keypair) = test_keys().pop().unwrap();\n\n\n\n // Make the transaction.\n\n let data = TransactionData::new_move_call(\n\n sender,\n\n package_ref,\n\n ident_str!(module).to_owned(),\n\n ident_str!(function).to_owned(),\n\n /* type_args */ vec![],\n\n gas_object.compute_object_reference(),\n\n arguments,\n\n MAX_GAS,\n\n );\n\n let signature = Signature::new(&data, &keypair);\n\n Transaction::new(data, signature)\n\n}\n\n\n", "file_path": "test_utils/src/messages.rs", "rank": 42, "score": 196902.73848769887 }, { "content": "pub fn decode_bytes_hex<T: for<'a> TryFrom<&'a [u8]>>(s: &str) -> Result<T, anyhow::Error> {\n\n let s = s.strip_prefix(\"0x\").unwrap_or(s);\n\n let value = hex::decode(s)?;\n\n T::try_from(&value[..]).map_err(|_| anyhow::anyhow!(\"byte deserialization failed\"))\n\n}\n\n\n\nimpl fmt::Display for SuiAddress {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{:#x}\", self)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for SuiAddress {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{:#x}\", self)\n\n }\n\n}\n\n\n\nimpl fmt::LowerHex for SuiAddress {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "sui_types/src/base_types.rs", "rank": 43, "score": 194173.6638573202 }, { "content": "/// Make a test authority store in a temporary directory.\n\npub fn test_authority_store() -> AuthorityStore {\n\n let store_path = tempfile::tempdir().unwrap();\n\n AuthorityStore::open(store_path, None)\n\n}\n\n\n", "file_path": "test_utils/src/authority.rs", "rank": 44, "score": 192879.2040237176 }, { "content": "pub fn get_nth_struct_field(v: Value, n: usize) -> Result<Value, PartialVMError> {\n\n let mut itr = v.value_as::<Struct>()?.unpack()?;\n\n Ok(itr.nth(n).unwrap())\n\n}\n", "file_path": "sui_programmability/framework/src/natives/mod.rs", "rank": 45, "score": 190976.89257265977 }, { "content": "pub fn run(args: Args) -> crate::Result<()> {\n\n let direct_dups_config = DirectDepDupsConfig { allow: vec![] };\n\n let banned_deps_config = BannedDepsConfig {\n\n direct: vec![(\n\n \"lazy_static\".to_owned(),\n\n \"use once_cell::sync::Lazy instead\".to_owned(),\n\n )]\n\n .into_iter()\n\n .collect(),\n\n };\n\n let project_linters: &[&dyn ProjectLinter] = &[\n\n &DirectDepDups::new(&direct_dups_config),\n\n &BannedDeps::new(&banned_deps_config),\n\n ];\n\n\n\n let package_linters: &[&dyn PackageLinter] = &[\n\n // &CrateNamesPaths,\n\n &IrrelevantBuildDeps,\n\n // This one seems to be broken\n\n //&UnpublishedPackagesOnlyUsePathDependencies::new(),\n", "file_path": "crates/x/src/lint.rs", "rank": 46, "score": 190969.76408985961 }, { "content": "/// Check via BFS\n\n/// The invariant is that all types at a given level must be the same or be empty\n\npub fn is_homogeneous(val: &JsonValue) -> bool {\n\n let mut deq: VecDeque<&JsonValue> = VecDeque::new();\n\n deq.push_back(val);\n\n is_homogeneous_rec(&mut deq)\n\n}\n\n\n", "file_path": "sui_core/src/sui_json.rs", "rank": 47, "score": 190676.1222414577 }, { "content": "/// Given a list of `modules`, use `ctx` to generate a fresh ID for the new packages.\n\n/// If `is_framework` is true, then the modules can have arbitrary user-defined address,\n\n/// otherwise their addresses must be 0.\n\n/// Mutate each module's self ID to the appropriate fresh ID and update its module handle tables\n\n/// to reflect the new ID's of its dependencies.\n\n/// Returns the newly created package ID.\n\npub fn generate_package_id(\n\n modules: &mut [CompiledModule],\n\n ctx: &mut TxContext,\n\n) -> Result<ObjectID, SuiError> {\n\n let mut sub_map = BTreeMap::new();\n\n let package_id = ctx.fresh_id();\n\n for module in modules.iter() {\n\n let old_module_id = module.self_id();\n\n let old_address = *old_module_id.address();\n\n if old_address != AccountAddress::ZERO {\n\n let handle = module.module_handle_at(module.self_module_handle_idx);\n\n let name = module.identifier_at(handle.name);\n\n return Err(SuiError::ModulePublishFailure {\n\n error: format!(\"Publishing module {name} with non-zero address is not allowed\"),\n\n });\n\n }\n\n let new_module_id = ModuleId::new(\n\n AccountAddress::from(package_id),\n\n old_module_id.name().to_owned(),\n\n );\n", "file_path": "sui_programmability/adapter/src/adapter.rs", "rank": 48, "score": 190310.8600097699 }, { "content": "pub fn bytes_to_address(\n\n context: &mut NativeContext,\n\n ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(args.len() == 1);\n\n\n\n let addr_bytes = pop_arg!(args, Vec<u8>);\n\n // unwrap safe because this native function is only called from new_from_bytes,\n\n // which already asserts the size of bytes to be equal of account address.\n\n let addr = AccountAddress::from_bytes(addr_bytes).unwrap();\n\n\n\n // TODO: what should the cost of this be?\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::CREATE_SIGNER, 0);\n\n\n\n Ok(NativeResult::ok(cost, smallvec![Value::address(addr)]))\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/natives/id.rs", "rank": 49, "score": 190300.4966433666 }, { "content": "/// Implementation of Move native function\n\n/// `freeze_object<T: key>(obj: T)`\n\npub fn freeze_object(\n\n context: &mut NativeContext,\n\n mut ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.len() == 1);\n\n\n\n let ty = ty_args.pop().unwrap();\n\n let obj = args.pop_back().unwrap();\n\n let event_type = EventType::FreezeObject;\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::EMIT_EVENT, 1);\n\n if context.save_event(vec![], event_type as u64, ty, obj)? {\n\n Ok(NativeResult::ok(cost, smallvec![]))\n\n } else {\n\n Ok(NativeResult::err(cost, 0))\n\n }\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/natives/transfer.rs", "rank": 50, "score": 190191.55557797267 }, { "content": "/// Implementation of Move native function\n\n/// `share_object<T: key>(obj: T)`\n\npub fn share_object(\n\n context: &mut NativeContext,\n\n mut ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.len() == 1);\n\n\n\n let ty = ty_args.pop().unwrap();\n\n let obj = args.pop_back().unwrap();\n\n let event_type = EventType::ShareObject;\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::EMIT_EVENT, 1);\n\n if context.save_event(vec![], event_type as u64, ty, obj)? {\n\n Ok(NativeResult::ok(cost, smallvec![]))\n\n } else {\n\n Ok(NativeResult::err(cost, 0))\n\n }\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/natives/transfer.rs", "rank": 51, "score": 190191.55557797267 }, { "content": "pub fn is_object(\n\n view: &BinaryIndexedView,\n\n function_type_args: &[AbilitySet],\n\n t: &SignatureToken,\n\n) -> Result<bool, String> {\n\n use SignatureToken as S;\n\n match t {\n\n S::Reference(inner) | S::MutableReference(inner) | S::Vector(inner) => {\n\n is_object(view, function_type_args, inner)\n\n }\n\n _ => is_object_struct(view, function_type_args, t),\n\n }\n\n}\n\n\n", "file_path": "sui_programmability/verifier/src/entry_points_verifier.rs", "rank": 52, "score": 190186.56022532057 }, { "content": "#[test]\n\nfn test_object_id_deserialize_from_json_value() {\n\n let obj_id = ObjectID::random();\n\n let json_value = serde_json::to_value(obj_id).expect(\"serde_json::to_value fail.\");\n\n let obj_id2: ObjectID =\n\n serde_json::from_value(json_value).expect(\"serde_json::from_value fail.\");\n\n assert_eq!(obj_id, obj_id2)\n\n}\n\n\n", "file_path": "sui_types/src/unit_tests/base_types_tests.rs", "rank": 53, "score": 188523.74422238453 }, { "content": "/// Store package in state_view and call module initializers\n\npub fn store_package_and_init_modules<\n\n E: Debug,\n\n S: ResourceResolver<Error = E> + ModuleResolver<Error = E> + Storage,\n\n>(\n\n state_view: &mut S,\n\n vm: &MoveVM,\n\n modules: Vec<CompiledModule>,\n\n ctx: &mut TxContext,\n\n gas_status: &mut SuiGasStatus,\n\n) -> SuiResult {\n\n let modules_to_init = modules\n\n .iter()\n\n .filter_map(|module| {\n\n let init_fdef = module.function_defs.iter().find(|fdef| {\n\n let fhandle = module.function_handle_at(fdef.function).name;\n\n let fname = module.identifier_at(fhandle);\n\n fname == INIT_FN_NAME\n\n })?;\n\n\n\n let fhandle = module.function_handle_at(init_fdef.function);\n", "file_path": "sui_programmability/adapter/src/adapter.rs", "rank": 54, "score": 187238.78845979267 }, { "content": "pub fn derive_id(\n\n context: &mut NativeContext,\n\n ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(args.len() == 2);\n\n\n\n let ids_created = pop_arg!(args, u64);\n\n let tx_hash = pop_arg!(args, Vec<u8>);\n\n\n\n // TODO(https://github.com/MystenLabs/sui/issues/58): finalize digest format\n\n // unwrap safe because all digests in Move are serialized from the Rust `TransactionDigest`\n\n let digest = TransactionDigest::try_from(tx_hash.as_slice()).unwrap();\n\n let id = Value::address(AccountAddress::from(digest.derive_id(ids_created)));\n\n\n\n // TODO: choose cost\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::CREATE_SIGNER, 0);\n\n\n\n Ok(NativeResult::ok(cost, smallvec![id]))\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/natives/tx_context.rs", "rank": 55, "score": 187184.36942259452 }, { "content": "pub fn update_object(\n\n context: &mut NativeContext,\n\n mut ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert_eq!(ty_args.len(), 1);\n\n debug_assert_eq!(args.len(), 1);\n\n\n\n let ty = ty_args.pop().unwrap();\n\n let obj = args.pop_back().unwrap();\n\n\n\n // Gas amount doesn't matter as this is test only.\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::EMIT_EVENT, 0);\n\n context.save_event(vec![], UPDATE_OBJECT_EVENT, ty, obj)?;\n\n // Run through the events to make sure the object we returned didn't violate any rules.\n\n match get_global_inventory(context.events()) {\n\n Ok(_) => Ok(NativeResult::ok(cost, smallvec![])),\n\n Err(abort_code) => Ok(NativeResult::err(cost, abort_code)),\n\n }\n\n}\n", "file_path": "sui_programmability/framework/src/natives/test_scenario.rs", "rank": 56, "score": 187073.15969526785 }, { "content": "/// Helper for a \"canonical\" verification of a module.\n\npub fn verify_module(module: &CompiledModule) -> SuiResult {\n\n struct_with_key_verifier::verify_module(module)?;\n\n global_storage_access_verifier::verify_module(module)?;\n\n id_immutable_verifier::verify_module(module)?;\n\n id_leak_verifier::verify_module(module)?;\n\n entry_points_verifier::verify_module(module)\n\n}\n", "file_path": "sui_programmability/verifier/src/verifier.rs", "rank": 57, "score": 184953.6507088294 }, { "content": "/// Delete the given object\n\npub fn delete_object_for_testing(\n\n context: &mut NativeContext,\n\n ty_args: Vec<Type>,\n\n args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert_eq!(ty_args.len(), 1);\n\n debug_assert_eq!(args.len(), 1);\n\n\n\n // Gas amount doesn't matter as this is test only.\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::EMIT_EVENT, 0);\n\n Ok(NativeResult::ok(cost, smallvec![]))\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/natives/test_scenario.rs", "rank": 58, "score": 184110.61650336746 }, { "content": "/// Implementation of Move native function\n\n/// `delete_child_object_internal<T: key>(child: T)`\n\npub fn delete_child_object_internal(\n\n context: &mut NativeContext,\n\n ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.len() == 1);\n\n\n\n let obj = args.pop_back().unwrap();\n\n let event_type = EventType::DeleteChildObject;\n\n let obj_id = get_object_id(obj)?;\n\n // TODO: Decide the cost.\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::EMIT_EVENT, 1);\n\n if context.save_event(vec![], event_type as u64, Type::Address, obj_id)? {\n\n Ok(NativeResult::ok(cost, smallvec![]))\n\n } else {\n\n Ok(NativeResult::err(cost, 0))\n\n }\n\n}\n", "file_path": "sui_programmability/framework/src/natives/transfer.rs", "rank": 59, "score": 184110.20329379197 }, { "content": "#[cfg(test)]\n\npub fn init_transfer_transaction(\n\n sender: SuiAddress,\n\n secret: &KeyPair,\n\n recipient: SuiAddress,\n\n object_ref: ObjectRef,\n\n gas_object_ref: ObjectRef,\n\n) -> Transaction {\n\n let data = TransactionData::new_transfer(recipient, object_ref, sender, gas_object_ref, 10000);\n\n let signature = Signature::new(&data, secret);\n\n Transaction::new(data, signature)\n\n}\n\n\n", "file_path": "sui_core/src/unit_tests/authority_tests.rs", "rank": 60, "score": 184107.6280462245 }, { "content": "pub fn emit_wrapped_object_events(\n\n context: &mut NativeContext,\n\n mut ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert_eq!(ty_args.len(), 1);\n\n debug_assert_eq!(args.len(), 2);\n\n\n\n let id_type = ty_args.pop().unwrap();\n\n let removed = pop_arg!(args, VectorRef);\n\n let tx_begin_idx = pop_arg!(args, u64) as usize;\n\n\n\n let mut removed_ids: BTreeSet<ObjectID> = BTreeSet::new();\n\n for i in 0..removed.len(&id_type)?.value_as::<u64>()? {\n\n let id = removed.borrow_elem(i as usize, &id_type)?;\n\n let id_bytes = get_nested_struct_field(id.value_as::<StructRef>()?.read_ref()?, &[0])?;\n\n removed_ids.insert(id_bytes.value_as::<AccountAddress>()?.into());\n\n }\n\n\n\n let processed_ids: BTreeSet<_> = context.events()[tx_begin_idx..]\n", "file_path": "sui_programmability/framework/src/natives/test_scenario.rs", "rank": 61, "score": 181272.9920846356 }, { "content": "pub fn get_object_owned_inventory(\n\n context: &mut NativeContext,\n\n ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert_eq!(ty_args.len(), 1);\n\n debug_assert_eq!(args.len(), 3);\n\n\n\n let tx_end_index = pop_arg!(args, u64) as usize;\n\n let parent_object = pop_arg!(args, AccountAddress);\n\n let signer_address = pop_arg!(args, AccountAddress);\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::EMIT_EVENT, 0);\n\n match get_inventory_for(\n\n Owner::AddressOwner(account_to_sui_address(signer_address)),\n\n Some(parent_object),\n\n &ty_args[0],\n\n tx_end_index,\n\n context.events(),\n\n ) {\n\n Ok(inventory) => Ok(NativeResult::ok(\n\n cost,\n\n smallvec![Value::vector_for_testing_only(inventory)],\n\n )),\n\n Err(abort_code) => Ok(NativeResult::err(cost, abort_code)),\n\n }\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/natives/test_scenario.rs", "rank": 62, "score": 181272.9920846356 }, { "content": "pub fn authority_genesis_objects(\n\n authority_count: usize,\n\n objects_per_authority: Vec<Object>,\n\n) -> Vec<Vec<Object>> {\n\n let mut objects = vec![];\n\n for _ in 0..authority_count {\n\n objects.push(objects_per_authority.clone());\n\n }\n\n objects\n\n}\n\n\n\npub async fn init_local_authorities(\n\n genesis_objects: Vec<Vec<Object>>,\n\n) -> (\n\n AuthorityAggregator<LocalAuthorityClient>,\n\n Vec<Arc<AuthorityState>>,\n\n) {\n\n let mut key_pairs = Vec::new();\n\n let mut voting_rights = BTreeMap::new();\n\n for _ in 0..genesis_objects.len() {\n", "file_path": "sui_core/src/unit_tests/authority_aggregator_tests.rs", "rank": 63, "score": 181272.9920846356 }, { "content": "/// Checks valid rules rules for entry points, both for module initialization and transactions\n\n///\n\n/// For module initialization\n\n/// - The existence of the function is optional\n\n/// - The function must have the name specified by `INIT_FN_NAME`\n\n/// - The function must have `Visibility::Private`\n\n/// - The function can have a single parameter: &mut TxContext (see `is_tx_context`)\n\n/// - Alternatively, the function can have zero parameters\n\n///\n\n/// For transaction entry points\n\n/// - The function must have `Visibility::Script`\n\n/// - The function must have at least one parameter: &mut TxContext (see `is_tx_context`)\n\n/// - The transaction context parameter must be the last parameter\n\n/// - The function cannot have any return values\n\npub fn verify_module(module: &CompiledModule) -> SuiResult {\n\n for func_def in &module.function_defs {\n\n let handle = module.function_handle_at(func_def.function);\n\n let name = module.identifier_at(handle.name);\n\n if name == INIT_FN_NAME {\n\n verify_init_function(module, func_def)\n\n .map_err(|error| SuiError::ModuleVerificationFailure { error })?;\n\n continue;\n\n }\n\n\n\n // find candidate entry functions and checke their parameters\n\n // (ignore other functions)\n\n if func_def.visibility != Visibility::Script {\n\n // it's not an entry function as a non-script function\n\n // cannot be called from Sui\n\n continue;\n\n }\n\n verify_entry_function_impl(module, func_def)\n\n .map_err(|error| SuiError::ModuleVerificationFailure { error })?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "sui_programmability/verifier/src/entry_points_verifier.rs", "rank": 64, "score": 179901.0117230396 }, { "content": "pub fn verify_module(module: &CompiledModule) -> SuiResult {\n\n verify_key_structs(module)\n\n}\n\n\n", "file_path": "sui_programmability/verifier/src/struct_with_key_verifier.rs", "rank": 65, "score": 179880.41876842867 }, { "content": "/// Generate a keypair from the specified RNG (useful for testing with seedable rngs).\n\npub fn get_key_pair_from_rng<R>(csprng: &mut R) -> (SuiAddress, KeyPair)\n\nwhere\n\n R: rand::CryptoRng + rand::RngCore,\n\n{\n\n let kp = DalekKeypair::generate(csprng);\n\n let keypair = KeyPair {\n\n key_pair: kp,\n\n public_key_cell: OnceCell::new(),\n\n };\n\n (SuiAddress::from(keypair.public_key_bytes()), keypair)\n\n}\n\n\n", "file_path": "sui_types/src/crypto.rs", "rank": 66, "score": 178872.70042057108 }, { "content": "/// Check via BFS\n\n/// The invariant is that all types at a given level must be the same or be empty\n\nfn is_homogeneous_rec(curr_q: &mut VecDeque<&JsonValue>) -> bool {\n\n if curr_q.is_empty() {\n\n // Nothing to do\n\n return true;\n\n }\n\n // Queue for the next level\n\n let mut next_q = VecDeque::new();\n\n // The types at this level must be the same\n\n let mut level_type = ValidJsonType::Any;\n\n\n\n // Process all in this queue/level\n\n while !curr_q.is_empty() {\n\n // Okay to unwrap since we know values exist\n\n let curr = match curr_q.pop_front().unwrap() {\n\n JsonValue::Bool(_) => ValidJsonType::Bool,\n\n JsonValue::Number(_) => ValidJsonType::Number,\n\n JsonValue::String(_) => ValidJsonType::String,\n\n JsonValue::Array(w) => {\n\n // Add to the next level\n\n w.iter().for_each(|t| next_q.push_back(t));\n", "file_path": "sui_core/src/sui_json.rs", "rank": 67, "score": 177638.607705048 }, { "content": "pub fn verify_module(module: &CompiledModule) -> SuiResult {\n\n verify_global_storage_access(module)\n\n}\n\n\n", "file_path": "sui_programmability/verifier/src/global_storage_access_verifier.rs", "rank": 68, "score": 177503.0525995857 }, { "content": "fn convert_string_to_u128(s: &str) -> Result<u128, anyhow::Error> {\n\n // Try as normal number\n\n if let Ok(v) = s.parse::<u128>() {\n\n return Ok(v);\n\n }\n\n\n\n // Check prefix\n\n // For now only Hex supported\n\n // TODO: add support for bin and octal?\n\n\n\n let s = s.trim().to_lowercase();\n\n if !s.starts_with(HEX_PREFIX) {\n\n return Err(anyhow!(\"Unable to convert {s} to unsigned int.\",));\n\n }\n\n u128::from_str_radix(s.trim_start_matches(HEX_PREFIX), 16).map_err(|e| e.into())\n\n}\n", "file_path": "sui_core/src/sui_json.rs", "rank": 69, "score": 175199.84670908537 }, { "content": "fn verify_id_immutable(module: &CompiledModule) -> SuiResult {\n\n let view = BinaryIndexedView::Module(module);\n\n for func_def in &module.function_defs {\n\n if func_def.code.is_none() {\n\n continue;\n\n }\n\n let code = &func_def.code.as_ref().unwrap().code;\n\n let check = |field_idx: FieldHandleIndex| {\n\n let field = view.field_handle_at(field_idx).unwrap();\n\n let struct_idx = view.struct_def_at(field.owner).unwrap().struct_handle;\n\n // The struct_with_key_verifier already checked that the first field of a key struct\n\n // must be the ID field.\n\n if view.struct_handle_at(struct_idx).abilities.has_key() && field.field == 0 {\n\n return Err(verification_failure(format!(\n\n \"In function {}: ID field of struct {} cannot be mut borrowed because ID is immutable.\",\n\n view.identifier_at(view.function_handle_at(func_def.function).name),\n\n view.identifier_at(view.struct_handle_at(struct_idx).name))));\n\n }\n\n Ok(())\n\n };\n", "file_path": "sui_programmability/verifier/src/id_immutable_verifier.rs", "rank": 70, "score": 175107.51168286504 }, { "content": "fn verify_id_leak(module: &CompiledModule) -> SuiResult {\n\n let binary_view = BinaryIndexedView::Module(module);\n\n for (index, func_def) in module.function_defs.iter().enumerate() {\n\n let code = match func_def.code.as_ref() {\n\n Some(code) => code,\n\n None => continue,\n\n };\n\n let handle = binary_view.function_handle_at(func_def.function);\n\n let func_view =\n\n FunctionView::function(module, FunctionDefinitionIndex(index as u16), code, handle);\n\n let initial_state = AbstractState::new(&func_view);\n\n let mut verifier = IDLeakAnalysis::new(&binary_view, &func_view);\n\n let inv_map = verifier.analyze_function(initial_state, &func_view);\n\n // Report all the join failures\n\n for (_block_id, BlockInvariant { post, .. }) in inv_map {\n\n match post {\n\n BlockPostcondition::Error(err) => match err {\n\n SuiError::ModuleVerificationFailure { error } => {\n\n return Err(SuiError::ModuleVerificationFailure {\n\n error: format!(\n", "file_path": "sui_programmability/verifier/src/id_leak_verifier.rs", "rank": 71, "score": 175107.51168286504 }, { "content": "pub fn to_transaction(data: TransactionData, signer: &dyn Signer<Signature>) -> Transaction {\n\n let signature = Signature::new(&data, signer);\n\n Transaction::new(data, signature)\n\n}\n\n\n\npub async fn do_transaction<A: AuthorityAPI>(authority: &A, transaction: &Transaction) {\n\n authority\n\n .handle_transaction(transaction.clone())\n\n .await\n\n .unwrap();\n\n}\n\n\n\npub async fn extract_cert<A: AuthorityAPI>(\n\n authorities: &[&A],\n\n committee: &Committee,\n\n transaction_digest: &TransactionDigest,\n\n) -> CertifiedTransaction {\n\n let mut votes = vec![];\n\n let mut transaction: Option<SignedTransaction> = None;\n\n for authority in authorities {\n", "file_path": "sui_core/src/unit_tests/authority_aggregator_tests.rs", "rank": 72, "score": 175096.77461880067 }, { "content": "/// Process the event log to determine the global set of live objects\n\n/// Returns the abort_code if an error is encountered.\n\nfn get_global_inventory(events: &[Event]) -> Result<Inventory, u64> {\n\n let mut inventory = Inventory::new();\n\n // Since we allow transfer object to ID, it's possible that when we transfer\n\n // an object to a parenet object, the parent object does not yet exist in the event log.\n\n // And without the parent object we cannot know the ultimate signer.\n\n // To handle this, for child objects whose parent is not yet known, we add them\n\n // to the unresolved_signer_parents map, which maps from parent object ID\n\n // to the list of child objects it has. Whenever a new object is seen, we check the map\n\n // and resolve if the object is an unresolved parent.\n\n let mut unresolved_signer_parents: BTreeMap<ObjectID, BTreeSet<ObjectID>> = BTreeMap::new();\n\n for (recipient, event_type_byte, type_, _layout, val) in events {\n\n let obj_id = if let Some(obj_id) = get_object_id_from_event(*event_type_byte, val) {\n\n obj_id\n\n } else {\n\n continue;\n\n };\n\n\n\n if *event_type_byte == WRAPPED_OBJECT_EVENT {\n\n // special, TestScenario-only event for object wrapping. treat the same as DeleteObjectID for inventory purposes--a wrapped object is not available for use\n\n assert!(inventory.remove(&obj_id).is_some());\n", "file_path": "sui_programmability/framework/src/natives/test_scenario.rs", "rank": 73, "score": 174978.7916005634 }, { "content": "/// Make a test certificates for each input transaction.\n\npub fn make_certificates(transactions: Vec<Transaction>) -> Vec<CertifiedTransaction> {\n\n let committee = test_committee();\n\n let mut certificates = Vec::new();\n\n for tx in transactions {\n\n let mut aggregator = SignatureAggregator::try_new(tx.clone(), &committee).unwrap();\n\n for (_, key) in test_keys() {\n\n let vote = SignedTransaction::new(\n\n /* epoch */ 0,\n\n tx.clone(),\n\n *key.public_key_bytes(),\n\n &key,\n\n );\n\n if let Some(certificate) = aggregator\n\n .append(vote.auth_sign_info.authority, vote.auth_sign_info.signature)\n\n .unwrap()\n\n {\n\n certificates.push(certificate);\n\n break;\n\n }\n\n }\n\n }\n\n certificates\n\n}\n\n\n", "file_path": "test_utils/src/messages.rs", "rank": 74, "score": 174321.46836372025 }, { "content": "fn split_and_unescape(line: &str) -> Result<Vec<String>, anyhow::Error> {\n\n let mut commands = Vec::new();\n\n let split: Vec<String> = shell_words::split(line)?;\n\n\n\n for word in split {\n\n let command =\n\n unescape(&word).ok_or_else(|| anyhow!(\"Error: Unhandled escape sequence {word}\"))?;\n\n commands.push(command);\n\n }\n\n Ok(commands)\n\n}\n\n\n", "file_path": "sui/src/shell.rs", "rank": 75, "score": 173846.98210689006 }, { "content": "fn make_module_handle(id: &ModuleId, m: &mut CompiledModule) -> ModuleHandle {\n\n ModuleHandle {\n\n address: ModuleHandleRewriter::get_or_create_address(id.address(), m),\n\n name: ModuleHandleRewriter::get_or_create_identifier(id.name(), m),\n\n }\n\n}\n\n\n", "file_path": "sui_programmability/adapter/src/unit_tests/bytecode_rewriter_tests.rs", "rank": 76, "score": 172500.86550421093 }, { "content": "pub fn run_benchmark(benchmark: Benchmark) -> BenchmarkResult {\n\n // Only microbenchmark is supported\n\n info!(\"benchmark : {:?}\", benchmark);\n\n BenchmarkResult::MicroBenchmark(run_microbenchmark(benchmark))\n\n}\n\n\n", "file_path": "sui/src/benchmark.rs", "rank": 77, "score": 170685.78605561497 }, { "content": "fn verification_failure(error: String) -> SuiError {\n\n SuiError::ModuleVerificationFailure { error }\n\n}\n\n\n", "file_path": "sui_programmability/verifier/src/lib.rs", "rank": 78, "score": 168999.9578649567 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn execute<E: Debug, S: ResourceResolver<Error = E> + ModuleResolver<Error = E> + Storage>(\n\n vm: &MoveVM,\n\n state_view: &mut S,\n\n module_id: ModuleId,\n\n function: &Identifier,\n\n type_args: Vec<TypeTag>,\n\n args: Vec<CallArg>,\n\n gas_status: &mut SuiGasStatus,\n\n ctx: &mut TxContext,\n\n) -> SuiResult<()> {\n\n let objects = args\n\n .iter()\n\n .filter_map(|arg| match arg {\n\n CallArg::Pure(_) => None,\n\n CallArg::ImmOrOwnedObject((id, _, _)) | CallArg::SharedObject(id) => {\n\n Some((*id, state_view.read_object(id)?))\n\n }\n\n })\n\n .collect();\n\n let module = vm.load_module(&module_id, state_view)?;\n", "file_path": "sui_programmability/adapter/src/adapter.rs", "rank": 79, "score": 167315.14116273716 }, { "content": "pub fn publish<E: Debug, S: ResourceResolver<Error = E> + ModuleResolver<Error = E> + Storage>(\n\n state_view: &mut S,\n\n natives: NativeFunctionTable,\n\n module_bytes: Vec<Vec<u8>>,\n\n ctx: &mut TxContext,\n\n gas_status: &mut SuiGasStatus,\n\n) -> SuiResult {\n\n gas_status.charge_publish_package(module_bytes.iter().map(|v| v.len()).sum())?;\n\n let mut modules = module_bytes\n\n .iter()\n\n .map(|b| CompiledModule::deserialize(b))\n\n .collect::<PartialVMResult<Vec<CompiledModule>>>()\n\n .map_err(|err| SuiError::ModuleDeserializationFailure {\n\n error: err.to_string(),\n\n })?;\n\n\n\n fp_ensure!(\n\n !modules.is_empty(),\n\n SuiError::ModulePublishFailure {\n\n error: \"Publishing empty list of modules\".to_string(),\n\n }\n\n );\n\n\n\n let package_id = generate_package_id(&mut modules, ctx)?;\n\n let vm = verify_and_link(state_view, &modules, package_id, natives, gas_status)?;\n\n store_package_and_init_modules(state_view, &vm, modules, ctx, gas_status)\n\n}\n\n\n", "file_path": "sui_programmability/adapter/src/adapter.rs", "rank": 80, "score": 167315.14116273716 }, { "content": "pub fn build_and_verify_user_package(path: &Path) -> SuiResult<Vec<CompiledModule>> {\n\n let build_config = BuildConfig {\n\n dev_mode: false,\n\n ..Default::default()\n\n };\n\n let modules = build_move_package(path, build_config, false)?;\n\n verify_modules(&modules)?;\n\n Ok(modules)\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/lib.rs", "rank": 81, "score": 164914.0748470849 }, { "content": "/// Returns a Context for OpenTelemetry tracing from a TransactionDigest\n\n// NOTE: See https://github.com/MystenLabs/sui/issues/852\n\n// The current code doesn't really work. Maybe the traceparent needs to be a specific format,\n\n// prohably needs to be the propagated trace ID from the source.\n\npub fn context_from_digest(digest: TransactionDigest) -> Context {\n\n // TODO: don't create a HashMap, that wastes memory and costs an allocation!\n\n let mut carrier = HashMap::new();\n\n // TODO: figure out exactly what key to use. I suspect it has to be the parent span ID in OpenTelemetry format.\n\n carrier.insert(\"traceparent\".to_string(), hex::encode(digest.0));\n\n // carrier.insert(\"tx_digest\".to_string(), hex::encode(digest.0));\n\n\n\n global::get_text_map_propagator(|propagator| propagator.extract(&carrier))\n\n}\n\n\n\nimpl ObjectDigest {\n\n pub const MIN: ObjectDigest = ObjectDigest([u8::MIN; 32]);\n\n pub const MAX: ObjectDigest = ObjectDigest([u8::MAX; 32]);\n\n\n\n /// A marker that signifies the object is deleted.\n\n pub const OBJECT_DIGEST_DELETED: ObjectDigest = ObjectDigest([99; 32]);\n\n\n\n /// A marker that signifies the object is wrapped into another object.\n\n pub const OBJECT_DIGEST_WRAPPED: ObjectDigest = ObjectDigest([88; 32]);\n\n\n", "file_path": "sui_types/src/base_types.rs", "rank": 82, "score": 164686.42998302757 }, { "content": "pub fn get_new_address() -> SuiAddress {\n\n crate::crypto::get_key_pair().0\n\n}\n\n\n", "file_path": "sui_types/src/base_types.rs", "rank": 84, "score": 163873.05070927524 }, { "content": "pub fn get_sui_framework_modules(lib_dir: &Path) -> SuiResult<Vec<CompiledModule>> {\n\n let modules = build_framework(lib_dir)?;\n\n verify_modules(&modules)?;\n\n Ok(modules)\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/lib.rs", "rank": 85, "score": 162725.75937097915 }, { "content": "pub fn get_move_stdlib_modules(lib_dir: &Path) -> SuiResult<Vec<CompiledModule>> {\n\n let denylist = vec![\n\n ident_str!(\"Capability\").to_owned(),\n\n ident_str!(\"Event\").to_owned(),\n\n ident_str!(\"GUID\").to_owned(),\n\n #[cfg(not(test))]\n\n ident_str!(\"Debug\").to_owned(),\n\n ];\n\n let modules: Vec<CompiledModule> = build_framework(lib_dir)?\n\n .into_iter()\n\n .filter(|m| !denylist.contains(&m.self_id().name().to_owned()))\n\n .collect();\n\n verify_modules(&modules)?;\n\n Ok(modules)\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/lib.rs", "rank": 86, "score": 162725.75937097915 }, { "content": "#[test]\n\nfn mut_borrow_key_struct_id_field() {\n\n /*\n\n struct Foo has key {\n\n id: 0x2::ID::VersionedID\n\n }\n\n\n\n fun foo(f: Foo) {\n\n let ref = &mut f.id;\n\n }\n\n */\n\n let (mut module, id_struct) = ModuleBuilder::default();\n\n let foo_struct = module.add_struct(\n\n module.get_self_index(),\n\n \"Foo\",\n\n AbilitySet::EMPTY | Ability::Key,\n\n vec![(\"id\", SignatureToken::Struct(id_struct.handle))],\n\n );\n\n let foo_func = module.add_function(\n\n module.get_self_index(),\n\n \"foo\",\n", "file_path": "sui_programmability/verifier/tests/id_immutable_verification_test.rs", "rank": 87, "score": 162696.86493949947 }, { "content": "fn pack(verifier: &mut IDLeakAnalysis, struct_def: &StructDefinition) {\n\n let mut has_id = false;\n\n for _ in 0..num_fields(struct_def) {\n\n has_id |= verifier.stack.pop().unwrap() == AbstractValue::ID;\n\n }\n\n verifier.stack.push(if has_id {\n\n AbstractValue::ID\n\n } else {\n\n AbstractValue::NonID\n\n });\n\n}\n\n\n", "file_path": "sui_programmability/verifier/src/id_leak_verifier.rs", "rank": 88, "score": 162517.3873115581 }, { "content": "fn unpack(verifier: &mut IDLeakAnalysis, struct_def: &StructDefinition) {\n\n // When unpacking, fields of the struct will be pushed to the stack in order.\n\n // An object whose struct type has key ability must have the first field as \"id\",\n\n // representing the ID field of the object. It's the focus of our tracking.\n\n // The struct_with_key_verifier verifies that the first field must be the id field.\n\n verifier.stack.pop().unwrap();\n\n let handle = verifier\n\n .binary_view\n\n .struct_handle_at(struct_def.struct_handle);\n\n verifier.stack.push(if handle.abilities.has_key() {\n\n AbstractValue::ID\n\n } else {\n\n AbstractValue::NonID\n\n });\n\n for _ in 1..num_fields(struct_def) {\n\n verifier.stack.push(AbstractValue::NonID);\n\n }\n\n}\n\n\n", "file_path": "sui_programmability/verifier/src/id_leak_verifier.rs", "rank": 89, "score": 162517.3873115581 }, { "content": "fn make_prim_move_type_layout(param: &SignatureToken) -> Result<MoveTypeLayout, anyhow::Error> {\n\n Ok(match param {\n\n SignatureToken::Bool => MoveTypeLayout::Bool,\n\n SignatureToken::U8 => MoveTypeLayout::U8,\n\n SignatureToken::U64 => MoveTypeLayout::U64,\n\n SignatureToken::U128 => MoveTypeLayout::U128,\n\n SignatureToken::Address => MoveTypeLayout::Address,\n\n SignatureToken::Signer => MoveTypeLayout::Signer,\n\n SignatureToken::Vector(inner) => {\n\n MoveTypeLayout::Vector(Box::new(make_prim_move_type_layout(inner)?))\n\n }\n\n SignatureToken::Struct(_)\n\n | SignatureToken::StructInstantiation(_, _)\n\n | SignatureToken::Reference(_)\n\n | SignatureToken::MutableReference(_)\n\n | SignatureToken::TypeParameter(_) => {\n\n debug_assert!(\n\n false,\n\n \"Should be unreachable. Args should be primitive types only\"\n\n );\n\n bail!(\"Could not serialize argument of type {:?}\", param)\n\n }\n\n })\n\n}\n\n\n", "file_path": "sui_core/src/sui_json.rs", "rank": 90, "score": 161159.66831157103 }, { "content": "fn unwrap_err_to_string<T: Display, F: FnOnce() -> Result<T, anyhow::Error>>(func: F) -> String {\n\n match func() {\n\n Ok(s) => format!(\"{s}\"),\n\n Err(err) => format!(\"{err}\").red().to_string(),\n\n }\n\n}\n\n\n\nimpl WalletCommandResult {\n\n pub fn print(&self, pretty: bool) {\n\n let line = if pretty {\n\n format!(\"{self}\")\n\n } else {\n\n format!(\"{:?}\", self)\n\n };\n\n // Log line by line\n\n for line in line.lines() {\n\n // Logs write to a file on the side. Print to stdout and also log to file, for tests to pass.\n\n println!(\"{line}\");\n\n info!(\"{line}\")\n\n }\n", "file_path": "sui/src/wallet_commands.rs", "rank": 91, "score": 160533.77817748862 }, { "content": "#[test]\n\nfn mut_borrow_non_key_struct_id_field() {\n\n /*\n\n struct Foo {\n\n id: 0x2::ID::VersionedID\n\n }\n\n fun foo(f: Foo) {\n\n let ref = &mut f.id;\n\n }\n\n */\n\n let (mut module, id_struct) = ModuleBuilder::default();\n\n let foo_struct = module.add_struct(\n\n module.get_self_index(),\n\n \"Foo\",\n\n AbilitySet::EMPTY,\n\n vec![(\"id\", SignatureToken::Struct(id_struct.handle))],\n\n );\n\n let foo_func = module.add_function(\n\n module.get_self_index(),\n\n \"foo\",\n\n vec![SignatureToken::Struct(foo_struct.handle)],\n", "file_path": "sui_programmability/verifier/tests/id_immutable_verification_test.rs", "rank": 92, "score": 160300.32105629408 }, { "content": "#[test]\n\nfn mut_borrow_key_struct_non_id_field() {\n\n /*\n\n struct Foo has key {\n\n id: 0x2::ID::VersionedID,\n\n other: 0x2::ID::VersionedID\n\n }\n\n fun foo(f: Foo) {\n\n let ref = &mut f.other;\n\n }\n\n */\n\n let (mut module, id_struct) = ModuleBuilder::default();\n\n let foo_struct = module.add_struct(\n\n module.get_self_index(),\n\n \"Foo\",\n\n AbilitySet::EMPTY | Ability::Key,\n\n vec![\n\n (\"id\", SignatureToken::Struct(id_struct.handle)),\n\n (\"other\", SignatureToken::Struct(id_struct.handle)),\n\n ],\n\n );\n", "file_path": "sui_programmability/verifier/tests/id_immutable_verification_test.rs", "rank": 93, "score": 160300.32105629408 }, { "content": "#[test]\n\nfn mut_borrow_generic_key_struct_id_field() {\n\n /*\n\n struct Foo<T> has key {\n\n id: 0x2::ID::VersionedID\n\n }\n\n\n\n fun foo(f: Foo<u64>) {\n\n let ref = &mut f.id;\n\n }\n\n */\n\n let (mut module, id_struct) = ModuleBuilder::default();\n\n let foo_struct = module.add_struct(\n\n module.get_self_index(),\n\n \"Foo\",\n\n AbilitySet::EMPTY | Ability::Key,\n\n vec![(\"id\", SignatureToken::Struct(id_struct.handle))],\n\n );\n\n let inst = module.add_field_instantiation(foo_struct.fields[0], vec![SignatureToken::U64]);\n\n let foo_func = module.add_function(\n\n module.get_self_index(),\n", "file_path": "sui_programmability/verifier/tests/id_immutable_verification_test.rs", "rank": 94, "score": 160300.32105629408 }, { "content": "/// Sui::ID::delete function is allowed to take an ID by value.\n\nfn is_call_safe_to_leak(verifier: &IDLeakAnalysis, function_handle: &FunctionHandle) -> bool {\n\n let m = verifier\n\n .binary_view\n\n .module_handle_at(function_handle.module);\n\n verifier.binary_view.address_identifier_at(m.address) == &SUI_FRAMEWORK_ADDRESS\n\n && verifier.binary_view.identifier_at(m.name).as_str() == \"ID\"\n\n && verifier\n\n .binary_view\n\n .identifier_at(function_handle.name)\n\n .as_str()\n\n == \"delete\"\n\n}\n\n\n", "file_path": "sui_programmability/verifier/src/id_leak_verifier.rs", "rank": 95, "score": 158367.28586089937 }, { "content": "#[test]\n\nfn single_template_object_param() {\n\n /*\n\n struct ObjStruct<Ty0> has key\n\n\n\n public foo<Ty0>(loc0: ObjStruct<Ty0>, loc1: &mut TxContext) {\n\n }\n\n\n\n it's a valid entry function and verification should SUCCEED\n\n */\n\n let (mut builder, _) = ModuleBuilder::default();\n\n\n\n let tx_context = add_tx_context(&mut builder);\n\n\n\n let obj_struct = builder.add_struct_verbose(\n\n builder.get_self_index(),\n\n \"ObjStruct\",\n\n AbilitySet::EMPTY | Ability::Key,\n\n vec![],\n\n vec![StructTypeParameter {\n\n constraints: AbilitySet::EMPTY,\n", "file_path": "sui_programmability/verifier/tests/entry_points_verification.rs", "rank": 96, "score": 157836.1072106512 }, { "content": "/// Create and return objects wrapping the genesis modules for sui\n\nfn create_genesis_module_objects(lib_dir: &Path) -> SuiResult<Genesis> {\n\n let sui_modules = sui_framework::get_sui_framework_modules(lib_dir)?;\n\n let std_modules =\n\n sui_framework::get_move_stdlib_modules(&lib_dir.join(\"deps\").join(\"move-stdlib\"))?;\n\n let objects = vec![\n\n Object::new_package(std_modules.clone(), TransactionDigest::genesis()),\n\n Object::new_package(sui_modules.clone(), TransactionDigest::genesis()),\n\n ];\n\n let modules = vec![std_modules, sui_modules];\n\n Ok(Genesis { objects, modules })\n\n}\n", "file_path": "sui_programmability/adapter/src/genesis.rs", "rank": 97, "score": 157537.11529337993 }, { "content": "fn main() -> Result<()> {\n\n let args = Args::parse();\n\n\n\n match args.cmd {\n\n Command::Lint(args) => lint::run(args),\n\n }\n\n}\n", "file_path": "crates/x/src/main.rs", "rank": 98, "score": 156944.28410662367 }, { "content": "pub fn run_move_unit_tests(path: &Path, config: Option<UnitTestingConfig>) -> SuiResult {\n\n use move_cli::package::cli::{self, UnitTestResult};\n\n use sui_types::{MOVE_STDLIB_ADDRESS, SUI_FRAMEWORK_ADDRESS};\n\n\n\n let config = config\n\n .unwrap_or_else(|| UnitTestingConfig::default_with_bound(Some(MAX_UNIT_TEST_INSTRUCTIONS)));\n\n\n\n let result = cli::run_move_unit_tests(\n\n path,\n\n BuildConfig::default(),\n\n UnitTestingConfig {\n\n report_stacktrace_on_abort: true,\n\n instruction_execution_bound: MAX_UNIT_TEST_INSTRUCTIONS,\n\n ..config\n\n },\n\n natives::all_natives(MOVE_STDLIB_ADDRESS, SUI_FRAMEWORK_ADDRESS),\n\n /* compute_coverage */ false,\n\n )\n\n .map_err(|err| SuiError::MoveUnitTestFailure {\n\n error: format!(\"{:?}\", err),\n\n })?;\n\n if result == UnitTestResult::Failure {\n\n Err(SuiError::MoveUnitTestFailure {\n\n error: \"Test failed\".to_string(),\n\n })\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "sui_programmability/framework/src/lib.rs", "rank": 99, "score": 156584.9550648103 } ]
Rust
src/app/core/m3u8/m3u8.rs
PeterDing/aget-rs
ebf43a1a2ab9ae88efd66d2f298643c119021372
use std::{cell::Cell, path::PathBuf, rc::Rc, time::Duration}; use futures::{ channel::mpsc::{channel, Sender}, select, stream::StreamExt, SinkExt, }; use actix_rt::{ spawn, time::{delay_for, interval}, System, }; use crate::{ app::{ core::m3u8::common::{get_m3u8, M3u8Segment, SharedM3u8SegmentList}, receive::m3u8_receiver::M3u8Receiver, record::{bytearray_recorder::ByteArrayRecorder, common::RECORDER_FILE_SUFFIX}, }, common::{ bytes::bytes_type::{Buf, Bytes}, crypto::decrypt_aes128, errors::{Error, Result}, net::{ net::{build_http_client, request}, ConnectorConfig, HttpClient, Method, Uri, }, }, features::{args::Args, running::Runnable, stack::StackLike}, }; pub struct M3u8Handler { output: PathBuf, method: Method, uri: Uri, headers: Vec<(String, String)>, data: Option<Bytes>, connector_config: ConnectorConfig, concurrency: u64, proxy: Option<String>, client: HttpClient, } impl M3u8Handler { pub fn new(args: &impl Args) -> Result<M3u8Handler> { let headers = args.headers(); let timeout = args.timeout(); let dns_timeout = args.dns_timeout(); let keep_alive = args.keep_alive(); let lifetime = args.lifetime(); let connector_config = ConnectorConfig { timeout, dns_timeout, keep_alive, lifetime, disable_redirects: true, }; let hds: Vec<(&str, &str)> = headers .iter() .map(|(k, v)| (k.as_str(), v.as_str())) .collect(); let client = build_http_client( hds.as_ref(), timeout, dns_timeout, keep_alive, lifetime, true, ); debug!("M3u8Handler::new"); Ok(M3u8Handler { output: args.output(), method: args.method(), uri: args.uri(), headers, data: args.data().map(|ref mut d| d.to_bytes()), connector_config, concurrency: args.concurrency(), proxy: None, client, }) } async fn start(self) -> Result<()> { debug!("M3u8Handler::start"); debug!("M3u8Handler: check whether task is completed"); let mut bytearrayrecorder = ByteArrayRecorder::new(&*(self.output.to_string_lossy() + RECORDER_FILE_SUFFIX))?; if self.output.exists() && !bytearrayrecorder.exists() { return Ok(()); } debug!("M3u8Handler: get m3u8"); let mut ls = get_m3u8( &self.client, self.method.clone(), self.uri.clone(), self.data.clone(), ) .await?; ls.reverse(); if bytearrayrecorder.exists() { bytearrayrecorder.open()?; let total = bytearrayrecorder.index(0)?; if total != ls.len() as u64 { return Err(Error::PartsAreNotConsistent); } else { let index = bytearrayrecorder.index(1)?; ls.truncate((total - index) as usize); } } else { bytearrayrecorder.open()?; bytearrayrecorder.write(0, ls.len() as u64)?; } let index = ls.last().unwrap().index; let sharedindex = Rc::new(Cell::new(index)); let stack = SharedM3u8SegmentList::new(ls); debug!("M3u8Handler: segments", stack.len()); let (sender, receiver) = channel::<(u64, Bytes)>(self.concurrency as usize + 10); let concurrency = std::cmp::min(stack.len() as u64, self.concurrency); for i in 1..concurrency + 1 { let mut task = RequestTask::new( self.client.clone(), stack.clone(), sender.clone(), i, sharedindex.clone(), self.connector_config.timeout, ); spawn(async move { task.start().await; }); } drop(sender); debug!("M3u8Handler: create receiver"); let mut m3u8receiver = M3u8Receiver::new(&self.output)?; m3u8receiver.start(receiver).await?; bytearrayrecorder.remove().unwrap_or(()); Ok(()) } } impl Runnable for M3u8Handler { fn run(self) -> Result<()> { let mut sys = System::new("M3u8Handler"); sys.block_on(self.start()) } } struct RequestTask { client: HttpClient, stack: SharedM3u8SegmentList, sender: Sender<(u64, Bytes)>, id: u64, shared_index: Rc<Cell<u64>>, timeout: Duration, } impl RequestTask { fn new( client: HttpClient, stack: SharedM3u8SegmentList, sender: Sender<(u64, Bytes)>, id: u64, sharedindex: Rc<Cell<u64>>, timeout: Duration, ) -> RequestTask { RequestTask { client, stack, sender, id, shared_index: sharedindex, timeout, } } async fn start(&mut self) { debug!("Fire RequestTask", self.id); while let Some(segment) = self.stack.pop() { loop { match self.req(segment.clone()).await { Err(Error::InnerError(msg)) => { print_err!(format!("RequestTask {}: InnerError", self.id), msg); System::current().stop(); } Err(err @ Error::Timeout) => { debug!(err); } Err(err) => { debug!(format!("RequestTask {}: error", self.id), err); delay_for(Duration::from_secs(1)).await; } _ => break, } } } } async fn req(&mut self, segment: M3u8Segment) -> Result<()> { let resp = request( &self.client, segment.method.clone(), segment.uri.clone(), segment.data.clone(), None, ) .await?; let index = segment.index; let mut buf: Vec<u8> = vec![]; let mut resp = resp.fuse(); let mut tick = interval(self.timeout).fuse(); let mut fire = false; loop { select! { item = resp.next() => { if let Some(item) = item { match item { Ok(chunk) => { buf.extend(chunk); } Err(err) => return Err(err.into()), } } else { break; } } _ = tick.next() => { if fire { return Err(Error::Timeout); } else { fire = true; } } } } let de = if let (Some(key), Some(iv)) = (segment.key, segment.iv) { decrypt_aes128(&key[..], &iv[..], buf.as_ref())? } else { buf.to_vec() }; loop { if self.shared_index.get() == index { if let Err(err) = self.sender.send((index, Bytes::from(de))).await { return Err(Error::InnerError(format!( "Error at `http::RequestTask`: Sender error: {:?}", err ))); } self.shared_index.set(index + 1); return Ok(()); } else { delay_for(Duration::from_millis(500)).await; } } } }
use std::{cell::Cell, path::PathBuf, rc::Rc, time::Duration}; use futures::{ channel::mpsc::{channel, Sender}, select, stream::StreamExt, SinkExt, }; use actix_rt::{ spawn, time::{delay_for, interval}, System, }; use crate::{ app::{ core::m3u8::common::{get_m3u8, M3u8Segment, SharedM3u8SegmentList}, receive::m3u8_receiver::M3u8Receiver, record::{bytearray_recorder::ByteArrayRecorder, common::RECORDER_FILE_SUFFIX}, }, common::{ bytes::bytes_type::{Buf, Bytes}, crypto::decrypt_aes128, errors::{Error, Result}, net::{ net::{build_http_client, request}, ConnectorConfig, HttpClient, Method, Uri, }, }, features::{args::Args, running::Runnable, stack::StackLike}, }; pub struct M3u8Handler { output: PathBuf, method: Method, uri: Uri, headers: Vec<(String, String)>, data: Option<Bytes>, connector_config: ConnectorConfig, concurrency: u64, proxy: Option<String>, client: HttpClient, } impl M3u8Handler { pub fn new(args: &impl Args) -> Result<M3u8Handler> { let headers = args.headers(); let timeout = args.timeout(); let dns_timeout = args.dns_timeout(); let keep_alive = args.keep_alive(); let lifetime = args.lifetime(); let connector_config = ConnectorConfig { timeout, dns_timeout, keep_alive, lifetime, disable_redirects: true, }; let hds: Vec<(&str, &str)> = headers .iter() .map(|(k, v)| (k.as_str(), v.as_str())) .collect(); let client = build_http_client( hds.as_ref(), timeout, dns_timeout, keep_alive, lifetime, true, ); debug!("M3u8Handler::new"); Ok(M3u8Handler { output: args.output(), method: args.method(), uri: args.uri(), headers, data: args.data().map(|ref mut d| d.to_bytes()), connector_config, concurrency: args.concurrency(), proxy: None, client, }) } async fn start(self) -> Result<()> { debug!("M3u8Handler::start"); debug!("M3u8Handler: check whether task is completed"); let mut bytearrayrecorder = ByteArrayRecorder::new(&*(self.output.to_string_lossy() + RECORDER_FILE_SUFFIX))?; if self.output.exists() && !bytearrayrecorder.exists() { return Ok(()); } debug!("M3u8Handler: get m3u8"); let mut ls = get_m3u8( &self.client, self.method.clone(), self.uri.clone(), self.data.clone(), ) .await?; ls.reverse(); if bytearrayrecorder.exists() { bytearrayrecorder.open()?; let total = bytearrayrecorder.index(0)?; if total != ls.len() as u64 { return Err(Error::PartsAreNotConsistent); } else { let index = bytearrayrecorder.index(1)?; ls.truncate((total - index) as usize); } } else { bytearrayrecorder.open()?; bytearrayrecorder.write(0, ls.len() as u64)?; } let index = ls.last().unwrap().index; let sharedindex = Rc::new(Cell::new(index)); let stack = SharedM3u8SegmentList::new(ls); debug!("M3u8Handler: segments", stack.len()); let (sender, receiver) = channel::<(u64, Bytes)>(self.concurrency as usize + 10); let concurrency = std::cmp::min(stack.len() as u64, self.concurrency); for i in 1..concurrency + 1 { let mut task = RequestTask::new( self.client.clone(), stack.clone(), sender.clone(), i, sharedindex.clone(), self.connector_config.timeout, ); spawn(async move { task.start().await; }); } drop(sender); debug!("M3u8Handler: create receiver"); let mut m3u8receiver = M3u8Receiver::new(&self.output)?; m3u8receiver.start(receiver).await?; bytearrayrecorder.remove().unwrap_or(()); Ok(()) } } impl Runnable for M3u8Handler { fn run(self) -> Result<()> { let mut sys = System::new("M3u8Handler"); sys.block_on(self.start()) } } struct RequestTask { client: HttpClient, stack: SharedM3u8SegmentList, sender: Sender<(u64, Bytes)>, id: u64, shared_index: Rc<Cell<u64>>, timeout: Duration, } impl RequestTask { fn new( client: HttpClient, stack: SharedM3u8SegmentList, sender: Sender<(u64, Bytes)>, id: u64, sharedindex: Rc<Cell<u64>>, timeout: Duration, ) -> RequestTask { RequestTask { client, stack, sender, id, shared_index: sharedindex, timeout, } } async fn start(&mut self) { debug!("Fire RequestTask", self.id); while let Some(segment) = self.stack.pop() { loop { match self.req(segment.clone()).await { Err(Error::InnerError(msg)) => { print_err!(format!("RequestTask {}: InnerError", self.id), msg); System::current().stop(); } Err(err @ Error::Timeout) => { debug!(err); } Err(err) => { debug!(format!("RequestTask {}: error", self.id), err); delay_for(Duration::from_secs(1)).await; } _ => break, } } } } async fn req(&mut self, segment: M3u8Segment) -> Result<()> { let resp =
.await?; let index = segment.index; let mut buf: Vec<u8> = vec![]; let mut resp = resp.fuse(); let mut tick = interval(self.timeout).fuse(); let mut fire = false; loop { select! { item = resp.next() => { if let Some(item) = item { match item { Ok(chunk) => { buf.extend(chunk); } Err(err) => return Err(err.into()), } } else { break; } } _ = tick.next() => { if fire { return Err(Error::Timeout); } else { fire = true; } } } } let de = if let (Some(key), Some(iv)) = (segment.key, segment.iv) { decrypt_aes128(&key[..], &iv[..], buf.as_ref())? } else { buf.to_vec() }; loop { if self.shared_index.get() == index { if let Err(err) = self.sender.send((index, Bytes::from(de))).await { return Err(Error::InnerError(format!( "Error at `http::RequestTask`: Sender error: {:?}", err ))); } self.shared_index.set(index + 1); return Ok(()); } else { delay_for(Duration::from_millis(500)).await; } } } }
request( &self.client, segment.method.clone(), segment.uri.clone(), segment.data.clone(), None, )
call_expression
[ { "content": "pub fn parse_header(raw: &str) -> Result<(&str, &str), Error> {\n\n if let Some(index) = raw.find(\": \") {\n\n return Ok((&raw[..index], &raw[index + 2..]));\n\n }\n\n if let Some(index) = raw.find(\":\") {\n\n return Ok((&raw[..index], &raw[index + 1..]));\n\n }\n\n Err(Error::InvalidHeader(raw.to_string()))\n\n}\n\n\n", "file_path": "src/common/net/net.rs", "rank": 0, "score": 251037.9903080673 }, { "content": "pub fn join_uri(base_uri: &Uri, uri: &str) -> Result<Uri> {\n\n let new_uri: Uri = if !uri.to_lowercase().starts_with(\"http\") {\n\n let base_url = Url::parse(&format!(\"{}\", base_uri))?;\n\n base_url.join(uri)?.as_str().parse()?\n\n } else {\n\n uri.parse()?\n\n };\n\n Ok(new_uri)\n\n}\n", "file_path": "src/common/net/net.rs", "rank": 1, "score": 231738.95582656504 }, { "content": "/// Check whether the response is success\n\n/// Check if status is within 200-299.\n\npub fn is_success<T>(resp: &ClientResponse<T>) -> Result<(), Error> {\n\n let status = resp.status();\n\n if !status.is_success() {\n\n Err(Error::Unsuccess(status.as_u16()))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Send a request with a range header, returning the final uri\n\npub async fn redirect(\n\n client: &HttpClient,\n\n method: Method,\n\n uri: Uri,\n\n data: Option<Bytes>,\n\n) -> Result<Uri> {\n\n let mut uri = uri;\n\n loop {\n\n let req = client\n\n .request(method.clone(), uri.clone())\n", "file_path": "src/common/net/net.rs", "rank": 2, "score": 228092.08353180983 }, { "content": "pub fn parse_headers<'a, I: IntoIterator<Item = &'a str>>(\n\n raws: I,\n\n) -> Result<Vec<(&'a str, &'a str)>, Error> {\n\n let mut headers = vec![];\n\n for raw in raws {\n\n let pair = parse_header(raw)?;\n\n headers.push(pair);\n\n }\n\n Ok(headers)\n\n}\n\n\n", "file_path": "src/common/net/net.rs", "rank": 3, "score": 199160.57902027803 }, { "content": "pub fn decode_hex(s: &str) -> Result<Vec<u8>, ParseIntError> {\n\n (0..s.len())\n\n .step_by(2)\n\n .map(|i| u8::from_str_radix(&s[i..i + 2], 16))\n\n .collect()\n\n}\n", "file_path": "src/common/bytes/bytes.rs", "rank": 4, "score": 190630.69816979984 }, { "content": "/// Builder a http client of curl\n\npub fn build_http_client(\n\n headers: &[(&str, &str)],\n\n timeout: Duration,\n\n dns_timeout: Duration,\n\n keep_alive: Duration,\n\n lifetime: Duration,\n\n disable_redirects: bool,\n\n) -> HttpClient {\n\n let conn = Connector::new()\n\n // Set total number of simultaneous connections per type of scheme.\n\n //\n\n // If limit is 0, the connector has no limit.\n\n // The default limit size is 100.\n\n .limit(0)\n\n // Connection timeout\n\n //\n\n // i.e. max time to connect to remote host including dns name resolution.\n\n // Set to 1 second by default.\n\n .timeout(dns_timeout)\n\n // Set keep-alive period for opened connection.\n", "file_path": "src/common/net/net.rs", "rank": 5, "score": 182323.8233452661 }, { "content": "/// Return the memory representation of this integer as a byte array in big-endian (network) byte\n\n/// order.\n\npub fn u64_to_u8x8(u: u64) -> [u8; 8] {\n\n u.to_be_bytes()\n\n}\n\n\n", "file_path": "src/common/bytes/bytes.rs", "rank": 7, "score": 171044.15392392984 }, { "content": "pub fn escape_nonascii(target: &str) -> String {\n\n utf8_percent_encode(target, FRAGMENT).to_string()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::escape_nonascii;\n\n\n\n #[test]\n\n fn test_escape_nonascii() {\n\n let s = \":ss/s 来;】/ 【【 ? 是的 & 水电费=45 进来看\";\n\n println!(\"{}\", s);\n\n println!(\"{}\", escape_nonascii(s));\n\n }\n\n}\n", "file_path": "src/common/character.rs", "rank": 8, "score": 170235.96390840146 }, { "content": "/// Create an integer value from its representation as a byte array in big endian.\n\npub fn u8x8_to_u64(u8x8: &[u8; 8]) -> u64 {\n\n u64::from_be_bytes(*u8x8)\n\n}\n\n\n", "file_path": "src/common/bytes/bytes.rs", "rank": 9, "score": 166934.59898106346 }, { "content": "/// Return done and undone bars' string\n\npub fn du_bars(bar_done_length: usize, bar_undone_length: usize) -> (String, String) {\n\n let (bar, bar_right, bar_left) = bars();\n\n\n\n let bar_done_str = if bar_done_length > 0 {\n\n if bar_undone_length > 0 {\n\n bar.repeat((bar_done_length - 1) as usize) + bar_right\n\n } else {\n\n // Remove bar_right when bar_undone_length is zero\n\n bar.repeat(bar_done_length as usize)\n\n }\n\n } else {\n\n \"\".to_owned()\n\n };\n\n\n\n let bar_undone_str = if bar_undone_length > 0 {\n\n bar_left.to_owned() + &bar.repeat(bar_undone_length as usize - 1)\n\n } else {\n\n \"\".to_owned()\n\n };\n\n\n\n (bar_done_str, bar_undone_str)\n\n}\n", "file_path": "src/app/show/common.rs", "rank": 10, "score": 161879.0627401157 }, { "content": "#[cfg(target_os = \"windows\")]\n\npub fn bars() -> (&'static str, &'static str, &'static str) {\n\n // bar, bar_right, bar_left\n\n (\"▓\", \"░\", \" \")\n\n}\n\n\n", "file_path": "src/app/show/common.rs", "rank": 11, "score": 156955.205322298 }, { "content": "pub fn terminal_width() -> u64 {\n\n if let Some((width, _)) = dimensions() {\n\n width as u64\n\n } else {\n\n // for envrionment in which atty is not available,\n\n // example, at ci of osx\n\n MIN_TERMINAL_WIDTH\n\n }\n\n}\n", "file_path": "src/common/terminal.rs", "rank": 12, "score": 147139.48143199817 }, { "content": "pub fn u32_to_u8x4(u: u32) -> [u8; 4] {\n\n u.to_be_bytes()\n\n}\n\n\n", "file_path": "src/common/bytes/bytes.rs", "rank": 13, "score": 124806.44578750247 }, { "content": "/// Directly request the resource without range header\n\nstruct DirectRequestTask {\n\n client: HttpClient,\n\n method: Method,\n\n uri: Uri,\n\n data: Option<Bytes>,\n\n sender: Sender<(RangePair, Bytes)>,\n\n}\n\n\n\nimpl DirectRequestTask {\n\n fn new(\n\n client: HttpClient,\n\n method: Method,\n\n uri: Uri,\n\n data: Option<Bytes>,\n\n sender: Sender<(RangePair, Bytes)>,\n\n ) -> DirectRequestTask {\n\n DirectRequestTask {\n\n client,\n\n method,\n\n uri,\n", "file_path": "src/app/core/http.rs", "rank": 14, "score": 122027.40458811684 }, { "content": "/// Request the resource with a range header which is in the `SharedRangList`\n\nstruct RangeRequestTask {\n\n client: HttpClient,\n\n method: Method,\n\n uri: Uri,\n\n data: Option<Bytes>,\n\n stack: SharedRangList,\n\n sender: Sender<(RangePair, Bytes)>,\n\n id: u64,\n\n timeout: Duration,\n\n}\n\n\n\nimpl RangeRequestTask {\n\n fn new(\n\n client: HttpClient,\n\n method: Method,\n\n uri: Uri,\n\n data: Option<Bytes>,\n\n stack: SharedRangList,\n\n sender: Sender<(RangePair, Bytes)>,\n\n id: u64,\n", "file_path": "src/app/core/http.rs", "rank": 15, "score": 122027.28489643734 }, { "content": "/// Split a close `RangePair` to many piece of pairs that each of their size is equal to\n\n/// `chunk_size`, but the last piece size can be less then `chunk_size`.\n\npub fn split_pair(pair: &RangePair, chunk_size: u64) -> RangeList {\n\n let mut stack = Vec::new();\n\n\n\n let mut begin = pair.begin;\n\n let interval_end = pair.end;\n\n\n\n while begin + chunk_size - 1 <= interval_end {\n\n let end = begin + chunk_size - 1;\n\n stack.push(RangePair::new(begin, end));\n\n begin += chunk_size;\n\n }\n\n\n\n if begin <= interval_end {\n\n stack.push(RangePair::new(begin, interval_end));\n\n }\n\n\n\n stack\n\n}\n", "file_path": "src/common/range.rs", "rank": 16, "score": 111966.51048560257 }, { "content": "pub fn build_app<'a>() -> ClapApp<'a, 'a> {\n\n ClapApp::new(crate_name!())\n\n .version(crate_version!())\n\n .global_setting(AppSettings::ColoredHelp)\n\n .global_setting(AppSettings::DeriveDisplayOrder)\n\n .global_setting(AppSettings::UnifiedHelpMessage)\n\n .global_setting(AppSettings::HidePossibleValuesInHelp)\n\n .about(\"Aget - Asynchronous Downloader\")\n\n .arg(\n\n Arg::with_name(\"URL\")\n\n .required(true)\n\n .empty_values(false)\n\n .multiple(false)\n\n .help(\"URL to request.\")\n\n )\n\n .arg(\n\n Arg::with_name(\"method\")\n\n .short(\"X\")\n\n .long(\"method\")\n\n .default_value(\"GET\")\n", "file_path": "src/arguments/clap_app.rs", "rank": 17, "score": 102248.59532564424 }, { "content": "pub fn decrypt_aes128(key: &[u8], iv: &[u8], enc: &[u8]) -> Result<Vec<u8>> {\n\n let cipher = Cipher::aes_128_cbc();\n\n Ok(decrypt(cipher, key, Some(iv), enc)?)\n\n}\n", "file_path": "src/common/crypto.rs", "rank": 18, "score": 102190.53362873045 }, { "content": "/// Use the last of components of uri as a file name\n\npub trait UriFileName {\n\n fn file_name(&self) -> Result<&str>;\n\n}\n\n\n\nimpl UriFileName for Uri {\n\n fn file_name(&self) -> Result<&str> {\n\n let path = Path::new(self.path());\n\n if let Some(file_name) = path.file_name() {\n\n Ok(file_name.to_str().unwrap())\n\n } else {\n\n Err(Error::NoFilename)\n\n }\n\n }\n\n}\n", "file_path": "src/common/uri.rs", "rank": 19, "score": 99724.62713563672 }, { "content": " total: u64,\n\n completed: u64,\n\n seek: u64,\n\n}\n\n\n\nimpl M3u8Receiver {\n\n pub fn new<P: AsRef<Path>>(output: P) -> Result<M3u8Receiver> {\n\n let mut outputfile = File::new(&output, true)?;\n\n outputfile.open()?;\n\n\n\n // Record 3 variables in a `ByteArrayRecorder`:\n\n // [0-index, total segment number][1-index, completed segment number][2-index, seek offset]\n\n let mut bytearrayrecorder =\n\n ByteArrayRecorder::new(&*(output.as_ref().to_string_lossy() + RECORDER_FILE_SUFFIX))?;\n\n bytearrayrecorder.open()?;\n\n let total = bytearrayrecorder.index(0)?;\n\n let completed = bytearrayrecorder.index(1)?;\n\n let seek = bytearrayrecorder.index(2)?;\n\n\n\n Ok(M3u8Receiver {\n", "file_path": "src/app/receive/m3u8_receiver.rs", "rank": 20, "score": 88250.82488462763 }, { "content": " let total = self.total;\n\n let completed = self.completed;\n\n let rate = self.ratestatus.rate();\n\n let length = self.seek;\n\n\n\n self.shower.print_status(completed, total, length, rate)?;\n\n self.ratestatus.clean();\n\n Ok(())\n\n }\n\n\n\n pub async fn start(&mut self, receiver: Receiver<(u64, Bytes)>) -> Result<()> {\n\n self.show_infos()?;\n\n\n\n let mut tick = interval(Duration::from_secs(2)).fuse();\n\n let mut receiver = receiver.fuse();\n\n loop {\n\n select! {\n\n item = receiver.next() => {\n\n if let Some((index, chunk)) = item {\n\n let len = chunk.len() as u64;\n", "file_path": "src/app/receive/m3u8_receiver.rs", "rank": 21, "score": 88241.00924180253 }, { "content": "use std::{io::SeekFrom, path::Path, time::Duration};\n\n\n\nuse futures::{channel::mpsc::Receiver, select, stream::StreamExt};\n\n\n\nuse actix_rt::time::interval;\n\n\n\nuse crate::{\n\n app::{\n\n record::{bytearray_recorder::ByteArrayRecorder, common::RECORDER_FILE_SUFFIX},\n\n show::m3u8_show::M3u8Shower,\n\n status::rate_status::RateStatus,\n\n },\n\n common::{bytes::bytes_type::Bytes, errors::Result, file::File},\n\n};\n\n\n\npub struct M3u8Receiver {\n\n output: File,\n\n bytearrayrecorder: ByteArrayRecorder,\n\n ratestatus: RateStatus,\n\n shower: M3u8Shower,\n", "file_path": "src/app/receive/m3u8_receiver.rs", "rank": 22, "score": 88238.89392586402 }, { "content": " output: outputfile,\n\n bytearrayrecorder,\n\n ratestatus: RateStatus::new(),\n\n shower: M3u8Shower::new(),\n\n total,\n\n completed,\n\n seek,\n\n })\n\n }\n\n\n\n fn show_infos(&mut self) -> Result<()> {\n\n let file_name = &self.output.file_name().unwrap_or(\"[No Name]\");\n\n let total = self.total;\n\n self.shower.print_file(file_name)?;\n\n self.shower.print_total(total)?;\n\n self.show_status()?;\n\n Ok(())\n\n }\n\n\n\n fn show_status(&mut self) -> Result<()> {\n", "file_path": "src/app/receive/m3u8_receiver.rs", "rank": 23, "score": 88229.49578805781 }, { "content": "\n\n // Write chunk to file\n\n self.output.write(&chunk[..], Some(SeekFrom::Start(self.seek)))?;\n\n\n\n // Record info\n\n self.bytearrayrecorder.write(1, index + 1)?; // Write completed\n\n self.bytearrayrecorder.write(2, self.seek + len)?; // Write seek offset\n\n self.completed = index + 1;\n\n self.seek += len ;\n\n\n\n // Update rate\n\n self.ratestatus.add(len);\n\n } else {\n\n break;\n\n }\n\n },\n\n _ = tick.next() => {\n\n self.show_status()?;\n\n },\n\n }\n\n }\n\n self.show_status()?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/app/receive/m3u8_receiver.rs", "rank": 24, "score": 88221.30453791452 }, { "content": "/// This a arg which gives parameters for apps\n\npub trait Args {\n\n /// Path of output\n\n fn output(&self) -> PathBuf;\n\n\n\n /// Request method for http\n\n fn method(&self) -> Method;\n\n\n\n /// The uri of a task\n\n fn uri(&self) -> Uri;\n\n\n\n /// The data for http post request\n\n fn data(&self) -> Option<BytesMut>;\n\n\n\n /// Request headers\n\n fn headers(&self) -> Vec<(String, String)>;\n\n\n\n /// Proxy: http, https, socks4, socks5\n\n fn proxy(&self) -> Option<String>;\n\n\n\n /// The maximum time the request is allowed to take.\n", "file_path": "src/features/args.rs", "rank": 25, "score": 79255.89920128213 }, { "content": "#[derive(Debug, Clone)]\n\npub struct M3u8Segment {\n\n pub index: u64,\n\n pub method: Method,\n\n pub uri: Uri,\n\n pub data: Option<Bytes>,\n\n pub key: Option<[u8; 16]>,\n\n pub iv: Option<[u8; 16]>,\n\n}\n\n\n\npub type M3u8SegmentList = Vec<M3u8Segment>;\n\n\n\npub type SharedM3u8SegmentList = SharedVec<M3u8Segment>;\n\n\n\npub async fn get_m3u8(\n\n client: &HttpClient,\n\n method: Method,\n\n uri: Uri,\n\n data: Option<Bytes>,\n\n) -> Result<M3u8SegmentList> {\n", "file_path": "src/app/core/m3u8/common.rs", "rank": 26, "score": 77816.80850172127 }, { "content": " }\n\n Ok(list)\n\n}\n\n\n\nasync fn get_key(client: &HttpClient, method: Method, uri: Uri) -> Result<[u8; 16]> {\n\n let mut resp = request(client, method.clone(), uri.clone(), None, None).await?;\n\n let cn = resp.body().await?;\n\n let mut buf = [0; 16];\n\n buf[..].clone_from_slice(&cn);\n\n Ok(buf)\n\n}\n", "file_path": "src/app/core/m3u8/common.rs", "rank": 27, "score": 77816.38664645785 }, { "content": "use std::collections::HashMap;\n\n\n\nuse m3u8_rs::{\n\n parse_playlist_res,\n\n playlist::{Key, Playlist},\n\n};\n\n\n\nuse crate::common::{\n\n bytes::{\n\n bytes::{decode_hex, u32_to_u8x4},\n\n bytes_type::Bytes,\n\n },\n\n errors::{Error, Result},\n\n list::SharedVec,\n\n net::{\n\n net::{join_uri, redirect, request},\n\n HttpClient, Method, Uri,\n\n },\n\n};\n\n\n", "file_path": "src/app/core/m3u8/common.rs", "rank": 28, "score": 77814.39443939464 }, { "content": " (None, None)\n\n }\n\n } else {\n\n (None, None)\n\n };\n\n\n\n list.push(M3u8Segment {\n\n index: idx,\n\n method: Method::GET,\n\n uri: seg_uri.clone(),\n\n data: None,\n\n key,\n\n iv,\n\n });\n\n index += 1;\n\n idx += 1;\n\n }\n\n }\n\n Err(_) => return Err(Error::M3U8ParseFail),\n\n }\n", "file_path": "src/app/core/m3u8/common.rs", "rank": 29, "score": 77810.98656747761 }, { "content": " let mut resp = request(client, method.clone(), u.clone(), data.clone(), None).await?;\n\n let cn = resp.body().await?;\n\n let mut cn = cn.to_vec();\n\n\n\n // Adding \"\\n\" for the case when response content has not \"\\n\" at end.\n\n cn.extend(b\"\\n\");\n\n\n\n // Parse m3u8 content\n\n let parsed = parse_playlist_res(cn.as_ref());\n\n match parsed {\n\n Ok(Playlist::MasterPlaylist(mut pl)) => {\n\n pl.variants.reverse();\n\n for variant in &pl.variants {\n\n let uri = join_uri(&base_uri, &variant.uri)?;\n\n uris.push(uri);\n\n }\n\n }\n\n Ok(Playlist::MediaPlaylist(pl)) => {\n\n let mut index = pl.media_sequence as u64;\n\n let mut key_m: Option<Key> = None;\n", "file_path": "src/app/core/m3u8/common.rs", "rank": 30, "score": 77810.60744160207 }, { "content": " // uri -> (key, iv)\n\n let mut keymap: HashMap<Uri, [u8; 16]> = HashMap::new();\n\n let mut uris = vec![uri];\n\n let mut list = vec![];\n\n\n\n let mut idx = 0;\n\n\n\n while let Some(uri) = uris.pop() {\n\n debug!(\"m3u8\", uri);\n\n let u = redirect(client, method.clone(), uri.clone(), data.clone()).await?;\n\n\n\n if u != uri {\n\n debug!(\"m3u8 redirect to\", u);\n\n uris.push(u.clone());\n\n continue;\n\n }\n\n\n\n let base_uri = u.clone();\n\n\n\n // Read m3u8 content\n", "file_path": "src/app/core/m3u8/common.rs", "rank": 31, "score": 77802.182878397 }, { "content": " iv[12..].clone_from_slice(&index_bin);\n\n iv\n\n };\n\n if let Some(uri) = &key.uri {\n\n let key_uri = join_uri(&base_uri, &uri)?;\n\n if let Some(k) = keymap.get(&key_uri) {\n\n (Some(*k), Some(iv))\n\n } else {\n\n let k = get_key(client, Method::GET, key_uri.clone()).await?;\n\n keymap.insert(key_uri.clone(), k);\n\n debug!(\n\n \"Get key, iv\",\n\n (\n\n std::str::from_utf8_unchecked(&k),\n\n std::str::from_utf8_unchecked(&iv)\n\n )\n\n );\n\n (Some(k), Some(iv))\n\n }\n\n } else {\n", "file_path": "src/app/core/m3u8/common.rs", "rank": 32, "score": 77800.00543466199 }, { "content": " for segment in &pl.segments {\n\n let seg_uri = join_uri(&base_uri, &segment.uri)?;\n\n\n\n // In `pl.segment`, the same key will not repeat, if previous key appears.\n\n let segment_key = if segment.key.is_none() && key_m.is_some() {\n\n &key_m\n\n } else {\n\n key_m = segment.key.clone();\n\n &segment.key\n\n };\n\n\n\n let (key, iv) = if let Some(key) = segment_key {\n\n let iv = if let Some(iv) = &key.iv {\n\n let mut i = [0; 16];\n\n let buf = decode_hex(&iv[2..])?;\n\n i.clone_from_slice(&buf[..]);\n\n i\n\n } else {\n\n let mut iv = [0; 16];\n\n let index_bin = u32_to_u8x4(index as u32);\n", "file_path": "src/app/core/m3u8/common.rs", "rank": 33, "score": 77796.20199137596 }, { "content": "pub trait StackLike<T> {\n\n fn push(&mut self, item: T);\n\n\n\n fn pop(&mut self) -> Option<T>;\n\n\n\n fn len(&self) -> usize;\n\n}\n", "file_path": "src/features/stack.rs", "rank": 34, "score": 74289.81842559988 }, { "content": "pub trait Runnable {\n\n fn run(self) -> Result<()>;\n\n}\n", "file_path": "src/features/running.rs", "rank": 35, "score": 71436.48220164132 }, { "content": "/// Convert seconds to date format\n\npub trait ToDate {\n\n fn date(&self) -> String;\n\n}\n\n\n\nimpl ToDate for u64 {\n\n fn date(&self) -> String {\n\n let mut num = *self as f64;\n\n if num < 60.0 {\n\n return format!(\"{:.0}s\", num);\n\n }\n\n num /= 60.0;\n\n if num < 60.0 {\n\n return format!(\"{:.0}m\", num);\n\n }\n\n num /= 60.0;\n\n if num < 24.0 {\n\n return format!(\"{:.0}h\", num);\n\n }\n\n num /= 24.0;\n\n return format!(\"{:.0}d\", num);\n\n }\n\n}\n", "file_path": "src/common/liberal.rs", "rank": 36, "score": 69131.61388092276 }, { "content": "/// Convert number to human-readable\n\npub trait HumanReadable {\n\n fn human_readable(&self) -> String;\n\n}\n\n\n\nimpl HumanReadable for u64 {\n\n fn human_readable(&self) -> String {\n\n let mut num = *self as f64;\n\n for s in &SIZES {\n\n if num < 1024.0 {\n\n return format!(\"{:.1}{}\", num, s);\n\n }\n\n num /= 1024.0;\n\n }\n\n return format!(\"{:.1}{}\", num, SIZES[SIZES.len() - 1]);\n\n }\n\n}\n\n\n\nimpl HumanReadable for f64 {\n\n fn human_readable(&self) -> String {\n\n let mut num = *self;\n\n for s in &SIZES {\n\n if num < 1024.0 {\n\n return format!(\"{:.1}{}\", num, s);\n\n }\n\n num /= 1024.0;\n\n }\n\n return format!(\"{:.1}{}\", num, SIZES[SIZES.len() - 1]);\n\n }\n\n}\n", "file_path": "src/common/size.rs", "rank": 37, "score": 66900.41151074118 }, { "content": "use std::num::ParseIntError;\n\n\n\nuse crate::common::errors::Result;\n\n\n\n/// Create an integer value from its representation as a byte array in big endian.\n", "file_path": "src/common/bytes/bytes.rs", "rank": 38, "score": 65002.36685549567 }, { "content": "/// Convert liberal number to u64\n\n/// e.g.\n\n/// 100k -> 100 * 1024\n\npub trait ParseLiteralNumber {\n\n fn literal_number(&self) -> Result<u64, Error>;\n\n}\n\n\n\nimpl ParseLiteralNumber for &str {\n\n fn literal_number(&self) -> Result<u64, Error> {\n\n let (num, unit) = self.split_at(self.len() - 1);\n\n if unit.parse::<u8>().is_err() {\n\n let mut num = num.parse::<u64>()?;\n\n for s in &SIZES {\n\n if s == &unit.to_uppercase() {\n\n return Ok(num);\n\n } else {\n\n num *= 1024;\n\n }\n\n }\n\n Ok(num)\n\n } else {\n\n let num = self.parse::<u64>()?;\n\n Ok(num)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/common/liberal.rs", "rank": 39, "score": 64850.66907472277 }, { "content": " }\n\n Ok(uri)\n\n}\n\n\n\n/// Get the content length of the resource\n\npub async fn redirect_and_contentlength(\n\n client: &HttpClient,\n\n method: Method,\n\n uri: Uri,\n\n data: Option<Bytes>,\n\n) -> Result<(Uri, ContentLengthValue)> {\n\n let mut uri = uri;\n\n loop {\n\n let req = client\n\n .request(method.clone(), uri.clone())\n\n .set_header_if_none(\"Accept\", \"*/*\") // set accept if none\n\n .set_header(header::RANGE, \"bytes=0-1\");\n\n\n\n let resp = if let Some(d) = data.clone() {\n\n req.send_body(d).await?\n", "file_path": "src/common/net/net.rs", "rank": 40, "score": 64675.44173417738 }, { "content": " method: Method,\n\n uri: Uri,\n\n data: Option<Bytes>,\n\n range: Option<RangePair>,\n\n) -> Result<RClientResponse> {\n\n let mut uri = uri;\n\n loop {\n\n let mut req = client\n\n .request(method.clone(), uri.clone())\n\n .set_header_if_none(\"Accept\", \"*/*\"); // set accept if none\n\n\n\n if let Some(RangePair { begin, end }) = range {\n\n req = req.set_header(header::RANGE, format!(\"bytes={}-{}\", begin, end));\n\n } else {\n\n req = req.set_header(header::RANGE, format!(\"bytes=0-\"));\n\n }\n\n\n\n let resp = if let Some(d) = data.clone() {\n\n req.send_body(d).await?\n\n } else {\n", "file_path": "src/common/net/net.rs", "rank": 41, "score": 64667.59231937414 }, { "content": "use std::time::Duration;\n\n\n\nuse crate::common::{\n\n bytes::bytes_type::Bytes,\n\n errors::{Error, Result},\n\n net::{\n\n header, ClientResponse, Connector, ContentLengthValue, HttpClient, Method, RClientResponse,\n\n Uri, Url,\n\n },\n\n range::RangePair,\n\n};\n\n\n", "file_path": "src/common/net/net.rs", "rank": 42, "score": 64667.49890366133 }, { "content": " }\n\n }\n\n }\n\n }\n\n if let Some(h) = resp.headers().get(header::CONTENT_LENGTH) {\n\n if let Ok(s) = h.to_str() {\n\n if let Ok(length) = s.parse::<u64>() {\n\n return Ok((uri, ContentLengthValue::DirectLength(length.clone())));\n\n }\n\n }\n\n }\n\n break;\n\n }\n\n }\n\n Ok((uri, ContentLengthValue::NoLength))\n\n}\n\n\n\n/// Send a request\n\npub async fn request(\n\n client: &HttpClient,\n", "file_path": "src/common/net/net.rs", "rank": 43, "score": 64667.222684666165 }, { "content": " } else {\n\n req.send().await?\n\n };\n\n\n\n let headers = resp.headers();\n\n if resp.status().is_redirection() {\n\n if let Some(location) = headers.get(header::LOCATION) {\n\n let uri_str = location.to_str()?;\n\n uri = join_uri(&uri, uri_str)?;\n\n continue;\n\n } else {\n\n return Err(Error::NoLocation(format!(\"{}\", uri)));\n\n }\n\n } else {\n\n is_success(&resp)?;\n\n if let Some(h) = headers.get(header::CONTENT_RANGE) {\n\n if let Ok(s) = h.to_str() {\n\n if let Some(index) = s.find(\"/\") {\n\n if let Ok(length) = &s[index + 1..].parse::<u64>() {\n\n return Ok((uri, ContentLengthValue::RangeLength(length.clone())));\n", "file_path": "src/common/net/net.rs", "rank": 44, "score": 64665.894122150945 }, { "content": " .set_header_if_none(\"Accept\", \"*/*\") // set accept if none\n\n .set_header(header::RANGE, \"bytes=0-1\");\n\n\n\n let resp = if let Some(d) = data.clone() {\n\n req.send_body(d).await?\n\n } else {\n\n req.send().await?\n\n };\n\n\n\n if !resp.status().is_redirection() {\n\n is_success(&resp)?; // Return unsuccess code\n\n break;\n\n }\n\n\n\n let headers = resp.headers();\n\n if let Some(location) = headers.get(header::LOCATION) {\n\n uri = location.to_str()?.parse()?;\n\n } else {\n\n break;\n\n }\n", "file_path": "src/common/net/net.rs", "rank": 45, "score": 64664.56928256931 }, { "content": " req.send().await?\n\n };\n\n\n\n if resp.status().is_redirection() {\n\n if let Some(location) = resp.headers().get(header::LOCATION) {\n\n let uri_str = location.to_str()?;\n\n uri = join_uri(&uri, uri_str)?;\n\n continue;\n\n } else {\n\n return Err(Error::NoLocation(format!(\"{}\", uri)));\n\n }\n\n } else {\n\n is_success(&resp)?;\n\n return Ok(resp);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/common/net/net.rs", "rank": 46, "score": 64662.078052630095 }, { "content": " //\n\n // Keep-alive period is the period between connection usage. If\n\n // the delay between repeated usages of the same connection\n\n // exceeds this period, the connection is closed.\n\n // Default keep-alive period is 15 seconds.\n\n .conn_keep_alive(keep_alive)\n\n // Set max lifetime period for connection.\n\n //\n\n // Connection lifetime is max lifetime of any opened connection\n\n // until it is closed regardless of keep-alive period.\n\n // Default lifetime period is 75 seconds.\n\n .conn_lifetime(lifetime)\n\n .finish();\n\n\n\n let mut builder = HttpClient::builder()\n\n .connector(conn)\n\n // Set request timeout\n\n //\n\n // Request timeout is the total time before a response must be received.\n\n // Default value is 5 seconds.\n", "file_path": "src/common/net/net.rs", "rank": 47, "score": 64650.44016488149 }, { "content": " .timeout(timeout)\n\n // Here we do not use default headers.\n\n .no_default_headers();\n\n\n\n if disable_redirects {\n\n builder = builder.disable_redirects();\n\n }\n\n\n\n // Add Default headers\n\n for (k, v) in headers {\n\n builder = builder.header(*k, *v);\n\n }\n\n\n\n builder.finish()\n\n}\n\n\n", "file_path": "src/common/net/net.rs", "rank": 48, "score": 64645.515749759164 }, { "content": "pub use bytes::{Buf, BufMut, Bytes, BytesMut};\n", "file_path": "src/common/bytes/bytes_type.rs", "rank": 49, "score": 62750.786107919135 }, { "content": " // Total content length of the uri\n\n total: u64,\n\n}\n\n\n\nimpl HttpReceiver {\n\n pub fn new<P: AsRef<Path>>(\n\n output: P,\n\n direct: bool,\n\n content_length: u64,\n\n ) -> Result<HttpReceiver> {\n\n let mut outputfile = File::new(&output, true)?;\n\n outputfile.open()?;\n\n\n\n let (rangerecorder, total, completed) = if direct {\n\n (None, content_length, 0)\n\n } else {\n\n let mut rangerecorder =\n\n RangeRecorder::new(&*(output.as_ref().to_string_lossy() + RECORDER_FILE_SUFFIX))?;\n\n rangerecorder.open()?;\n\n let total = rangerecorder.total()?;\n", "file_path": "src/app/receive/http_receiver.rs", "rank": 50, "score": 60906.940792722555 }, { "content": " fn record_pair(&mut self, pair: RangePair) -> Result<()> {\n\n if let Some(ref mut rangerecorder) = self.rangerecorder {\n\n rangerecorder.write_pair(pair)?;\n\n }\n\n Ok(())\n\n }\n\n pub async fn start(&mut self, receiver: Receiver<(RangePair, Bytes)>) -> Result<()> {\n\n self.show_infos()?;\n\n\n\n let mut tick = interval(Duration::from_secs(2)).fuse();\n\n let mut receiver = receiver.fuse();\n\n loop {\n\n select! {\n\n item = receiver.next() => {\n\n if let Some((pair, chunk)) = item {\n\n self.output.write(&chunk[..], Some(SeekFrom::Start(pair.begin)))?;\n\n self.record_pair(pair)?;\n\n self.ratestatus.add(pair.length());\n\n } else {\n\n break;\n", "file_path": "src/app/receive/http_receiver.rs", "rank": 51, "score": 60901.64960475152 }, { "content": "use std::{io::SeekFrom, path::Path, time::Duration};\n\n\n\nuse futures::{channel::mpsc::Receiver, select, stream::StreamExt};\n\n\n\nuse actix_rt::time::interval;\n\n\n\nuse crate::{\n\n app::{\n\n record::{common::RECORDER_FILE_SUFFIX, range_recorder::RangeRecorder},\n\n show::http_show::HttpShower,\n\n status::rate_status::RateStatus,\n\n },\n\n common::{bytes::bytes_type::Bytes, errors::Result, file::File, range::RangePair},\n\n};\n\n\n\npub struct HttpReceiver {\n\n output: File,\n\n rangerecorder: Option<RangeRecorder>,\n\n ratestatus: RateStatus,\n\n shower: HttpShower,\n", "file_path": "src/app/receive/http_receiver.rs", "rank": 52, "score": 60901.52309635907 }, { "content": " let completed = rangerecorder.count()?;\n\n (Some(rangerecorder), total, completed)\n\n };\n\n\n\n let mut ratestatus = RateStatus::new();\n\n ratestatus.set_total(completed);\n\n\n\n Ok(HttpReceiver {\n\n output: outputfile,\n\n rangerecorder,\n\n ratestatus,\n\n shower: HttpShower::new(),\n\n // receiver,\n\n total,\n\n })\n\n }\n\n\n\n fn show_infos(&mut self) -> Result<()> {\n\n if self.rangerecorder.is_none() {\n\n self.shower\n", "file_path": "src/app/receive/http_receiver.rs", "rank": 53, "score": 60899.85342006041 }, { "content": " .print_msg(\"Server doesn't support range request.\")?;\n\n }\n\n\n\n let file_name = &self.output.file_name().unwrap_or(\"[No Name]\");\n\n let total = self.total;\n\n self.shower.print_file(file_name)?;\n\n self.shower.print_total(total)?;\n\n // self.shower.print_concurrency(concurrency)?;\n\n self.show_status()?;\n\n Ok(())\n\n }\n\n\n\n fn show_status(&mut self) -> Result<()> {\n\n let total = self.total;\n\n let completed = self.ratestatus.total();\n\n let rate = self.ratestatus.rate();\n\n\n\n let eta = if self.rangerecorder.is_some() || self.total != 0 {\n\n let remains = total - completed;\n\n // rate > 1.0 for overflow\n", "file_path": "src/app/receive/http_receiver.rs", "rank": 54, "score": 60895.387797546355 }, { "content": " if remains > 0 && rate > 1.0 {\n\n let eta = (remains as f64 / rate) as u64;\n\n // eta is large than 99 days, return 0\n\n if eta > 99 * 24 * 60 * 60 {\n\n 0\n\n } else {\n\n eta\n\n }\n\n } else {\n\n 0\n\n }\n\n } else {\n\n 0\n\n };\n\n\n\n self.shower.print_status(completed, total, rate, eta)?;\n\n self.ratestatus.clean();\n\n Ok(())\n\n }\n\n\n", "file_path": "src/app/receive/http_receiver.rs", "rank": 55, "score": 60884.85018390028 }, { "content": " }\n\n },\n\n _ = tick.next() => {\n\n self.show_status()?;\n\n },\n\n }\n\n }\n\n self.show_status()?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/app/receive/http_receiver.rs", "rank": 56, "score": 60877.42109501298 }, { "content": "use std::path::Path;\n\n\n\nuse crate::common::{\n\n errors::{Error, Result},\n\n net::Uri,\n\n};\n\n\n\n/// Use the last of components of uri as a file name\n", "file_path": "src/common/uri.rs", "rank": 71, "score": 59512.95649597471 }, { "content": "#[derive(Debug, Clone)]\n\npub enum TaskType {\n\n HTTP,\n\n M3U8,\n\n}\n", "file_path": "src/common/tasks.rs", "rank": 72, "score": 59465.73720987406 }, { "content": "use std::{io::Error as IoError, num, result};\n\n\n\nuse thiserror::Error as ThisError;\n\n\n\nuse url::ParseError as UrlParseError;\n\n\n\nuse awc::error::{PayloadError, SendRequestError};\n\n\n\nuse openssl;\n\n\n\npub type Result<T, E = Error> = result::Result<T, E>;\n\n\n\n#[derive(Debug, ThisError)]\n\npub enum Error {\n\n // For Arguments\n\n #[error(\"Output path is invalid: {0}\")]\n\n InvalidPath(String),\n\n #[error(\"Uri is invalid: {0}\")]\n\n InvaildUri(#[from] http::uri::InvalidUri),\n\n #[error(\"Header is invalid: {0}\")]\n", "file_path": "src/common/errors.rs", "rank": 73, "score": 59360.2797560646 }, { "content": "\n\nimpl From<http::header::ToStrError> for Error {\n\n fn from(err: http::header::ToStrError) -> Error {\n\n Error::NetError(format!(\"{}\", err))\n\n }\n\n}\n\n\n\nimpl From<http::Error> for Error {\n\n fn from(err: http::Error) -> Error {\n\n Error::NetError(format!(\"{}\", err))\n\n }\n\n}\n\n\n\nimpl From<SendRequestError> for Error {\n\n fn from(err: SendRequestError) -> Error {\n\n Error::NetError(format!(\"{}\", err))\n\n }\n\n}\n\n\n\nimpl From<PayloadError> for Error {\n\n fn from(err: PayloadError) -> Error {\n\n Error::NetError(format!(\"{}\", err))\n\n }\n\n}\n", "file_path": "src/common/errors.rs", "rank": 74, "score": 59360.01914073284 }, { "content": " #[error(\"Procedure timeout\")]\n\n Timeout,\n\n\n\n // For Network\n\n #[error(\"Network error: {0}\")]\n\n NetError(String),\n\n #[error(\"Uncompleted Read\")]\n\n UncompletedRead,\n\n #[error(\"{0} is unsupported\")]\n\n UnsupportedMethod(String),\n\n #[error(\"header is invalid: {0}\")]\n\n HeaderParseError(String),\n\n #[error(\"header is invalid: {0}\")]\n\n UrlParseError(#[from] UrlParseError),\n\n #[error(\"BUG: {0}\")]\n\n Bug(String),\n\n #[error(\"The two content lengths are not equal between the response and the aget file.\")]\n\n ContentLengthIsNotConsistent,\n\n\n\n // For m3u8\n", "file_path": "src/common/errors.rs", "rank": 75, "score": 59355.9132058437 }, { "content": " #[error(\"Fail to parse m3u8 file.\")]\n\n M3U8ParseFail,\n\n #[error(\"The two m3u8 parts are not equal between the response and the aget file.\")]\n\n PartsAreNotConsistent,\n\n\n\n #[error(\"An internal error: {0}\")]\n\n InnerError(String),\n\n #[error(\"Content does not has length\")]\n\n NoContentLength,\n\n #[error(\"header is invalid: {0}\")]\n\n InvaildHeader(String),\n\n #[error(\"response status code is: {0}\")]\n\n Unsuccess(u16),\n\n #[error(\"Redirect to: {0}\")]\n\n Redirect(String),\n\n #[error(\"No Location for redirection: {0}\")]\n\n NoLocation(String),\n\n #[error(\"Fail to decrypt aes128 data: {0}\")]\n\n AES128DecryptFail(#[from] openssl::error::ErrorStack),\n\n}\n", "file_path": "src/common/errors.rs", "rank": 76, "score": 59352.48524587788 }, { "content": " InvalidHeader(String),\n\n #[error(\"No filename.\")]\n\n NoFilename,\n\n #[error(\"Directory is not found\")]\n\n NotFoundDirectory,\n\n #[error(\"The file already exists.\")]\n\n FileExists,\n\n #[error(\"The path is a directory.\")]\n\n PathIsDirectory,\n\n #[error(\"Can't parse string as number: {0}\")]\n\n IsNotNumber(#[from] num::ParseIntError),\n\n #[error(\"Io Error: {0}\")]\n\n Io(#[from] IoError),\n\n #[error(\"{0} task is not supported\")]\n\n UnsupportedTask(String),\n\n\n\n // For IO\n\n #[error(\"IO: Unexpected EOF\")]\n\n UnexpectedEof,\n\n\n", "file_path": "src/common/errors.rs", "rank": 77, "score": 59348.49570708056 }, { "content": "pub mod bytes;\n\npub mod bytes_type;\n", "file_path": "src/common/bytes/mod.rs", "rank": 78, "score": 56686.07832107629 }, { "content": "pub mod net;\n\n\n\nuse std::{boxed::Box, pin::Pin, time::Duration};\n\n\n\npub use http::{self, header, HeaderMap, HeaderValue, Method, Request, Response, Uri};\n\n\n\npub use url::Url;\n\n\n\npub use awc::{\n\n error as net_error, Client as HttpClient, ClientBuilder, ClientRequest, ClientResponse,\n\n Connector,\n\n};\n\n\n\nuse bytes::Bytes;\n\nuse futures::Stream;\n\n\n\nuse actix_http::{encoding::Decoder, error::PayloadError, Payload};\n\n\n\npub type RClientResponse =\n\n ClientResponse<Decoder<Payload<Pin<Box<dyn Stream<Item = Result<Bytes, PayloadError>>>>>>>;\n", "file_path": "src/common/net/mod.rs", "rank": 79, "score": 56441.01849327652 }, { "content": "\n\n#[derive(Debug)]\n\npub enum ContentLengthValue {\n\n RangeLength(u64),\n\n DirectLength(u64),\n\n NoLength,\n\n}\n\n\n\npub struct ConnectorConfig {\n\n pub timeout: Duration,\n\n pub dns_timeout: Duration,\n\n pub keep_alive: Duration,\n\n pub lifetime: Duration,\n\n pub disable_redirects: bool,\n\n}\n", "file_path": "src/common/net/mod.rs", "rank": 80, "score": 56424.97367363374 }, { "content": "pub mod http_receiver;\n\npub mod m3u8_receiver;\n", "file_path": "src/app/receive/mod.rs", "rank": 81, "score": 54796.92821990638 }, { "content": "pub const RECORDER_FILE_SUFFIX: &'static str = \".rc.aget\";\n", "file_path": "src/app/record/common.rs", "rank": 82, "score": 52741.46262732615 }, { "content": "#[cfg(target_os = \"windows\")]\n", "file_path": "src/app/show/common.rs", "rank": 83, "score": 52734.86015427085 }, { "content": " \"{}: {}\",\n\n Blue.bold().paint(\"Segments\"),\n\n total,\n\n )?;\n\n Ok(())\n\n }\n\n\n\n pub fn print_concurrency(&mut self, concurrency: u64) -> Result<()> {\n\n writeln!(\n\n &mut self.stdout,\n\n \"{}: {}\\n\",\n\n Yellow.bold().paint(\"concurrency\"),\n\n concurrency,\n\n )?;\n\n Ok(())\n\n }\n\n\n\n pub fn print_status(\n\n &mut self,\n\n completed: u64,\n", "file_path": "src/app/show/m3u8_show.rs", "rank": 84, "score": 51764.018825477695 }, { "content": "\n\n pub fn print_msg(&mut self, msg: &str) -> Result<()> {\n\n writeln!(&mut self.stdout, \"\\n {}\", Yellow.italic().paint(msg))?;\n\n Ok(())\n\n }\n\n\n\n pub fn print_file(&mut self, path: &str) -> Result<()> {\n\n writeln!(\n\n &mut self.stdout,\n\n // \"\\n {}: {}\",\n\n \"\\n{}: {}\",\n\n Green.bold().paint(\"File\"),\n\n path,\n\n )?;\n\n Ok(())\n\n }\n\n\n\n pub fn print_total(&mut self, total: u64) -> Result<()> {\n\n writeln!(\n\n &mut self.stdout,\n", "file_path": "src/app/show/m3u8_show.rs", "rank": 85, "score": 51761.85873437123 }, { "content": "use std::io::{stdout, Stdout, Write};\n\n\n\nuse crate::{\n\n app::show::common::du_bars,\n\n common::{\n\n colors::{Black, Blue, Green, Red, Yellow},\n\n errors::Result,\n\n size::HumanReadable,\n\n terminal::terminal_width,\n\n },\n\n};\n\n\n\npub struct M3u8Shower {\n\n stdout: Stdout,\n\n}\n\n\n\nimpl M3u8Shower {\n\n pub fn new() -> M3u8Shower {\n\n M3u8Shower { stdout: stdout() }\n\n }\n", "file_path": "src/app/show/m3u8_show.rs", "rank": 86, "score": 51761.406571014246 }, { "content": " total: u64,\n\n length: u64,\n\n rate: f64,\n\n ) -> Result<()> {\n\n let percent = completed as f64 / total as f64;\n\n\n\n let completed_str = completed.to_string();\n\n let total_str = total.to_string();\n\n let length_str = length.human_readable();\n\n let percent_str = format!(\"{:.2}\", percent * 100.0);\n\n let rate_str = rate.human_readable();\n\n\n\n // maximum info length is `completed_str.len()` + `total_str.len()` + 26\n\n // e.g.\n\n // 100/1021 97.98% 10m 1003.1B/s eta: 12s\n\n let info = format!(\n\n \"{completed}/{total} {length} {percent}% {rate}/s\",\n\n completed = completed_str,\n\n total = total_str,\n\n length = length_str,\n", "file_path": "src/app/show/m3u8_show.rs", "rank": 87, "score": 51755.8544236758 }, { "content": " percent = percent_str,\n\n rate = rate_str,\n\n );\n\n\n\n // set default info length\n\n let info_length = total_str.len() * 2 + 26;\n\n let miss = info_length - info.len();\n\n\n\n let terminal_width = terminal_width();\n\n let bar_length = terminal_width - info_length as u64 - 3;\n\n let bar_done_length = (bar_length as f64 * percent) as u64;\n\n let bar_undone_length = bar_length - bar_done_length;\n\n\n\n let (bar_done_str, bar_undone_str) =\n\n du_bars(bar_done_length as usize, bar_undone_length as usize);\n\n\n\n write!(\n\n &mut self.stdout,\n\n \"\\r{completed}/{total} {length} {percent}% {rate}/s{miss} {bar_done}{bar_undone} \",\n\n completed = Red.bold().paint(completed_str),\n", "file_path": "src/app/show/m3u8_show.rs", "rank": 88, "score": 51751.30514646047 }, { "content": "pub mod common;\n\npub mod m3u8;\n\n\n\npub use m3u8::M3u8Handler;\n", "file_path": "src/app/core/m3u8/mod.rs", "rank": 89, "score": 51751.20392414287 }, { "content": " total = Green.bold().paint(total_str),\n\n length = Red.bold().paint(length_str),\n\n percent = Yellow.bold().paint(percent_str),\n\n rate = Blue.bold().paint(rate_str),\n\n miss = \" \".repeat(miss),\n\n bar_done = Red.bold().paint(bar_done_str),\n\n bar_undone = Black.bold().paint(bar_undone_str),\n\n )?;\n\n\n\n self.stdout.flush()?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/app/show/m3u8_show.rs", "rank": 90, "score": 51745.47628611956 }, { "content": "fn main() {\n\n // From actix-rt(1.1.1), actix-rt does not create default System;\n\n let _ = System::new(\"default\");\n\n\n\n let cmdargs = CmdArgs::new();\n\n\n\n // Set debug\n\n if cmdargs.debug() {\n\n unsafe {\n\n DEBUG = true;\n\n }\n\n debug!(\"Args\", cmdargs);\n\n }\n\n\n\n // Set quiet\n\n if cmdargs.quiet() {\n\n unsafe {\n\n QUIET = true;\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 91, "score": 49342.67808989957 }, { "content": "use std::{path::PathBuf, time::Duration};\n\n\n\nuse crate::common::{\n\n bytes::bytes_type::BytesMut,\n\n net::{Method, Uri},\n\n tasks::TaskType,\n\n};\n\n\n\n/// This a arg which gives parameters for apps\n", "file_path": "src/features/args.rs", "rank": 92, "score": 30439.783577437534 }, { "content": " // Default lifetime period is 75 seconds.\n\n fn lifetime(&self) -> Duration;\n\n\n\n // Always return `true`\n\n fn disable_redirects(&self) -> bool;\n\n\n\n /// The number of concurrency\n\n fn concurrency(&self) -> u64;\n\n\n\n /// The chunk size of each concurrency for http task\n\n fn chunk_size(&self) -> u64;\n\n\n\n /// The number of retry of a task\n\n fn retries(&self) -> u64;\n\n\n\n /// The internal of each retry\n\n fn retry_wait(&self) -> u64;\n\n\n\n /// Task type\n\n fn task_type(&self) -> TaskType;\n\n\n\n /// To debug mode, if it returns true\n\n fn debug(&self) -> bool;\n\n\n\n /// To quiet mode, if it return true\n\n fn quiet(&self) -> bool;\n\n}\n", "file_path": "src/features/args.rs", "rank": 93, "score": 30424.7288622771 }, { "content": " fn timeout(&self) -> Duration;\n\n\n\n // Connection timeout\n\n //\n\n // i.e. max time to connect to remote host including dns name resolution.\n\n // Set to 1 second by default.\n\n fn dns_timeout(&self) -> Duration;\n\n\n\n // Set keep-alive period for opened connection.\n\n //\n\n // Keep-alive period is the period between connection usage. If\n\n // the delay between repeated usages of the same connection\n\n // exceeds this period, the connection is closed.\n\n // Default keep-alive period is 15 seconds.\n\n fn keep_alive(&self) -> Duration;\n\n\n\n // Set max lifetime period for connection.\n\n //\n\n // Connection lifetime is max lifetime of any opened connection\n\n // until it is closed regardless of keep-alive period.\n", "file_path": "src/features/args.rs", "rank": 94, "score": 30414.96386487614 }, { "content": " /// To quiet mode, if it return true\n\n fn quiet(&self) -> bool {\n\n self.matches.is_present(\"quiet\")\n\n }\n\n}\n\n\n\nimpl fmt::Debug for CmdArgs {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"CmdArgs\")\n\n .field(\"output\", &self.output())\n\n .field(\"method\", &self.method())\n\n .field(\"uri\", &self.uri())\n\n .field(\"data\", &self.data())\n\n .field(\"headers\", &self.headers())\n\n .field(\"proxy\", &self.proxy())\n\n .field(\"timeout\", &self.timeout())\n\n .field(\"dns_timeout\", &self.dns_timeout())\n\n .field(\"keep_alive\", &self.keep_alive())\n\n .field(\"lifetime\", &self.lifetime())\n\n .field(\"disable_redirects\", &self.disable_redirects())\n", "file_path": "src/arguments/cmd_args.rs", "rank": 95, "score": 29063.315513781356 }, { "content": " net::{net::parse_headers, Method, Uri},\n\n tasks::TaskType,\n\n },\n\n config::Config,\n\n features::args::Args,\n\n};\n\n\n\npub struct CmdArgs {\n\n matches: ArgMatches<'static>,\n\n config: Config,\n\n}\n\n\n\nimpl CmdArgs {\n\n pub fn new() -> CmdArgs {\n\n #[cfg(windows)]\n\n let _ = enable_ansi_support();\n\n\n\n let args = env::args();\n\n let inner = build_app();\n\n let matches = inner.get_matches_from(args);\n", "file_path": "src/arguments/cmd_args.rs", "rank": 96, "score": 29059.511909685523 }, { "content": " if let Ok(p) = env::var(\"HTTPS_PROXY\") {\n\n return Some(p);\n\n }\n\n\n\n if let Ok(p) = env::var(\"ALL_PROXY\") {\n\n return Some(p);\n\n }\n\n\n\n None\n\n }\n\n\n\n // Set request timeout\n\n //\n\n // Request timeout is the total time before a response must be received.\n\n // Default value is 5 seconds.\n\n fn timeout(&self) -> Duration {\n\n Duration::from_secs(\n\n self.matches\n\n .value_of(\"timeout\")\n\n .map(|i| i.parse::<u64>().unwrap())\n", "file_path": "src/arguments/cmd_args.rs", "rank": 97, "score": 29057.35435345957 }, { "content": " self.matches.value_of(\"data\").map(|d| BytesMut::from(d))\n\n }\n\n\n\n /// Request headers\n\n fn headers(&self) -> Vec<(String, String)> {\n\n let mut headers = if let Some(headers) = self.matches.values_of(\"header\") {\n\n parse_headers(headers)\n\n .unwrap()\n\n .into_iter()\n\n .map(|(k, v)| (k.to_string(), v.to_string()))\n\n .collect::<Vec<(String, String)>>()\n\n } else {\n\n vec![]\n\n };\n\n\n\n for (uk, uv) in self.config.headers.as_ref().unwrap_or(&vec![]) {\n\n let mut has = false;\n\n for (k, _) in headers.iter() {\n\n if k.to_lowercase() == *uk {\n\n has = true;\n", "file_path": "src/arguments/cmd_args.rs", "rank": 98, "score": 29056.452373164684 }, { "content": " timeout: Duration,\n\n ) -> RangeRequestTask {\n\n RangeRequestTask {\n\n client,\n\n method,\n\n uri,\n\n data,\n\n stack,\n\n sender,\n\n id,\n\n timeout,\n\n }\n\n }\n\n\n\n async fn start(&mut self) {\n\n debug!(\"Fire RangeRequestTask\", self.id);\n\n while let Some(pair) = self.stack.pop() {\n\n match self.req(pair).await {\n\n // Exit whole process when `Error::InnerError` is returned\n\n Err(Error::InnerError(msg)) => {\n", "file_path": "src/app/core/http.rs", "rank": 99, "score": 44.61112139019764 } ]
Rust
src/input_byte_stream.rs
sunfishcode/nameless
f5986c3f999db099c05214d6fe61bd04bf81bf93
use crate::open_input::{open_input, Input}; use crate::{MediaType, Pseudonym}; use clap::TryFromOsArg; use io_streams::StreamReader; use layered_io::{Bufferable, LayeredReader, ReadLayered, Status}; use std::ffi::OsStr; use std::fmt::{self, Debug, Formatter}; use std::io::{self, IoSliceMut, Read}; use terminal_io::NeverTerminalReader; pub struct InputByteStream { name: String, reader: LayeredReader<NeverTerminalReader<StreamReader>>, media_type: MediaType, initial_size: Option<u64>, } impl InputByteStream { #[inline] pub fn media_type(&self) -> &MediaType { &self.media_type } #[inline] pub fn initial_size(&self) -> Option<u64> { self.initial_size } #[inline] pub fn pseudonym(&self) -> Pseudonym { Pseudonym::new(self.name.clone()) } fn from_input(input: Input) -> Self { let reader = NeverTerminalReader::new(input.reader); let reader = LayeredReader::new(reader); Self { name: input.name, reader, media_type: input.media_type, initial_size: input.initial_size, } } } #[doc(hidden)] impl TryFromOsArg for InputByteStream { type Error = anyhow::Error; #[inline] fn try_from_os_str_arg(os: &OsStr) -> anyhow::Result<Self> { open_input(os).map(Self::from_input) } } impl ReadLayered for InputByteStream { #[inline] fn read_with_status(&mut self, buf: &mut [u8]) -> io::Result<(usize, Status)> { self.reader.read_with_status(buf) } #[inline] fn read_vectored_with_status( &mut self, bufs: &mut [IoSliceMut<'_>], ) -> io::Result<(usize, Status)> { self.reader.read_vectored_with_status(bufs) } } impl Read for InputByteStream { #[inline] fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.reader.read(buf) } #[inline] fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> { self.reader.read_vectored(bufs) } #[cfg(can_vector)] #[inline] fn is_read_vectored(&self) -> bool { self.reader.is_read_vectored() } #[inline] fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> { self.reader.read_to_end(buf) } #[inline] fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> { self.reader.read_to_string(buf) } #[inline] fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> { self.reader.read_exact(buf) } } impl Bufferable for InputByteStream { #[inline] fn abandon(&mut self) { self.reader.abandon() } } impl Debug for InputByteStream { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let mut b = f.debug_struct("InputByteStream"); b.field("media_type", &self.media_type); b.field("initial_size", &self.initial_size); b.finish() } } #[test] fn data_url_plain() { let mut s = String::new(); InputByteStream::try_from_os_str_arg("data:,Hello%2C%20World!".as_ref()) .unwrap() .read_to_string(&mut s) .unwrap(); assert_eq!(s, "Hello, World!"); } #[test] fn data_url_base64() { let mut s = String::new(); InputByteStream::try_from_os_str_arg("data:text/plain;base64,SGVsbG8sIFdvcmxkIQ==".as_ref()) .unwrap() .read_to_string(&mut s) .unwrap(); assert_eq!(s, "Hello, World!"); }
use crate::open_input::{open_input, Input}; use crate::{MediaType, Pseudonym}; use clap::TryFromOsArg; use io_streams::StreamReader; use layered_io::{Bufferable, LayeredReader, ReadLayered, Status}; use std::ffi::OsStr; use std::fmt::{self, Debug, Formatter}; use std::io::{self, IoSliceMut, Read}; use terminal_io::NeverTerminalReader; pub struct InputByteStream { name: String, reader: LayeredReader<NeverTerminalReader<StreamReader>>, media_type: MediaType, initial_size: Option<u64>, } impl InputByteStream { #[inline] pub fn media_type(&self) -> &MediaType { &self.media_type } #[inline] pub fn initial_size(&self) -> Option<u64> { self.initial_size } #[inline] pub fn pseudonym(&self) -> Pseudonym { Pseudonym::new(self.name.clone()) } fn from_input(input: Input) -> Self { let reader = NeverTerminalReader::new(input.reader); let reader = LayeredReader::new(reader); Self { name: input.name, reader, media_type: input.media_type, initial_size: input.initial_size, } } } #[doc(hidden)] impl TryFromOsArg for InputByteStream { type Error = anyhow::Error; #[inline] fn try_from_os_str_arg(os: &OsStr) -> anyhow::Result<Self> { open_input(os).map(Self::from_input) } } impl ReadLayered for InputByteStream { #[inline] fn read_with_status(&mut self, buf: &mut [u8]) -> io::Result<(usize, Status)> { self.reader.read_with_status(buf) } #[inline] fn read_vectored_with_status( &mut self, bufs: &mut [IoSliceMut<'_>], ) -> io::Result<(usize, Status)> { self.reader.read_vectored_with_status(bufs) } } impl Read for InputByteStream { #[inline] fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.reader.read(buf) } #[inline] fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> { self.reader.read_vectored(bufs) } #[cfg(can_vector)] #[inline] fn is_read_vectored(&self) -> bool { self.reader.is_read_vectored() } #[inline] fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> { self.reader.read_to_end(buf) } #[inline] fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> { self.reader.read_to_string(buf) } #[inline] fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> { self.reader.read_exact(buf) } } impl Bufferable for InputByteStream { #[inline] fn abandon(&mut self) { self.reader.abandon() } } impl Debug for InputByteStream { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let mut b = f.debug_struct("InputByteStream"); b.field("media_type", &self.media_type); b.field("initial_size", &self.initial_size); b.finish() } } #[test]
#[test] fn data_url_base64() { let mut s = String::new(); InputByteStream::try_from_os_str_arg("data:text/plain;base64,SGVsbG8sIFdvcmxkIQ==".as_ref()) .unwrap() .read_to_string(&mut s) .unwrap(); assert_eq!(s, "Hello, World!"); }
fn data_url_plain() { let mut s = String::new(); InputByteStream::try_from_os_str_arg("data:,Hello%2C%20World!".as_ref()) .unwrap() .read_to_string(&mut s) .unwrap(); assert_eq!(s, "Hello, World!"); }
function_block-full_function
[ { "content": "#[kommand::main]\n\nfn main(mut input: InputByteStream, mut output: OutputByteStream) -> anyhow::Result<()> {\n\n copy(&mut input, &mut output)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/copy.rs", "rank": 0, "score": 139863.88478210891 }, { "content": "#[kommand::main]\n\nfn main(mut input: InputByteStream, mut output: OutputByteStream) -> anyhow::Result<()> {\n\n copy(&mut input, &mut output)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/kommand.rs", "rank": 1, "score": 139863.88478210891 }, { "content": "fn repl(mut io: BufReaderLineWriter<InteractiveTextStream>, color: bool) -> io::Result<()> {\n\n let mut s = String::new();\n\n\n\n loop {\n\n if color {\n\n write!(io, \"\\u{1b}[01;36mprompt>\\u{1b}[0m \\u{34f}\")?;\n\n } else {\n\n write!(io, \"prompt> \\u{34f}\")?;\n\n }\n\n\n\n if io.read_line(&mut s)? == 0 {\n\n // End of stream.\n\n io.abandon();\n\n return Ok(());\n\n }\n\n\n\n if s.trim() == \"exit\" {\n\n io.abandon();\n\n return Ok(());\n\n }\n\n\n\n eprintln!(\"[logging \\\"{}\\\"]\", s.trim().escape_default());\n\n writeln!(io, \"[received \\\"{}\\\"]\", s.trim().escape_default())?;\n\n\n\n s.clear();\n\n }\n\n}\n", "file_path": "examples/repl.rs", "rank": 2, "score": 138554.27300824152 }, { "content": "#[test]\n\n#[cfg_attr(windows, ignore)] // TODO: Improve path handling on Windows.\n\nfn test_path_to_name() {\n\n assert_eq!(path_to_name(\"file\", Path::new(\"/\")).unwrap(), \"/\");\n\n assert_eq!(path_to_name(\"file\", Path::new(\"/foo\")).unwrap(), \"/foo\");\n\n assert_eq!(\n\n path_to_name(\"file\", Path::new(\"/foo:bar\")).unwrap(),\n\n \"file:///foo%3Abar\"\n\n );\n\n assert_eq!(path_to_name(\"file\", Path::new(\"foo\")).unwrap(), \"foo\");\n\n // TODO: Redo how relative paths are handled.\n\n // assert_eq!(path_to_name(\"file\", Path::new(\"./foo\")).unwrap(), \"./foo\");\n\n // assert_eq!(\n\n // path_to_name(\"file\",\n\n // OsStr::from_bytes(b\"f\\xffoo\").as_ref()).unwrap(), \"\\\"./f\\\\\n\n // u{fffd}oo\\\"\"\n\n //);\n\n}\n", "file_path": "src/path_to_name.rs", "rank": 3, "score": 107020.68981322231 }, { "content": "#[kommand::main]\n\nfn main(pattern: Regex, mut inputs: Vec<InputTextStream>) -> anyhow::Result<()> {\n\n let mut output = OutputTextStream::try_from_os_str_arg(\"-\".as_ref())?;\n\n\n\n if inputs.is_empty() {\n\n inputs.push(InputTextStream::try_from_os_str_arg(\"-\".as_ref())?);\n\n }\n\n\n\n let print_inputs = inputs.len() > 1;\n\n\n\n 'inputs: for input in inputs {\n\n let pseudonym = input.pseudonym();\n\n let reader = BufReader::new(input);\n\n for line in reader.lines() {\n\n let line = line?;\n\n if pattern.is_match(&line) {\n\n if let Err(e) = (|| -> io::Result<()> {\n\n if print_inputs {\n\n output.write_pseudonym(&pseudonym)?;\n\n write!(output, \":\")?;\n\n }\n", "file_path": "examples/text-grep.rs", "rank": 4, "score": 95040.29854151035 }, { "content": "#[cfg(not(windows))]\n\nfn spawn_child(os: &OsStr, lossy: &str) -> anyhow::Result<Input> {\n\n use std::process::{Command, Stdio};\n\n assert!(lossy.starts_with(\"$(\"));\n\n if !lossy.ends_with(')') {\n\n return Err(anyhow!(\"child string must end in ')'\"));\n\n }\n\n let s = if let Some(s) = os.to_str() {\n\n s\n\n } else {\n\n return Err(anyhow!(\"Non-UTF-8 child strings not yet supported\"));\n\n };\n\n let words = shell_words::split(&s[2..s.len() - 1])?;\n\n let (first, rest) = words\n\n .split_first()\n\n .ok_or_else(|| anyhow!(\"child stream specified with '(...)' must contain a command\"))?;\n\n let child = Command::new(first)\n\n .args(rest)\n\n .stdin(Stdio::null())\n\n .stdout(Stdio::piped())\n\n .spawn()?;\n\n let reader = StreamReader::child_stdout(child.stdout.unwrap());\n\n Ok(Input {\n\n name: s.to_owned(),\n\n reader,\n\n media_type: MediaType::unknown(),\n\n initial_size: None,\n\n })\n\n}\n", "file_path": "src/open_input.rs", "rank": 5, "score": 92634.23066202758 }, { "content": "fn acquire_stdin() -> anyhow::Result<Input> {\n\n let reader = StreamReader::stdin()?;\n\n Ok(Input {\n\n name: \"-\".to_owned(),\n\n reader,\n\n media_type: MediaType::unknown(),\n\n initial_size: None,\n\n })\n\n}\n\n\n", "file_path": "src/open_input.rs", "rank": 6, "score": 91078.40642629049 }, { "content": "/// Test whether the rustc at `var(\"RUSTC\")` supports the given feature.\n\nfn has_feature(feature: &str) -> bool {\n\n let out_dir = var(\"OUT_DIR\").unwrap();\n\n let rustc = var(\"RUSTC\").unwrap();\n\n\n\n let mut child = std::process::Command::new(rustc)\n\n .arg(\"--crate-type=rlib\") // Don't require `main`.\n\n .arg(\"--emit=metadata\") // Do as little as possible but still parse.\n\n .arg(\"--out-dir\")\n\n .arg(out_dir) // Put the output somewhere inconsequential.\n\n .arg(\"-\") // Read from stdin.\n\n .stdin(std::process::Stdio::piped()) // Stdin is a pipe.\n\n .spawn()\n\n .unwrap();\n\n\n\n writeln!(child.stdin.take().unwrap(), \"#![feature({})]\", feature).unwrap();\n\n\n\n child.wait().unwrap().success()\n\n}\n", "file_path": "build.rs", "rank": 7, "score": 90340.81969104473 }, { "content": "#[kommand::main]\n\nfn main(input: Option<InputByteStream>, output: Option<OutputByteStream>) -> anyhow::Result<()> {\n\n let mut input = if let Some(input) = input {\n\n input\n\n } else {\n\n InputByteStream::try_from_os_str_arg(\"-\".as_ref())?\n\n };\n\n let mut output = if let Some(output) = output {\n\n output\n\n } else {\n\n OutputByteStream::try_from_os_str_arg(\"-\".as_ref())?\n\n };\n\n\n\n copy(&mut input, &mut output)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/copy_with_defaults.rs", "rank": 8, "score": 86809.9128524091 }, { "content": "fn open_path(path: &Path) -> anyhow::Result<Input> {\n\n let name = path_to_name(\"file\", path)?;\n\n // TODO: Should we have our own error type?\n\n let file = File::open(path).map_err(|err| anyhow!(\"{}: {}\", path.display(), err))?;\n\n if path.extension() == Some(Path::new(\"gz\").as_os_str()) {\n\n // TODO: We shouldn't really need to allocate a `PathBuf` here.\n\n let path = path.with_extension(\"\");\n\n let media_type = MediaType::from_extension(path.extension());\n\n let initial_size = None;\n\n let reader = GzDecoder::new(file);\n\n let reader = StreamReader::piped_thread(Box::new(reader))?;\n\n Ok(Input {\n\n name,\n\n reader,\n\n media_type,\n\n initial_size,\n\n })\n\n } else {\n\n let media_type = MediaType::from_extension(path.extension());\n\n let initial_size = Some(file.metadata()?.len());\n\n let reader = StreamReader::file(file);\n\n Ok(Input {\n\n name,\n\n reader,\n\n media_type,\n\n initial_size,\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/open_input.rs", "rank": 9, "score": 83748.64104322964 }, { "content": "fn open_url(url: Url) -> anyhow::Result<Input> {\n\n match url.scheme() {\n\n \"http\" | \"https\" => open_http_url_str(url.as_str()),\n\n \"data\" => open_data_url_str(url.as_str()),\n\n \"file\" => {\n\n if !url.username().is_empty()\n\n || url.password().is_some()\n\n || url.has_host()\n\n || url.port().is_some()\n\n || url.query().is_some()\n\n || url.fragment().is_some()\n\n {\n\n return Err(anyhow!(\"file URL should only contain a path\"));\n\n }\n\n // TODO: https://docs.rs/url/latest/url/struct.Url.html#method.to_file_path\n\n // is ambiguous about how it can fail. What is `Path::new_opt`?\n\n open_path(\n\n &url.to_file_path()\n\n .map_err(|_: ()| anyhow!(\"unknown file URL weirdness\"))?,\n\n )\n\n }\n\n #[cfg(feature = \"ssh2\")]\n\n \"scp\" => open_scp_url(&url),\n\n other => Err(anyhow!(\"unsupported URL scheme \\\"{}\\\"\", other)),\n\n }\n\n}\n\n\n", "file_path": "src/open_input.rs", "rank": 10, "score": 83748.64104322964 }, { "content": "#[cfg(feature = \"ssh2\")]\n\nfn open_scp_url(scp_url: &Url) -> anyhow::Result<Input> {\n\n if scp_url.query().is_some() || scp_url.fragment().is_some() {\n\n return Err(anyhow!(\"scp URL should only contain a socket address, optional username, optional password, and optional path\"));\n\n }\n\n\n\n let host_str = match scp_url.host_str() {\n\n Some(host_str) => host_str,\n\n None => return Err(anyhow!(\"ssh URL should have a host\")),\n\n };\n\n let port = match scp_url.port() {\n\n Some(port) => port,\n\n None => 22, // default ssh port\n\n };\n\n let tcp = TcpStream::connect((host_str, port))?;\n\n let mut sess = Session::new().unwrap();\n\n sess.set_tcp_stream(tcp);\n\n sess.handshake().unwrap();\n\n\n\n let username = if scp_url.username().is_empty() {\n\n whoami::username()\n", "file_path": "src/open_input.rs", "rank": 11, "score": 80802.6397794892 }, { "content": "fn open_http_url_str(http_url_str: &str) -> anyhow::Result<Input> {\n\n // TODO: Set any headers, like \"Accept\"?\n\n let response = ureq::get(http_url_str)\n\n .call()\n\n .map_err(|e| anyhow!(\"HTTP error fetching {}: {}\", http_url_str, e))?;\n\n\n\n let initial_size = Some(\n\n response\n\n .header(\"Content-Length\")\n\n .ok_or_else(|| anyhow!(\"invalid Content-Length header\"))?\n\n .parse()?,\n\n );\n\n let media_type = response.content_type();\n\n let media_type = MediaType::from_mime(Mime::from_str(media_type)?);\n\n\n\n let reader = response.into_reader();\n\n let reader = StreamReader::piped_thread(Box::new(reader))?;\n\n Ok(Input {\n\n name: http_url_str.to_owned(),\n\n media_type,\n\n reader,\n\n initial_size,\n\n })\n\n}\n\n\n", "file_path": "src/open_input.rs", "rank": 12, "score": 78166.61009176541 }, { "content": "fn open_data_url_str(data_url_str: &str) -> anyhow::Result<Input> {\n\n // TODO: `DataUrl` should really implement `std::error::Error`.\n\n let data_url =\n\n DataUrl::process(data_url_str).map_err(|e| anyhow!(\"invalid data URL syntax: {:?}\", e))?;\n\n // TODO: `DataUrl` should really really implement `std::error::Error`.\n\n let (body, fragment) = data_url\n\n .decode_to_vec()\n\n .map_err(|_| anyhow!(\"invalid base64 encoding\"))?;\n\n\n\n if fragment.is_some() {\n\n return Err(anyhow!(\"data urls with fragments are unsupported\"));\n\n }\n\n\n\n // Awkwardly convert from `data_url::Mime` to `mime::Mime`.\n\n // TODO: Consider submitting patches to `data_url` to streamline this.\n\n let media_type =\n\n MediaType::from_mime(Mime::from_str(&data_url.mime_type().to_string()).unwrap());\n\n\n\n let reader = StreamReader::bytes(&body)?;\n\n Ok(Input {\n\n name: data_url_str.to_owned(),\n\n reader,\n\n media_type,\n\n initial_size: Some(data_url_str.len().try_into().unwrap()),\n\n })\n\n}\n\n\n\n// Handle URLs of the form `scp://[user@]host[:port][/path]`.\n", "file_path": "src/open_input.rs", "rank": 13, "score": 78166.61009176541 }, { "content": "#[test]\n\nfn mime_union() {\n\n assert_eq!(\n\n MediaType::from_mime(Mime::from_str(\"image/jpeg\").unwrap())\n\n .union(MediaType::from_mime(Mime::from_str(\"image/png\").unwrap()))\n\n .mime(),\n\n &Mime::from_str(\"image/*\").unwrap()\n\n );\n\n}\n", "file_path": "src/media_type.rs", "rank": 14, "score": 72472.2897420718 }, { "content": "#[test]\n\nfn mime_from_extension() {\n\n use std::path::Path;\n\n assert_eq!(MediaType::from_extension(None), MediaType::unknown());\n\n assert_eq!(\n\n MediaType::from_extension(Some(Path::new(\"jpg\").as_ref())).mime(),\n\n &Mime::from_str(\"image/jpeg\").unwrap()\n\n );\n\n}\n\n\n", "file_path": "src/media_type.rs", "rank": 15, "score": 72472.2897420718 }, { "content": "fn use_feature(feature: &str) {\n\n println!(\"cargo:rustc-cfg={}\", feature);\n\n}\n\n\n", "file_path": "build.rs", "rank": 16, "score": 70379.36921942586 }, { "content": "fn generate_env_initializer(default: Box<Expr>, pat_ident: Ident2, result_type: Box<Type>) -> Expr {\n\n let case_insensitive = false;\n\n parse_quote! {\n\n match _kommand_env.#pat_ident {\n\n Some(os_str) => match {\n\n use std::convert::{Infallible, TryFrom};\n\n use std::ffi::{OsStr, OsString};\n\n use std::str::FromStr;\n\n use std::marker::PhantomData;\n\n\n\n struct Wrap<T>(T);\n", "file_path": "kommand/src/lib.rs", "rank": 17, "score": 70252.3826844115 }, { "content": "#[proc_macro_attribute]\n\npub fn initialize(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n // Placeholder\n\n item\n\n}\n", "file_path": "reaktor/src/lib.rs", "rank": 18, "score": 70189.0000157805 }, { "content": "#[proc_macro_attribute]\n\npub fn main(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let mut input = parse_macro_input!(item as syn::ItemFn);\n\n let ret = &input.sig.output;\n\n let name = &input.sig.ident;\n\n let mut body = &mut input.block;\n\n let asyncness = &input.sig.asyncness;\n\n let attrs = &input.attrs;\n\n\n\n if name != \"main\" {\n\n return TokenStream::from(quote_spanned! { name.span() =>\n\n compile_error!(\"only `main` can be tagged with `#[kommand::main]`\");\n\n });\n\n }\n\n\n\n // Traverse the function body and find all the `#[env_or_default]` variables.\n\n let mut env_visitor = EnvVisitor::default();\n\n env_visitor.visit_block_mut(&mut body);\n\n if let Some((message, span)) = env_visitor.err {\n\n return TokenStream::from(quote_spanned! { span =>\n\n compile_error!(#message);\n", "file_path": "kommand/src/lib.rs", "rank": 19, "score": 70189.0000157805 }, { "content": "#[cfg(not(windows))]\n\nfn spawn_child(os: &OsStr, lossy: &str, media_type: MediaType) -> anyhow::Result<Output> {\n\n use std::process::{Command, Stdio};\n\n assert!(lossy.starts_with(\"$(\"));\n\n if !lossy.ends_with(')') {\n\n return Err(anyhow!(\"child string must end in ')'\"));\n\n }\n\n let s = if let Some(s) = os.to_str() {\n\n s\n\n } else {\n\n return Err(anyhow!(\"Non-UTF-8 child strings not yet supported\"));\n\n };\n\n let words = shell_words::split(&s[2..s.len() - 1])?;\n\n let (first, rest) = words\n\n .split_first()\n\n .ok_or_else(|| anyhow!(\"child stream specified with '(...)' must contain a command\"))?;\n\n let child = Command::new(first)\n\n .args(rest)\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::null())\n\n .spawn()?;\n\n let writer = StreamWriter::child_stdin(child.stdin.unwrap());\n\n Ok(Output {\n\n name: lossy.to_owned(),\n\n writer,\n\n media_type,\n\n })\n\n}\n", "file_path": "src/open_output.rs", "rank": 20, "score": 70128.7239829918 }, { "content": "fn use_feature_or_nothing(feature: &str) {\n\n if has_feature(feature) {\n\n use_feature(feature);\n\n }\n\n}\n\n\n", "file_path": "build.rs", "rank": 21, "score": 68091.13136743374 }, { "content": "#[test]\n\nfn data_url_base64() {\n\n let mut s = String::new();\n\n InputTextStream::try_from_os_str_arg(\"data:text/plain;base64,SGVsbG8sIFdvcmxkIQ==\".as_ref())\n\n .unwrap()\n\n .read_to_string(&mut s)\n\n .unwrap();\n\n assert_eq!(s, \"Hello, World!\\n\");\n\n}\n", "file_path": "src/input_text_stream.rs", "rank": 22, "score": 67453.02993203401 }, { "content": "#[test]\n\nfn data_url_plain() {\n\n let mut s = String::new();\n\n InputTextStream::try_from_os_str_arg(\"data:,Hello%2C%20World!\".as_ref())\n\n .unwrap()\n\n .read_to_string(&mut s)\n\n .unwrap();\n\n assert_eq!(s, \"Hello, World!\\n\");\n\n}\n\n\n", "file_path": "src/input_text_stream.rs", "rank": 23, "score": 67453.02993203401 }, { "content": "fn exclude<T: std::fmt::Debug>(bound: Bound<T>) -> Bound<T> {\n\n match bound {\n\n Bound::Included(offset) => Bound::Excluded(offset),\n\n Bound::Excluded(_offset) => panic!(\"bound is already excluded\"),\n\n Bound::Unbounded => Bound::Unbounded,\n\n }\n\n}\n", "file_path": "kommand/src/lib.rs", "rank": 26, "score": 66263.5721070653 }, { "content": "#[kommand::main]\n\nfn main(inputs: Vec<InputTextStream>) -> anyhow::Result<()> {\n\n let mut output = OutputTextStream::try_from_os_str_arg(\"-\".as_ref())?;\n\n\n\n for mut input in inputs {\n\n copy(&mut input, &mut output)?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/cat.rs", "rank": 27, "score": 65977.47258287288 }, { "content": "fn acquire_stdout(media_type: MediaType) -> anyhow::Result<Output> {\n\n let stdout = StreamWriter::stdout()?;\n\n\n\n Ok(Output {\n\n name: \"-\".to_string(),\n\n writer: stdout,\n\n media_type,\n\n })\n\n}\n\n\n", "file_path": "src/open_output.rs", "rank": 28, "score": 63794.15990466034 }, { "content": "fn open_path(path: &Path, media_type: MediaType) -> anyhow::Result<Output> {\n\n let name = path_to_name(\"file\", path)?;\n\n let file = File::create(path).map_err(|err| anyhow!(\"{}: {}\", path.display(), err))?;\n\n if path.extension() == Some(Path::new(\"gz\").as_os_str()) {\n\n // TODO: We shouldn't really need to allocate a `PathBuf` here.\n\n let path = path.with_extension(\"\");\n\n let media_type = MediaType::union(media_type, MediaType::from_extension(path.extension()));\n\n // 6 is the default gzip compression level.\n\n let writer =\n\n StreamWriter::piped_thread(Box::new(GzEncoder::new(file, Compression::new(6))))?;\n\n Ok(Output {\n\n name,\n\n writer,\n\n media_type,\n\n })\n\n } else {\n\n let media_type = MediaType::union(media_type, MediaType::from_extension(path.extension()));\n\n let writer = StreamWriter::file(file);\n\n Ok(Output {\n\n name,\n\n writer,\n\n media_type,\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/open_output.rs", "rank": 29, "score": 59896.449818398774 }, { "content": "fn open_url(url: Url, media_type: MediaType) -> anyhow::Result<Output> {\n\n match url.scheme() {\n\n // TODO: POST the data to HTTP? But the `Write` trait makes this\n\n // tricky because there's no hook for closing and finishing the\n\n // stream. `Drop` can't fail.\n\n \"http\" | \"https\" => Err(anyhow!(\"output to HTTP not supported yet\")),\n\n \"file\" => {\n\n if !url.username().is_empty()\n\n || url.password().is_some()\n\n || url.has_host()\n\n || url.port().is_some()\n\n || url.query().is_some()\n\n || url.fragment().is_some()\n\n {\n\n return Err(anyhow!(\"file URL should only contain a path\"));\n\n }\n\n // TODO: https://docs.rs/url/latest/url/struct.Url.html#method.to_file_path\n\n // is ambiguous about how it can fail. What is `Path::new_opt`?\n\n open_path(\n\n &url.to_file_path()\n\n .map_err(|_: ()| anyhow!(\"unknown file URL weirdness\"))?,\n\n media_type,\n\n )\n\n }\n\n \"data\" => Err(anyhow!(\"output to data URL isn't possible\")),\n\n other => Err(anyhow!(\"unsupported URL scheme \\\"{}\\\"\", other)),\n\n }\n\n}\n\n\n", "file_path": "src/open_output.rs", "rank": 30, "score": 59896.449818398774 }, { "content": "#[derive(Debug, Clap)]\n\n#[clap(name = \"example\", about = \"An example of StructOpt usage.\")]\n\nstruct Opt {\n\n /// Activate debug mode\n\n // short and long flags (-d, --debug) will be deduced from the field's name\n\n #[clap(short, long)]\n\n debug: bool,\n\n\n\n /// Set speed\n\n // we don't want to name it \"speed\", need to look smart\n\n #[clap(short = 'v', long = \"velocity\", default_value = \"42\")]\n\n speed: f64,\n\n\n\n /// Input source\n\n input: InputByteStream,\n\n\n\n /// Output sink, stdout if not present\n\n output: Option<OutputByteStream>,\n\n}\n\n\n", "file_path": "examples/clap.rs", "rank": 31, "score": 58021.85411183783 }, { "content": "#[derive(Default)]\n\nstruct EnvVisitor {\n\n err: Option<(String, Span2)>,\n\n vars: HashSet<String>,\n\n}\n\n\n\nimpl VisitMut for EnvVisitor {\n\n fn visit_stmt_mut(&mut self, stmt: &mut Stmt) {\n\n // We're looking for syntax like this:\n\n //\n\n // ```rust\n\n // #[env_or_default]\n\n // let foo: i32 = 0;\n\n // ```\n\n if let Stmt::Local(local) = stmt {\n\n let mut has_other_attrs = false;\n\n let mut has_env = false;\n\n for attr in &local.attrs {\n\n if attr.path.is_ident(\"env_or_default\") {\n\n has_env = true;\n\n } else {\n", "file_path": "kommand/src/lib.rs", "rank": 32, "score": 55338.90866709688 }, { "content": "#[kommand::main]\n\nfn main(output: LazyOutput<OutputTextStream>, inputs: Vec<InputTextStream>) -> anyhow::Result<()> {\n\n let media_type = match inputs.iter().next() {\n\n Some(first) if inputs.iter().map(InputTextStream::media_type).all_equal() => {\n\n first.media_type().clone()\n\n }\n\n _ => MediaType::text(),\n\n };\n\n\n\n let mut output = output.materialize(media_type)?;\n\n\n\n for mut input in inputs {\n\n copy_text(&mut input, &mut output)?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/text-cat.rs", "rank": 33, "score": 55173.21868415493 }, { "content": "#[doc(hidden)]\n\npub trait FromLazyOutput {\n\n type Err;\n\n\n\n fn from_lazy_output(name: OsString, media_type: MediaType) -> Result<Self, Self::Err>\n\n where\n\n Self: Sized;\n\n}\n\n\n\n/// A placeholder for an output stream which is created lazily. It is created\n\n/// when `materialize` is called.\n\npub struct LazyOutput<T: FromLazyOutput> {\n\n name: OsString,\n\n _phantom: PhantomData<T>,\n\n}\n\n\n\nimpl<T: FromLazyOutput> LazyOutput<T> {\n\n /// Consume `self` and materialize an output stream.\n\n #[inline]\n\n pub fn materialize(self, media_type: MediaType) -> Result<T, T::Err> {\n\n T::from_lazy_output(self.name, media_type)\n", "file_path": "src/lazy_output.rs", "rank": 34, "score": 51935.289575025716 }, { "content": "fn main() {\n\n use_feature_or_nothing(\"can_vector\"); // https://github.com/rust-lang/rust/issues/69941\n\n use_feature_or_nothing(\"clamp\"); // https://github.com/rust-lang/rust/issues/44095\n\n use_feature_or_nothing(\"extend_one\"); // https://github.com/rust-lang/rust/issues/72631\n\n use_feature_or_nothing(\"io_error_more\"); // https://github.com/rust-lang/rust/issues/86442\n\n use_feature_or_nothing(\"pattern\"); // https://github.com/rust-lang/rust/issues/27721\n\n use_feature_or_nothing(\"seek_convenience\"); // https://github.com/rust-lang/rust/issues/59359\n\n use_feature_or_nothing(\"seek_stream_len\"); // https://github.com/rust-lang/rust/issues/59359\n\n use_feature_or_nothing(\"shrink_to\"); // https://github.com/rust-lang/rust/issues/56431\n\n use_feature_or_nothing(\"toowned_clone_into\"); // https://github.com/rust-lang/rust/issues/41263\n\n use_feature_or_nothing(\"try_reserve\"); // https://github.com/rust-lang/rust/issues/56431\n\n use_feature_or_nothing(\"unix_socket_peek\"); // https://github.com/rust-lang/rust/issues/76923\n\n use_feature_or_nothing(\"windows_by_handle\"); // https://github.com/rust-lang/rust/issues/63010\n\n use_feature_or_nothing(\"with_options\"); // https://github.com/rust-lang/rust/issues/65439\n\n use_feature_or_nothing(\"write_all_vectored\"); // https://github.com/rust-lang/rust/issues/70436\n\n // https://doc.rust-lang.org/unstable-book/library-features/windows-file-type-ext.html\n\n use_feature_or_nothing(\"windows_file_type_ext\");\n\n\n\n // Don't rerun this on changes other than build.rs, as we only depend on\n\n // the rustc version.\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n\n\n", "file_path": "build.rs", "rank": 35, "score": 47800.94358544653 }, { "content": "#[cfg(not(windows))]\n\nfn spawn_child(os: &OsStr, lossy: &str) -> anyhow::Result<Interactive> {\n\n use std::process::Command;\n\n assert!(lossy.starts_with(\"$(\"));\n\n if !lossy.ends_with(')') {\n\n return Err(anyhow!(\"child string must end in ')'\"));\n\n }\n\n let s = if let Some(s) = os.to_str() {\n\n s\n\n } else {\n\n return Err(anyhow!(\"Non-UTF-8 child strings not yet supported\"));\n\n };\n\n let words = shell_words::split(&s[2..s.len() - 1])?;\n\n let (first, rest) = words\n\n .split_first()\n\n .ok_or_else(|| anyhow!(\"child stream specified with '(...)' must contain a command\"))?;\n\n let mut command = Command::new(first);\n\n command.args(rest);\n\n let duplexer = StreamDuplexer::duplex_with_command(command)?;\n\n Ok(Interactive {\n\n name: lossy.to_owned(),\n\n duplexer,\n\n })\n\n}\n", "file_path": "src/open_interactive.rs", "rank": 36, "score": 47036.315735375305 }, { "content": "fn main() {\n\n let opt = Opt::parse();\n\n println!(\"{:?}\", opt);\n\n}\n", "file_path": "examples/clap.rs", "rank": 37, "score": 46348.651254567194 }, { "content": "#[kommand::main]\n\nfn main(\n\n // A flag, true if used in the command line. The name of the argument will be,\n\n // by default, based on the name of the field.\n\n #[kommand(short, long)] debug: bool,\n\n // The number of occurrences of the `v/verbose` flag\n\n #[kommand(short, long, parse(from_occurrences))] verbose: u8,\n\n #[kommand(short, long, default_value = \"42\")] speed: f64,\n\n #[kommand(short, long)] output: OutputByteStream,\n\n // the long option will be translated by default to kebab case, i.e. `--nb-cars`.\n\n #[kommand(short = 'c', long)] nb_cars: Option<i32>,\n\n #[kommand(short, long)] level: Vec<String>,\n\n #[kommand(name = \"INPUT\")] inputs: Vec<InputByteStream>,\n\n) {\n\n dbg!(debug, verbose, speed, output, nb_cars, level, inputs);\n\n}\n", "file_path": "examples/basic.rs", "rank": 38, "score": 46348.651254567194 }, { "content": "#[kommand::main]\n\nfn main(\n\n pattern: Regex,\n\n output: LazyOutput<OutputTextStream>,\n\n inputs: Vec<InputTextStream>,\n\n #[kommand(short = 'l', long)] inputs_with_matches: bool,\n\n) -> anyhow::Result<()> {\n\n let mut output = output.materialize(MediaType::text())?;\n\n\n\n let print_inputs = inputs.len() > 1;\n\n\n\n 'next_input: for input in inputs {\n\n let pseudonym = input.pseudonym();\n\n for line in BufReader::new(input).lines() {\n\n let line = line?;\n\n if pattern.is_match(&line) {\n\n if inputs_with_matches {\n\n output.write_pseudonym(&pseudonym)?;\n\n writeln!(output, \"\")?;\n\n continue 'next_input;\n\n }\n", "file_path": "examples/grep.rs", "rank": 39, "score": 46348.651254567194 }, { "content": "/// Parse the `about` string as Markdown to find the `Arguments` section and\n\n/// extract the argument names and descriptions.\n\n///\n\n/// Recognize an `Arguments` header, followed by a list of `name - description`\n\n/// descriptions of the arguments. This is the syntax used in\n\n/// [official examples].\n\n///\n\n/// [official examples]: https://doc.rust-lang.org/rust-by-example/meta/doc.html#doc-comments\n\n///\n\n/// For example:\n\n///\n\n/// ```rust,ignore\n\n/// # Arguments\n\n///\n\n/// * `x` - x marks the spot\n\n/// * `y` - why ask y\n\n/// fn main(x: i32, y: i32) {\n\n/// ...\n\n/// }\n\n/// ```\n\nfn parse_arguments_from_comment(\n\n about: &str,\n\n span: Span2,\n\n) -> Result<(String, Vec<(String, String)>), TokenStream> {\n\n let mut p = Parser::new_ext(&about, opts()).into_offset_iter();\n\n while let Some((event, start_offset)) = p.next() {\n\n if matches!(event, Event::Start(Tag::Heading(HeadingLevel::H1, _, _))) {\n\n if let Some((Event::Text(content), _)) = p.next() {\n\n if &*content != \"Arguments\"\n\n || !matches!(\n\n p.next(),\n\n Some((Event::End(Tag::Heading(HeadingLevel::H1, _, _)), _))\n\n )\n\n {\n\n continue;\n\n }\n\n if let Some((Event::Start(Tag::List(None)), _)) = p.next() {\n\n return parse_arguments_list(start_offset, p, span, about);\n\n }\n\n return Err(TokenStream::from(quote_spanned! { span =>\n\n compile_error!(\"`# Arguments` section does not contain a name/description list\");\n\n }));\n\n }\n\n }\n\n }\n\n\n\n // No `Arguments` section; just leave everything undocumented.\n\n Ok((about.to_string(), Vec::new()))\n\n}\n\n\n", "file_path": "kommand/src/lib.rs", "rank": 40, "score": 42707.04754744023 }, { "content": "fn parse_arguments_list(\n\n start_offset: Range<usize>,\n\n mut p: OffsetIter,\n\n span: Span2,\n\n about: &str,\n\n) -> Result<(String, Vec<(String, String)>), TokenStream> {\n\n let mut arg_info = Vec::new();\n\n\n\n while let Some((Event::Start(Tag::Item), _)) = p.next() {\n\n if let Some((Event::Code(var_name), _)) = p.next() {\n\n if let Some((Event::Text(var_description), _)) = p.next() {\n\n if let Some(parsed_description) = var_description.trim().strip_prefix(\"-\") {\n\n // We've parsed a row of the list. Record it.\n\n arg_info.push((var_name.to_string(), parsed_description.trim().to_string()));\n\n\n\n if matches!(p.next(), Some((Event::End(Tag::Item), _))) {\n\n // If we make it to the end of the item successfully,\n\n // continue to look for another item.\n\n continue;\n\n }\n", "file_path": "kommand/src/lib.rs", "rank": 41, "score": 42697.82758977422 }, { "content": "// Match rustdoc's options.\n\nfn opts() -> Options {\n\n Options::ENABLE_TABLES\n\n | Options::ENABLE_FOOTNOTES\n\n | Options::ENABLE_STRIKETHROUGH\n\n | Options::ENABLE_TASKLISTS\n\n}\n\n\n", "file_path": "kommand/src/lib.rs", "rank": 42, "score": 42509.65976386556 }, { "content": "/// Parse the `about` string as Markdown to find the `Environment Variables`\n\n/// section and extract the environment variable names and descriptions.\n\n///\n\n/// Recognize an `Environment Variables` header, followed by a list of\n\n/// `name - description` descriptions of the environment variables.\n\n///\n\n/// For example:\n\n///\n\n/// ```rust,ignore\n\n/// # Environment Variables\n\n///\n\n/// * `app_z` - z for zest\n\n/// * `app_w` - there isn't a trouble, you know it's a w\n\n/// fn main() {\n\n/// ...\n\n/// }\n\n/// ```\n\nfn parse_env_vars_from_comment(\n\n about: &str,\n\n span: Span2,\n\n) -> Result<(String, Vec<(String, String)>), TokenStream> {\n\n let mut p = Parser::new_ext(&about, opts()).into_offset_iter();\n\n while let Some((event, start_offset)) = p.next() {\n\n if matches!(event, Event::Start(Tag::Heading(HeadingLevel::H1, _, _))) {\n\n if let Some((Event::Text(content), _)) = p.next() {\n\n if &*content != \"Environment Variables\"\n\n || !matches!(\n\n p.next(),\n\n Some((Event::End(Tag::Heading(HeadingLevel::H1, _, _)), _))\n\n )\n\n {\n\n continue;\n\n }\n\n if let Some((Event::Start(Tag::List(None)), _)) = p.next() {\n\n return parse_env_vars_list(start_offset, p, span, about);\n\n }\n\n return Err(TokenStream::from(quote_spanned! { span =>\n\n compile_error!(\"`# Arguments` section does not contain a name/description list\");\n\n }));\n\n }\n\n }\n\n }\n\n\n\n // No `Environment Variables` section; just leave everything undocumented.\n\n Ok((about.to_owned(), Vec::new()))\n\n}\n\n\n", "file_path": "kommand/src/lib.rs", "rank": 43, "score": 41678.357868730185 }, { "content": "fn parse_env_vars_list(\n\n start_offset: Range<usize>,\n\n mut p: OffsetIter,\n\n span: Span2,\n\n about: &str,\n\n) -> Result<(String, Vec<(String, String)>), TokenStream> {\n\n let mut env_info = Vec::new();\n\n\n\n while let Some((Event::Start(Tag::Item), _)) = p.next() {\n\n if let Some((Event::Code(var_name), _)) = p.next() {\n\n if let Some((Event::Text(var_description), _)) = p.next() {\n\n if let Some(parsed_description) = var_description.trim().strip_prefix(\"-\") {\n\n // We've parsed a row of the list. Record it.\n\n env_info.push((var_name.to_string(), parsed_description.trim().to_string()));\n\n\n\n if matches!(p.next(), Some((Event::End(Tag::Item), _))) {\n\n // If we make it to the end of the item successfully,\n\n // continue to look for another item.\n\n continue;\n\n }\n", "file_path": "kommand/src/lib.rs", "rank": 44, "score": 41670.41899999924 }, { "content": "#[kommand::main]\n\nfn main(duration: Duration) {\n\n std::thread::sleep(duration.into());\n\n}\n", "file_path": "examples/sleep.rs", "rank": 45, "score": 41399.912421664434 }, { "content": "#[kommand::main]\n\nfn main(x: i32, y: i32) {\n\n #[env_or_default]\n\n let z: i32 = 0;\n\n #[env_or_default]\n\n let w: i32 = 0;\n\n\n\n println!(\"{}\", x + y + z + w);\n\n}\n", "file_path": "kommand/examples/add.rs", "rank": 46, "score": 38495.15277957885 }, { "content": "fn acquire_stdin_stdout() -> anyhow::Result<Interactive> {\n\n let duplexer = StreamDuplexer::stdin_stdout()?;\n\n Ok(Interactive {\n\n name: \"-\".to_owned(),\n\n duplexer,\n\n })\n\n}\n\n\n", "file_path": "src/open_interactive.rs", "rank": 47, "score": 36469.25986490055 }, { "content": "#[kommand::main]\n\nfn main(io: InteractiveTextStream) -> anyhow::Result<()> {\n\n let io = BufReaderLineWriter::new(io);\n\n let color =\n\n io.color_support() != TerminalColorSupport::Monochrome && std::env::var(\"NOCOLOR\").is_err();\n\n\n\n match repl(io, color) {\n\n Ok(()) => Ok(()),\n\n Err(e) => match e.kind() {\n\n io::ErrorKind::BrokenPipe => Ok(()),\n\n _ => Err(e.into()),\n\n },\n\n }\n\n}\n\n\n", "file_path": "examples/repl.rs", "rank": 48, "score": 35635.60712195061 }, { "content": "#[kommand::main]\n\nfn main(io: InteractiveTextStream) -> anyhow::Result<()> {\n\n let mut io = BufReaderLineWriter::new(io);\n\n let mut v = [0_u8; PROMPT.len()];\n\n let mut s = String::new();\n\n\n\n // Read the \"prompt> \".\n\n io.read_exact(&mut v)?;\n\n if str::from_utf8(&v).unwrap() != PROMPT {\n\n panic!(\"missed prompt\");\n\n }\n\n\n\n // Write \"hello\".\n\n writeln!(io, \"hello\")?;\n\n\n\n io.read_line(&mut s)?;\n\n if s != \"[received \\\"hello\\\"]\\n\" {\n\n panic!(\"missed response: '{}'\", s);\n\n }\n\n\n\n // Read another \"prompt> \".\n", "file_path": "examples/repl-client.rs", "rank": 49, "score": 34815.35166404485 }, { "content": "fn open_url(url: Url) -> anyhow::Result<Interactive> {\n\n match url.scheme() {\n\n \"connect\" => open_connect_url(url),\n\n \"accept\" => open_accept_url(url),\n\n scheme @ \"http\" | scheme @ \"https\" | scheme @ \"file\" | scheme @ \"data\" => {\n\n Err(anyhow!(\"non-interactive URL scheme \\\"{}\\\"\", scheme))\n\n }\n\n other => Err(anyhow!(\"unsupported URL scheme \\\"{}\\\"\", other)),\n\n }\n\n}\n\n\n", "file_path": "src/open_interactive.rs", "rank": 50, "score": 34082.59910599492 }, { "content": "fn open_path(path: &Path) -> anyhow::Result<Interactive> {\n\n let name = path_to_name(\"file\", path)?;\n\n let duplexer = CharDevice::open(path)?;\n\n let duplexer = StreamDuplexer::char_device(duplexer);\n\n Ok(Interactive { name, duplexer })\n\n}\n\n\n", "file_path": "src/open_interactive.rs", "rank": 51, "score": 34082.59910599492 }, { "content": "fn open_connect_url(url: Url) -> anyhow::Result<Interactive> {\n\n if !url.username().is_empty()\n\n || url.password().is_some()\n\n || url.query().is_some()\n\n || url.fragment().is_some()\n\n {\n\n return Err(anyhow!(\"connect URL should only contain a socket address\"));\n\n }\n\n\n\n if url.path().is_empty() {\n\n let port = match url.port() {\n\n Some(port) => port,\n\n None => return Err(anyhow!(\"TCP connect URL should have a port\")),\n\n };\n\n let host_str = match url.host_str() {\n\n Some(host_str) => host_str,\n\n None => return Err(anyhow!(\"TCP connect URL should have a host\")),\n\n };\n\n\n\n let duplexer = TcpStream::connect((host_str, port))?;\n", "file_path": "src/open_interactive.rs", "rank": 52, "score": 33317.61135723488 }, { "content": "fn open_accept_url(url: Url) -> anyhow::Result<Interactive> {\n\n if !url.username().is_empty()\n\n || url.password().is_some()\n\n || url.query().is_some()\n\n || url.fragment().is_some()\n\n {\n\n return Err(anyhow!(\"accept URL should only contain a socket address\"));\n\n }\n\n\n\n if url.path().is_empty() {\n\n let port = match url.port() {\n\n Some(port) => port,\n\n None => return Err(anyhow!(\"accept URL should have a port\")),\n\n };\n\n let host_str = match url.host_str() {\n\n Some(host_str) => host_str,\n\n None => return Err(anyhow!(\"accept URL should have a host\")),\n\n };\n\n\n\n let listener = TcpListener::bind((host_str, port))?;\n", "file_path": "src/open_interactive.rs", "rank": 53, "score": 33317.61135723488 }, { "content": "/// This struct encapsulates the name of an entity whose name is being\n\n/// hidden in the `nameless` API. It can be written to an `OutputByteStream`\n\n/// but it's otherwise entirely opaque.\n\npub struct Pseudonym {\n\n pub(crate) name: String,\n\n}\n\n\n\nimpl Pseudonym {\n\n pub(crate) fn new(name: String) -> Self {\n\n Self { name }\n\n }\n\n}\n", "file_path": "src/pseudonym.rs", "rank": 54, "score": 33236.46259168441 }, { "content": "use anyhow::anyhow;\n\nuse std::path::Path;\n\nuse std::str;\n\n#[cfg(not(windows))]\n\nuse {\n\n percent_encoding::{percent_encode, NON_ALPHANUMERIC},\n\n std::path::Component,\n\n};\n\n\n\n#[cfg(not(windows))]\n\npub(crate) fn path_to_name(scheme: &str, path: &Path) -> anyhow::Result<String> {\n\n #[cfg(unix)]\n\n use std::os::unix::ffi::OsStrExt;\n\n if path.is_absolute() {\n\n let mut result = String::new();\n\n let mut components = path.components();\n\n assert!(components.next().unwrap() == Component::RootDir);\n\n if let Some(component) = components.next() {\n\n result += \"/\";\n\n result +=\n", "file_path": "src/path_to_name.rs", "rank": 55, "score": 31576.545973259043 }, { "content": " return Err(anyhow!(\"not supported yet: strings contains `:`\"));\n\n }\n\n let display = path.display().to_string();\n\n if result == display {\n\n Ok(result)\n\n } else {\n\n Err(anyhow!(\n\n \"not supported yet: \\\"interesting\\\" strings: {}\",\n\n result\n\n ))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(windows)]\n\npub(crate) fn path_to_name(_scheme: &str, path: &Path) -> anyhow::Result<String> {\n\n if path.is_absolute() {\n\n Ok(url::Url::from_file_path(path)\n\n .map_err(|_| {\n\n anyhow!(\n", "file_path": "src/path_to_name.rs", "rank": 56, "score": 31572.255498515042 }, { "content": " \"not supported yet: \\\"interesting\\\" strings: {}\",\n\n path.display()\n\n )\n\n })?\n\n .into())\n\n } else {\n\n Err(anyhow!(\"not supported yet: non-UTF-8 relative paths\",))\n\n }\n\n}\n\n\n\n#[test]\n\n#[cfg_attr(windows, ignore)] // TODO: Improve path handling on Windows.\n", "file_path": "src/path_to_name.rs", "rank": 57, "score": 31569.748214464005 }, { "content": " &percent_encode(component.as_os_str().as_bytes(), NON_ALPHANUMERIC).to_string();\n\n for component in components {\n\n result += \"/\";\n\n result +=\n\n &percent_encode(component.as_os_str().as_bytes(), NON_ALPHANUMERIC).to_string();\n\n }\n\n } else {\n\n result += \"/\";\n\n }\n\n if result == path.display().to_string() {\n\n Ok(result)\n\n } else {\n\n Ok(format!(\"{}://{}\", scheme, result))\n\n }\n\n } else {\n\n let result = str::from_utf8(path.as_os_str().as_bytes())\n\n .map_err(|_| anyhow!(\"not supported yet: non-UTF-8 relative paths\",))?\n\n .escape_default()\n\n .to_string();\n\n if result.contains(':') {\n", "file_path": "src/path_to_name.rs", "rank": 58, "score": 31567.801047567973 }, { "content": "/// Replace with `ops::Bound::cloned` once that's stable:\n\n/// https://github.com/rust-lang/rust/issues/61356\n\nfn clone_bound<T: Clone>(bound: Bound<&T>) -> Bound<T> {\n\n match bound {\n\n Bound::Included(offset) => Bound::Included(offset.clone()),\n\n Bound::Excluded(offset) => Bound::Excluded(offset.clone()),\n\n Bound::Unbounded => Bound::Unbounded,\n\n }\n\n}\n\n\n", "file_path": "kommand/src/lib.rs", "rank": 59, "score": 31374.320115094415 }, { "content": "use mime::Mime;\n\nuse std::ffi::OsStr;\n\nuse std::str::FromStr;\n\n\n\n/// The type of content in a stream. This can be either a Media Type\n\n/// (aka Mime Type) or a filename extension, both, or neither if nothing\n\n/// is known.\n\n#[derive(Clone, Debug, Eq, PartialEq)]\n\npub struct MediaType {\n\n mime: Mime,\n\n extension: String,\n\n}\n\n\n\nimpl MediaType {\n\n /// Construct a type representing completely unknown contents.\n\n pub fn unknown() -> Self {\n\n Self {\n\n mime: mime::STAR_STAR,\n\n extension: String::new(),\n\n }\n", "file_path": "src/media_type.rs", "rank": 60, "score": 31318.623693953432 }, { "content": " extension: s.to_string(),\n\n });\n\n }\n\n merged\n\n } else {\n\n Self::unknown()\n\n }\n\n } else {\n\n Self::unknown()\n\n }\n\n } else {\n\n Self::unknown()\n\n }\n\n }\n\n\n\n /// Return the Media Type, which is \"*/*\" if unknown.\n\n #[inline]\n\n pub fn mime(&self) -> &Mime {\n\n &self.mime\n\n }\n", "file_path": "src/media_type.rs", "rank": 61, "score": 31315.358549596272 }, { "content": " }\n\n\n\n /// Construct a type representing plain text (UTF-8) contents.\n\n pub fn text() -> Self {\n\n Self {\n\n mime: mime::TEXT_PLAIN_UTF_8,\n\n extension: String::new(),\n\n }\n\n }\n\n\n\n /// Construct a type representing the given Media Type.\n\n pub fn from_mime(mime: Mime) -> Self {\n\n let extension = match mime_guess::get_mime_extensions(&mime) {\n\n Some(exts) => {\n\n if exts.len() == 1 {\n\n exts[0].to_string()\n\n } else {\n\n String::new()\n\n }\n\n }\n", "file_path": "src/media_type.rs", "rank": 62, "score": 31311.928535871983 }, { "content": "\n\n /// Return the filename extension, which is empty if unknown.\n\n #[inline]\n\n pub fn extension(&self) -> &str {\n\n &self.extension\n\n }\n\n\n\n /// Return a type which is the generalization of `self` and `other`. Falls\n\n /// back to `MediaType::unknown()` if it cannot be determined.\n\n pub fn union(self, other: Self) -> Self {\n\n if self == other {\n\n self\n\n } else if other == MediaType::unknown() {\n\n self\n\n } else if self == MediaType::unknown() {\n\n other\n\n } else if self.mime.type_() == other.mime.type_()\n\n && self.mime.suffix() == other.mime.suffix()\n\n && self.mime.params().eq(other.mime.params())\n\n {\n", "file_path": "src/media_type.rs", "rank": 63, "score": 31311.862126399195 }, { "content": " None => String::new(),\n\n };\n\n\n\n Self { mime, extension }\n\n }\n\n\n\n /// Construct a type representing the given filename extension.\n\n pub fn from_extension(extension: Option<&OsStr>) -> Self {\n\n if let Some(ext) = extension {\n\n if let Some(s) = ext.to_str() {\n\n let mut guesses = mime_guess::from_ext(s).iter();\n\n\n\n if let Some(first) = guesses.next() {\n\n let mut merged = Self {\n\n mime: first,\n\n extension: s.to_string(),\n\n };\n\n for guess in guesses {\n\n merged = merged.union(Self {\n\n mime: guess,\n", "file_path": "src/media_type.rs", "rank": 64, "score": 31311.467524342137 }, { "content": " } else {\n\n MediaType::unknown()\n\n }\n\n } else if self == MediaType::text() {\n\n if self.mime.type_() == other.mime.type_() {\n\n other\n\n } else {\n\n MediaType::unknown()\n\n }\n\n } else {\n\n MediaType::unknown()\n\n }\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/media_type.rs", "rank": 65, "score": 31307.011021064816 }, { "content": " if other.mime.subtype().as_str() == mime::STAR && other.mime.suffix().is_none() {\n\n self\n\n } else if self.mime.subtype().as_str() == mime::STAR && self.mime.suffix().is_none() {\n\n other\n\n } else {\n\n // Create a new mime value with the subtype replaced by star.\n\n let mut s = format!(\"{}/{}\", self.mime.type_(), mime::STAR);\n\n if let Some(suffix) = self.mime.suffix() {\n\n s += &format!(\"+{}\", suffix);\n\n }\n\n if self.mime.params().next().is_some() {\n\n for param in self.mime.params() {\n\n s += &format!(\"; {}={}\", param.0, param.1);\n\n }\n\n }\n\n MediaType::from_mime(Mime::from_str(&s).unwrap())\n\n }\n\n } else if other == MediaType::text() {\n\n if self.mime.type_() == other.mime.type_() {\n\n self\n", "file_path": "src/media_type.rs", "rank": 66, "score": 31304.41785760932 }, { "content": "use crate::path_to_name::path_to_name;\n\nuse crate::{MediaType, Mime};\n\nuse anyhow::anyhow;\n\nuse data_url::DataUrl;\n\nuse flate2::read::GzDecoder;\n\nuse io_streams::StreamReader;\n\nuse std::convert::TryInto;\n\nuse std::ffi::OsStr;\n\nuse std::fs::File;\n\nuse std::path::Path;\n\nuse std::str::FromStr;\n\nuse url::Url;\n\n#[cfg(feature = \"ssh2\")]\n\nuse {percent_encoding::percent_decode, ssh2::Session, std::net::TcpStream};\n\n\n\npub(crate) struct Input {\n\n pub(crate) name: String,\n\n pub(crate) reader: StreamReader,\n\n pub(crate) media_type: MediaType,\n\n pub(crate) initial_size: Option<u64>,\n", "file_path": "src/open_input.rs", "rank": 67, "score": 30554.764242650926 }, { "content": " } else {\n\n scp_url.username().to_owned()\n\n };\n\n let username = percent_decode(username.as_bytes()).decode_utf8()?;\n\n\n\n if let Some(password) = scp_url.password() {\n\n let password = percent_decode(password.as_bytes()).decode_utf8()?;\n\n sess.userauth_password(&username, &password)?;\n\n } else {\n\n sess.userauth_agent(&username)?;\n\n }\n\n\n\n assert!(sess.authenticated());\n\n\n\n let path = Path::new(scp_url.path());\n\n let (channel, stat) = sess.scp_recv(path)?;\n\n let reader = StreamReader::piped_thread(Box::new(channel))?;\n\n let media_type = MediaType::from_extension(path.extension());\n\n Ok(Input {\n\n name: scp_url.as_str().to_owned(),\n\n reader,\n\n media_type,\n\n initial_size: Some(stat.size()),\n\n })\n\n}\n\n\n", "file_path": "src/open_input.rs", "rank": 68, "score": 30543.696197493246 }, { "content": "}\n\n\n\npub(crate) fn open_input(os: &OsStr) -> anyhow::Result<Input> {\n\n if let Some(s) = os.to_str() {\n\n // If we can parse it as a URL, treat it as such.\n\n if let Ok(url) = Url::parse(s) {\n\n return open_url(url);\n\n }\n\n\n\n // Special-case \"-\" to mean stdin.\n\n if s == \"-\" {\n\n return acquire_stdin();\n\n }\n\n }\n\n\n\n #[cfg(not(windows))]\n\n {\n\n let lossy = os.to_string_lossy();\n\n\n\n // Strings beginning with \"$(\" are commands.\n\n if lossy.starts_with(\"$(\") {\n\n return spawn_child(os, &lossy);\n\n }\n\n }\n\n\n\n // Otherwise try opening it as a path in the filesystem namespace.\n\n open_path(Path::new(os))\n\n}\n\n\n", "file_path": "src/open_input.rs", "rank": 69, "score": 30541.92911378933 }, { "content": "impl ReadTextLayered for InputTextStream {\n\n #[inline]\n\n fn read_text_substr_with_status(\n\n &mut self,\n\n buf: &mut TextSubstr,\n\n ) -> io::Result<(usize, Status)> {\n\n self.reader.read_text_substr_with_status(buf)\n\n }\n\n\n\n #[inline]\n\n fn read_exact_text_substr_using_status(&mut self, buf: &mut TextSubstr) -> io::Result<Status> {\n\n self.reader.read_exact_text_substr_using_status(buf)\n\n }\n\n}\n\n\n\nimpl Debug for InputTextStream {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n // Don't print the name here, as that's an implementation detail.\n\n let mut b = f.debug_struct(\"InputTextStream\");\n\n b.field(\"media_type\", &self.media_type);\n\n b.field(\"initial_size\", &self.initial_size);\n\n b.finish()\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/input_text_stream.rs", "rank": 71, "score": 29222.439674681664 }, { "content": "\n\n #[inline]\n\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n\n self.reader.read_exact(buf)\n\n }\n\n}\n\n\n\nimpl Bufferable for InputTextStream {\n\n #[inline]\n\n fn abandon(&mut self) {\n\n self.reader.abandon()\n\n }\n\n}\n\n\n\nimpl ReadStr for InputTextStream {\n\n #[inline]\n\n fn read_str(&mut self, buf: &mut str) -> io::Result<usize> {\n\n self.reader.read_str(buf)\n\n }\n\n}\n", "file_path": "src/input_text_stream.rs", "rank": 72, "score": 29217.579295103325 }, { "content": " #[inline]\n\n fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {\n\n self.reader.read_vectored(bufs)\n\n }\n\n\n\n #[cfg(can_vector)]\n\n #[inline]\n\n fn is_read_vectored(&self) -> bool {\n\n self.reader.is_read_vectored()\n\n }\n\n\n\n #[inline]\n\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n\n self.reader.read_to_end(buf)\n\n }\n\n\n\n #[inline]\n\n fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {\n\n self.reader.read_to_string(buf)\n\n }\n", "file_path": "src/input_text_stream.rs", "rank": 73, "score": 29214.91650722145 }, { "content": " #[inline]\n\n fn read_with_status(&mut self, buf: &mut [u8]) -> io::Result<(usize, Status)> {\n\n self.reader.read_with_status(buf)\n\n }\n\n\n\n #[inline]\n\n fn read_vectored_with_status(\n\n &mut self,\n\n bufs: &mut [IoSliceMut<'_>],\n\n ) -> io::Result<(usize, Status)> {\n\n self.reader.read_vectored_with_status(bufs)\n\n }\n\n}\n\n\n\nimpl Read for InputTextStream {\n\n #[inline]\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n self.reader.read(buf)\n\n }\n\n\n", "file_path": "src/input_text_stream.rs", "rank": 76, "score": 29214.23946542224 }, { "content": "use crate::open_input::{open_input, Input};\n\nuse crate::{MediaType, Pseudonym};\n\nuse basic_text::{ReadText, ReadTextLayered, TextReader, TextSubstr};\n\nuse clap::TryFromOsArg;\n\nuse io_streams::StreamReader;\n\nuse layered_io::{Bufferable, LayeredReader, ReadLayered, Status};\n\nuse std::ffi::OsStr;\n\nuse std::fmt::{self, Debug, Formatter};\n\nuse std::io::{self, IoSliceMut, Read};\n\nuse terminal_io::TerminalReader;\n\nuse utf8_io::{ReadStr, ReadStrLayered, Utf8Reader};\n\n\n\n/// In input stream for plain text input.\n\n///\n\n/// An `InputTextStream` implements `Read` so it supports `read`,\n\n/// `read_to_end`, `read_to_str`, etc. and can be used anywhere a\n\n/// `Read`-implementing object is needed.\n\n///\n\n/// `InputTextStream` is unbuffered (even when it is stdin), so wrapping\n\n/// it in a [`std::io::BufReader`] is recommended for performance and\n", "file_path": "src/input_text_stream.rs", "rank": 79, "score": 29212.629707387045 }, { "content": " pub fn initial_size(&self) -> Option<u64> {\n\n self.initial_size\n\n }\n\n\n\n /// Return a `Pseudonym` which encapsulates this stream's name (typically\n\n /// its filesystem path or its URL). This allows it to be written to an\n\n /// `OutputByteStream` while otherwise remaining entirely opaque.\n\n pub fn pseudonym(&self) -> Pseudonym {\n\n Pseudonym::new(self.name.clone())\n\n }\n\n\n\n fn from_input(input: Input) -> Self {\n\n let reader = TerminalReader::with_handle(input.reader);\n\n let reader = TextReader::new(reader);\n\n let media_type = input.media_type.union(MediaType::text());\n\n Self {\n\n name: input.name,\n\n reader,\n\n media_type,\n\n initial_size: input.initial_size,\n", "file_path": "src/input_text_stream.rs", "rank": 80, "score": 29211.72854176306 }, { "content": "\n\nimpl ReadStrLayered for InputTextStream {\n\n #[inline]\n\n fn read_str_with_status(&mut self, buf: &mut str) -> io::Result<(usize, Status)> {\n\n self.reader.read_str_with_status(buf)\n\n }\n\n}\n\n\n\nimpl ReadText for InputTextStream {\n\n #[inline]\n\n fn read_text_substr(&mut self, buf: &mut TextSubstr) -> io::Result<usize> {\n\n self.reader.read_text_substr(buf)\n\n }\n\n\n\n #[inline]\n\n fn read_exact_text_substr(&mut self, buf: &mut TextSubstr) -> io::Result<()> {\n\n self.reader.read_exact_text_substr(buf)\n\n }\n\n}\n\n\n", "file_path": "src/input_text_stream.rs", "rank": 81, "score": 29211.56163540133 }, { "content": " }\n\n }\n\n}\n\n\n\n/// Implement `TryFromOsArg` so that `clap_derive` can parse `InputTextStream`\n\n/// arguments automatically.\n\n///\n\n/// This is hidden from the documentation as it opens resources from\n\n/// strings using ambient authorities.\n\n#[doc(hidden)]\n\nimpl TryFromOsArg for InputTextStream {\n\n type Error = anyhow::Error;\n\n\n\n #[inline]\n\n fn try_from_os_str_arg(os: &OsStr) -> anyhow::Result<Self> {\n\n open_input(os).map(Self::from_input)\n\n }\n\n}\n\n\n\nimpl ReadLayered for InputTextStream {\n", "file_path": "src/input_text_stream.rs", "rank": 83, "score": 29208.544543370677 }, { "content": " reader: TextReader<Utf8Reader<LayeredReader<TerminalReader<StreamReader>>>>,\n\n media_type: MediaType,\n\n initial_size: Option<u64>,\n\n}\n\n\n\nimpl InputTextStream {\n\n /// If the input stream metadata implies a particular media type, also\n\n /// known as MIME type, return it. Many input streams know their type,\n\n /// though some do not. This is strictly based on available metadata, and\n\n /// not on examining any of the contents of the stream, and there's no\n\n /// guarantee the contents are valid.\n\n pub fn media_type(&self) -> &MediaType {\n\n &self.media_type\n\n }\n\n\n\n /// Return the initial size of the stream, in bytes. This is strictly based\n\n /// on available metadata, and not on examining any of the contents of the\n\n /// stream, and the stream could end up being shorter or longer if the\n\n /// source is concurrently modified or it produces content which must be\n\n /// adapted to meet the \"plain text\" requirements.\n", "file_path": "src/input_text_stream.rs", "rank": 86, "score": 29201.64252196404 }, { "content": "/// ease of use.\n\n///\n\n/// The primary way to construct an `InputTextStream` is to use it as\n\n/// a type in a `kommand` argument or `clap_derive` struct. Command-line\n\n/// arguments will then be automatically converted into input streams.\n\n/// Currently supported syntaxes include:\n\n/// - Names starting with `https:` or `http:`, which are interpreted as URLs\n\n/// to open.\n\n/// - Names starting with `data:` are interpreted as data URLs proving the\n\n/// data in their payload.\n\n/// - Names starting with `file:` are interpreted as local filesystem URLs\n\n/// providing paths to files to open.\n\n/// - \"-\" is interpreted as standard input.\n\n/// - \"(...)\" runs a command with a pipe from the child process' stdout, on\n\n/// platforms whch support it.\n\n/// - Names which don't parse as URLs are interpreted as plain local\n\n/// filesystem paths. To force a string to be interpreted as a plain local\n\n/// path, arrange for it to begin with `./` or `/`.\n\npub struct InputTextStream {\n\n name: String,\n", "file_path": "src/input_text_stream.rs", "rank": 87, "score": 29200.69905553736 }, { "content": "/// arguments automatically.\n\n///\n\n/// This is hidden from the documentation as it opens resources from\n\n/// strings using ambient authorities.\n\n#[doc(hidden)]\n\nimpl TryFromOsArg for InteractiveTextStream {\n\n type Error = anyhow::Error;\n\n\n\n #[inline]\n\n fn try_from_os_str_arg(os: &OsStr) -> anyhow::Result<Self> {\n\n open_interactive(os).map(Self::from_interactive)\n\n }\n\n}\n\n\n\nimpl ReadLayered for InteractiveTextStream {\n\n #[inline]\n\n fn read_with_status(&mut self, buf: &mut [u8]) -> io::Result<(usize, Status)> {\n\n self.duplexer.read_with_status(buf)\n\n }\n\n\n", "file_path": "src/interactive_text_stream.rs", "rank": 88, "score": 25.787685687522668 }, { "content": " exit(rustix::process::EXIT_FAILURE);\n\n #[cfg(windows)]\n\n exit(libc::EXIT_FAILURE);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl FromLazyOutput for OutputTextStream {\n\n type Err = anyhow::Error;\n\n\n\n fn from_lazy_output(name: OsString, media_type: MediaType) -> Result<Self, anyhow::Error> {\n\n open_output(&name, media_type).map(Self::from_output)\n\n }\n\n}\n\n\n\nimpl Debug for OutputTextStream {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n // Don't print the name here, as that's an implementation detail.\n\n let mut b = f.debug_struct(\"OutputTextStream\");\n\n b.field(\"media_type\", &self.media_type);\n\n b.finish()\n\n }\n\n}\n", "file_path": "src/output_text_stream.rs", "rank": 89, "score": 25.325617331418567 }, { "content": " #[inline]\n\n fn write_fmt(&mut self, fmt: Arguments<'_>) -> io::Result<()> {\n\n self.writer.write_fmt(fmt)\n\n }\n\n}\n\n\n\nimpl Bufferable for OutputByteStream {\n\n #[inline]\n\n fn abandon(&mut self) {\n\n self.writer.abandon()\n\n }\n\n}\n\n\n\nimpl FromLazyOutput for OutputByteStream {\n\n type Err = anyhow::Error;\n\n\n\n fn from_lazy_output(name: OsString, media_type: MediaType) -> Result<Self, anyhow::Error> {\n\n open_output(&name, media_type).and_then(Self::from_output)\n\n }\n\n}\n", "file_path": "src/output_byte_stream.rs", "rank": 90, "score": 24.85425574225604 }, { "content": " #[cfg(can_vector)]\n\n #[inline]\n\n fn is_read_vectored(&self) -> bool {\n\n self.duplexer.is_read_vectored()\n\n }\n\n\n\n #[inline]\n\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n\n self.duplexer.read_to_end(buf)\n\n }\n\n\n\n #[inline]\n\n fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {\n\n self.duplexer.read_to_string(buf)\n\n }\n\n\n\n #[inline]\n\n fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {\n\n self.duplexer.read_exact(buf)\n\n }\n", "file_path": "src/interactive_text_stream.rs", "rank": 91, "score": 24.107454214586806 }, { "content": "impl TryFromOsArg for InteractiveByteStream {\n\n type Error = anyhow::Error;\n\n\n\n #[inline]\n\n fn try_from_os_str_arg(os: &OsStr) -> anyhow::Result<Self> {\n\n open_interactive(os).map(Self::from_interactive)\n\n }\n\n}\n\n\n\nimpl ReadLayered for InteractiveByteStream {\n\n #[inline]\n\n fn read_with_status(&mut self, buf: &mut [u8]) -> io::Result<(usize, Status)> {\n\n self.duplexer.read_with_status(buf)\n\n }\n\n\n\n #[inline]\n\n fn read_vectored_with_status(\n\n &mut self,\n\n bufs: &mut [IoSliceMut<'_>],\n\n ) -> io::Result<(usize, Status)> {\n", "file_path": "src/interactive_byte_stream.rs", "rank": 92, "score": 23.862480789160376 }, { "content": "\n\n #[inline]\n\n fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {\n\n default_read_to_end(self, buf)\n\n }\n\n\n\n #[inline]\n\n fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {\n\n default_read_to_string(self, buf)\n\n }\n\n}\n\n\n\nimpl WriteLayered for InteractiveByteStream {\n\n #[inline]\n\n fn close(&mut self) -> io::Result<()> {\n\n self.duplexer.close()\n\n }\n\n}\n\n\n\nimpl Write for InteractiveByteStream {\n", "file_path": "src/interactive_byte_stream.rs", "rank": 93, "score": 23.228518167786934 }, { "content": "use crate::MediaType;\n\nuse clap::TryFromOsArg;\n\nuse std::error::Error;\n\nuse std::ffi::{OsStr, OsString};\n\nuse std::fmt;\n\nuse std::marker::PhantomData;\n\n\n\n#[doc(hidden)]\n\n#[derive(Debug)]\n\npub struct Never {}\n\n\n\nimpl Error for Never {}\n\n\n\nimpl fmt::Display for Never {\n\n fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n panic!()\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n", "file_path": "src/lazy_output.rs", "rank": 94, "score": 23.121612364027083 }, { "content": " self.duplexer.read_vectored_with_status(bufs)\n\n }\n\n}\n\n\n\nimpl Read for InteractiveByteStream {\n\n #[inline]\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n default_read(self, buf)\n\n }\n\n\n\n #[inline]\n\n fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {\n\n default_read_vectored(self, bufs)\n\n }\n\n\n\n #[cfg(can_vector)]\n\n #[inline]\n\n fn is_read_vectored(&self) -> bool {\n\n self.duplexer.is_read_vectored()\n\n }\n", "file_path": "src/interactive_byte_stream.rs", "rank": 95, "score": 21.85561590722109 }, { "content": " #[inline]\n\n fn color_support(&self) -> TerminalColorSupport {\n\n self.duplexer.color_support()\n\n }\n\n\n\n #[inline]\n\n fn color_preference(&self) -> bool {\n\n self.duplexer.color_preference()\n\n }\n\n\n\n #[inline]\n\n fn is_output_terminal(&self) -> bool {\n\n self.duplexer.is_output_terminal()\n\n }\n\n}\n\n\n\nimpl DuplexTerminal for InteractiveByteStream {}\n\n\n\nimpl Duplex for InteractiveByteStream {}\n\n\n\nimpl Debug for InteractiveByteStream {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n // Don't print the name here, as that's an implementation detail.\n\n let mut b = f.debug_struct(\"InteractiveByteStream\");\n\n b.finish()\n\n }\n\n}\n", "file_path": "src/interactive_byte_stream.rs", "rank": 96, "score": 21.102536441228928 }, { "content": "///\n\n/// Programs using `OutputTextStream` as an argument should avoid using\n\n/// `std::io::stdout`, `std::println`, or anything else which uses standard\n\n/// output implicitly.\n\npub struct OutputTextStream {\n\n name: String,\n\n writer: TextWriter<Utf8Writer<LayeredWriter<TerminalWriter<StreamWriter>>>>,\n\n media_type: MediaType,\n\n helper_child: Option<(Child, StreamWriter)>,\n\n}\n\n\n\nimpl OutputTextStream {\n\n /// Write the given `Pseudonym` to the output stream.\n\n #[inline]\n\n pub fn write_pseudonym(&mut self, pseudonym: &Pseudonym) -> io::Result<()> {\n\n Write::write_all(self, pseudonym.name.as_bytes())\n\n }\n\n\n\n /// Write the name of the given output stream to the output stream. This is\n\n /// needed because the name of an `OutputTextStream` is not available in\n", "file_path": "src/output_text_stream.rs", "rank": 97, "score": 20.910278506100777 }, { "content": "\n\nimpl Debug for OutputByteStream {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n // Don't print the name here, as that's an implementation detail.\n\n let mut b = f.debug_struct(\"OutputByteStream\");\n\n b.field(\"media_type\", &self.media_type);\n\n b.finish()\n\n }\n\n}\n", "file_path": "src/output_byte_stream.rs", "rank": 98, "score": 20.881320711475354 }, { "content": " #[inline]\n\n fn is_output_terminal(&self) -> bool {\n\n self.duplexer.is_output_terminal()\n\n }\n\n}\n\n\n\nimpl DuplexTerminal for InteractiveTextStream {}\n\n\n\nimpl Duplex for InteractiveTextStream {}\n\n\n\nimpl Bufferable for InteractiveTextStream {\n\n #[inline]\n\n fn abandon(&mut self) {\n\n self.duplexer.abandon()\n\n }\n\n}\n\n\n\nimpl ReadStr for InteractiveTextStream {\n\n #[inline]\n\n fn read_str(&mut self, buf: &mut str) -> io::Result<usize> {\n", "file_path": "src/interactive_text_stream.rs", "rank": 99, "score": 20.433573766796805 } ]
Rust
plumber_vdf/src/parsers.rs
lasa01/plumber_core
056b42a7f0b29793407153cf8b52c6a0ca2e6076
use nom::{ branch::alt, bytes::complete::{escaped, is_a, is_not, tag, take_till, take_until}, character::complete::{anychar, char, multispace1, none_of, one_of, space0, space1}, combinator::{all_consuming, cut, eof, not, opt, peek, recognize, value}, error::{ErrorKind, ParseError}, sequence::{delimited, preceded, terminated}, Err, IResult, Parser, }; fn unit<I, O, E, F>(mut parser: F) -> impl FnMut(I) -> IResult<I, (), E> where F: Parser<I, O, E>, { move |input: I| parser.parse(input).map(|(i, _)| (i, ())) } fn ignore_many0<I, O, E, F>(mut f: F) -> impl FnMut(I) -> IResult<I, (), E> where I: Clone + PartialEq, F: Parser<I, O, E>, E: ParseError<I>, { move |mut input: I| { loop { match f.parse(input.clone()) { Ok((parsed_input, _)) => { if parsed_input == input { return Err(Err::Error(E::from_error_kind(input, ErrorKind::Many0))); } input = parsed_input; } Err(Err::Error(_)) => return Ok((input, ())), Err(e) => { return Err(e); } } } } } fn ignore_many1<I, O, E, F>(mut f: F) -> impl FnMut(I) -> IResult<I, (), E> where I: Clone + PartialEq, F: Parser<I, O, E>, E: ParseError<I>, { move |mut input: I| { match f.parse(input.clone()) { Err(Err::Error(_)) => Err(Err::Error(E::from_error_kind(input, ErrorKind::Many1))), Err(e) => Err(e), Ok((parsed_input, _)) => { input = parsed_input; loop { match f.parse(input.clone()) { Ok((parsed_input, _)) => { if parsed_input == input { return Err(Err::Error(E::from_error_kind( input, ErrorKind::Many1, ))); } input = parsed_input; } Err(Err::Error(_)) => return Ok((input, ())), Err(e) => { return Err(e); } } } } } } } fn multiline_comment<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { delimited(tag(b"/*"), take_until(b"*/".as_ref()), tag(b"*/"))(i) } fn singleline_comment<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { preceded(tag(b"//"), take_till(|c| c == b'\r' || c == b'\n'))(i) } fn comment<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { alt((singleline_comment, multiline_comment))(i) } fn multispace_comment0<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { ignore_many0(alt((multispace1, comment)))(i) } fn trash<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], Option<&'a [u8]>, E> { opt(is_not(b"\r\n{}".as_ref()))(i) } fn space_comment_trash0<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { delimited(space0, unit(opt(comment)), trash)(i) } fn quoted_token<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { delimited(char('"'), take_till(|c| c == b'"'), char('"'))(i) } fn escaped_quoted_token<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { alt(( delimited( char('"'), escaped(is_not(b"\"\\".as_ref()), '\\', one_of(b"nt\\\"".as_ref())), char('"'), ), value(b"".as_ref(), tag(b"\"\"")), ))(i) } fn unquoted_char_nonspace<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], char, E> { alt(( none_of(b"{}\"\r\n/ \t".as_ref()), terminated(char('/'), not(char('/'))), ))(i) } fn unquoted_key<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { recognize(ignore_many1(unquoted_char_nonspace))(i) } fn unquoted_value<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { recognize(ignore_many1(alt(( unit(unquoted_char_nonspace), unit(terminated(space1, unquoted_char_nonspace)), ))))(i) } fn specific_token<'a: 'b, 'b, E: ParseError<&'a [u8]> + 'a>( key: &'b [u8], ) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], &'a [u8], E> + 'b { preceded( multispace_comment0, alt(( preceded(char('"'), cut(terminated(tag(key), char('"')))), tag(key), )), ) } pub(crate) fn any_key<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { preceded(multispace_comment0, alt((quoted_token, unquoted_key)))(i) } pub(crate) fn any_escaped_key<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded( multispace_comment0, alt((escaped_quoted_token, unquoted_key)), )(i) } pub(crate) fn empty_token<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded(multispace_comment0, tag(b"\"\"".as_ref()))(i) } pub(crate) fn any_value<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded(multispace_comment0, alt((quoted_token, unquoted_value)))(i) } pub(crate) fn any_escaped_value<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded( multispace_comment0, alt((escaped_quoted_token, unquoted_value)), )(i) } pub(crate) fn block_start<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { preceded(multispace_comment0, unit(char('{')))(i) } pub(crate) fn block_end<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { preceded( preceded(space_comment_trash0, multispace_comment0), unit(alt((eof, tag(b"}")))), )(i) } pub(crate) fn block_end_early<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], (), E> { preceded(multispace_comment0, unit(alt((eof, tag(b"}")))))(i) } pub(crate) fn block_sep<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { unit(preceded(space_comment_trash0, is_a(b"\r\n".as_ref())))(i) } pub(crate) fn peeked_char<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], char, E> { preceded(multispace_comment0, peek(anychar))(i) } pub(crate) fn comment_eof<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { all_consuming(multispace_comment0)(i) } pub(crate) fn peeked_block_end<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], (), E> { peek(block_end)(i) } pub(crate) fn block_sep_and_token<'a: 'b, 'b, E: ParseError<&'a [u8]> + 'a>( token: &'b [u8], ) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], &'a [u8], E> + 'b { preceded(block_sep, specific_token(token)) } #[cfg(test)] mod tests { use super::*; use nom::error::VerboseError; #[test] fn quoted_key() { assert_eq!( any_key::<VerboseError<&[u8]>>(b"\r\n\t \"a quoted key\" value".as_ref()), IResult::Ok((b" value".as_ref(), b"a quoted key".as_ref())) ); } #[test] fn unquoted_key() { assert_eq!( any_key::<VerboseError<&[u8]>>(b"\r\n\t $unquotedKey remaining".as_ref()), IResult::Ok((b" remaining".as_ref(), b"$unquotedKey".as_ref())) ); } #[test] fn quoted_value() { assert_eq!( any_value::<VerboseError<&[u8]>>(b" \"quoted value\"".as_ref()), IResult::Ok((b"".as_ref(), b"quoted value".as_ref())) ); } #[test] fn unquoted_value() { assert_eq!( any_value::<VerboseError<&[u8]>>(b"\tcsgo\\models\\stuff.mdl".as_ref()), IResult::Ok((b"".as_ref(), b"csgo\\models\\stuff.mdl".as_ref())) ); } #[test] fn unquoted_value_comment_terminated() { assert_eq!( any_value::<VerboseError<&[u8]>>( b" unquoted value with spaces/shit // and a comment too".as_ref() ), IResult::Ok(( b" // and a comment too".as_ref(), b"unquoted value with spaces/shit".as_ref() )) ); } #[test] fn comment_preceded_key() { assert_eq!( any_key::<VerboseError<&[u8]>>( b"\t//this is a comment\r\n\tNotComment A Value".as_ref() ), IResult::Ok((b" A Value".as_ref(), b"NotComment".as_ref())) ); } #[test] fn empty_comment() { assert_eq!( multispace_comment0::<VerboseError<&[u8]>>(b"\r\n\t//\r\n".as_ref()), IResult::Ok((b"".as_ref(), ())) ); } #[test] fn escaped() { assert_eq!( any_escaped_value::<VerboseError<&[u8]>>(b" \"escaped \\\" value\"".as_ref()), IResult::Ok((b"".as_ref(), b"escaped \\\" value".as_ref())) ); assert_eq!( any_escaped_key::<VerboseError<&[u8]>>(b"\"\"".as_ref()), IResult::Ok((b"".as_ref(), b"".as_ref())) ); } }
use nom::{ branch::alt, bytes::complete::{escaped, is_a, is_not, tag, take_till, take_until}, character::complete::{anychar, char, multispace1, none_of, one_of, space0, space1}, combinator::{all_consuming, cut, eof, not, opt, peek, recognize, value}, error::{ErrorKind, ParseError}, sequence::{delimited, preceded, terminated}, Err, IResult, Parser, }; fn unit<I, O, E, F>(mut parser: F) -> impl FnMut(I) -> IResult<I, (), E> where F: Parser<I, O, E>, { move |input: I| parser.parse(input).map(|(i, _)| (i, ())) } fn ignore_many0<I, O, E, F>(mut f: F) -> impl FnMut(I) -> IResult<I, (), E> where I: Clone + PartialEq, F: Parser<I, O, E>, E: ParseError<I>, { move |mut input: I| { loop { match f.parse(input.clone()) { Ok((parsed_input, _)) => { if parsed_input == input { return Err(Err::Error(E::from_error_kind(input, ErrorKind::Many0))); } input = parsed_input; } Err(Err::Error(_)) => return Ok((input, ())), Err(e) => { return Err(e); } } } } } fn ignore_many1<I, O, E, F>(mut f: F) -> impl FnMut(I) -> IResult<I, (), E> where I: Clone + PartialEq, F: Parser<I, O, E>, E: ParseError<I>, { move |mut input: I| { match f.parse(input.clone()) { Err(Err::Error(_)) => Err(Err::Error(E::from_error_kind(input, ErrorKind::Many1))), Err(e) => Err(e), Ok((parsed_input, _)) => { input = parsed_input; loop { match f.parse(input.clone()) { Ok((parsed_input, _)) => { if parsed_input == input { return Err(Err::Error(E::from_error_kind( input, ErrorKind::Many1, ))); } input = parsed_input; } Err(Err::Error(_)) => return Ok((input, ())), Err(e) => { return Err(e); } } } } } } } fn multiline_comment<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { delimited(tag(b"/*"), take_until(b"*/".as_ref()), tag(b"*/"))(i) } fn singleline_comment<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { preceded(tag(b"//"), take_till(|c| c == b'\r' || c == b'\n'))(i) } fn comment<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { alt((singleline_comment, multiline_comment))(i) } fn multispace_comment0<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { ignore_many0(alt((multispace1, comment)))(i) } fn trash<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], Option<&'a [u8]>, E> { opt(is_not(b"\r\n{}".as_ref()))(i) } fn space_comment_trash0<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { delimited(space0, unit(opt(comment)), trash)(i) } fn quoted_token<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { delimited(char('"'), take_till(|c| c == b'"'), char('"'))(i) } fn escaped_quoted_token<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { alt(( delimited( char('"'), escaped(is_not(b"\"\\".as_ref()), '\\', one_of(b"nt\\\"".as_ref())), char('"'), ), value(b"".as_ref(), tag(b"\"\"")), ))(i) } fn unquoted_char_nonspace<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], char, E> { alt(( none_of(b"{}\"\r\n/ \t".as_ref()), terminated(char('/'), not(char('/'))), ))(i) } fn unquoted_key<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { recognize(ignore_many1(unquoted_char_nonspace))(i) } fn unquoted_value<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { recognize(
)(i) } fn specific_token<'a: 'b, 'b, E: ParseError<&'a [u8]> + 'a>( key: &'b [u8], ) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], &'a [u8], E> + 'b { preceded( multispace_comment0, alt(( preceded(char('"'), cut(terminated(tag(key), char('"')))), tag(key), )), ) } pub(crate) fn any_key<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], &'a [u8], E> { preceded(multispace_comment0, alt((quoted_token, unquoted_key)))(i) } pub(crate) fn any_escaped_key<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded( multispace_comment0, alt((escaped_quoted_token, unquoted_key)), )(i) } pub(crate) fn empty_token<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded(multispace_comment0, tag(b"\"\"".as_ref()))(i) } pub(crate) fn any_value<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded(multispace_comment0, alt((quoted_token, unquoted_value)))(i) } pub(crate) fn any_escaped_value<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], &'a [u8], E> { preceded( multispace_comment0, alt((escaped_quoted_token, unquoted_value)), )(i) } pub(crate) fn block_start<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { preceded(multispace_comment0, unit(char('{')))(i) } pub(crate) fn block_end<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { preceded( preceded(space_comment_trash0, multispace_comment0), unit(alt((eof, tag(b"}")))), )(i) } pub(crate) fn block_end_early<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], (), E> { preceded(multispace_comment0, unit(alt((eof, tag(b"}")))))(i) } pub(crate) fn block_sep<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { unit(preceded(space_comment_trash0, is_a(b"\r\n".as_ref())))(i) } pub(crate) fn peeked_char<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], char, E> { preceded(multispace_comment0, peek(anychar))(i) } pub(crate) fn comment_eof<'a, E: ParseError<&'a [u8]>>(i: &'a [u8]) -> IResult<&'a [u8], (), E> { all_consuming(multispace_comment0)(i) } pub(crate) fn peeked_block_end<'a, E: ParseError<&'a [u8]>>( i: &'a [u8], ) -> IResult<&'a [u8], (), E> { peek(block_end)(i) } pub(crate) fn block_sep_and_token<'a: 'b, 'b, E: ParseError<&'a [u8]> + 'a>( token: &'b [u8], ) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], &'a [u8], E> + 'b { preceded(block_sep, specific_token(token)) } #[cfg(test)] mod tests { use super::*; use nom::error::VerboseError; #[test] fn quoted_key() { assert_eq!( any_key::<VerboseError<&[u8]>>(b"\r\n\t \"a quoted key\" value".as_ref()), IResult::Ok((b" value".as_ref(), b"a quoted key".as_ref())) ); } #[test] fn unquoted_key() { assert_eq!( any_key::<VerboseError<&[u8]>>(b"\r\n\t $unquotedKey remaining".as_ref()), IResult::Ok((b" remaining".as_ref(), b"$unquotedKey".as_ref())) ); } #[test] fn quoted_value() { assert_eq!( any_value::<VerboseError<&[u8]>>(b" \"quoted value\"".as_ref()), IResult::Ok((b"".as_ref(), b"quoted value".as_ref())) ); } #[test] fn unquoted_value() { assert_eq!( any_value::<VerboseError<&[u8]>>(b"\tcsgo\\models\\stuff.mdl".as_ref()), IResult::Ok((b"".as_ref(), b"csgo\\models\\stuff.mdl".as_ref())) ); } #[test] fn unquoted_value_comment_terminated() { assert_eq!( any_value::<VerboseError<&[u8]>>( b" unquoted value with spaces/shit // and a comment too".as_ref() ), IResult::Ok(( b" // and a comment too".as_ref(), b"unquoted value with spaces/shit".as_ref() )) ); } #[test] fn comment_preceded_key() { assert_eq!( any_key::<VerboseError<&[u8]>>( b"\t//this is a comment\r\n\tNotComment A Value".as_ref() ), IResult::Ok((b" A Value".as_ref(), b"NotComment".as_ref())) ); } #[test] fn empty_comment() { assert_eq!( multispace_comment0::<VerboseError<&[u8]>>(b"\r\n\t//\r\n".as_ref()), IResult::Ok((b"".as_ref(), ())) ); } #[test] fn escaped() { assert_eq!( any_escaped_value::<VerboseError<&[u8]>>(b" \"escaped \\\" value\"".as_ref()), IResult::Ok((b"".as_ref(), b"escaped \\\" value".as_ref())) ); assert_eq!( any_escaped_key::<VerboseError<&[u8]>>(b"\"\"".as_ref()), IResult::Ok((b"".as_ref(), b"".as_ref())) ); } }
ignore_many1(alt(( unit(unquoted_char_nonspace), unit(terminated(space1, unquoted_char_nonspace)), )))
call_expression
[ { "content": "fn read_animation_value(bytes: &mut &[u8]) -> Result<AnimationValue> {\n\n let value_bytes = bytes\n\n .get(..2)\n\n .ok_or_else(|| corrupted(\"animation values out of bounds\"))?\n\n .try_into()\n\n .expect(\"slice must have correct length\");\n\n\n\n *bytes = &bytes[2..];\n\n\n\n Ok(AnimationValue::from_bytes(value_bytes))\n\n}\n\n\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 15, "score": 190333.95876179193 }, { "content": "fn unescape(char: u8) -> Option<u8> {\n\n match char {\n\n b't' => Some(b'\\t'),\n\n b'n' => Some(b'\\n'),\n\n b'\\\\' => Some(b'\\\\'),\n\n b'\"' => Some(b'\"'),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "plumber_vdf/src/escape.rs", "rank": 16, "score": 187107.9529455876 }, { "content": "fn parse_nul_str<'a>(bytes: &mut &'a [u8]) -> Option<&'a [u8]> {\n\n if bytes.is_empty() {\n\n return None;\n\n }\n\n let mut split = bytes.splitn(2, |&b| b == 0);\n\n let str_bytes = split.next()?;\n\n *bytes = split.next().unwrap_or_default();\n\n Some(str_bytes)\n\n}\n\n\n", "file_path": "plumber_vpk/src/lib.rs", "rank": 17, "score": 163047.24720416532 }, { "content": "/// # Errors\n\n///\n\n/// Returns `Err` if the deserialization fails.\n\npub fn from_bytes(input: &[u8]) -> vdf::Result<Vmt> {\n\n Vmt::from_bytes(input)\n\n}\n\n\n", "file_path": "plumber_core/src/vmt/mod.rs", "rank": 18, "score": 149579.84939075797 }, { "content": "/// # Errors\n\n///\n\n/// Returns `Err` if the deserialization fails.\n\npub fn from_bytes(input: &[u8]) -> vdf::Result<Vmf> {\n\n Vmf::from_bytes(input)\n\n}\n\n\n", "file_path": "plumber_core/src/vmf/mod.rs", "rank": 19, "score": 149579.84939075797 }, { "content": "pub fn null_terminated_prefix(bytes: &[u8]) -> Option<&[u8]> {\n\n if bytes.is_empty() {\n\n return None;\n\n }\n\n bytes.splitn(2, |&b| b == 0).next()\n\n}\n\n\n", "file_path": "plumber_core/src/model/binary_utils.rs", "rank": 20, "score": 149001.15578181404 }, { "content": "fn parse<'a, T: Unaligned>(bytes: &mut &'a [u8]) -> Option<LayoutVerified<&'a [u8], T>> {\n\n let (verified, remaining) = LayoutVerified::new_unaligned_from_prefix(*bytes)?;\n\n *bytes = remaining;\n\n Some(verified)\n\n}\n\n\n\nconst IN_DIRECTORY: u16 = 0x7fff;\n\n\n", "file_path": "plumber_vpk/src/lib.rs", "rank": 21, "score": 145938.89831493798 }, { "content": "fn escape(char: char) -> Option<char> {\n\n match char {\n\n '\\t' => Some('\\t'),\n\n '\\n' => Some('\\n'),\n\n '\\\\' => Some('\\\\'),\n\n '\"' => Some('\"'),\n\n _ => None,\n\n }\n\n}\n\n\n\npub(crate) fn maybe_unescape_str(input: &[u8]) -> Cow<[u8]> {\n\n let mut char_iter = input.iter().enumerate();\n\n while let Some((i, &ch)) = char_iter.next() {\n\n if ch == b'\\\\' {\n\n if let Some(escaped) = char_iter.next().and_then(|(_, &ch)| unescape(ch)) {\n\n let mut escaped_string = Vec::with_capacity(input.len() + 1);\n\n escaped_string.extend_from_slice(&input[..i]);\n\n escaped_string.push(escaped);\n\n while let Some((_, &ch)) = char_iter.next() {\n\n if ch == b'\\\\' {\n", "file_path": "plumber_vdf/src/escape.rs", "rank": 22, "score": 142124.79936825647 }, { "content": "/// # Errors\n\n///\n\n/// Returns `Err` if the deserialization fails.\n\npub fn from_bytes<'de, T>(input: &'de [u8]) -> Result<T>\n\nwhere\n\n T: Deserialize<'de>,\n\n{\n\n let mut deserializer = Deserializer::from_bytes(input);\n\n let t = T::deserialize(&mut deserializer).map_err(|err| err.with_position(&deserializer))?;\n\n Ok(t)\n\n}\n\n\n", "file_path": "plumber_vdf/src/de.rs", "rank": 23, "score": 140545.54507638837 }, { "content": "/// # Errors\n\n///\n\n/// Returns `Err` if the deserialization fails.\n\npub fn escaped_from_bytes<'de, T>(input: &'de [u8]) -> Result<T>\n\nwhere\n\n T: Deserialize<'de>,\n\n{\n\n let mut deserializer = Deserializer::escaped_from_bytes(input);\n\n let t = T::deserialize(&mut deserializer).map_err(|err| err.with_position(&deserializer))?;\n\n Ok(t)\n\n}\n\n\n\n#[must_use]\n\npub struct Deserializer<'de> {\n\n original_input: &'de [u8],\n\n input: &'de [u8],\n\n remaining_depth: u8,\n\n last_key: Option<Cow<'de, [u8]>>,\n\n escaped: bool,\n\n}\n\n\n\nimpl<'de> Deserializer<'de> {\n\n pub fn from_str(input: &'de str) -> Self {\n", "file_path": "plumber_vdf/src/de.rs", "rank": 24, "score": 138064.66181633732 }, { "content": "pub fn parse_mut<'a, T: FromBytes>(bytes: &mut &'a [u8]) -> Option<&'a T> {\n\n LayoutVerified::<_, T>::new_from_prefix(*bytes).map(|(res, remaining)| {\n\n *bytes = remaining;\n\n res.into_ref()\n\n })\n\n}\n\n\n", "file_path": "plumber_core/src/model/binary_utils.rs", "rank": 25, "score": 137940.18431867537 }, { "content": "pub fn parse_slice_mut<'a, T: FromBytes>(bytes: &mut &'a [u8], count: usize) -> Option<&'a [T]> {\n\n LayoutVerified::new_slice_from_prefix(*bytes, count).map(|(res, remaining)| {\n\n *bytes = remaining;\n\n res.into_slice()\n\n })\n\n}\n", "file_path": "plumber_core/src/model/binary_utils.rs", "rank": 26, "score": 128167.47446750398 }, { "content": "pub fn read_file_aligned<A: maligned::Alignment>(mut file: GameFile) -> io::Result<Vec<u8>> {\n\n let size = file.size().unwrap_or_default();\n\n\n\n let mut bytes = maligned::align_first::<u8, A>(size);\n\n file.read_to_end(&mut bytes)?;\n\n\n\n if bytes.as_ptr() as usize % align_of::<A>() != 0 {\n\n // vector reallocated, no longer aligned\n\n let mut new_bytes = maligned::align_first::<u8, A>(bytes.len());\n\n new_bytes.append(&mut bytes);\n\n bytes = new_bytes;\n\n }\n\n\n\n assert!(bytes.as_ptr() as usize % align_of::<A>() == 0);\n\n\n\n Ok(bytes)\n\n}\n\n\n", "file_path": "plumber_core/src/model/binary_utils.rs", "rank": 27, "score": 118236.75447419292 }, { "content": "fn make_animation_quats_compatible(animation_data: &mut BTreeMap<usize, BoneAnimationData>) {\n\n for data in animation_data.values_mut() {\n\n if let AnimationData::Animated(values) = &mut data.rotation {\n\n let mut previous = None;\n\n\n\n for value in values {\n\n if let Some(previous) = previous {\n\n make_quat_compatible(value, previous);\n\n }\n\n previous = Some(*value);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 28, "score": 118205.41499891004 }, { "content": "fn quat_from_bytes_64(bytes: [u8; 8]) -> Quat {\n\n let x = u32::from(bytes[0]) | u32::from(bytes[1]) << 8 | u32::from(bytes[2] & 0x1f) << 16;\n\n let x = (x as f32 - 1_048_576.0) / 1_048_576.5;\n\n\n\n let y = u32::from(bytes[2] & 0xe0) >> 5\n\n | u32::from(bytes[3]) << 3\n\n | u32::from(bytes[4]) << 11\n\n | u32::from(bytes[5] & 0x3) << 19;\n\n let y = (y as f32 - 1_048_576.0) / 1_048_576.5;\n\n\n\n let z = u32::from(bytes[5] & 0xfc) >> 2\n\n | u32::from(bytes[6]) << 6\n\n | u32::from(bytes[7] & 0x7f) << 14;\n\n let z = (z as f32 - 1_048_576.0) / 1_048_576.5;\n\n\n\n let w_sign = if bytes[7] & 0x80 > 0 { -1.0 } else { 1.0 };\n\n let w = (1.0 - x * x - y * y - z * z).sqrt() * w_sign;\n\n\n\n Quat::from_xyzw(x, y, z, w)\n\n}\n\n\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 29, "score": 108158.27647883564 }, { "content": "fn quat_from_bytes_48(bytes: [u8; 6]) -> Quat {\n\n let a = (u16::from(bytes[1] & 0x7f) << 8) | u16::from(bytes[0]);\n\n let b = (u16::from(bytes[3] & 0x7f) << 8) | u16::from(bytes[2]);\n\n let c = (u16::from(bytes[5] & 0x7f) << 8) | u16::from(bytes[4]);\n\n\n\n let missing_component_index = ((bytes[1] & 0x80) >> 6) | ((bytes[3] & 0x80) >> 7);\n\n let missing_component_sign = if bytes[5] & 0x80 > 0 { -1.0 } else { 1.0 };\n\n\n\n let a = (f32::from(a) - 16384.0) / 23168.0;\n\n let b = (f32::from(b) - 16384.0) / 23168.0;\n\n let c = (f32::from(c) - 16384.0) / 23168.0;\n\n\n\n let missing_component = (1.0 - a * a - b * b - c * c).sqrt() * missing_component_sign;\n\n\n\n match missing_component_index {\n\n 1 => Quat::from_xyzw(missing_component, a, b, c),\n\n 2 => Quat::from_xyzw(c, missing_component, a, b),\n\n 3 => Quat::from_xyzw(b, c, missing_component, a),\n\n 0 => Quat::from_xyzw(a, b, c, missing_component),\n\n 4.. => {\n\n unreachable!(\"missing component index has only 2 nonzero bits, so maximum value is 3\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 30, "score": 108158.27647883564 }, { "content": "fn decompress_hdr(data: &[u8]) -> Vec<f32> {\n\n data.iter()\n\n .copied()\n\n .tuples()\n\n .flat_map(|(b, g, r, a)| {\n\n let a = f32::from(a);\n\n\n\n let r = f32::from(r) * a * 16.0 / 262_144.0;\n\n let g = f32::from(g) * a * 16.0 / 262_144.0;\n\n let b = f32::from(b) * a * 16.0 / 262_144.0;\n\n\n\n let a = 1.0;\n\n\n\n [r, g, b, a]\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "plumber_core/src/vmt/loader.rs", "rank": 31, "score": 106205.54523386105 }, { "content": "fn f16s_to_f32s(data: &[u8]) -> Vec<f32> {\n\n let floats: &[f16] = LayoutVerified::new_slice(data)\n\n .expect(\"vtflib should return properly aligned images\")\n\n .into_slice();\n\n floats.iter().copied().map(f32::from).collect()\n\n}\n\n\n", "file_path": "plumber_core/src/vmt/loader.rs", "rank": 32, "score": 106205.54523386105 }, { "content": "fn make_quat_compatible(quat: &mut Quat, previous: Quat) {\n\n if previous.dot(*quat) < 0.0 {\n\n *quat = -*quat;\n\n }\n\n}\n\n\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 33, "score": 100071.75370181643 }, { "content": "/// # Errors\n\n///\n\n/// Returns `Err` if the deserialization fails.\n\npub fn from_str<'de, T>(input: &'de str) -> Result<T>\n\nwhere\n\n T: Deserialize<'de>,\n\n{\n\n let mut deserializer = Deserializer::from_str(input);\n\n let t = T::deserialize(&mut deserializer).map_err(|err| err.with_position(&deserializer))?;\n\n Ok(t)\n\n}\n\n\n", "file_path": "plumber_vdf/src/de.rs", "rank": 34, "score": 93440.48609685495 }, { "content": "/// # Errors\n\n///\n\n/// Returns `Err` if the deserialization fails.\n\npub fn escaped_from_str<'de, T>(input: &'de str) -> Result<T>\n\nwhere\n\n T: Deserialize<'de>,\n\n{\n\n let mut deserializer = Deserializer::escaped_from_str(input);\n\n let t = T::deserialize(&mut deserializer).map_err(|err| err.with_position(&deserializer))?;\n\n Ok(t)\n\n}\n\n\n", "file_path": "plumber_vdf/src/de.rs", "rank": 35, "score": 91787.72969714004 }, { "content": "fn extract_animation_value(frame: usize, values: &[AnimationValue], scale: f32) -> f32 {\n\n let mut k = frame;\n\n let mut i = 0;\n\n\n\n loop {\n\n match values.get(i) {\n\n Some(v) if v.total() as usize > k => break,\n\n Some(v) if v.total() == 0 => return 0.0,\n\n Some(v) => {\n\n k -= v.total() as usize;\n\n i += v.valid() as usize + 1;\n\n }\n\n None => return 0.0,\n\n }\n\n }\n\n\n\n values\n\n .get(i)\n\n .map(|&v| {\n\n if v.valid() as usize > k {\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 36, "score": 91482.52839420567 }, { "content": "fn recurse_readdir(readdir: ReadDir, encountered: &mut HashSet<(StdPathBuf, GamePathBuf)>) {\n\n // check that recursing yields no duplicates\n\n for entry in readdir.map(Result::unwrap) {\n\n let search_path = entry.search_path().to_path_buf();\n\n let entry_path = entry.path().to_path_buf();\n\n if entry.entry_type().is_directory() {\n\n recurse_readdir(entry.read_dir(), encountered);\n\n }\n\n if let Some(old) = encountered.replace((search_path, entry_path)) {\n\n panic!(\"readdir encountered duplicate: {:?}\", old);\n\n }\n\n }\n\n}\n", "file_path": "plumber_core/tests/steam.rs", "rank": 37, "score": 86497.95740339247 }, { "content": "pub fn parse<T: FromBytes>(bytes: &[u8], offset: usize) -> Option<&T> {\n\n bytes\n\n .get(offset..)\n\n .and_then(LayoutVerified::<_, T>::new_from_prefix)\n\n .map(|(res, _)| res.into_ref())\n\n}\n\n\n", "file_path": "plumber_core/src/model/binary_utils.rs", "rank": 38, "score": 86476.72164505447 }, { "content": "/// # Errors\n\n///\n\n/// Returns `Err` if the serialization fails.\n\npub fn to_string<T>(value: &T) -> Result<String>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut serializer = Serializer {\n\n output: String::new(),\n\n last_key: None,\n\n indentation: 0,\n\n escaped: false,\n\n };\n\n value.serialize(&mut serializer)?;\n\n Ok(serializer.output)\n\n}\n\n\n", "file_path": "plumber_vdf/src/ser.rs", "rank": 39, "score": 82656.16092473798 }, { "content": "fn merge_animated_constant<T: Copy>(prev: &mut Vec<T>, next: T, next_n: usize) {\n\n prev.reserve(next_n);\n\n\n\n for _ in 0..next_n {\n\n prev.push(next);\n\n }\n\n}\n\n\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 40, "score": 82474.71062099392 }, { "content": "/// # Errors\n\n///\n\n/// Returns `Err` if the serialization fails.\n\npub fn escaped_to_string<T>(value: &T) -> Result<String>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut serializer = Serializer {\n\n output: String::new(),\n\n last_key: None,\n\n indentation: 0,\n\n escaped: true,\n\n };\n\n value.serialize(&mut serializer)?;\n\n Ok(serializer.output)\n\n}\n\n\n\npub struct Serializer {\n\n output: String,\n\n last_key: Option<String>,\n\n indentation: usize,\n\n escaped: bool,\n\n}\n", "file_path": "plumber_vdf/src/ser.rs", "rank": 41, "score": 80810.98295006757 }, { "content": "pub fn parse_slice<T: FromBytes>(bytes: &[u8], offset: usize, count: usize) -> Option<&[T]> {\n\n bytes\n\n .get(offset..)\n\n .and_then(|bytes| LayoutVerified::new_slice_from_prefix(bytes, count))\n\n .map(|(res, _)| res.into_slice())\n\n}\n\n\n", "file_path": "plumber_core/src/model/binary_utils.rs", "rank": 42, "score": 79485.9954976849 }, { "content": "fn accumulate_animation<T: Copy + PartialEq>(\n\n acc: &mut AnimationData<T>,\n\n next: AnimationData<T>,\n\n bone_data: T,\n\n accumulated_frames: usize,\n\n next_frames: usize,\n\n) {\n\n use AnimationData::{Animated, Constant, None};\n\n\n\n match (&mut *acc, next) {\n\n (None, None) => {}\n\n (None, Constant(next)) => {\n\n *acc = Animated(merge_constant_constant(\n\n bone_data,\n\n accumulated_frames,\n\n next,\n\n next_frames,\n\n ));\n\n }\n\n (None, Animated(next)) => {\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 43, "score": 77403.8415292754 }, { "content": "pub fn dump_animation(opts: DumpAnimation, file_system: &FileSystem) {\n\n let file_system = file_system.open().unwrap();\n\n\n\n let model = Model::read(&GamePathBuf::from(opts.mdl_path), &file_system).unwrap();\n\n let verified = model.verify().unwrap();\n\n\n\n if opts.bones {\n\n for bone in verified.bones().unwrap() {\n\n eprintln!(\"{:#?}\", bone);\n\n }\n\n }\n\n\n\n for res in verified.animations().unwrap() {\n\n let animation = match res {\n\n Ok(a) => a,\n\n Err(err) => {\n\n eprintln!(\"Error reading animation: {}\", err);\n\n continue;\n\n }\n\n };\n", "file_path": "plumber_cli/src/dump_animation.rs", "rank": 44, "score": 77131.60768294302 }, { "content": "pub fn euler_to_quat(e: Vec3) -> Quat {\n\n Quat::from_euler(EulerRot::ZYX, e.z, e.y, e.x)\n\n}\n\n\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 45, "score": 74544.18263154234 }, { "content": "fn main() {\n\n let opts = Opts::parse();\n\n\n\n let libraries = Libraries::discover().unwrap();\n\n\n\n let app = libraries\n\n .apps()\n\n .map(Result::unwrap)\n\n .find(|app| app.app_id == opts.app_id)\n\n .unwrap();\n\n let file_system = FileSystem::from_app(&app).unwrap();\n\n\n\n match opts.subcommand {\n\n SubCommand::DumpAnimation(opts) => dump_animation(opts, &file_system),\n\n }\n\n}\n", "file_path": "plumber_cli/src/main.rs", "rank": 46, "score": 68123.5782487968 }, { "content": "#[test]\n\nfn no_deadlocks() {\n\n let (sender, receiver) = mpsc::channel();\n\n\n\n let vmf = include_bytes!(\"./vmf/build_scene_test.vmf\");\n\n let root_path = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"tests\")\n\n .join(\"test_filesystem\");\n\n let game_info_path = root_path.join(\"game\").join(\"gameinfo.txt\");\n\n\n\n let file_system = FileSystem::from_paths(root_path, game_info_path).unwrap();\n\n\n\n let importer = Importer::new(\n\n file_system.open().unwrap(),\n\n TestHandler { _sender: sender },\n\n 2,\n\n );\n\n\n\n importer\n\n .import_vmf_blocking(vmf, &Settings::default(), || {\n\n // make sure channel disconnects\n", "file_path": "plumber_core/tests/asset.rs", "rank": 47, "score": 68123.5782487968 }, { "content": "#[test]\n\n#[ignore]\n\nfn test_filesystem_discovery() {\n\n let libraries = Libraries::discover().unwrap();\n\n for filesystem in libraries.apps().source().filesystems().map(Result::unwrap) {\n\n eprintln!(\"filesystem: {:?}\", filesystem);\n\n }\n\n}\n\n\n\n/// Fails if steam is not installed\n", "file_path": "plumber_core/tests/steam.rs", "rank": 48, "score": 65540.1257835372 }, { "content": "#[test]\n\n#[ignore]\n\nfn test_library_discovery() {\n\n let libraries = Libraries::discover().unwrap();\n\n eprintln!(\"discovered libraries: {:?}\", libraries.paths);\n\n let apps: Vec<App> = libraries.apps().map(Result::unwrap).collect();\n\n eprintln!(\"discovered apps: {:?}\", apps);\n\n let source_apps: Vec<App> = libraries.apps().source().map(Result::unwrap).collect();\n\n eprintln!(\"discovered source apps: {:?}\", source_apps);\n\n}\n\n\n\n/// Fails if steam is not installed\n", "file_path": "plumber_core/tests/steam.rs", "rank": 49, "score": 65540.1257835372 }, { "content": "#[test]\n\n#[ignore]\n\nfn open_discovered_filesystems() {\n\n let libraries = Libraries::discover().unwrap();\n\n for filesystem in libraries.apps().source().filesystems().map(Result::unwrap) {\n\n eprintln!(\"filesystem: {:?}\", filesystem);\n\n filesystem.open().unwrap();\n\n }\n\n}\n\n\n\n/// Fails if steam is not installed\n", "file_path": "plumber_core/tests/steam.rs", "rank": 50, "score": 65540.1257835372 }, { "content": "fn get_shader(\n\n material_path: &PathBuf,\n\n file_system: &OpenFileSystem,\n\n) -> Result<Shader, MaterialLoadError> {\n\n let material_path = material_path.with_extension(\"vmt\");\n\n let material_contents = file_system\n\n .read(&material_path)\n\n .map_err(|err| MaterialLoadError::from_io(&err, &material_path))?;\n\n let material = Vmt::from_bytes(&material_contents)?;\n\n Ok(material.resolve_shader(file_system)?)\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct MaterialInfo {\n\n width: u32,\n\n height: u32,\n\n no_draw: bool,\n\n}\n\n\n\nimpl MaterialInfo {\n", "file_path": "plumber_core/src/vmt/loader.rs", "rank": 51, "score": 65540.1257835372 }, { "content": "#[test]\n\nfn test_vmf_roundtrip() {\n\n let input = include_str!(\"vmf/test.vmf\");\n\n let first_vmf = Vmf::from_bytes(input.as_bytes()).unwrap();\n\n let serialized_vmf = first_vmf.to_string().unwrap();\n\n let second_vmf = Vmf::from_bytes(serialized_vmf.as_bytes()).unwrap();\n\n assert_eq!(first_vmf, second_vmf)\n\n}\n", "file_path": "plumber_core/tests/vmf.rs", "rank": 52, "score": 65540.1257835372 }, { "content": "#[test]\n\nfn test_vdf_parse() {\n\n let input = include_str!(\"test.vdf\");\n\n assert_eq!(\n\n plumber_vdf::from_str::<Test>(input).unwrap(),\n\n Test {\n\n key1: \"value1\".into(),\n\n key2: \"value2\".into(),\n\n key3: \"value3\".into(),\n\n key5: (),\n\n key6: btreemap! {\n\n \"key1\".into() => \"value1\".into(),\n\n \"key2\".into() => \"value2\".into(),\n\n },\n\n key7: Key7 {},\n\n key8: Key8 {\n\n key1: \"value1\".into()\n\n },\n\n seq1: vec![\n\n \"hello\".into(),\n\n \"this is kind of a hacky seq\".into(),\n", "file_path": "plumber_vdf/tests/vdf.rs", "rank": 53, "score": 65540.1257835372 }, { "content": "#[test]\n\n#[ignore]\n\nfn opened_discovered_filesystems_readdir() {\n\n let libraries = Libraries::discover().unwrap();\n\n for filesystem in libraries.apps().source().filesystems().map(Result::unwrap) {\n\n eprintln!(\"filesystem: {:?}\", filesystem.name);\n\n let opened = filesystem.open().unwrap();\n\n let mut encountered = HashSet::new();\n\n recurse_readdir(\n\n opened.read_dir(GamePath::try_from_str(\"\").unwrap()),\n\n &mut encountered,\n\n );\n\n }\n\n}\n\n\n\nuse plumber_core::fs::ReadDir;\n\n\n", "file_path": "plumber_core/tests/steam.rs", "rank": 54, "score": 64370.97859233138 }, { "content": "#[test]\n\nfn test_vpk_single_file() {\n\n let path = StdPath::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"tests\")\n\n .join(\"test.vpk\");\n\n let vpk = Directory::read(path).unwrap();\n\n let mut files: Vec<&Path> = vpk.files().collect();\n\n files.sort_unstable();\n\n assert_eq!(files, vec![\"test.vdf\", \"test.vmf\"]);\n\n let mut file = vpk\n\n .open_file(Path::try_from_str(\"test.vdf\").unwrap())\n\n .unwrap();\n\n let contents = String::from_utf8(file.verify_contents().unwrap()).unwrap().replace(\"\\r\\n\", \"\\n\");\n\n assert_eq!(contents, include_str!(\"test.vdf\").replace(\"\\r\\n\", \"\\n\"));\n\n}\n\n\n", "file_path": "plumber_vpk/tests/vpk.rs", "rank": 55, "score": 64370.97859233138 }, { "content": "#[test]\n\nfn test_vpk_multi_part() {\n\n let path = StdPath::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"tests\")\n\n .join(\"test_dir.vpk\");\n\n let vpk = Directory::read(path).unwrap();\n\n let mut files: Vec<&Path> = vpk.files().collect();\n\n files.sort_unstable();\n\n assert_eq!(files, vec![\"test.txt\", \"test/test2\"]);\n\n let test_contents: Vec<&DirectoryContent> = vpk\n\n .directory_contents(Path::try_from_str(\"test\").unwrap())\n\n .unwrap()\n\n .collect();\n\n assert_eq!(test_contents, vec![&DirectoryContent::File(\"test2\".into())]);\n\n let mut file = vpk\n\n .open_file(Path::try_from_str(\"test/test2\").unwrap())\n\n .unwrap();\n\n let contents = String::from_utf8(file.verify_contents().unwrap()).unwrap();\n\n assert_eq!(&contents, \"test 2\");\n\n}\n", "file_path": "plumber_vpk/tests/vpk.rs", "rank": 56, "score": 64370.97859233138 }, { "content": "fn find_material<'a>(\n\n texture: mdl::TextureRef,\n\n texture_paths: &[&str],\n\n file_system: &'a OpenFileSystem,\n\n) -> Result<GamePathBuf> {\n\n let name = GamePathBuf::from(texture.name()?);\n\n\n\n for &path in texture_paths {\n\n let mut candidate = GamePathBuf::from(\"materials\");\n\n candidate.push(GamePathBuf::from(path));\n\n candidate.push(&name);\n\n candidate.set_extension(\"vmt\");\n\n\n\n match file_system.open_file(&candidate) {\n\n Ok(_) => return Ok(candidate),\n\n Err(err) => {\n\n if err.kind() != io::ErrorKind::NotFound {\n\n return Err(Error::from_io(&err, &candidate));\n\n }\n\n }\n", "file_path": "plumber_core/src/model/mod.rs", "rank": 57, "score": 62601.14634570874 }, { "content": "fn find_vtx<'a>(\n\n mdl_path: Path,\n\n file_system: &'a OpenFileSystem,\n\n) -> Result<(PathBuf, GameFile<'a>)> {\n\n for &extension in VTX_EXTENSIONS {\n\n let path = mdl_path.with_extension(extension);\n\n match file_system.open_file(&path) {\n\n Ok(file) => return Ok((path, file)),\n\n Err(err) => {\n\n if err.kind() == io::ErrorKind::NotFound {\n\n continue;\n\n }\n\n return Err(Error::from_io(&err, &path));\n\n }\n\n }\n\n }\n\n Err(Error::Io {\n\n path: mdl_path.with_extension(\"*.vtx\").to_string(),\n\n error: \"could not find a supported vtx file\".to_owned(),\n\n })\n", "file_path": "plumber_core/src/model/mod.rs", "rank": 58, "score": 62601.14634570874 }, { "content": "fn open_texture<'a>(\n\n vtf_lib: &'a mut (VtfLib, VtfGuard),\n\n texture_path: &GamePathBuf,\n\n file_system: &OpenFileSystem,\n\n) -> Result<BoundVtfFile<'a, 'a>, TextureLoadError> {\n\n let (vtf_lib, guard) = vtf_lib;\n\n\n\n let vtf_bytes = file_system\n\n .read(&texture_path.with_extension(\"vtf\"))\n\n .map_err(|err| TextureLoadError::from_io(&err, &texture_path))?;\n\n\n\n let mut vtf = vtf_lib.new_vtf_file().bind(guard);\n\n vtf.load(&vtf_bytes)?;\n\n\n\n Ok(vtf)\n\n}\n\n\n", "file_path": "plumber_core/src/vmt/loader.rs", "rank": 59, "score": 62601.14634570874 }, { "content": "fn vtf_data<'a>(\n\n vtf: &'a BoundVtfFile,\n\n) -> Result<(&'a [u8], vtflib::ImageFormat, u32, u32), TextureLoadError> {\n\n let data = vtf.data(0, 0, 0, 0).ok_or(vtflib::Error::ImageNotLoaded)?;\n\n let format = vtf.format().ok_or(vtflib::Error::InvalidFormat)?;\n\n let width = vtf.width();\n\n let height = vtf.height();\n\n\n\n Ok((data, format, width, height))\n\n}\n", "file_path": "plumber_core/src/vmt/loader.rs", "rank": 60, "score": 62601.14634570874 }, { "content": "fn merge_animation_sections<'a>(\n\n sections: impl Iterator<Item = AnimationSectionRef<'a>>,\n\n bones: &[Bone],\n\n frame_animation: bool,\n\n total_frames: usize,\n\n) -> Result<BTreeMap<usize, BoneAnimationData>> {\n\n let mut acc_data = BTreeMap::new();\n\n let mut first_section = true;\n\n let mut accumulated_frames = 0;\n\n\n\n for section in sections {\n\n if accumulated_frames == total_frames {\n\n break;\n\n }\n\n\n\n let section_data = section.data(frame_animation)?;\n\n let current_frames = section.frame_count.min(total_frames - accumulated_frames);\n\n\n\n if first_section {\n\n acc_data = section_data;\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 61, "score": 61431.99915450292 }, { "content": "struct KeySerializer<'a>(&'a mut Serializer);\n\n\n\nimpl<'a> ser::Serializer for KeySerializer<'a> {\n\n type Ok = ();\n\n type Error = Error;\n\n\n\n type SerializeSeq = Impossible<(), Error>;\n\n type SerializeTuple = Impossible<(), Error>;\n\n type SerializeTupleStruct = Impossible<(), Error>;\n\n type SerializeTupleVariant = Impossible<(), Error>;\n\n type SerializeMap = Impossible<(), Error>;\n\n type SerializeStruct = Impossible<(), Error>;\n\n type SerializeStructVariant = Impossible<(), Error>;\n\n\n\n fn serialize_bool(self, v: bool) -> Result<Self::Ok> {\n\n ser::Serializer::serialize_bool(self.0, v)\n\n }\n\n\n\n fn serialize_i8(self, v: i8) -> Result<Self::Ok> {\n\n ser::Serializer::serialize_i8(self.0, v)\n", "file_path": "plumber_vdf/src/ser.rs", "rank": 62, "score": 57493.17893929013 }, { "content": "pub trait Handler: Clone + Send + 'static {\n\n type MaterialData: Send + 'static;\n\n\n\n fn handle_error(&mut self, error: Error) {\n\n error!(\"{}\", error);\n\n }\n\n\n\n /// # Errors\n\n ///\n\n /// Returns `Err` if an unrecoverable error was encountered during the material building.\n\n fn build_material(&mut self, vmt: LoadedVmt) -> Result<Self::MaterialData, MaterialLoadError>;\n\n\n\n fn handle_material(&mut self, material: LoadedMaterial<Self::MaterialData>);\n\n fn handle_texture(&mut self, texture: LoadedTexture);\n\n\n\n fn handle_skybox(&mut self, skybox: SkyBox);\n\n\n\n fn handle_model(&mut self, model: LoadedModel);\n\n fn handle_entity(&mut self, entity: TypedEntity);\n\n fn handle_brush(&mut self, brush: BuiltBrushEntity);\n", "file_path": "plumber_core/src/asset.rs", "rank": 63, "score": 56728.79854450592 }, { "content": "fn is_acf_file(filename: &str) -> bool {\n\n filename\n\n .rsplit('.')\n\n .next()\n\n .map(|ext| ext.eq_ignore_ascii_case(\"acf\"))\n\n == Some(true)\n\n}\n\n\n", "file_path": "plumber_core/src/steam.rs", "rank": 64, "score": 55960.65569687699 }, { "content": "fn is_vpk_file(filename: &str) -> bool {\n\n filename\n\n .rsplit('.')\n\n .next()\n\n .map(|ext| ext.eq_ignore_ascii_case(\"vpk\"))\n\n == Some(true)\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum ParseError {\n\n #[error(\"io error reading `{path}`: {inner}\")]\n\n Io { path: String, inner: io::Error },\n\n #[error(\"could not find gameinfo.txt in `{path}`\")]\n\n NoGameInfo { path: String },\n\n #[error(\"error deserializing `{path}`: {inner}\")]\n\n Deserialization { path: String, inner: vdf::Error },\n\n}\n\n\n\nimpl ParseError {\n\n fn from_io(err: io::Error, path: &StdPath) -> Self {\n", "file_path": "plumber_core/src/fs.rs", "rank": 65, "score": 55960.65569687699 }, { "content": "fn f16_to_f32(f16: u16) -> f32 {\n\n let mantissa = u32::from(f16 & 0x3ff);\n\n let biased_exponent = u32::from((f16 & 0x7c00) >> 10);\n\n let sign = u32::from((f16 & 0x8000) >> 15);\n\n\n\n let float_sign = if sign == 1 { -1.0 } else { 1.0 };\n\n\n\n if biased_exponent == 31 {\n\n if mantissa == 0 {\n\n // Infinity\n\n return 65504.0 * float_sign;\n\n }\n\n // NaN\n\n return 0.0;\n\n }\n\n\n\n if biased_exponent == 0 && mantissa != 0 {\n\n let float_mantissa = mantissa as f32 / 1024.0;\n\n float_sign * float_mantissa / 16384.0\n\n } else {\n\n f32::from_bits(sign << 31 | (biased_exponent + 127 - 15) << 23 | mantissa << (23 - 10))\n\n }\n\n}\n\n\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 66, "score": 54928.502952947354 }, { "content": "fn initial_buffer_size(file: &GameFile) -> usize {\n\n // Allocate one extra byte so the buffer doesn't need to grow before the\n\n // final `read` call at the end of the file.\n\n file.size().map_or(0, |s| s + 1)\n\n}\n\n\n\nimpl OpenFileSystem {\n\n /// Opens the specified file if it exists.\n\n /// The path is case-insensitive even when the underlying filesystem is not.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns `Err` if `file_path` doesn't exist or if the file can't be opened.\n\n pub fn open_file<'a>(&self, file_path: impl Into<Path<'a>>) -> io::Result<GameFile> {\n\n let file_path = file_path.into();\n\n match file_path {\n\n Path::Game(file_path) => {\n\n for path in &self.search_paths {\n\n if let Some(file) = path.try_open_file(file_path)? {\n\n return Ok(file);\n", "file_path": "plumber_core/src/fs.rs", "rank": 67, "score": 53955.98858001402 }, { "content": "fn quat_from_u16s(u16s: [u16; 3]) -> Quat {\n\n let x = (f32::from(u16s[0]) - 32768.0) / 32768.0;\n\n let y = (f32::from(u16s[1]) - 32768.0) / 32768.0;\n\n let z = (f32::from(u16s[2] & 0x7fff) - 16384.0) / 16384.0;\n\n\n\n let w_sign = if u16s[2] & 0x8000 > 0 { -1.0 } else { 1.0 };\n\n\n\n let w = (1.0 - x * x - y * y - z * z).sqrt() * w_sign;\n\n\n\n Quat::from_xyzw(x, y, z, w)\n\n}\n\n\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 68, "score": 53257.17801295605 }, { "content": "fn corrupted(error: &'static str) -> Error {\n\n Error::Corrupted {\n\n ty: FileType::Mdl,\n\n error,\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Mdl {\n\n bytes: Vec<u8>,\n\n}\n\n\n\nimpl Mdl {\n\n pub fn read(file: GameFile) -> io::Result<Self> {\n\n let bytes = read_file_aligned::<A4>(file)?;\n\n Ok(Self { bytes })\n\n }\n\n\n\n pub fn check_signature(&self) -> Result<()> {\n\n let signature = self\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 69, "score": 53257.17801295605 }, { "content": "pub fn vec3_from_u16s(u16s: [u16; 3]) -> Vec3 {\n\n Vec3::new(\n\n f16_to_f32(u16s[0]),\n\n f16_to_f32(u16s[1]),\n\n f16_to_f32(u16s[2]),\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nuse serde::Deserialize;\n\n\n\n// Generic animation data of a bone.\n\n#[derive(Debug, Clone, PartialEq)]\n\n#[cfg_attr(test, derive(Deserialize))]\n\npub enum AnimationData<T> {\n\n /// The data of the bone stays constant during the animation.\n\n Constant(T),\n\n /// The data of the bone is animated. Contains one value for each frame.\n\n Animated(Vec<T>),\n\n /// The animation has no data for the bone.\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 70, "score": 50856.89861590954 }, { "content": "fn build_thread_pool(num_threads: usize) -> rayon::ThreadPool {\n\n ThreadPoolBuilder::new()\n\n .num_threads(num_threads)\n\n .thread_name(|index| format!(\"asset loader {}\", index))\n\n .build()\n\n .expect(\"thread pool building shouldn't fail\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::path::Path;\n\n\n\n use crate::fs::FileSystem;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn scene_loading() {\n\n let vmf = include_bytes!(\"../tests/vmf/build_scene_test.vmf\");\n\n let root_path = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n", "file_path": "plumber_core/src/asset.rs", "rank": 71, "score": 50499.0037665877 }, { "content": "fn get_dimension_reference(shader: &Shader) -> Option<GamePathBuf> {\n\n DIMENSION_REFERENCE_TEXTURES.iter().find_map(|parameter| {\n\n let path: TexturePath = shader.try_extract_param(parameter).ok().flatten()?;\n\n Some(path.absolute_path())\n\n })\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct LoadedMaterial<D: Send + 'static> {\n\n pub name: GamePathBuf,\n\n pub info: MaterialInfo,\n\n pub data: D,\n\n}\n\n\n\nimpl<D: Send + Sync + 'static> Debug for LoadedMaterial<D> {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n f.debug_struct(\"LoadedMaterial\")\n\n .field(\"name\", &self.name)\n\n .field(\"info\", &self.info)\n\n .finish_non_exhaustive()\n\n }\n\n}\n\n\n", "file_path": "plumber_core/src/vmt/loader.rs", "rank": 72, "score": 49677.384289040936 }, { "content": "fn is_nodraw(material_path: &PathBuf, shader: &Shader) -> bool {\n\n let no_draw = NODRAW_MATERIALS.iter().any(|m| material_path == *m)\n\n || NODRAW_PARAMS.iter().any(|p| {\n\n shader\n\n .parameters\n\n .get(p.as_uncased())\n\n .map_or(false, |v| v == \"1\")\n\n });\n\n no_draw\n\n}\n\n\n", "file_path": "plumber_core/src/vmt/loader.rs", "rank": 73, "score": 49071.23874247453 }, { "content": "#[must_use]\n\npub fn path_to_absolute(path: &GamePathBuf) -> GamePathBuf {\n\n GamePath::try_from_str(\"materials\")\n\n .expect(\"cannot fail\")\n\n .join(path)\n\n}\n\n\n\n#[derive(Debug, Clone, Error, Hash, PartialEq, Eq)]\n\npub enum ShaderResolveError {\n\n #[error(\"io error reading `{path}`: {error}\")]\n\n Io { path: String, error: String },\n\n #[error(\"error deserializing included material: {0}\")]\n\n Deserialization(#[from] vdf::Error),\n\n}\n\n\n\nimpl ShaderResolveError {\n\n fn from_io(err: &io::Error, path: &GamePath) -> Self {\n\n Self::Io {\n\n path: path.to_string(),\n\n error: err.to_string(),\n\n }\n", "file_path": "plumber_core/src/vmt/mod.rs", "rank": 74, "score": 48903.24515156707 }, { "content": "/// # Errors\n\n///\n\n/// Returns `Err` if the serialization fails.\n\npub fn to_string(vmt: &Vmt) -> vdf::Result<String> {\n\n vmt.to_string()\n\n}\n\n\n", "file_path": "plumber_core/src/vmt/mod.rs", "rank": 75, "score": 48714.440903992 }, { "content": "/// # Errors\n\n///\n\n/// Returns `Err` if the serialization fails.\n\npub fn to_string(vmf: &Vmf) -> vdf::Result<String> {\n\n vmf.to_string()\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize, PartialEq)]\n\n#[serde(expecting = \"a vmf file\")]\n\npub struct Vmf {\n\n #[serde(default, rename = \"versioninfo\")]\n\n pub version_info: VersionInfo,\n\n #[serde(default, rename = \"visgroups\")]\n\n pub vis_groups: VisGroups,\n\n #[serde(default, rename = \"viewsettings\")]\n\n pub view_settings: ViewSettings,\n\n pub world: World,\n\n #[serde(default, rename = \"entity\", skip_serializing_if = \"Vec::is_empty\")]\n\n pub entities: Vec<Entity>,\n\n}\n\n\n\nimpl Vmf {\n\n /// # Errors\n", "file_path": "plumber_core/src/vmf/mod.rs", "rank": 76, "score": 48714.440903992 }, { "content": "fn discover_test_files(path: &Path, extension: &str) -> Vec<TestFile> {\n\n let mut files = Vec::new();\n\n\n\n for result in WalkDir::new(path) {\n\n let entry = result.unwrap();\n\n\n\n let file_name = entry.path().strip_prefix(path).unwrap();\n\n let name_with_ext = file_name.to_string_lossy();\n\n let name = match name_with_ext.strip_suffix(extension) {\n\n None => continue,\n\n Some(name) => name,\n\n };\n\n\n\n files.push(TestFile {\n\n name: name.to_owned(),\n\n path: entry.into_path(),\n\n });\n\n }\n\n\n\n files\n\n}\n", "file_path": "plumber_core/src/test_utils.rs", "rank": 77, "score": 46236.72589775946 }, { "content": "use std::collections::BTreeMap;\n\n\n\nuse serde::{\n\n de::{self, MapAccess, Visitor},\n\n Deserialize, Serialize,\n\n};\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub enum Value {\n\n String(String),\n\n Class(BTreeMap<String, Value>),\n\n}\n\n\n\nimpl From<BTreeMap<String, Value>> for Value {\n\n fn from(v: BTreeMap<String, Value>) -> Self {\n\n Self::Class(v)\n\n }\n\n}\n\n\n\nimpl From<String> for Value {\n", "file_path": "plumber_vdf/src/value.rs", "rank": 78, "score": 41415.62347865471 }, { "content": " fn from(v: String) -> Self {\n\n Self::String(v)\n\n }\n\n}\n\n\n\nimpl PartialEq<String> for Value {\n\n fn eq(&self, other: &String) -> bool {\n\n if let Self::String(string) = self {\n\n string == other\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n\nimpl Value {\n\n /// Returns `true` if the value is a string.\n\n #[must_use]\n\n pub fn is_string(&self) -> bool {\n\n matches!(self, Self::String(..))\n", "file_path": "plumber_vdf/src/value.rs", "rank": 79, "score": 41413.99751929369 }, { "content": " }\n\n\n\n /// Returns `true` if the value is a class.\n\n #[must_use]\n\n pub fn is_class(&self) -> bool {\n\n matches!(self, Self::Class(..))\n\n }\n\n\n\n #[must_use]\n\n pub fn as_string(&self) -> Option<&String> {\n\n if let Self::String(v) = self {\n\n Some(v)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n #[must_use]\n\n pub fn as_class(&self) -> Option<&BTreeMap<String, Value>> {\n\n if let Self::Class(v) = self {\n", "file_path": "plumber_vdf/src/value.rs", "rank": 80, "score": 41413.97611797599 }, { "content": "\n\n fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>\n\n where\n\n A: MapAccess<'de>,\n\n {\n\n let mut values = BTreeMap::new();\n\n while let Some((key, value)) = map.next_entry()? {\n\n values.insert(key, value);\n\n }\n\n Ok(Value::Class(values))\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Ok(Value::String(v.into()))\n\n }\n\n\n\n fn visit_string<E>(self, v: String) -> Result<Self::Value, E>\n", "file_path": "plumber_vdf/src/value.rs", "rank": 81, "score": 41412.28767911791 }, { "content": " where\n\n E: de::Error,\n\n {\n\n Ok(Value::String(v))\n\n }\n\n }\n\n\n\n deserializer.deserialize_any(ValueVisitor)\n\n }\n\n}\n\n\n\nimpl Serialize for Value {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n {\n\n match self {\n\n Value::String(str) => str.serialize(serializer),\n\n Value::Class(cls) => cls.serialize(serializer),\n\n }\n\n }\n\n}\n", "file_path": "plumber_vdf/src/value.rs", "rank": 82, "score": 41409.41159365318 }, { "content": " Some(v)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Value {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: serde::Deserializer<'de>,\n\n {\n\n struct ValueVisitor;\n\n\n\n impl<'de> Visitor<'de> for ValueVisitor {\n\n type Value = Value;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"any valid vdf value\")\n\n }\n", "file_path": "plumber_vdf/src/value.rs", "rank": 83, "score": 41407.20719955176 }, { "content": "use nom::{\n\n bytes::complete::is_not,\n\n character::complete::{char, multispace0},\n\n sequence::{delimited, preceded},\n\n IResult,\n\n};\n\n\n\npub(crate) fn space_separated(input: &str) -> IResult<&str, &str> {\n\n preceded(multispace0, is_not(\" \\t\\r\\n[](){}\"))(input)\n\n}\n\n\n\npub(crate) fn bracketed<'a, O>(\n\n parser: impl FnMut(&'a str) -> IResult<&'a str, O>,\n\n) -> impl FnMut(&'a str) -> IResult<&'a str, O> {\n\n preceded(multispace0, delimited(char('['), parser, char(']')))\n\n}\n\n\n\npub(crate) fn parenthesed<'a, O>(\n\n parser: impl FnMut(&'a str) -> IResult<&'a str, O>,\n\n) -> impl FnMut(&'a str) -> IResult<&'a str, O> {\n\n preceded(multispace0, delimited(char('('), parser, char(')')))\n\n}\n\n\n\npub(crate) fn braced<'a, O>(\n\n parser: impl FnMut(&'a str) -> IResult<&'a str, O>,\n\n) -> impl FnMut(&'a str) -> IResult<&'a str, O> {\n\n preceded(multispace0, delimited(char('{'), parser, char('}')))\n\n}\n", "file_path": "plumber_core/src/parsers.rs", "rank": 85, "score": 41366.05625850973 }, { "content": "#[cfg(not(unix))]\n\nfn open_fs_file(path: &StdPath, file_path: &GamePath) -> Result<fs::File, io::Error> {\n\n fs::File::open(path.join(file_path.as_str()))\n\n}\n\n\n\n#[cfg(unix)]\n", "file_path": "plumber_core/src/fs.rs", "rank": 94, "score": 40619.977565601315 }, { "content": "#[derive(Parser)]\n\n#[clap(version = \"0.1.0\")]\n\nstruct Opts {\n\n #[clap(short, long)]\n\n app_id: u32,\n\n #[clap(subcommand)]\n\n subcommand: SubCommand,\n\n}\n\n\n", "file_path": "plumber_cli/src/main.rs", "rank": 95, "score": 40031.92199808271 }, { "content": "// \"manual\" case-insensitive file opening on Linux\n\nfn open_fs_file(root_path: &StdPath, file_path: &GamePath) -> Result<fs::File, io::Error> {\n\n use std::io::ErrorKind;\n\n\n\n match fs::File::open(root_path.join(file_path.as_str())) {\n\n Ok(f) => Ok(f),\n\n Err(err) => {\n\n if err.kind() != ErrorKind::NotFound {\n\n return Err(err);\n\n }\n\n\n\n let mut target_path = root_path.to_path_buf();\n\n for path_part in file_path.as_str().split('/') {\n\n let target_part = fs::read_dir(&target_path)?.find_map(|res| {\n\n res.ok().and_then(|entry| {\n\n let file_name = entry.file_name();\n\n if file_name.eq_ignore_ascii_case(path_part) {\n\n Some(file_name)\n\n } else {\n\n None\n\n }\n", "file_path": "plumber_core/src/fs.rs", "rank": 96, "score": 40011.15846478521 }, { "content": "fn merge_constant_animated<T: Copy>(prev: T, prev_n: usize, next: &[T], next_n: usize) -> Vec<T> {\n\n let mut data = Vec::with_capacity(prev_n + next_n);\n\n\n\n for _ in 0..prev_n {\n\n data.push(prev);\n\n }\n\n\n\n data.extend_from_slice(next.get(0..next_n).unwrap_or(next));\n\n\n\n data\n\n}\n\n\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 97, "score": 37974.82058138099 }, { "content": "fn merge_constant_constant<T: Copy>(prev: T, prev_n: usize, next: T, next_n: usize) -> Vec<T> {\n\n let mut data = Vec::with_capacity(prev_n + next_n);\n\n\n\n for _ in 0..prev_n {\n\n data.push(prev);\n\n }\n\n\n\n for _ in 0..next_n {\n\n data.push(next);\n\n }\n\n\n\n data\n\n}\n\n\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 98, "score": 37974.82058138099 }, { "content": "#[derive(Debug, Clone, Copy)]\n\n#[repr(transparent)]\n\nstruct AnimationValue(i16);\n\n\n\nimpl AnimationValue {\n\n fn from_bytes(bytes: [u8; 2]) -> Self {\n\n Self(i16::from_ne_bytes(bytes))\n\n }\n\n\n\n fn valid(self) -> u8 {\n\n self.0.to_ne_bytes()[0]\n\n }\n\n\n\n fn total(self) -> u8 {\n\n self.0.to_ne_bytes()[1]\n\n }\n\n}\n\n\n", "file_path": "plumber_core/src/model/mdl.rs", "rank": 99, "score": 36162.202385269346 } ]
Rust
src/bin/day22/swiss_box.rs
mbikovitsky/aoc2021
0d952a7184edefcb6ab1859608daf74ed3fd4a43
use std::ops::{Sub, SubAssign}; use itertools::Itertools; use num::{CheckedAdd, CheckedMul, CheckedSub, Integer}; use petgraph::{ graph::NodeIndex, stable_graph::StableDiGraph, visit::{depth_first_search, Control, Dfs, DfsEvent, Reversed}, EdgeDirection::{Incoming, Outgoing}, }; use crate::r#box::Box; #[derive(Debug, Clone)] pub struct SwissBox<T: Integer> { tree: StableDiGraph<Box<T>, ()>, } impl<T: Integer> SwissBox<T> { pub fn new(initial: Box<T>) -> Self { if initial.is_empty() { Default::default() } else { let mut tree = StableDiGraph::new(); tree.add_node(initial); Self { tree } } } pub fn is_empty(&self) -> bool { self.tree.node_count() == 0 } fn root(&self) -> Option<NodeIndex> { if self.is_empty() { return None; } Some( self.tree .externals(Incoming) .exactly_one() .map_err(|_| "More than one root in the tree") .unwrap(), ) } fn is_leaf(&self, node: NodeIndex) -> bool { self.tree .neighbors_directed(node, Outgoing) .next() .is_none() } } impl<T: Integer> Default for SwissBox<T> { fn default() -> Self { Self { tree: Default::default(), } } } impl<T: Integer> From<Box<T>> for SwissBox<T> { fn from(value: Box<T>) -> Self { Self::new(value) } } impl<T: Integer + Clone> Sub<Box<T>> for SwissBox<T> { type Output = Self; fn sub(self, rhs: Box<T>) -> Self::Output { self - &rhs } } impl<'a, T: Integer + Clone> Sub<&'a Box<T>> for SwissBox<T> { type Output = Self; fn sub(mut self, rhs: &'a Box<T>) -> Self::Output { self -= rhs; self } } impl<T: Integer + Clone> Sub<SwissBox<T>> for SwissBox<T> { type Output = Self; fn sub(self, rhs: SwissBox<T>) -> Self::Output { self - &rhs } } impl<'a, T: Integer + Clone> Sub<&'a SwissBox<T>> for SwissBox<T> { type Output = Self; fn sub(mut self, rhs: &'a SwissBox<T>) -> Self::Output { self -= rhs; self } } impl<T: Integer + Clone> SubAssign<Box<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: Box<T>) { *self -= &rhs; } } impl<'a, T: Integer + Clone> SubAssign<&'a Box<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: &'a Box<T>) { if self.is_empty() { return; } let mut to_add = vec![]; let mut to_delete = vec![]; depth_first_search( &self.tree, Some(self.root().unwrap()), |event| -> Control<()> { if let DfsEvent::Discover(node, _) = event { let intersection = self.tree[node].intersect(rhs); if intersection.is_empty() { return Control::Prune; } if !self.is_leaf(node) { return Control::Continue; } if intersection == self.tree[node] { to_delete.push(node); return Control::Continue; } for slice in self.tree[node].subtract_split(rhs) { to_add.push((node, slice)); } } Control::Continue }, ); for (parent, slice) in to_add { let child = self.tree.add_node(slice); self.tree.add_edge(parent, child, ()); } for node in to_delete { let mut ancestors = vec![]; let reversed = Reversed(&self.tree); let mut dfs = Dfs::new(&reversed, node); while let Some(ancestor) = dfs.next(&reversed) { if self.tree.neighbors_directed(ancestor, Outgoing).count() <= 1 { ancestors.push(ancestor); } else { break; } } for ancestor in ancestors { self.tree.remove_node(ancestor); } } } } impl<T: Integer + Clone> SubAssign<SwissBox<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: SwissBox<T>) { *self -= &rhs; } } impl<'a, T: Integer + Clone> SubAssign<&'a SwissBox<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: &'a SwissBox<T>) { for node in rhs.tree.externals(Outgoing) { *self -= &rhs.tree[node]; } } } impl<T: Integer + CheckedAdd + CheckedSub + CheckedMul> SwissBox<T> { pub fn volume(&self) -> Option<T> { let mut volume: T = T::zero(); for node in self.tree.externals(Outgoing) { let r#box = &self.tree[node]; volume = volume.checked_add(&r#box.volume()?)?; } Some(volume) } } #[cfg(test)] mod tests { use super::{Box, SwissBox}; #[test] fn empty_swiss_box_has_no_volume() { assert_eq!(SwissBox::<i32>::default().volume().unwrap(), 0); assert_eq!(SwissBox::new(Box::<i32>::default()).volume().unwrap(), 0); } #[test] fn swiss_box_has_volume_of_initial_box() { assert_eq!( SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }) .volume() .unwrap(), 27 ); } #[test] fn rubik_center() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); let center = Box { x: (1..2).into(), y: (1..2).into(), z: (1..2).into(), }; cube -= center; assert_eq!(cube.volume().unwrap(), 27 - 1); } #[test] fn rubik_corner() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); let corner = Box { x: (2..3).into(), y: (2..3).into(), z: (2..3).into(), }; cube -= corner; assert_eq!(cube.volume().unwrap(), 27 - 1); } #[test] fn rubik_bar() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); let bar = Box { x: (0..3).into(), y: (1..2).into(), z: (1..2).into(), }; cube -= bar; assert_eq!(cube.volume().unwrap(), 27 - 3); } #[test] fn death_by_a_thousand_cuts() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); for x in 0..3 { for y in 0..3 { for z in 0..3 { cube -= Box { x: (x..x + 1).into(), y: (y..y + 1).into(), z: (z..z + 1).into(), }; } } } assert_eq!(cube.volume().unwrap(), 0); assert!(cube.is_empty()); } }
use std::ops::{Sub, SubAssign}; use itertools::Itertools; use num::{CheckedAdd, CheckedMul, CheckedSub, Integer}; use petgraph::{ graph::NodeIndex, stable_graph::StableDiGraph, visit::{depth_first_search, Control, Dfs, DfsEvent, Reversed}, EdgeDirection::{Incoming, Outgoing}, }; use crate::r#box::Box; #[derive(Debug, Clone)] pub struct SwissBox<T: Integer> { tree: StableDiGraph<Box<T>, ()>, } impl<T: Integer> SwissBox<T> { pub fn new(initial: Box<T>) -> Self { if initial.is_empty() { Default::default() } else { let mut tree = StableDiGraph::new(); tree.add_node(initial); Self { tree } } } pub fn is_empty(&self) -> bool { self.tree.node_count() == 0 } fn root(&self) -> Option<NodeIndex> { if self.is_empty() { return None; }
} fn is_leaf(&self, node: NodeIndex) -> bool { self.tree .neighbors_directed(node, Outgoing) .next() .is_none() } } impl<T: Integer> Default for SwissBox<T> { fn default() -> Self { Self { tree: Default::default(), } } } impl<T: Integer> From<Box<T>> for SwissBox<T> { fn from(value: Box<T>) -> Self { Self::new(value) } } impl<T: Integer + Clone> Sub<Box<T>> for SwissBox<T> { type Output = Self; fn sub(self, rhs: Box<T>) -> Self::Output { self - &rhs } } impl<'a, T: Integer + Clone> Sub<&'a Box<T>> for SwissBox<T> { type Output = Self; fn sub(mut self, rhs: &'a Box<T>) -> Self::Output { self -= rhs; self } } impl<T: Integer + Clone> Sub<SwissBox<T>> for SwissBox<T> { type Output = Self; fn sub(self, rhs: SwissBox<T>) -> Self::Output { self - &rhs } } impl<'a, T: Integer + Clone> Sub<&'a SwissBox<T>> for SwissBox<T> { type Output = Self; fn sub(mut self, rhs: &'a SwissBox<T>) -> Self::Output { self -= rhs; self } } impl<T: Integer + Clone> SubAssign<Box<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: Box<T>) { *self -= &rhs; } } impl<'a, T: Integer + Clone> SubAssign<&'a Box<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: &'a Box<T>) { if self.is_empty() { return; } let mut to_add = vec![]; let mut to_delete = vec![]; depth_first_search( &self.tree, Some(self.root().unwrap()), |event| -> Control<()> { if let DfsEvent::Discover(node, _) = event { let intersection = self.tree[node].intersect(rhs); if intersection.is_empty() { return Control::Prune; } if !self.is_leaf(node) { return Control::Continue; } if intersection == self.tree[node] { to_delete.push(node); return Control::Continue; } for slice in self.tree[node].subtract_split(rhs) { to_add.push((node, slice)); } } Control::Continue }, ); for (parent, slice) in to_add { let child = self.tree.add_node(slice); self.tree.add_edge(parent, child, ()); } for node in to_delete { let mut ancestors = vec![]; let reversed = Reversed(&self.tree); let mut dfs = Dfs::new(&reversed, node); while let Some(ancestor) = dfs.next(&reversed) { if self.tree.neighbors_directed(ancestor, Outgoing).count() <= 1 { ancestors.push(ancestor); } else { break; } } for ancestor in ancestors { self.tree.remove_node(ancestor); } } } } impl<T: Integer + Clone> SubAssign<SwissBox<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: SwissBox<T>) { *self -= &rhs; } } impl<'a, T: Integer + Clone> SubAssign<&'a SwissBox<T>> for SwissBox<T> { fn sub_assign(&mut self, rhs: &'a SwissBox<T>) { for node in rhs.tree.externals(Outgoing) { *self -= &rhs.tree[node]; } } } impl<T: Integer + CheckedAdd + CheckedSub + CheckedMul> SwissBox<T> { pub fn volume(&self) -> Option<T> { let mut volume: T = T::zero(); for node in self.tree.externals(Outgoing) { let r#box = &self.tree[node]; volume = volume.checked_add(&r#box.volume()?)?; } Some(volume) } } #[cfg(test)] mod tests { use super::{Box, SwissBox}; #[test] fn empty_swiss_box_has_no_volume() { assert_eq!(SwissBox::<i32>::default().volume().unwrap(), 0); assert_eq!(SwissBox::new(Box::<i32>::default()).volume().unwrap(), 0); } #[test] fn swiss_box_has_volume_of_initial_box() { assert_eq!( SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }) .volume() .unwrap(), 27 ); } #[test] fn rubik_center() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); let center = Box { x: (1..2).into(), y: (1..2).into(), z: (1..2).into(), }; cube -= center; assert_eq!(cube.volume().unwrap(), 27 - 1); } #[test] fn rubik_corner() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); let corner = Box { x: (2..3).into(), y: (2..3).into(), z: (2..3).into(), }; cube -= corner; assert_eq!(cube.volume().unwrap(), 27 - 1); } #[test] fn rubik_bar() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); let bar = Box { x: (0..3).into(), y: (1..2).into(), z: (1..2).into(), }; cube -= bar; assert_eq!(cube.volume().unwrap(), 27 - 3); } #[test] fn death_by_a_thousand_cuts() { let mut cube = SwissBox::new(Box { x: (0..3).into(), y: (0..3).into(), z: (0..3).into(), }); for x in 0..3 { for y in 0..3 { for z in 0..3 { cube -= Box { x: (x..x + 1).into(), y: (y..y + 1).into(), z: (z..z + 1).into(), }; } } } assert_eq!(cube.volume().unwrap(), 0); assert!(cube.is_empty()); } }
Some( self.tree .externals(Incoming) .exactly_one() .map_err(|_| "More than one root in the tree") .unwrap(), )
call_expression
[ { "content": "pub fn input_file() -> Result<File> {\n\n let input_filename = match env::args_os().nth(1) {\n\n Some(filename) => filename,\n\n None => {\n\n let mut path: PathBuf = [\n\n OsStr::new(INPUTS_DIRECTORY),\n\n env::current_exe()\n\n .context(\"Couldn't get executable filename\")?\n\n .file_stem()\n\n .context(\"No executable filename\")?,\n\n ]\n\n .iter()\n\n .collect();\n\n\n\n path.set_extension(\"txt\");\n\n\n\n path.into_os_string()\n\n }\n\n };\n\n\n\n Ok(File::open(input_filename)?)\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 0, "score": 36446.16354436148 }, { "content": "pub fn input_lines() -> Result<impl Iterator<Item = Result<String>>> {\n\n Ok(BufReader::new(input_file()?).lines().map(|line| Ok(line?)))\n\n}\n", "file_path": "src/util.rs", "rank": 1, "score": 27727.39332588056 }, { "content": "fn main() {\n\n lalrpop::process_root().unwrap();\n\n}\n", "file_path": "build.rs", "rank": 2, "score": 26957.181845083534 }, { "content": "struct LinePointsIterator {\n\n current: Option<Point>,\n\n step: Vector,\n\n end: Point,\n\n}\n\n\n\nimpl Iterator for LinePointsIterator {\n\n type Item = Point;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if let Some(current) = self.current {\n\n self.current = if current == self.end {\n\n None\n\n } else {\n\n Some(current + self.step)\n\n };\n\n\n\n Some(current)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "src/geometry.rs", "rank": 3, "score": 24619.05925310836 }, { "content": "use std::ops::Range;\n\n\n\nuse anyhow::bail;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct BitMap {\n\n data: Vec<u8>,\n\n}\n\n\n\nimpl BitMap {\n\n pub fn new(data: Vec<u8>) -> Self {\n\n Self { data }\n\n }\n\n\n\n pub fn get(&self, index: usize) -> bool {\n\n assert!(index < self.len());\n\n\n\n self.data[index / 8] & (1 << (7 - index % 8)) != 0\n\n }\n\n\n", "file_path": "src/bitmap.rs", "rank": 4, "score": 7.0770559468377625 }, { "content": "const BINGO_ROWS: usize = 5;\n\nconst BINGO_COLS: usize = 5;\n\nconst BINGO_CELLS: usize = BINGO_ROWS * BINGO_COLS;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub struct Board {\n\n numbers: [u32; BINGO_CELLS],\n\n marked: [bool; BINGO_CELLS],\n\n}\n\n\n\nimpl Board {\n\n pub fn new(numbers: &[u32; BINGO_CELLS]) -> Self {\n\n Self {\n\n numbers: *numbers,\n\n marked: Default::default(),\n\n }\n\n }\n\n\n\n pub fn mark_number(&self, number: u32) -> Self {\n\n let mut new_board = *self;\n", "file_path": "src/bingo.rs", "rank": 5, "score": 6.050535555146924 }, { "content": "use std::{\n\n ops::{Add, Sub},\n\n str::FromStr,\n\n};\n\n\n\nuse anyhow::Result;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct Vector {\n\n pub x: i32,\n\n pub y: i32,\n\n}\n\n\n\nimpl AsRef<Vector> for Vector {\n\n fn as_ref(&self) -> &Vector {\n\n self\n\n }\n\n}\n\n\n\nimpl Sub<Self> for Vector {\n", "file_path": "src/geometry.rs", "rank": 6, "score": 5.96767513601102 }, { "content": " y: self.y + rhs.y,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Line {\n\n pub start: Point,\n\n pub end: Point,\n\n}\n\n\n\nimpl Line {\n\n pub fn is_horizontal(&self) -> bool {\n\n self.start.y == self.end.y\n\n }\n\n\n\n pub fn is_vertical(&self) -> bool {\n\n self.start.x == self.end.x\n\n }\n\n\n", "file_path": "src/geometry.rs", "rank": 7, "score": 5.598986419905981 }, { "content": " cols: self.cols.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T: PartialEq> PartialEq for Matrix<T> {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.data == other.data && self.cols == other.cols\n\n }\n\n}\n\n\n\nimpl<T: Eq> Eq for Matrix<T> {}\n\n\n\nimpl<T: Hash> Hash for Matrix<T> {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n self.data.hash(state);\n\n self.cols.hash(state);\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct Position {\n\n pub row: usize,\n\n pub col: usize,\n\n}\n", "file_path": "src/matrix.rs", "rank": 8, "score": 5.583972068769695 }, { "content": "\n\n (min_row..=max_row)\n\n .flat_map(move |row| (min_col..=max_col).map(move |col| Position { row, col }))\n\n .filter(move |neighbour| neighbour != &pos)\n\n }\n\n}\n\n\n\nimpl<T: Debug> Debug for Matrix<T> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"Matrix\")\n\n .field(\"data\", &self.data)\n\n .field(\"cols\", &self.cols)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl<T: Clone> Clone for Matrix<T> {\n\n fn clone(&self) -> Self {\n\n Self {\n\n data: self.data.clone(),\n", "file_path": "src/matrix.rs", "rank": 9, "score": 5.194252632782852 }, { "content": " if marked {\n\n None\n\n } else {\n\n Some(self.numbers[index])\n\n }\n\n })\n\n .sum()\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct Game {\n\n pub numbers: Vec<u32>,\n\n pub boards: Vec<Board>,\n\n}\n", "file_path": "src/bingo.rs", "rank": 10, "score": 5.0770026737077805 }, { "content": "\n\n pub fn slice(&self, range: Range<usize>) -> Self {\n\n Self {\n\n bitmap: self.bitmap,\n\n start: self.start + range.start.clamp(0, self.len()),\n\n end: self.start + range.end.clamp(0, self.len()),\n\n }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n if self.start >= self.end {\n\n 0\n\n } else {\n\n self.end - self.start\n\n }\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.len() == 0\n\n }\n", "file_path": "src/bitmap.rs", "rank": 11, "score": 4.890930440711469 }, { "content": " }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.data.is_empty()\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct BitMapRef<'a> {\n\n bitmap: &'a BitMap,\n\n start: usize,\n\n end: usize,\n\n}\n\n\n\nimpl<'a> BitMapRef<'a> {\n\n pub fn get(&self, index: usize) -> bool {\n\n assert!(index < self.len());\n\n\n\n self.bitmap.get(self.start + index)\n\n }\n", "file_path": "src/bitmap.rs", "rank": 12, "score": 4.4673998243724355 }, { "content": "use std::{\n\n fmt::Debug,\n\n hash::{Hash, Hasher},\n\n};\n\n\n\npub struct Matrix<T> {\n\n data: Vec<T>,\n\n cols: usize,\n\n}\n\n\n\nimpl<T> Matrix<T> {\n\n pub fn new(data: Vec<T>, cols: usize) -> Self {\n\n assert_eq!(data.len() % cols, 0);\n\n Self { data, cols }\n\n }\n\n\n\n pub fn cols(&self) -> usize {\n\n self.cols\n\n }\n\n\n", "file_path": "src/matrix.rs", "rank": 13, "score": 4.435225352829743 }, { "content": "\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct Point {\n\n pub x: i32,\n\n pub y: i32,\n\n}\n\n\n\nimpl FromStr for Point {\n\n type Err = anyhow::Error;\n\n\n\n fn from_str(string: &str) -> Result<Self, Self::Err> {\n\n let coordinates: Result<Vec<i32>> = string\n\n .split(',')\n\n .map(|coordinate| Ok(coordinate.parse()?))\n\n .collect();\n\n let coordinates = coordinates?;\n\n\n\n assert_eq!(coordinates.len(), 2);\n\n\n\n let x = coordinates[0];\n", "file_path": "src/geometry.rs", "rank": 14, "score": 4.0786292535746025 }, { "content": " pub fn set(&mut self, index: usize, value: bool) {\n\n assert!(index < self.len());\n\n\n\n if value {\n\n self.data[index / 8] |= 1 << (7 - index % 8);\n\n } else {\n\n self.data[index / 8] &= !(1 << (7 - index % 8));\n\n }\n\n }\n\n\n\n pub fn slice(&self, range: Range<usize>) -> BitMapRef {\n\n BitMapRef {\n\n bitmap: self,\n\n start: range.start.clamp(0, self.len()),\n\n end: range.end.clamp(0, self.len()),\n\n }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.data.len() * 8\n", "file_path": "src/bitmap.rs", "rank": 15, "score": 3.862144107848423 }, { "content": " pub fn rows(&self) -> usize {\n\n self.data.len() / self.cols()\n\n }\n\n\n\n pub fn get(&self, pos: &Position) -> &T {\n\n assert!(pos.row < self.rows());\n\n assert!(pos.col < self.cols());\n\n\n\n &self.data[pos.row * self.cols() + pos.col]\n\n }\n\n\n\n pub fn get_mut(&mut self, pos: &Position) -> &mut T {\n\n assert!(pos.row < self.rows());\n\n assert!(pos.col < self.cols());\n\n\n\n let cols = self.cols();\n\n\n\n &mut self.data[pos.row * cols + pos.col]\n\n }\n\n\n", "file_path": "src/matrix.rs", "rank": 16, "score": 3.6334932202938415 }, { "content": "\n\n for index in 0..new_board.numbers.len() {\n\n if new_board.numbers[index] == number {\n\n new_board.marked[index] = true;\n\n }\n\n }\n\n\n\n new_board\n\n }\n\n\n\n pub fn is_marked(&self, row: usize, col: usize) -> bool {\n\n assert!(row < BINGO_ROWS);\n\n assert!(col < BINGO_COLS);\n\n\n\n self.marked[row * BINGO_ROWS + col]\n\n }\n\n\n\n pub fn is_winning(&self) -> bool {\n\n // Check rows\n\n for row in 0..BINGO_ROWS {\n", "file_path": "src/bingo.rs", "rank": 17, "score": 2.8990675644373938 }, { "content": " pub fn is_diagonal(&self) -> bool {\n\n let delta = self.end - self.start;\n\n delta.x.unsigned_abs() == delta.y.unsigned_abs()\n\n }\n\n\n\n pub fn points(&self) -> impl Iterator<Item = Point> {\n\n if !(self.is_horizontal() || self.is_vertical() || self.is_diagonal()) {\n\n todo!(\"Only horizontal, vertical, and 45-degree diagonal lines are implemented\")\n\n }\n\n\n\n let dx = if self.start.x < self.end.x {\n\n 1\n\n } else if self.start.x > self.end.x {\n\n -1\n\n } else {\n\n 0\n\n };\n\n\n\n let dy = if self.start.y < self.end.y {\n\n 1\n", "file_path": "src/geometry.rs", "rank": 18, "score": 2.7589729915360754 }, { "content": " type Output = Self;\n\n\n\n fn sub(self, rhs: Self) -> Self::Output {\n\n Self {\n\n x: self.x - rhs.x,\n\n y: self.y - rhs.y,\n\n }\n\n }\n\n}\n\n\n\nimpl Add<Self> for Vector {\n\n type Output = Self;\n\n\n\n fn add(self, rhs: Self) -> Self::Output {\n\n Self {\n\n x: self.x + rhs.x,\n\n y: self.y + rhs.y,\n\n }\n\n }\n\n}\n", "file_path": "src/geometry.rs", "rank": 19, "score": 2.5631959655147813 }, { "content": " }\n\n}\n\n\n\nimpl Sub<Vector> for Point {\n\n type Output = Self;\n\n\n\n fn sub(self, rhs: Vector) -> Self::Output {\n\n Self {\n\n x: self.x - rhs.x,\n\n y: self.y - rhs.y,\n\n }\n\n }\n\n}\n\n\n\nimpl Add<Vector> for Point {\n\n type Output = Self;\n\n\n\n fn add(self, rhs: Vector) -> Self::Output {\n\n Self {\n\n x: self.x + rhs.x,\n", "file_path": "src/geometry.rs", "rank": 20, "score": 2.3894444629325515 }, { "content": " let y = coordinates[1];\n\n\n\n Ok(Point { x, y })\n\n }\n\n}\n\n\n\nimpl AsRef<Point> for Point {\n\n fn as_ref(&self) -> &Point {\n\n self\n\n }\n\n}\n\n\n\nimpl Sub<Self> for Point {\n\n type Output = Self;\n\n\n\n fn sub(self, rhs: Self) -> Self::Output {\n\n Self {\n\n x: self.x - rhs.x,\n\n y: self.y - rhs.y,\n\n }\n", "file_path": "src/geometry.rs", "rank": 21, "score": 2.362961552317286 }, { "content": "pub mod bingo;\n\npub mod bitmap;\n\npub mod geometry;\n\npub mod matrix;\n\npub mod util;\n", "file_path": "src/lib.rs", "rank": 22, "score": 2.199677202762702 }, { "content": "}\n\n\n\nimpl<'a> From<&'a BitMap> for BitMapRef<'a> {\n\n fn from(bitmap: &'a BitMap) -> Self {\n\n bitmap.slice(0..bitmap.len())\n\n }\n\n}\n\n\n\nimpl<'a> TryFrom<BitMapRef<'a>> for u64 {\n\n type Error = anyhow::Error;\n\n\n\n fn try_from(value: BitMapRef<'a>) -> Result<Self, Self::Error> {\n\n Self::try_from(&value)\n\n }\n\n}\n\n\n\nimpl<'a, 'b> TryFrom<&'a BitMapRef<'b>> for u64 {\n\n type Error = anyhow::Error;\n\n\n\n fn try_from(value: &'a BitMapRef<'b>) -> Result<Self, Self::Error> {\n", "file_path": "src/bitmap.rs", "rank": 23, "score": 2.146950466841441 }, { "content": "use std::{\n\n env,\n\n ffi::OsStr,\n\n fs::File,\n\n io::{BufRead, BufReader},\n\n path::PathBuf,\n\n};\n\n\n\nuse anyhow::{Context, Result};\n\n\n\nconst INPUTS_DIRECTORY: &str = \"inputs\";\n\n\n", "file_path": "src/util.rs", "rank": 24, "score": 2.091212039264523 }, { "content": " } else if self.start.y > self.end.y {\n\n -1\n\n } else {\n\n 0\n\n };\n\n\n\n let step = Vector { x: dx, y: dy };\n\n\n\n LinePointsIterator {\n\n current: Some(self.start),\n\n step,\n\n end: self.end,\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for Line {\n\n type Err = anyhow::Error;\n\n\n\n fn from_str(string: &str) -> Result<Self, Self::Err> {\n", "file_path": "src/geometry.rs", "rank": 25, "score": 1.8329642405995927 }, { "content": " if value.len() > u64::BITS.try_into().unwrap() {\n\n bail!(\"Value has too many bits\");\n\n }\n\n\n\n let mut result = 0u64;\n\n\n\n for index in 0..value.len() {\n\n result <<= 1;\n\n result |= if value.get(index) { 1 } else { 0 };\n\n }\n\n\n\n Ok(result)\n\n }\n\n}\n", "file_path": "src/bitmap.rs", "rank": 26, "score": 1.5455387200406911 }, { "content": " pub fn all_points(&self) -> impl Iterator<Item = Position> {\n\n let rows = self.rows();\n\n let cols = self.cols();\n\n (0..rows).flat_map(move |row| (0..cols).map(move |col| Position { row, col }))\n\n }\n\n\n\n pub fn neighbours(&self, pos: &Position) -> impl Iterator<Item = Position> {\n\n let pos = *pos;\n\n\n\n assert!(pos.row < self.rows());\n\n assert!(pos.col < self.cols());\n\n\n\n let min_row = pos.row.saturating_sub(1);\n\n let max_row = (pos.row + 1).min(self.rows() - 1);\n\n\n\n let up_down = (min_row..=max_row)\n\n .filter(move |row| *row != pos.row)\n\n .map(move |row| Position { row, col: pos.col });\n\n\n\n let min_col = pos.col.saturating_sub(1);\n", "file_path": "src/matrix.rs", "rank": 27, "score": 1.0246177714611409 }, { "content": " if self.marked[row * BINGO_ROWS..row * BINGO_ROWS + BINGO_COLS] == [true; BINGO_COLS] {\n\n return true;\n\n }\n\n }\n\n\n\n // Check columns\n\n for col in 0..BINGO_COLS {\n\n if (0..BINGO_ROWS).all(|row| self.is_marked(row, col)) {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n }\n\n\n\n pub fn sum_unmarked(&self) -> u32 {\n\n self.marked\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(index, &marked)| {\n", "file_path": "src/bingo.rs", "rank": 28, "score": 0.9798995685868723 }, { "content": " let max_col = (pos.col + 1).min(self.cols() - 1);\n\n\n\n let left_right = (min_col..=max_col)\n\n .filter(move |col| *col != pos.col)\n\n .map(move |col| Position { col, row: pos.row });\n\n\n\n up_down.chain(left_right)\n\n }\n\n\n\n pub fn neighbours_with_diagonals(&self, pos: &Position) -> impl Iterator<Item = Position> {\n\n let pos = *pos;\n\n\n\n assert!(pos.row < self.rows());\n\n assert!(pos.col < self.cols());\n\n\n\n let min_row = pos.row.saturating_sub(1);\n\n let max_row = (pos.row + 1).min(self.rows() - 1);\n\n\n\n let min_col = pos.col.saturating_sub(1);\n\n let max_col = (pos.col + 1).min(self.cols() - 1);\n", "file_path": "src/matrix.rs", "rank": 29, "score": 0.6636517996190794 } ]
Rust
src/fs.rs
autokrator-uog/actix-web
8590eca7f83cf835b458c7e57d6f6ccffc8a58f4
use std::io; use std::io::Read; use std::fmt::Write; use std::fs::{File, DirEntry}; use std::path::{Path, PathBuf}; use std::ops::{Deref, DerefMut}; use mime_guess::get_mime_type; use param::FromParam; use handler::{Handler, Responder}; use headers::ContentEncoding; use httprequest::HttpRequest; use httpresponse::HttpResponse; use httpcodes::{HTTPOk, HTTPFound}; #[derive(Debug)] pub struct NamedFile(PathBuf, File); impl NamedFile { pub fn open<P: AsRef<Path>>(path: P) -> io::Result<NamedFile> { let file = File::open(path.as_ref())?; Ok(NamedFile(path.as_ref().to_path_buf(), file)) } #[inline] pub fn file(&self) -> &File { &self.1 } #[inline] pub fn path(&self) -> &Path { self.0.as_path() } } impl Deref for NamedFile { type Target = File; fn deref(&self) -> &File { &self.1 } } impl DerefMut for NamedFile { fn deref_mut(&mut self) -> &mut File { &mut self.1 } } impl Responder for NamedFile { type Item = HttpResponse; type Error = io::Error; fn respond_to(mut self, _: HttpRequest) -> Result<HttpResponse, io::Error> { let mut resp = HTTPOk.build(); resp.content_encoding(ContentEncoding::Identity); if let Some(ext) = self.path().extension() { let mime = get_mime_type(&ext.to_string_lossy()); resp.content_type(format!("{}", mime).as_str()); } let mut data = Vec::new(); let _ = self.1.read_to_end(&mut data); Ok(resp.body(data).unwrap()) } } #[derive(Debug)] pub struct Directory{ base: PathBuf, path: PathBuf } impl Directory { pub fn new(base: PathBuf, path: PathBuf) -> Directory { Directory { base: base, path: path } } fn can_list(&self, entry: &io::Result<DirEntry>) -> bool { if let Ok(ref entry) = *entry { if let Some(name) = entry.file_name().to_str() { if name.starts_with('.') { return false } } if let Ok(ref md) = entry.metadata() { let ft = md.file_type(); return ft.is_dir() || ft.is_file() || ft.is_symlink() } } false } } impl Responder for Directory { type Item = HttpResponse; type Error = io::Error; fn respond_to(self, req: HttpRequest) -> Result<HttpResponse, io::Error> { let index_of = format!("Index of {}", req.path()); let mut body = String::new(); let base = Path::new(req.path()); for entry in self.path.read_dir()? { if self.can_list(&entry) { let entry = entry.unwrap(); let p = match entry.path().strip_prefix(&self.path) { Ok(p) => base.join(p), Err(_) => continue }; let file_url = format!("{}", p.to_string_lossy()); if let Ok(metadata) = entry.metadata() { if metadata.is_dir() { let _ = write!(body, "<li><a href=\"{}\">{}/</a></li>", file_url, entry.file_name().to_string_lossy()); } else { let _ = write!(body, "<li><a href=\"{}\">{}</a></li>", file_url, entry.file_name().to_string_lossy()); } } else { continue } } } let html = format!("<html>\ <head><title>{}</title></head>\ <body><h1>{}</h1>\ <ul>\ {}\ </ul></body>\n</html>", index_of, index_of, body); Ok(HTTPOk.build() .content_type("text/html; charset=utf-8") .body(html).unwrap()) } } pub enum FilesystemElement { File(NamedFile), Directory(Directory), Redirect(HttpResponse), } impl Responder for FilesystemElement { type Item = HttpResponse; type Error = io::Error; fn respond_to(self, req: HttpRequest) -> Result<HttpResponse, io::Error> { match self { FilesystemElement::File(file) => file.respond_to(req), FilesystemElement::Directory(dir) => dir.respond_to(req), FilesystemElement::Redirect(resp) => Ok(resp), } } } pub struct StaticFiles { directory: PathBuf, accessible: bool, index: Option<String>, show_index: bool, _chunk_size: usize, _follow_symlinks: bool, } impl StaticFiles { pub fn new<T: Into<PathBuf>>(dir: T, index: bool) -> StaticFiles { let dir = dir.into(); let (dir, access) = match dir.canonicalize() { Ok(dir) => { if dir.is_dir() { (dir, true) } else { warn!("Is not directory `{:?}`", dir); (dir, false) } }, Err(err) => { warn!("Static files directory `{:?}` error: {}", dir, err); (dir, false) } }; StaticFiles { directory: dir, accessible: access, index: None, show_index: index, _chunk_size: 0, _follow_symlinks: false, } } pub fn index_file<T: Into<String>>(mut self, index: T) -> StaticFiles { self.index = Some(index.into()); self } } impl<S> Handler<S> for StaticFiles { type Result = Result<FilesystemElement, io::Error>; fn handle(&mut self, req: HttpRequest<S>) -> Self::Result { if !self.accessible { Err(io::Error::new(io::ErrorKind::NotFound, "not found")) } else { let path = if let Some(path) = req.match_info().get("tail") { path } else { return Err(io::Error::new(io::ErrorKind::NotFound, "not found")) }; let relpath = PathBuf::from_param(path) .map_err(|_| io::Error::new(io::ErrorKind::NotFound, "not found"))?; let path = self.directory.join(&relpath).canonicalize()?; if path.is_dir() { if let Some(ref redir_index) = self.index { let mut base = Path::new(req.path()).join(relpath); base.push(redir_index); Ok(FilesystemElement::Redirect( HTTPFound .build() .header("LOCATION", base.to_string_lossy().as_ref()) .finish().unwrap())) } else if self.show_index { Ok(FilesystemElement::Directory(Directory::new(self.directory.clone(), path))) } else { Err(io::Error::new(io::ErrorKind::NotFound, "not found")) } } else { Ok(FilesystemElement::File(NamedFile::open(path)?)) } } } } #[cfg(test)] mod tests { use super::*; use http::{header, StatusCode}; #[test] fn test_named_file() { assert!(NamedFile::open("test--").is_err()); let mut file = NamedFile::open("Cargo.toml").unwrap(); { file.file(); let _f: &File = &file; } { let _f: &mut File = &mut file; } let resp = file.respond_to(HttpRequest::default()).unwrap(); assert_eq!(resp.headers().get(header::CONTENT_TYPE).unwrap(), "text/x-toml") } #[test] fn test_static_files() { let mut st = StaticFiles::new(".", true); st.accessible = false; assert!(st.handle(HttpRequest::default()).is_err()); st.accessible = true; st.show_index = false; assert!(st.handle(HttpRequest::default()).is_err()); let mut req = HttpRequest::default(); req.match_info_mut().add("tail", ""); st.show_index = true; let resp = st.handle(req).respond_to(HttpRequest::default()).unwrap(); assert_eq!(resp.headers().get(header::CONTENT_TYPE).unwrap(), "text/html; charset=utf-8"); assert!(resp.body().is_binary()); assert!(format!("{:?}", resp.body()).contains("README.md")); } #[test] fn test_redirec_to_index() { let mut st = StaticFiles::new(".", false).index_file("index.html"); let mut req = HttpRequest::default(); req.match_info_mut().add("tail", "guide"); let resp = st.handle(req).respond_to(HttpRequest::default()).unwrap(); assert_eq!(resp.status(), StatusCode::FOUND); assert_eq!(resp.headers().get(header::LOCATION).unwrap(), "/guide/index.html"); } }
use std::io; use std::io::Read; use std::fmt::Write; use std::fs::{File, DirEntry}; use std::path::{Path, PathBuf}; use std::ops::{Deref, DerefMut}; use mime_guess::get_mime_type; use param::FromParam; use handler::{Handler, Responder}; use headers::ContentEncoding; use httprequest::HttpRequest; use httpresponse::HttpResponse; use httpcodes::{HTTPOk, HTTPFound}; #[derive(Debug)] pub struct NamedFile(PathBuf, File); impl NamedFile { pub fn open<P: AsRef<Path>>(path: P) -> io::Result<NamedFile> { let file = File::open(path.as_ref())?; Ok(NamedFile(path.as_ref().to_path_buf(), file)) } #[inline] pub fn file(&self) -> &File { &self.1 } #[inline] pub fn path(&self) -> &Path { self.0.as_path() } } impl Deref for NamedFile { type Target = File; fn deref(&self) -> &File { &self.1 } } impl DerefMut for NamedFile { fn deref_mut(&mut self) -> &mut File { &mut self.1 } } impl Responder for NamedFile { type Item = HttpResponse; type Error = io::Error; fn respond_to(mut self, _: HttpRequest) -> Result<HttpResponse, io::Error> { let mut resp = HTTPOk.build(); resp.content_encoding(ContentEncoding::Identity); if let Some(ext) = self.path().extension() { let mime = get_mime_type(&ext.to_string_lossy()); resp.content_type(format!("{}", mime).as_str()); } let mut data = Vec::new(); let _ = self.1.read_to_end(&mut data); Ok(resp.body(data).unwrap()) } } #[derive(Debug)] pub struct Directory{ base: PathBuf, path: PathBuf } impl Directory { pub fn new(base: PathBuf, path: PathBuf) -> Directory { Directory { base: base, path: path } } fn can_list(&self, entry: &io::Result<DirEntry>) -> bool { if let Ok(ref entry) = *entry { if let Some(name) = entry.file_name().to_str() { if name.starts_with('.') { return false } } if let Ok(ref md) = entry.metadata() { let ft = md.file_type(); return ft.is_dir() || ft.is_file() || ft.is_symlink() } } false } } impl Responder for Directory { type Item = HttpResponse; type Error = io::Error; fn respond_to(self, req: HttpRequest) -> Result<HttpResponse, io::Error> { let index_of = format!("Index of {}", req.path()); let mut body = String::new(); let base = Path::new(req.path()); for entry in self.path.read_dir()? { if self.can_list(&entry) { let entry = entry.unwrap(); let p = match entry.path().strip_prefix(&self.path) { Ok(p) => base.join(p), Err(_) => continue }; let file_url = format!("{}", p.to_string_lossy()); if let Ok(metadata) = entry.metadata() { if metadata.is_dir() { let _ = write!(body, "<li><a href=\"{}\">{}/</a></li>", file_url, entry.file_name().to_string_lossy()); } else { let _ = write!(body, "<li><a href=\"{}\">{}</a></li>", file_url, entry.file_name().to_string_lossy()); } } else { continue } } } let html = format!("<html>\ <head><title>{}</title></head>\ <body><h1>{}</h1>\ <ul>\ {}\ </ul></body>\n</html>", index_of, index_of, body); Ok(HTTPOk.build() .content_type("text/html; charset=utf-8") .body(html).unwrap()) } } pub enum FilesystemElement { File(NamedFile), Directory(Directory), Redirect(HttpResponse), } impl Responder for FilesystemElement { type Item = HttpResponse; type Error = io::Error; fn respond_to(self, req: HttpRequest) -> Result<HttpResponse, io::Error> { match self { FilesystemElement::File(file) => file.respond_to(req), FilesystemElement::Directory(dir) => dir.respond_to(req), FilesystemElement::Redirect(resp) => Ok(resp), } } } pub struct StaticFiles { directory: PathBuf, accessible: bool, index: Option<String>, show_index: bool, _chunk_size: usize, _follow_symlinks: bool, } impl StaticFiles { pub fn new<T: Into<PathBuf>>(dir: T, index: bool) -> StaticFiles { let dir = dir.into(); let (dir, access) = match dir.canonicalize() { Ok(dir) => { if dir.is_dir() { (dir, true) } else { warn!("Is not directory `{:?}`", dir); (dir, false) } }, Err(err) => { warn!("Static files directory `{:?}` error: {}", dir, err); (dir, false) } }; StaticFiles { directory: dir, accessible: access, index: None, show_index: index, _chunk_size: 0, _follow_symlinks: false, } } pub fn index_file<T: Into<String>>(mut self, index: T) -> StaticFiles { self.index = Some(index.into()); self } } impl<S> Handler<S> for StaticFiles { type Result = Result<FilesystemElement, io::Error>; fn handle(&mut self, req: HttpRequest<S>) -> Self::Result { if !self.accessible { Err(io::Error::new(io::ErrorKind::NotFound, "not found")) } else { let path = if let Some(path) = req.match_info().get("tail") { path } else { return Err(io::Error::new(io::ErrorKind::NotFound, "not found")) }; let relpath = PathBuf::from_param(path) .map_err(|_| io::Error::new(io::ErrorKind::NotFound, "not found"))?; let path = self.directory.join(&relpath).canonicalize()?; if path.is_dir() { if let Some(ref redir_index) = self.index { let mut base = Path::new(req.path()).join(relpath); base.push(redir_index); Ok(FilesystemElement::Redirect( HTTPFound .build() .header("LOCATION", base.to_string_lossy().as_ref()) .finish().unwrap())) } else if self.show_index { Ok(FilesystemElement::Directory(Directory::new(self.directory.clone(), path))) } else { Err(io::Error::new(io::ErrorKind::NotFound, "not found")) } } else { Ok(FilesystemElement::File(NamedFile::open(path)?)) } } } } #[cfg(test)] mod tests { use super::*; use http::{header, StatusCode}; #[test] fn test_named_file() { assert!(NamedFile::open("test--").is_err()); let mut file = NamedFile::open("Cargo.toml").unwrap(); { file.file(); let _f: &File = &file; } { let _f: &mut File = &mut file; } let resp = file.respond_to(HttpRequest::default()).unwrap(); assert_eq!(resp.headers().get(header::CONTENT_TYPE).unwrap(), "text/x-toml") } #[test] fn test_static_files() { let mut st = StaticFiles::new(".", true); st.accessible = false; assert!(st.handle(HttpRequest::default()).is_err()); st.accessible = true; st.show_index = false; assert!(st.handle(HttpRequest::default()).is_err()); let mut req = HttpRequest::default(); req.match_info_mut().add("tail", ""); st.show_index = true; let resp = st.handle(req).respond_to(HttpRequest::default()).unwrap(); assert_eq!(resp.headers().get(header::CONTENT_TYPE).unwrap(), "text/html; charset=utf-8"); assert!(resp.body().is_binary()); assert!(format!("{:?}", resp.body()).contains("README.md")); } #[test] fn test_redirec_to_index() {
}
let mut st = StaticFiles::new(".", false).index_file("index.html"); let mut req = HttpRequest::default(); req.match_info_mut().add("tail", "guide"); let resp = st.handle(req).respond_to(HttpRequest::default()).unwrap(); assert_eq!(resp.status(), StatusCode::FOUND); assert_eq!(resp.headers().get(header::LOCATION).unwrap(), "/guide/index.html"); }
function_block-function_prefix_line
[ { "content": "fn index(mut req: HttpRequest) -> Box<Future<Item=HttpResponse, Error=Error>>\n\n{\n\n println!(\"{:?}\", req);\n\n\n\n req.multipart() // <- get multipart stream for current request\n\n .from_err() // <- convert multipart errors\n\n .and_then(|item| { // <- iterate over multipart items\n\n match item {\n\n // Handle multipart Field\n\n multipart::MultipartItem::Field(field) => {\n\n println!(\"==== FIELD ==== {:?}\", field);\n\n\n\n // Field in turn is stream of *Bytes* object\n\n Either::A(\n\n field.map_err(Error::from)\n\n .map(|chunk| {\n\n println!(\"-- CHUNK: \\n{}\",\n\n std::str::from_utf8(&chunk).unwrap());})\n\n .finish())\n\n },\n", "file_path": "examples/multipart/src/main.rs", "rank": 0, "score": 314799.4749427085 }, { "content": "/// Do websocket handshake and start actor\n\npub fn start<A, S>(mut req: HttpRequest<S>, actor: A) -> Result<HttpResponse, Error>\n\n where A: Actor<Context=WebsocketContext<A, S>> + Handler<Message>,\n\n S: 'static\n\n{\n\n let mut resp = handshake(&req)?;\n\n let stream = WsStream::new(req.payload_mut().readany());\n\n\n\n let mut ctx = WebsocketContext::new(req, actor);\n\n ctx.add_message_stream(stream);\n\n\n\n Ok(resp.body(ctx)?)\n\n}\n\n\n", "file_path": "src/ws/mod.rs", "rank": 1, "score": 304133.0922020558 }, { "content": "/// This handler manually load request payload and parse json-rust\n\nfn index_mjsonrust(mut req: HttpRequest) -> Box<Future<Item=HttpResponse, Error=Error>> {\n\n req.payload_mut().readany().concat2()\n\n .from_err()\n\n .and_then(|body| {\n\n // body is loaded, now we can deserialize json-rust\n\n let result = json::parse(std::str::from_utf8(&body).unwrap()); // return Result\n\n let injson: JsonValue = match result { Ok(v) => v, Err(e) => object!{\"err\" => e.to_string() } };\n\n Ok(HttpResponse::build(StatusCode::OK)\n\n .content_type(\"application/json\")\n\n .body(injson.dump()).unwrap())\n\n\n\n })\n\n .responder()\n\n}\n\n\n", "file_path": "examples/json/src/main.rs", "rank": 2, "score": 291840.6065746303 }, { "content": "/// This handler manually load request payload and parse serde json\n\nfn index_manual(mut req: HttpRequest) -> Box<Future<Item=HttpResponse, Error=Error>> {\n\n // readany() returns asynchronous stream of Bytes objects\n\n req.payload_mut().readany()\n\n // `Future::from_err` acts like `?` in that it coerces the error type from\n\n // the future into the final error type\n\n .from_err()\n\n\n\n // `fold` will asynchronously read each chunk of the request body and\n\n // call supplied closure, then it resolves to result of closure\n\n .fold(BytesMut::new(), move |mut body, chunk| {\n\n // limit max size of in-memory payload\n\n if (body.len() + chunk.len()) > MAX_SIZE {\n\n Err(error::ErrorBadRequest(\"overflow\"))\n\n } else {\n\n body.extend_from_slice(&chunk);\n\n Ok(body)\n\n }\n\n })\n\n // `Future::and_then` can be used to merge an asynchronous workflow with a\n\n // synchronous workflow\n\n .and_then(|body| {\n\n // body is loaded, now we can deserialize serde-json\n\n let obj = serde_json::from_slice::<MyObj>(&body)?;\n\n Ok(httpcodes::HTTPOk.build().json(obj)?) // <- send response\n\n })\n\n .responder()\n\n}\n\n\n", "file_path": "examples/json/src/main.rs", "rank": 3, "score": 291840.6065746303 }, { "content": "/// simple index handler\n\nfn index(mut req: HttpRequest) -> Result<HttpResponse> {\n\n println!(\"{:?}\", req);\n\n\n\n // example of ...\n\n if let Ok(ch) = req.payload_mut().readany().poll() {\n\n if let futures::Async::Ready(Some(d)) = ch {\n\n println!(\"{}\", String::from_utf8_lossy(d.as_ref()));\n\n }\n\n }\n\n\n\n // session\n\n let mut counter = 1;\n\n if let Some(count) = req.session().get::<i32>(\"counter\")? {\n\n println!(\"SESSION value: {}\", count);\n\n counter = count + 1;\n\n req.session().set(\"counter\", counter)?;\n\n } else {\n\n req.session().set(\"counter\", counter)?;\n\n }\n\n\n", "file_path": "examples/basics/src/main.rs", "rank": 4, "score": 291707.83704273775 }, { "content": "/// This handler uses `HttpRequest::json()` for loading serde json object.\n\nfn index(req: HttpRequest) -> Box<Future<Item=HttpResponse, Error=Error>> {\n\n req.json()\n\n .from_err() // convert all errors into `Error`\n\n .and_then(|val: MyObj| {\n\n println!(\"model: {:?}\", val);\n\n Ok(httpcodes::HTTPOk.build().json(val)?) // <- send response\n\n })\n\n .responder()\n\n}\n\n\n\n\n\nconst MAX_SIZE: usize = 262_144; // max payload size is 256k\n\n\n", "file_path": "examples/json/src/main.rs", "rank": 5, "score": 278264.96466652624 }, { "content": "/// Async request handler\n\nfn index(req: HttpRequest<State>) -> Box<Future<Item=HttpResponse, Error=Error>> {\n\n let name = &req.match_info()[\"name\"];\n\n\n\n req.state().db.call_fut(CreateUser{name: name.to_owned()})\n\n .from_err()\n\n .and_then(|res| {\n\n match res {\n\n Ok(user) => Ok(httpcodes::HTTPOk.build().json(user)?),\n\n Err(_) => Ok(httpcodes::HTTPInternalServerError.into())\n\n }\n\n })\n\n .responder()\n\n}\n\n\n", "file_path": "examples/diesel/src/main.rs", "rank": 6, "score": 268546.21768206847 }, { "content": "pub fn info(req: HttpRequest) -> Box<Future<Item=HttpResponse, Error=Error>> {\n\n req.json() \n\n .from_err()\n\n .and_then(|res: Info| {\n\n Ok(httpcodes::HTTPOk.build().json(res)?)\n\n }).responder()\n\n}\n", "file_path": "examples/web-cors/backend/src/user.rs", "rank": 7, "score": 260392.67552907253 }, { "content": "/// Prepare `WebSocket` handshake response.\n\n///\n\n/// This function returns handshake `HttpResponse`, ready to send to peer.\n\n/// It does not perform any IO.\n\n///\n\n// /// `protocols` is a sequence of known protocols. On successful handshake,\n\n// /// the returned response headers contain the first protocol in this list\n\n// /// which the server also knows.\n\npub fn handshake<S>(req: &HttpRequest<S>) -> Result<HttpResponseBuilder, WsHandshakeError> {\n\n // WebSocket accepts only GET\n\n if *req.method() != Method::GET {\n\n return Err(WsHandshakeError::GetMethodRequired)\n\n }\n\n\n\n // Check for \"UPGRADE\" to websocket header\n\n let has_hdr = if let Some(hdr) = req.headers().get(header::UPGRADE) {\n\n if let Ok(s) = hdr.to_str() {\n\n s.to_lowercase().contains(\"websocket\")\n\n } else {\n\n false\n\n }\n\n } else {\n\n false\n\n };\n\n if !has_hdr {\n\n return Err(WsHandshakeError::NoWebsocketUpgrade)\n\n }\n\n\n", "file_path": "src/ws/mod.rs", "rank": 8, "score": 256136.44996501325 }, { "content": "/// simple handle\n\nfn index(req: HttpRequest) -> Result<HttpResponse> {\n\n println!(\"{:?}\", req);\n\n Ok(httpcodes::HTTPOk\n\n .build()\n\n .content_type(\"text/plain\")\n\n .body(\"Welcome!\")?)\n\n}\n\n\n", "file_path": "examples/tls/src/main.rs", "rank": 9, "score": 248864.87480878018 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorNotFound<T>(err: T) -> InternalError<T> {\n\n InternalError::new(err, StatusCode::NOT_FOUND)\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *METHOD NOT ALLOWED* response.\n", "file_path": "src/error.rs", "rank": 10, "score": 241280.67669084124 }, { "content": "/// Check if request has chunked transfer encoding\n\npub fn chunked(headers: &HeaderMap) -> Result<bool, ParseError> {\n\n if let Some(encodings) = headers.get(header::TRANSFER_ENCODING) {\n\n if let Ok(s) = encodings.to_str() {\n\n Ok(s.to_lowercase().contains(\"chunked\"))\n\n } else {\n\n Err(ParseError::Header)\n\n }\n\n } else {\n\n Ok(false)\n\n }\n\n}\n\n\n\n/// Decoders to handle different Transfer-Encodings.\n\n///\n\n/// If a message body does not include a Transfer-Encoding, it *should*\n\n/// include a Content-Length header.\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Decoder {\n\n kind: Kind,\n\n}\n", "file_path": "src/server/h1.rs", "rank": 11, "score": 239789.701143487 }, { "content": "/// async handler\n\nfn index_async(req: HttpRequest) -> FutureResult<HttpResponse, Error>\n\n{\n\n println!(\"{:?}\", req);\n\n\n\n result(HttpResponse::Ok()\n\n .content_type(\"text/html\")\n\n .body(format!(\"Hello {}!\", req.match_info().get(\"name\").unwrap()))\n\n .map_err(|e| e.into()))\n\n}\n\n\n", "file_path": "examples/basics/src/main.rs", "rank": 12, "score": 236388.62002944853 }, { "content": "fn index(req: HttpRequest<State>) -> Result<HttpResponse> {\n\n let s = if let Some(name) = req.query().get(\"name\") { // <- submitted form\n\n let mut ctx = tera::Context::new();\n\n ctx.add(\"name\", &name.to_owned());\n\n ctx.add(\"text\", &\"Welcome!\".to_owned());\n\n req.state().template.render(\"user.html\", &ctx)\n\n .map_err(|_| error::ErrorInternalServerError(\"Template error\"))?\n\n } else {\n\n req.state().template.render(\"index.html\", &tera::Context::new())\n\n .map_err(|_| error::ErrorInternalServerError(\"Template error\"))?\n\n };\n\n Ok(httpcodes::HTTPOk.build()\n\n .content_type(\"text/html\")\n\n .body(s)?)\n\n}\n\n\n", "file_path": "examples/template_tera/src/main.rs", "rank": 13, "score": 233703.68814558882 }, { "content": "/// favicon handler\n\nfn favicon(req: HttpRequest) -> Result<fs::NamedFile> {\n\n Ok(fs::NamedFile::open(\"../static/favicon.ico\")?)\n\n}\n\n\n", "file_path": "examples/basics/src/main.rs", "rank": 14, "score": 214381.1146270152 }, { "content": "pub fn read_from_io<T: IoStream>(io: &mut T, buf: &mut BytesMut) -> Poll<usize, io::Error> {\n\n unsafe {\n\n if buf.remaining_mut() < LW_BUFFER_SIZE {\n\n buf.reserve(HW_BUFFER_SIZE);\n\n }\n\n match io.read(buf.bytes_mut()) {\n\n Ok(n) => {\n\n buf.advance_mut(n);\n\n Ok(Async::Ready(n))\n\n },\n\n Err(e) => {\n\n if e.kind() == io::ErrorKind::WouldBlock {\n\n Ok(Async::NotReady)\n\n } else {\n\n Err(e)\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/server/utils.rs", "rank": 15, "score": 211999.54124336067 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorGone<T>(err: T) -> InternalError<T> {\n\n InternalError::new(err, StatusCode::GONE)\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *PRECONDITION FAILED* response.\n", "file_path": "src/error.rs", "rank": 16, "score": 208982.80459733683 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorForbidden<T>(err: T) -> InternalError<T> {\n\n InternalError::new(err, StatusCode::FORBIDDEN)\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *NOT FOUND* response.\n", "file_path": "src/error.rs", "rank": 17, "score": 208982.80459733683 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorUnauthorized<T>(err: T) -> InternalError<T> {\n\n InternalError::new(err, StatusCode::UNAUTHORIZED)\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *FORBIDDEN* response.\n", "file_path": "src/error.rs", "rank": 18, "score": 208982.80459733683 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorConflict<T>(err: T) -> InternalError<T> {\n\n InternalError::new(err, StatusCode::CONFLICT)\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *GONE* response.\n", "file_path": "src/error.rs", "rank": 19, "score": 208982.80459733683 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorInternalServerError<T>(err: T) -> InternalError<T> {\n\n InternalError::new(err, StatusCode::INTERNAL_SERVER_ERROR)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::error::Error as StdError;\n\n use std::io;\n\n use httparse;\n\n use http::{StatusCode, Error as HttpError};\n\n use cookie::ParseError as CookieParseError;\n\n use super::*;\n\n\n\n #[test]\n\n #[cfg(actix_nightly)]\n\n fn test_nightly() {\n\n let resp: HttpResponse = IoError::new(io::ErrorKind::Other, \"test\").error_response();\n\n assert_eq!(resp.status(), StatusCode::INTERNAL_SERVER_ERROR);\n\n }\n\n\n", "file_path": "src/error.rs", "rank": 20, "score": 205722.29339443022 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorBadRequest<T>(err: T) -> InternalError<T> {\n\n InternalError::new(err, StatusCode::BAD_REQUEST)\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *UNAUTHORIZED* response.\n", "file_path": "src/error.rs", "rank": 21, "score": 205189.090475919 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorExpectationFailed<T>(err: T) -> InternalError<T> {\n\n InternalError::new(err, StatusCode::EXPECTATION_FAILED)\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *INTERNAL SERVER ERROR* response.\n", "file_path": "src/error.rs", "rank": 22, "score": 205189.090475919 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorMethodNotAllowed<T>(err: T) -> InternalError<T> {\n\n InternalError::new(err, StatusCode::METHOD_NOT_ALLOWED)\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *REQUEST TIMEOUT* response.\n", "file_path": "src/error.rs", "rank": 23, "score": 205189.090475919 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorRequestTimeout<T>(err: T) -> InternalError<T> {\n\n InternalError::new(err, StatusCode::REQUEST_TIMEOUT)\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *CONFLICT* response.\n", "file_path": "src/error.rs", "rank": 24, "score": 205189.090475919 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorPreconditionFailed<T>(err: T) -> InternalError<T> {\n\n InternalError::new(err, StatusCode::PRECONDITION_FAILED)\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *EXPECTATION FAILED* response.\n", "file_path": "src/error.rs", "rank": 25, "score": 205189.090475919 }, { "content": "fn cors<'a>(parts: &'a mut Option<Cors>, err: &Option<http::Error>) -> Option<&'a mut Cors> {\n\n if err.is_some() {\n\n return None\n\n }\n\n parts.as_mut()\n\n}\n\n\n\nimpl CorsBuilder {\n\n\n\n /// Add an origin that are allowed to make requests.\n\n /// Will be verified against the `Origin` request header.\n\n ///\n\n /// When `All` is set, and `send_wildcard` is set, \"*\" will be sent in\n\n /// the `Access-Control-Allow-Origin` response header. Otherwise, the client's `Origin` request\n\n /// header will be echoed back in the `Access-Control-Allow-Origin` response header.\n\n ///\n\n /// When `Some` is set, the client's `Origin` request header will be checked in a\n\n /// case-sensitive manner.\n\n ///\n\n /// This is the `list of origins` in the\n", "file_path": "src/middleware/cors.rs", "rank": 26, "score": 192208.3881908944 }, { "content": "/// handler with path parameters like `/user/{name}/`\n\nfn with_param(req: HttpRequest) -> Result<HttpResponse>\n\n{\n\n println!(\"{:?}\", req);\n\n\n\n Ok(HttpResponse::Ok()\n\n .content_type(\"test/plain\")\n\n .body(format!(\"Hello {}!\", req.match_info().get(\"name\").unwrap()))?)\n\n}\n\n\n", "file_path": "examples/basics/src/main.rs", "rank": 27, "score": 191994.24934235186 }, { "content": "/// 404 handler\n\nfn p404(req: HttpRequest) -> Result<HttpResponse> {\n\n\n\n // html\n\n let html = r#\"<!DOCTYPE html><html><head><title>actix - basics</title><link rel=\"shortcut icon\" type=\"image/x-icon\" href=\"/favicon.ico\" /></head>\n\n<body>\n\n <a href=\"index.html\">back to home</a>\n\n <h1>404</h1>\n\n</body>\n\n</html>\"#;\n\n\n\n // response\n\n Ok(HttpResponse::build(StatusCode::NOT_FOUND)\n\n .content_type(\"text/html; charset=utf-8\")\n\n .body(html).unwrap())\n\n}\n\n\n\n\n", "file_path": "examples/basics/src/main.rs", "rank": 28, "score": 191989.41659381986 }, { "content": "/// somple handle\n\nfn index(req: HttpRequest<AppState>) -> HttpResponse {\n\n println!(\"{:?}\", req);\n\n req.state().counter.set(req.state().counter.get() + 1);\n\n\n\n httpcodes::HTTPOk.with_body(\n\n format!(\"Num of requests: {}\", req.state().counter.get()))\n\n}\n\n\n", "file_path": "examples/state/src/main.rs", "rank": 29, "score": 189111.70592568186 }, { "content": "type Fut = Box<Future<Item=Option<HttpResponse>, Error=Error>>;\n\n\n", "file_path": "src/pipeline.rs", "rank": 30, "score": 179443.85927206 }, { "content": "type Fut = Box<Future<Item=Option<HttpResponse>, Error=Error>>;\n\n\n\nimpl<S: 'static> StartMiddlewares<S> {\n\n\n\n fn init(info: &mut ComposeInfo<S>) -> ComposeState<S> {\n\n let len = info.mws.len();\n\n loop {\n\n if info.count == len {\n\n let reply = info.handler.handle(info.req.clone());\n\n return WaitingResponse::init(info, reply)\n\n } else {\n\n match info.mws[info.count].start(&mut info.req) {\n\n Ok(MiddlewareStarted::Done) =>\n\n info.count += 1,\n\n Ok(MiddlewareStarted::Response(resp)) =>\n\n return RunMiddlewares::init(info, resp),\n\n Ok(MiddlewareStarted::Future(mut fut)) =>\n\n match fut.poll() {\n\n Ok(Async::NotReady) =>\n\n return ComposeState::Starting(StartMiddlewares {\n", "file_path": "src/route.rs", "rank": 31, "score": 179443.85927206 }, { "content": "#[inline]\n\nfn parts<'a>(parts: &'a mut Option<ClientRequest>, err: &Option<HttpError>)\n\n -> Option<&'a mut ClientRequest>\n\n{\n\n if err.is_some() {\n\n return None\n\n }\n\n parts.as_mut()\n\n}\n", "file_path": "src/client/request.rs", "rank": 32, "score": 174381.0329207012 }, { "content": "/// Entry point for our route\n\nfn chat_route(req: HttpRequest<WsChatSessionState>) -> Result<HttpResponse> {\n\n ws::start(\n\n req,\n\n WsChatSession {\n\n id: 0,\n\n hb: Instant::now(),\n\n room: \"Main\".to_owned(),\n\n name: None})\n\n}\n\n\n", "file_path": "examples/websocket-chat/src/main.rs", "rank": 33, "score": 170611.61322331848 }, { "content": "#[inline]\n\n#[cfg_attr(feature = \"cargo-clippy\", allow(borrowed_box))]\n\nfn parts<'a>(parts: &'a mut Option<Box<InnerHttpResponse>>, err: &Option<HttpError>)\n\n -> Option<&'a mut Box<InnerHttpResponse>>\n\n{\n\n if err.is_some() {\n\n return None\n\n }\n\n parts.as_mut()\n\n}\n\n\n\n/// Helper converters\n\nimpl<I: Into<HttpResponse>, E: Into<Error>> From<Result<I, E>> for HttpResponse {\n\n fn from(res: Result<I, E>) -> Self {\n\n match res {\n\n Ok(val) => val.into(),\n\n Err(err) => err.into().into(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<HttpResponseBuilder> for HttpResponse {\n", "file_path": "src/httpresponse.rs", "rank": 34, "score": 168337.67045052536 }, { "content": "fn index(_req: HttpRequest) -> &'static str {\n\n \"Hello world!\"\n\n}\n\n\n", "file_path": "examples/hello-world/src/main.rs", "rank": 35, "score": 167690.0805003995 }, { "content": "/// do websocket handshake and start `MyWebSocket` actor\n\nfn ws_index(r: HttpRequest) -> Result<HttpResponse> {\n\n ws::start(r, MyWebSocket)\n\n}\n\n\n", "file_path": "examples/websocket/src/main.rs", "rank": 36, "score": 167619.5763789966 }, { "content": "#[test]\n\nfn test_body() {\n\n let srv = test::TestServer::new(\n\n |app| app.handler(|_| httpcodes::HTTPOk.build().body(STR)));\n\n let mut res = reqwest::get(&srv.url(\"/\")).unwrap();\n\n assert!(res.status().is_success());\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n assert_eq!(bytes, Bytes::from_static(STR.as_ref()));\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 37, "score": 165813.69717245828 }, { "content": "#[test]\n\nfn test_body_length() {\n\n let srv = test::TestServer::new(\n\n |app| app.handler(|_| {\n\n let body = once(Ok(Bytes::from_static(STR.as_ref())));\n\n httpcodes::HTTPOk.build()\n\n .content_length(STR.len() as u64)\n\n .body(Body::Streaming(Box::new(body)))}));\n\n\n\n let mut res = reqwest::get(&srv.url(\"/\")).unwrap();\n\n assert!(res.status().is_success());\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n assert_eq!(bytes, Bytes::from_static(STR.as_ref()));\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 38, "score": 161798.20749747814 }, { "content": "#[test]\n\nfn test_body_gzip() {\n\n let srv = test::TestServer::new(\n\n |app| app.handler(\n\n |_| httpcodes::HTTPOk.build()\n\n .content_encoding(headers::ContentEncoding::Gzip)\n\n .body(STR)));\n\n let mut res = reqwest::get(&srv.url(\"/\")).unwrap();\n\n assert!(res.status().is_success());\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n assert_eq!(bytes, Bytes::from_static(STR.as_ref()));\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 39, "score": 161798.20749747814 }, { "content": "#[test]\n\nfn test_body_deflate() {\n\n let srv = test::TestServer::new(\n\n |app| app.handler(\n\n |_| httpcodes::HTTPOk\n\n .build()\n\n .content_encoding(headers::ContentEncoding::Deflate)\n\n .body(STR)));\n\n let mut res = reqwest::get(&srv.url(\"/\")).unwrap();\n\n assert!(res.status().is_success());\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n\n\n let mut e = DeflateDecoder::new(Vec::new());\n\n e.write_all(bytes.as_ref()).unwrap();\n\n let dec = e.finish().unwrap();\n\n assert_eq!(Bytes::from(dec), Bytes::from_static(STR.as_ref()));\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 40, "score": 161798.20749747814 }, { "content": "#[test]\n\nfn test_body_brotli() {\n\n let srv = test::TestServer::new(\n\n |app| app.handler(\n\n |_| httpcodes::HTTPOk\n\n .build()\n\n .content_encoding(headers::ContentEncoding::Br)\n\n .body(STR)));\n\n let mut res = reqwest::get(&srv.url(\"/\")).unwrap();\n\n assert!(res.status().is_success());\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n\n\n let mut e = BrotliDecoder::new(Vec::with_capacity(2048));\n\n e.write_all(bytes.as_ref()).unwrap();\n\n let dec = e.finish().unwrap();\n\n assert_eq!(Bytes::from(dec), Bytes::from_static(STR.as_ref()));\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 41, "score": 161798.20749747814 }, { "content": "/// Return predicate that matches if all of supplied predicate matches.\n\n///\n\n/// ```rust\n\n/// # extern crate actix_web;\n\n/// # extern crate http;\n\n/// # use actix_web::*;\n\n/// # use actix_web::httpcodes::*;\n\n/// use actix_web::pred;\n\n///\n\n/// fn main() {\n\n/// Application::new()\n\n/// .resource(\"/index.html\", |r| r.route()\n\n/// .p(pred::All(pred::Get())\n\n/// .and(pred::Header(\"content-type\", \"plain/text\")))\n\n/// .h(HTTPMethodNotAllowed));\n\n/// }\n\n/// ```\n\npub fn All<S: 'static, P: Predicate<S> + 'static>(pred: P) -> AllPredicate<S> {\n\n AllPredicate(vec![Box::new(pred)])\n\n}\n\n\n\n/// Matches if all of supplied predicate matches.\n\npub struct AllPredicate<S>(Vec<Box<Predicate<S>>>);\n\n\n\nimpl<S> AllPredicate<S> {\n\n /// Add new predicate to list of predicates to check\n\n pub fn and<P: Predicate<S> + 'static>(mut self, pred: P) -> Self {\n\n self.0.push(Box::new(pred));\n\n self\n\n }\n\n}\n\n\n\nimpl<S: 'static> Predicate<S> for AllPredicate<S> {\n\n fn check(&self, req: &mut HttpRequest<S>) -> bool {\n\n for p in &self.0 {\n\n if !p.check(req) {\n\n return false\n\n }\n\n }\n\n true\n\n }\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 42, "score": 159896.33000473306 }, { "content": "/// Return predicate that matches if any of supplied predicate matches.\n\n///\n\n/// ```rust\n\n/// # extern crate actix_web;\n\n/// # extern crate http;\n\n/// # use actix_web::*;\n\n/// # use actix_web::httpcodes::*;\n\n/// use actix_web::pred;\n\n///\n\n/// fn main() {\n\n/// Application::new()\n\n/// .resource(\"/index.html\", |r| r.route()\n\n/// .p(pred::Any(pred::Get()).or(pred::Post()))\n\n/// .h(HTTPMethodNotAllowed));\n\n/// }\n\n/// ```\n\npub fn Any<S: 'static, P: Predicate<S> + 'static>(pred: P) -> AnyPredicate<S>\n\n{\n\n AnyPredicate(vec![Box::new(pred)])\n\n}\n\n\n\n/// Matches if any of supplied predicate matches.\n\npub struct AnyPredicate<S>(Vec<Box<Predicate<S>>>);\n\n\n\nimpl<S> AnyPredicate<S> {\n\n /// Add new predicate to list of predicates to check\n\n pub fn or<P: Predicate<S> + 'static>(mut self, pred: P) -> Self {\n\n self.0.push(Box::new(pred));\n\n self\n\n }\n\n}\n\n\n\nimpl<S: 'static> Predicate<S> for AnyPredicate<S> {\n\n fn check(&self, req: &mut HttpRequest<S>) -> bool {\n\n for p in &self.0 {\n\n if p.check(req) {\n\n return true\n\n }\n\n }\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 43, "score": 159893.22194570635 }, { "content": "/// Return predicate that matches if supplied predicate does not match.\n\npub fn Not<S: 'static, P: Predicate<S> + 'static>(pred: P) -> NotPredicate<S>\n\n{\n\n NotPredicate(Box::new(pred))\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct NotPredicate<S>(Box<Predicate<S>>);\n\n\n\nimpl<S: 'static> Predicate<S> for NotPredicate<S> {\n\n fn check(&self, req: &mut HttpRequest<S>) -> bool {\n\n !self.0.check(req)\n\n }\n\n}\n\n\n\n/// Http method predicate\n\n#[doc(hidden)]\n\npub struct MethodPredicate<S>(http::Method, PhantomData<S>);\n\n\n\nimpl<S: 'static> Predicate<S> for MethodPredicate<S> {\n\n fn check(&self, req: &mut HttpRequest<S>) -> bool {\n\n *req.method() == self.0\n\n }\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 44, "score": 159878.58076145558 }, { "content": "#[test]\n\nfn test_body_br_streaming() {\n\n let srv = test::TestServer::new(\n\n |app| app.handler(|_| {\n\n let body = once(Ok(Bytes::from_static(STR.as_ref())));\n\n httpcodes::HTTPOk.build()\n\n .content_encoding(headers::ContentEncoding::Br)\n\n .body(Body::Streaming(Box::new(body)))}));\n\n\n\n let mut res = reqwest::get(&srv.url(\"/\")).unwrap();\n\n assert!(res.status().is_success());\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n\n\n let mut e = BrotliDecoder::new(Vec::with_capacity(2048));\n\n e.write_all(bytes.as_ref()).unwrap();\n\n let dec = e.finish().unwrap();\n\n assert_eq!(Bytes::from(dec), Bytes::from_static(STR.as_ref()));\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 45, "score": 158036.91269172024 }, { "content": "#[test]\n\nfn test_body_streaming_implicit() {\n\n let srv = test::TestServer::new(\n\n |app| app.handler(|_| {\n\n let body = once(Ok(Bytes::from_static(STR.as_ref())));\n\n httpcodes::HTTPOk.build()\n\n .content_encoding(headers::ContentEncoding::Gzip)\n\n .body(Body::Streaming(Box::new(body)))}));\n\n\n\n let mut res = reqwest::get(&srv.url(\"/\")).unwrap();\n\n assert!(res.status().is_success());\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n assert_eq!(bytes, Bytes::from_static(STR.as_ref()));\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 46, "score": 158036.91269172024 }, { "content": "#[test]\n\nfn test_body_streaming_explicit() {\n\n let srv = test::TestServer::new(\n\n |app| app.handler(|_| {\n\n let body = once(Ok(Bytes::from_static(STR.as_ref())));\n\n httpcodes::HTTPOk.build()\n\n .chunked()\n\n .content_encoding(headers::ContentEncoding::Gzip)\n\n .body(Body::Streaming(Box::new(body)))}));\n\n\n\n let mut res = reqwest::get(&srv.url(\"/\")).unwrap();\n\n assert!(res.status().is_success());\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n assert_eq!(bytes, Bytes::from_static(STR.as_ref()));\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 47, "score": 158036.91269172024 }, { "content": "#[inline]\n\npub fn apply_mask(buf: &mut [u8], mask: &[u8; 4]) {\n\n apply_mask_fast32(buf, mask)\n\n}\n\n\n\n/// A safe unoptimized mask application.\n", "file_path": "src/ws/mask.rs", "rank": 48, "score": 155058.54030912812 }, { "content": "/// Trait implemented by types that generate responses for clients.\n\n///\n\n/// Types that implement this trait can be used as the return type of a handler.\n\npub trait Responder {\n\n /// The associated item which can be returned.\n\n type Item: Into<Reply>;\n\n\n\n /// The associated error which can be returned.\n\n type Error: Into<Error>;\n\n\n\n /// Convert itself to `Reply` or `Error`.\n\n fn respond_to(self, req: HttpRequest) -> Result<Self::Item, Self::Error>;\n\n}\n\n\n\n#[doc(hidden)]\n", "file_path": "src/handler.rs", "rank": 49, "score": 153704.58525023513 }, { "content": "/// Conversion helper trait\n\npub trait IntoHttpHandler {\n\n /// The associated type which is result of conversion.\n\n type Handler: HttpHandler;\n\n\n\n /// Convert into `HttpHandler` object.\n\n fn into_handler(self, settings: ServerSettings) -> Self::Handler;\n\n}\n\n\n\nimpl<T: HttpHandler> IntoHttpHandler for T {\n\n type Handler = T;\n\n\n\n fn into_handler(self, _: ServerSettings) -> Self::Handler {\n\n self\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum WriterState {\n\n Done,\n\n Pause,\n\n}\n\n\n", "file_path": "src/server/mod.rs", "rank": 50, "score": 144181.208194581 }, { "content": "#[derive(Clone)]\n\n#[doc(hidden)]\n\nstruct Format(Vec<FormatText>);\n\n\n\nimpl Default for Format {\n\n /// Return the default formatting style for the `Logger`:\n\n fn default() -> Format {\n\n Format::new(r#\"%a %t \"%r\" %s %b \"%{Referer}i\" \"%{User-Agent}i\" %T\"#)\n\n }\n\n}\n\n\n\nimpl Format {\n\n /// Create a `Format` from a format string.\n\n ///\n\n /// Returns `None` if the format string syntax is incorrect.\n\n pub fn new(s: &str) -> Format {\n\n trace!(\"Access log format: {}\", s);\n\n let fmt = Regex::new(r\"%(\\{([A-Za-z0-9\\-_]+)\\}([ioe])|[atPrsbTD]?)\").unwrap();\n\n\n\n let mut idx = 0;\n\n let mut results = Vec::new();\n\n for cap in fmt.captures_iter(s) {\n", "file_path": "src/middleware/logger.rs", "rank": 51, "score": 140984.17459127243 }, { "content": "pub trait HttpHandlerTask {\n\n\n\n fn poll(&mut self) -> Poll<(), Error>;\n\n\n\n fn poll_io(&mut self, io: &mut Writer) -> Poll<bool, Error>;\n\n\n\n fn disconnected(&mut self);\n\n}\n\n\n", "file_path": "src/server/mod.rs", "rank": 52, "score": 140010.05007778163 }, { "content": "#[allow(unused_variables)]\n\npub trait Handler<S>: 'static {\n\n\n\n /// The type of value that handler will return.\n\n type Result: Responder;\n\n\n\n /// Handle request\n\n fn handle(&mut self, req: HttpRequest<S>) -> Self::Result;\n\n}\n\n\n", "file_path": "src/handler.rs", "rank": 53, "score": 139802.86389955843 }, { "content": "#[allow(unused_variables)]\n\npub trait HttpHandler: 'static {\n\n\n\n /// Handle request\n\n fn handle(&mut self, req: HttpRequest) -> Result<Box<HttpHandlerTask>, HttpRequest>;\n\n}\n\n\n\nimpl HttpHandler for Box<HttpHandler> {\n\n fn handle(&mut self, req: HttpRequest) -> Result<Box<HttpHandlerTask>, HttpRequest> {\n\n self.as_mut().handle(req)\n\n }\n\n}\n\n\n", "file_path": "src/server/mod.rs", "rank": 54, "score": 137754.60002152895 }, { "content": "/// Convenience trait that convert `Future` object into `Boxed` future\n\npub trait AsyncResponder<I, E>: Sized {\n\n fn responder(self) -> Box<Future<Item=I, Error=E>>;\n\n}\n\n\n\nimpl<F, I, E> AsyncResponder<I, E> for F\n\n where F: Future<Item=I, Error=E> + 'static,\n\n I: Responder + 'static,\n\n E: Into<Error> + 'static,\n\n{\n\n fn responder(self) -> Box<Future<Item=I, Error=E>> {\n\n Box::new(self)\n\n }\n\n}\n\n\n\n/// Handler<S> for Fn()\n\nimpl<F, R, S> Handler<S> for F\n\n where F: Fn(HttpRequest<S>) -> R + 'static,\n\n R: Responder + 'static\n\n{\n\n type Result = R;\n", "file_path": "src/handler.rs", "rank": 55, "score": 134265.91229787434 }, { "content": "struct MiddlewareTest {\n\n start: Arc<AtomicUsize>,\n\n response: Arc<AtomicUsize>,\n\n finish: Arc<AtomicUsize>,\n\n}\n\n\n\nimpl<S> middleware::Middleware<S> for MiddlewareTest {\n\n fn start(&self, _: &mut HttpRequest<S>) -> Result<middleware::Started> {\n\n self.start.store(self.start.load(Ordering::Relaxed) + 1, Ordering::Relaxed);\n\n Ok(middleware::Started::Done)\n\n }\n\n\n\n fn response(&self, _: &mut HttpRequest<S>, resp: HttpResponse) -> Result<middleware::Response> {\n\n self.response.store(self.response.load(Ordering::Relaxed) + 1, Ordering::Relaxed);\n\n Ok(middleware::Response::Done(resp))\n\n }\n\n\n\n fn finish(&self, _: &mut HttpRequest<S>, _: &HttpResponse) -> middleware::Finished {\n\n self.finish.store(self.finish.load(Ordering::Relaxed) + 1, Ordering::Relaxed);\n\n middleware::Finished::Done\n\n }\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 56, "score": 127630.7745267762 }, { "content": "#[test]\n\nfn test_h2() {\n\n let srv = test::TestServer::new(|app| app.handler(|_|{\n\n httpcodes::HTTPOk.build().body(STR)\n\n }));\n\n let addr = srv.addr();\n\n\n\n let mut core = Core::new().unwrap();\n\n let handle = core.handle();\n\n let tcp = TcpStream::connect(&addr, &handle);\n\n\n\n let tcp = tcp.then(|res| {\n\n client::handshake(res.unwrap())\n\n }).then(move |res| {\n\n let (mut client, h2) = res.unwrap();\n\n\n\n let request = Request::builder()\n\n .uri(format!(\"https://{}/\", addr).as_str())\n\n .body(())\n\n .unwrap();\n\n let (response, _) = client.send_request(request, false).unwrap();\n", "file_path": "tests/test_server.rs", "rank": 57, "score": 126019.04197540172 }, { "content": "#[test]\n\nfn test_middlewares() {\n\n let num1 = Arc::new(AtomicUsize::new(0));\n\n let num2 = Arc::new(AtomicUsize::new(0));\n\n let num3 = Arc::new(AtomicUsize::new(0));\n\n\n\n let act_num1 = Arc::clone(&num1);\n\n let act_num2 = Arc::clone(&num2);\n\n let act_num3 = Arc::clone(&num3);\n\n\n\n let srv = test::TestServer::new(\n\n move |app| app.middleware(MiddlewareTest{start: Arc::clone(&act_num1),\n\n response: Arc::clone(&act_num2),\n\n finish: Arc::clone(&act_num3)})\n\n .handler(httpcodes::HTTPOk)\n\n );\n\n \n\n assert!(reqwest::get(&srv.url(\"/\")).unwrap().status().is_success());\n\n assert_eq!(num1.load(Ordering::Relaxed), 1);\n\n assert_eq!(num2.load(Ordering::Relaxed), 1);\n\n assert_eq!(num3.load(Ordering::Relaxed), 1);\n\n}\n\n\n\n\n", "file_path": "tests/test_server.rs", "rank": 58, "score": 126019.04197540172 }, { "content": "#[test]\n\nfn test_simple() {\n\n let srv = test::TestServer::new(|app| app.handler(httpcodes::HTTPOk));\n\n assert!(reqwest::get(&srv.url(\"/\")).unwrap().status().is_success());\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 59, "score": 126019.04197540172 }, { "content": "#[test]\n\nfn test_simple() {\n\n let mut srv = test::TestServer::new(\n\n |app| app.handler(|req| ws::start(req, Ws)));\n\n let (reader, mut writer) = srv.ws().unwrap();\n\n\n\n writer.text(\"text\");\n\n let (item, reader) = srv.execute(reader.into_future()).unwrap();\n\n assert_eq!(item, Some(ws::Message::Text(\"text\".to_owned())));\n\n\n\n writer.binary(b\"text\".as_ref());\n\n let (item, reader) = srv.execute(reader.into_future()).unwrap();\n\n assert_eq!(item, Some(ws::Message::Binary(Bytes::from_static(b\"text\").into())));\n\n\n\n writer.ping(\"ping\");\n\n let (item, reader) = srv.execute(reader.into_future()).unwrap();\n\n assert_eq!(item, Some(ws::Message::Pong(\"ping\".to_owned())));\n\n\n\n writer.close(ws::CloseCode::Normal, \"\");\n\n let (item, _) = srv.execute(reader.into_future()).unwrap();\n\n assert!(item.is_none())\n\n}\n", "file_path": "tests/test_ws.rs", "rank": 60, "score": 126019.04197540172 }, { "content": "#[test]\n\nfn test_application() {\n\n let srv = test::TestServer::with_factory(\n\n || Application::new().resource(\"/\", |r| r.h(httpcodes::HTTPOk)));\n\n assert!(reqwest::get(&srv.url(\"/\")).unwrap().status().is_success());\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 61, "score": 126019.04197540172 }, { "content": "#[test]\n\nfn test_start() {\n\n let _ = test::TestServer::unused_addr();\n\n let (tx, rx) = mpsc::channel();\n\n\n\n thread::spawn(move || {\n\n let sys = System::new(\"test\");\n\n let srv = HttpServer::new(\n\n || vec![Application::new()\n\n .resource(\"/\", |r| r.method(Method::GET).h(httpcodes::HTTPOk))]);\n\n\n\n let srv = srv.bind(\"127.0.0.1:0\").unwrap();\n\n let addr = srv.addrs()[0];\n\n let srv_addr = srv.start();\n\n let _ = tx.send((addr, srv_addr));\n\n sys.run();\n\n });\n\n let (addr, srv_addr) = rx.recv().unwrap();\n\n assert!(reqwest::get(&format!(\"http://{}/\", addr)).unwrap().status().is_success());\n\n\n\n // pause\n\n let _ = srv_addr.call_fut(server::PauseServer).wait();\n\n thread::sleep(time::Duration::from_millis(100));\n\n assert!(net::TcpStream::connect(addr).is_err());\n\n\n\n // resume\n\n let _ = srv_addr.call_fut(server::ResumeServer).wait();\n\n assert!(reqwest::get(&format!(\"http://{}/\", addr)).unwrap().status().is_success());\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 62, "score": 126019.04197540172 }, { "content": "struct Ws;\n\n\n\nimpl Actor for Ws {\n\n type Context = ws::WebsocketContext<Self>;\n\n}\n\n\n\nimpl Handler<ws::Message> for Ws {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: ws::Message, ctx: &mut Self::Context) {\n\n match msg {\n\n ws::Message::Ping(msg) => ctx.pong(&msg),\n\n ws::Message::Text(text) => ctx.text(&text),\n\n ws::Message::Binary(bin) => ctx.binary(bin),\n\n _ => (),\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/test_ws.rs", "rank": 63, "score": 125245.97577713671 }, { "content": "#[test]\n\nfn test_resource_middlewares() {\n\n let num1 = Arc::new(AtomicUsize::new(0));\n\n let num2 = Arc::new(AtomicUsize::new(0));\n\n let num3 = Arc::new(AtomicUsize::new(0));\n\n\n\n let act_num1 = Arc::clone(&num1);\n\n let act_num2 = Arc::clone(&num2);\n\n let act_num3 = Arc::clone(&num3);\n\n\n\n let srv = test::TestServer::new(\n\n move |app| app.handler2(\n\n httpcodes::HTTPOk,\n\n MiddlewareTest{start: Arc::clone(&act_num1),\n\n response: Arc::clone(&act_num2),\n\n finish: Arc::clone(&act_num3)})\n\n );\n\n\n\n assert!(reqwest::get(&srv.url(\"/\")).unwrap().status().is_success());\n\n assert_eq!(num1.load(Ordering::Relaxed), 1);\n\n assert_eq!(num2.load(Ordering::Relaxed), 1);\n\n // assert_eq!(num3.load(Ordering::Relaxed), 1);\n\n}\n", "file_path": "tests/test_server.rs", "rank": 64, "score": 123505.31407051883 }, { "content": "#[test]\n\nfn test_brotli_encoding() {\n\n let srv = test::TestServer::new(|app| app.handler(|req: HttpRequest| {\n\n req.body()\n\n .and_then(|bytes: Bytes| {\n\n Ok(httpcodes::HTTPOk\n\n .build()\n\n .content_encoding(headers::ContentEncoding::Identity)\n\n .body(bytes))\n\n }).responder()}\n\n ));\n\n\n\n let mut e = BrotliEncoder::new(Vec::new(), 5);\n\n e.write_all(STR.as_ref()).unwrap();\n\n let enc = e.finish().unwrap();\n\n\n\n let client = reqwest::Client::new();\n\n let mut res = client.post(&srv.url(\"/\"))\n\n .header(ContentEncoding(vec![Encoding::Brotli]))\n\n .body(enc.clone()).send().unwrap();\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n assert_eq!(bytes, Bytes::from_static(STR.as_ref()));\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 65, "score": 123505.31407051883 }, { "content": "#[test]\n\nfn test_head_empty() {\n\n let srv = test::TestServer::new(\n\n |app| app.handler(|_| {\n\n httpcodes::HTTPOk.build()\n\n .content_length(STR.len() as u64).finish()}));\n\n\n\n let client = reqwest::Client::new();\n\n let mut res = client.head(&srv.url(\"/\")).send().unwrap();\n\n assert!(res.status().is_success());\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let len = res.headers()\n\n .get::<reqwest::header::ContentLength>().map(|ct_len| **ct_len).unwrap();\n\n assert_eq!(len, STR.len() as u64);\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n assert!(bytes.is_empty());\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 66, "score": 123505.31407051883 }, { "content": "#[test]\n\nfn test_head_binary2() {\n\n let srv = test::TestServer::new(\n\n |app| app.handler(|_| {\n\n httpcodes::HTTPOk.build()\n\n .content_encoding(headers::ContentEncoding::Identity)\n\n .body(STR)\n\n }));\n\n\n\n let client = reqwest::Client::new();\n\n let mut res = client.head(&srv.url(\"/\")).send().unwrap();\n\n assert!(res.status().is_success());\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let len = res.headers()\n\n .get::<reqwest::header::ContentLength>().map(|ct_len| **ct_len).unwrap();\n\n assert_eq!(len, STR.len() as u64);\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n assert!(bytes.is_empty());\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 67, "score": 123505.31407051883 }, { "content": "#[test]\n\nfn test_head_binary() {\n\n let srv = test::TestServer::new(\n\n |app| app.handler(|_| {\n\n httpcodes::HTTPOk.build()\n\n .content_encoding(headers::ContentEncoding::Identity)\n\n .content_length(100).body(STR)}));\n\n\n\n let client = reqwest::Client::new();\n\n let mut res = client.head(&srv.url(\"/\")).send().unwrap();\n\n assert!(res.status().is_success());\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let len = res.headers()\n\n .get::<reqwest::header::ContentLength>().map(|ct_len| **ct_len).unwrap();\n\n assert_eq!(len, STR.len() as u64);\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n assert!(bytes.is_empty());\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 68, "score": 123505.31407051883 }, { "content": "#[test]\n\nfn test_gzip_encoding() {\n\n let srv = test::TestServer::new(|app| app.handler(|req: HttpRequest| {\n\n req.body()\n\n .and_then(|bytes: Bytes| {\n\n Ok(httpcodes::HTTPOk\n\n .build()\n\n .content_encoding(headers::ContentEncoding::Identity)\n\n .body(bytes))\n\n }).responder()}\n\n ));\n\n\n\n let mut e = GzEncoder::new(Vec::new(), Compression::default());\n\n e.write_all(STR.as_ref()).unwrap();\n\n let enc = e.finish().unwrap();\n\n\n\n let client = reqwest::Client::new();\n\n let mut res = client.post(&srv.url(\"/\"))\n\n .header(ContentEncoding(vec![Encoding::Gzip]))\n\n .body(enc.clone()).send().unwrap();\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n assert_eq!(bytes, Bytes::from_static(STR.as_ref()));\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 69, "score": 123505.31407051883 }, { "content": "#[test]\n\nfn test_deflate_encoding() {\n\n let srv = test::TestServer::new(|app| app.handler(|req: HttpRequest| {\n\n req.body()\n\n .and_then(|bytes: Bytes| {\n\n Ok(httpcodes::HTTPOk\n\n .build()\n\n .content_encoding(headers::ContentEncoding::Identity)\n\n .body(bytes))\n\n }).responder()}\n\n ));\n\n\n\n let mut e = DeflateEncoder::new(Vec::new(), Compression::default());\n\n e.write_all(STR.as_ref()).unwrap();\n\n let enc = e.finish().unwrap();\n\n\n\n let client = reqwest::Client::new();\n\n let mut res = client.post(&srv.url(\"/\"))\n\n .header(ContentEncoding(vec![Encoding::Deflate]))\n\n .body(enc.clone()).send().unwrap();\n\n let mut bytes = BytesMut::with_capacity(2048).writer();\n\n let _ = res.copy_to(&mut bytes);\n\n let bytes = bytes.into_inner();\n\n assert_eq!(bytes, Bytes::from_static(STR.as_ref()));\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 70, "score": 123505.31407051883 }, { "content": "#[cfg(not(unix))]\n\nfn main() {\n\n match version_check::is_nightly() {\n\n Some(true) => println!(\"cargo:rustc-cfg=actix_nightly\"),\n\n Some(false) => (),\n\n None => (),\n\n };\n\n}\n", "file_path": "build.rs", "rank": 71, "score": 120237.75248090137 }, { "content": "/// Error that can be converted to `HttpResponse`\n\npub trait ResponseError: Fail {\n\n\n\n /// Create response for error\n\n ///\n\n /// Internal server error is generated by default.\n\n fn error_response(&self) -> HttpResponse {\n\n HttpResponse::new(StatusCode::INTERNAL_SERVER_ERROR, Body::Empty)\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Display::fmt(&self.cause, f)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n if let Some(bt) = self.cause.backtrace() {\n\n write!(f, \"{:?}\\n\\n{:?}\", &self.cause, bt)\n", "file_path": "src/error.rs", "rank": 72, "score": 115018.98739508005 }, { "content": "struct Entry {\n\n pipe: Box<HttpHandlerTask>,\n\n flags: EntryFlags,\n\n}\n\n\n\nimpl<T, H> Http1<T, H>\n\n where T: IoStream, H: HttpHandler + 'static\n\n{\n\n pub fn new(h: Rc<WorkerSettings<H>>, stream: T, addr: Option<SocketAddr>, buf: BytesMut)\n\n -> Self\n\n {\n\n let bytes = h.get_shared_bytes();\n\n Http1{ flags: Flags::KEEPALIVE,\n\n settings: h,\n\n addr: addr,\n\n stream: H1Writer::new(stream, bytes),\n\n reader: Reader::new(),\n\n read_buf: buf,\n\n tasks: VecDeque::new(),\n\n keepalive_timer: None }\n", "file_path": "src/server/h1.rs", "rank": 73, "score": 114526.31218293756 }, { "content": "struct Entry {\n\n task: Box<HttpHandlerTask>,\n\n payload: PayloadType,\n\n recv: RecvStream,\n\n stream: H2Writer,\n\n capacity: usize,\n\n flags: EntryFlags,\n\n}\n\n\n\nimpl Entry {\n\n fn new<H>(parts: Parts,\n\n recv: RecvStream,\n\n resp: SendResponse<Bytes>,\n\n addr: Option<SocketAddr>,\n\n settings: &Rc<WorkerSettings<H>>) -> Entry\n\n where H: HttpHandler + 'static\n\n {\n\n // Payload and Content-Encoding\n\n let (psender, payload) = Payload::new(false);\n\n\n", "file_path": "src/server/h2.rs", "rank": 74, "score": 114526.31218293756 }, { "content": "#[derive(Debug)]\n\nenum InnerMultipartItem {\n\n None,\n\n Field(Rc<RefCell<InnerField>>),\n\n Multipart(Rc<RefCell<InnerMultipart>>),\n\n}\n\n\n", "file_path": "src/multipart.rs", "rank": 75, "score": 112810.66761475723 }, { "content": "#[derive(Debug)]\n\nenum ReaderError {\n\n Disconnect,\n\n Payload,\n\n Error(ParseError),\n\n}\n\n\n\nimpl Reader {\n\n pub fn new() -> Reader {\n\n Reader {\n\n payload: None,\n\n }\n\n }\n\n\n\n fn decode(&mut self, buf: &mut BytesMut) -> std::result::Result<Decoding, ReaderError> {\n\n if let Some(ref mut payload) = self.payload {\n\n if payload.tx.capacity() > DEFAULT_BUFFER_SIZE {\n\n return Ok(Decoding::Paused)\n\n }\n\n loop {\n\n match payload.decoder.decode(buf) {\n", "file_path": "src/server/h1.rs", "rank": 76, "score": 112562.404489885 }, { "content": "/// Predicate to match *TRACE* http method\n\npub fn Trace<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::TRACE, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 77, "score": 111446.9175330381 }, { "content": "/// Predicate to match *GET* http method\n\npub fn Get<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::GET, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 78, "score": 111446.9175330381 }, { "content": "/// Predicate to match *PUT* http method\n\npub fn Put<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::PUT, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 79, "score": 111446.9175330381 }, { "content": "/// Predicate to match *HEAD* http method\n\npub fn Head<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::HEAD, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 80, "score": 111446.9175330381 }, { "content": "/// Predicate to match *POST* http method\n\npub fn Post<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::POST, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 81, "score": 111446.9175330381 }, { "content": "/// Predicate to match *DELETE* http method\n\npub fn Delete<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::DELETE, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 82, "score": 111446.9175330381 }, { "content": "/// Predicate to match *OPTIONS* http method\n\npub fn Options<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::OPTIONS, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 83, "score": 111446.9175330381 }, { "content": "/// Predicate to match *CONNECT* http method\n\npub fn Connect<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::CONNECT, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 84, "score": 111446.9175330381 }, { "content": "/// Predicate to match *PATCH* http method\n\npub fn Patch<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::PATCH, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 85, "score": 111446.9175330381 }, { "content": "struct WrapHandler<S, H, R>\n\n where H: Handler<S, Result=R>,\n\n R: Responder,\n\n S: 'static,\n\n{\n\n h: H,\n\n s: PhantomData<S>,\n\n}\n\n\n\nimpl<S, H, R> WrapHandler<S, H, R>\n\n where H: Handler<S, Result=R>,\n\n R: Responder,\n\n S: 'static,\n\n{\n\n pub fn new(h: H) -> Self {\n\n WrapHandler{h: h, s: PhantomData}\n\n }\n\n}\n\n\n\nimpl<S, H, R> RouteHandler<S> for WrapHandler<S, H, R>\n", "file_path": "src/handler.rs", "rank": 86, "score": 110320.01884968145 }, { "content": "/// Stream writer\n\npub trait Writer {\n\n fn written(&self) -> u64;\n\n\n\n fn start(&mut self, req: &mut HttpMessage, resp: &mut HttpResponse) -> io::Result<WriterState>;\n\n\n\n fn write(&mut self, payload: Binary) -> io::Result<WriterState>;\n\n\n\n fn write_eof(&mut self) -> io::Result<WriterState>;\n\n\n\n fn poll_completed(&mut self, shutdown: bool) -> Poll<(), io::Error>;\n\n}\n\n\n", "file_path": "src/server/mod.rs", "rank": 87, "score": 108925.22765788421 }, { "content": "/// Dummy session impl, does not do anything\n\nstruct DummySessionImpl;\n\n\n\nstatic DUMMY: DummySessionImpl = DummySessionImpl;\n\n\n\nimpl SessionImpl for DummySessionImpl {\n\n\n\n fn get(&self, key: &str) -> Option<&str> {\n\n None\n\n }\n\n fn set(&mut self, key: &str, value: String) {}\n\n fn remove(&mut self, key: &str) {}\n\n fn clear(&mut self) {}\n\n fn write(&self, resp: HttpResponse) -> Result<Response> {\n\n Ok(Response::Done(resp))\n\n }\n\n}\n\n\n\n/// Session that uses signed cookies as session storage\n\npub struct CookieSession {\n\n changed: bool,\n", "file_path": "src/middleware/session.rs", "rank": 88, "score": 108279.01843123343 }, { "content": "struct SessionImplBox(Box<SessionImpl>);\n\n\n\n#[doc(hidden)]\n\nunsafe impl Send for SessionImplBox {}\n\n#[doc(hidden)]\n\nunsafe impl Sync for SessionImplBox {}\n\n\n\n/// Session storage middleware\n\n///\n\n/// ```rust\n\n/// # extern crate actix;\n\n/// # extern crate actix_web;\n\n/// # use actix_web::middleware::{SessionStorage, CookieSessionBackend};\n\n/// use actix_web::*;\n\n///\n\n/// fn main() {\n\n/// let app = Application::new().middleware(\n\n/// SessionStorage::new( // <- create session middleware\n\n/// CookieSessionBackend::build(&[0; 32]) // <- create cookie session backend\n\n/// .secure(false)\n", "file_path": "src/middleware/session.rs", "rank": 89, "score": 107672.84792816697 }, { "content": "#[inline]\n\n#[allow(dead_code)]\n\nfn apply_mask_fast32(buf: &mut [u8], mask: &[u8; 4]) {\n\n // TODO replace this with read_unaligned() as it stabilizes.\n\n let mask_u32 = unsafe {\n\n let mut m: u32 = uninitialized();\n\n #[allow(trivial_casts)]\n\n copy_nonoverlapping(mask.as_ptr(), &mut m as *mut _ as *mut u8, 4);\n\n m\n\n };\n\n\n\n let mut ptr = buf.as_mut_ptr();\n\n let mut len = buf.len();\n\n\n\n // Possible first unaligned block.\n\n let head = min(len, (4 - (ptr as usize & 3)) & 3);\n\n let mask_u32 = if head > 0 {\n\n unsafe {\n\n xor_mem(ptr, mask_u32, head);\n\n ptr = ptr.offset(head as isize);\n\n }\n\n len -= head;\n", "file_path": "src/ws/mask.rs", "rank": 90, "score": 105524.99305646526 }, { "content": "#[inline]\n\n#[allow(dead_code)]\n\nfn apply_mask_fallback(buf: &mut [u8], mask: &[u8; 4]) {\n\n for (i, byte) in buf.iter_mut().enumerate() {\n\n *byte ^= mask[i & 3];\n\n }\n\n}\n\n\n\n/// Faster version of `apply_mask()` which operates on 4-byte blocks.\n", "file_path": "src/ws/mask.rs", "rank": 91, "score": 105524.99305646526 }, { "content": "struct AsyncHandler<S, H, F, R, E>\n\n where H: Fn(HttpRequest<S>) -> F + 'static,\n\n F: Future<Item=R, Error=E> + 'static,\n\n R: Responder + 'static,\n\n E: Into<Error> + 'static,\n\n S: 'static,\n\n{\n\n h: Box<H>,\n\n s: PhantomData<S>,\n\n}\n\n\n\nimpl<S, H, F, R, E> AsyncHandler<S, H, F, R, E>\n\n where H: Fn(HttpRequest<S>) -> F + 'static,\n\n F: Future<Item=R, Error=E> + 'static,\n\n R: Responder + 'static,\n\n E: Into<Error> + 'static,\n\n S: 'static,\n\n{\n\n pub fn new(h: H) -> Self {\n\n AsyncHandler{h: Box::new(h), s: PhantomData}\n", "file_path": "src/handler.rs", "rank": 92, "score": 102497.2480461802 }, { "content": "#[doc(hidden)]\n\npub trait SessionImpl: 'static {\n\n\n\n fn get(&self, key: &str) -> Option<&str>;\n\n\n\n fn set(&mut self, key: &str, value: String);\n\n\n\n fn remove(&mut self, key: &str);\n\n\n\n fn clear(&mut self);\n\n\n\n /// Write session to storage backend.\n\n fn write(&self, resp: HttpResponse) -> Result<Response>;\n\n}\n\n\n\n/// Session's storage backend trait definition.\n", "file_path": "src/middleware/session.rs", "rank": 93, "score": 101011.61364571899 }, { "content": "/// `RouteHandler` wrapper. This struct is required because it needs to be shared\n\n/// for resource level middlewares.\n\nstruct InnerHandler<S>(Rc<Box<RouteHandler<S>>>);\n\n\n\nimpl<S: 'static> InnerHandler<S> {\n\n\n\n #[inline]\n\n fn new<H: Handler<S>>(h: H) -> Self {\n\n InnerHandler(Rc::new(Box::new(WrapHandler::new(h))))\n\n }\n\n\n\n #[inline]\n\n fn async<H, R, F, E>(h: H) -> Self\n\n where H: Fn(HttpRequest<S>) -> F + 'static,\n\n F: Future<Item=R, Error=E> + 'static,\n\n R: Responder + 'static,\n\n E: Into<Error> + 'static\n\n {\n\n InnerHandler(Rc::new(Box::new(AsyncHandler::new(h))))\n\n }\n\n\n\n #[inline]\n", "file_path": "src/route.rs", "rank": 94, "score": 100346.99650954557 }, { "content": "#[allow(unused_variables)]\n\npub trait Middleware<S>: 'static {\n\n\n\n /// Method is called when request is ready. It may return\n\n /// future, which should resolve before next middleware get called.\n\n fn start(&self, req: &mut HttpRequest<S>) -> Result<Started> {\n\n Ok(Started::Done)\n\n }\n\n\n\n /// Method is called when handler returns response,\n\n /// but before sending http message to peer.\n\n fn response(&self, req: &mut HttpRequest<S>, resp: HttpResponse) -> Result<Response> {\n\n Ok(Response::Done(resp))\n\n }\n\n\n\n /// Method is called after body stream get sent to peer.\n\n fn finish(&self, req: &mut HttpRequest<S>, resp: &HttpResponse) -> Finished {\n\n Finished::Done\n\n }\n\n}\n", "file_path": "src/middleware/mod.rs", "rank": 95, "score": 98956.8000629823 }, { "content": "/// Predicate to match specified http method\n\npub fn Method<S: 'static>(method: http::Method) -> MethodPredicate<S> {\n\n MethodPredicate(method, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 96, "score": 98364.57378596725 }, { "content": "#[derive(Debug)]\n\nstruct Completed<S, H>(PhantomData<S>, PhantomData<H>);\n\n\n\nimpl<S, H> Completed<S, H> {\n\n\n\n #[inline]\n\n fn init(info: &mut PipelineInfo<S>) -> PipelineState<S, H> {\n\n if let Some(ref err) = info.error {\n\n error!(\"Error occurred during request handling: {}\", err);\n\n }\n\n\n\n if info.context.is_none() {\n\n PipelineState::None\n\n } else {\n\n PipelineState::Completed(Completed(PhantomData, PhantomData))\n\n }\n\n }\n\n\n\n #[inline]\n\n fn poll(&mut self, info: &mut PipelineInfo<S>) -> Option<PipelineState<S, H>> {\n\n match info.poll_context() {\n", "file_path": "src/pipeline.rs", "rank": 97, "score": 97172.72591869357 }, { "content": "fn create_tcp_listener(addr: net::SocketAddr, backlog: i32) -> io::Result<net::TcpListener> {\n\n let builder = match addr {\n\n net::SocketAddr::V4(_) => TcpBuilder::new_v4()?,\n\n net::SocketAddr::V6(_) => TcpBuilder::new_v6()?,\n\n };\n\n builder.bind(addr)?;\n\n builder.reuse_address(true)?;\n\n Ok(builder.listen(backlog)?)\n\n}\n", "file_path": "src/server/srv.rs", "rank": 98, "score": 89067.97164670918 } ]
Rust
src/simple_control.rs
magiclen/gitlab-deploy
9c8e0d0a765d2db598a618c719dcef3b9c837c86
use std::error::Error; use std::fmt::Write as FmtWrite; use execute::Execute; use clap::{ArgMatches, Values}; use crate::constants::*; use crate::functions::*; use crate::parse::*; #[inline] fn handle_command(values: Option<Values>) -> Result<Vec<&str>, &'static str> { match values { Some(values) => Ok(values.collect()), None => Err("A command is needed."), } } pub(crate) fn simple_control(matches: &ArgMatches) -> Result<(), Box<dyn Error>> { check_ssh()?; let project_id = parse_parse_id(matches); let commit_sha = parse_commit_sha(matches); let project_name = parse_project_name(matches); let reference_name = parse_reference_name(matches); let phase = parse_phase(matches); let command = handle_command(matches.values_of("COMMAND"))?; let command_string: String = command.join(" "); let inject_project_directory = matches.is_present("INJECT_PROJECT_DIRECTORY"); let ssh_user_hosts = find_ssh_user_hosts(phase, project_id)?; if ssh_user_hosts.is_empty() { warn!("No hosts to control!"); return Ok(()); } for ssh_user_host in ssh_user_hosts.iter() { info!("Controlling to {} ({})", ssh_user_host, command_string); let ssh_root = { let mut ssh_home = get_ssh_home(ssh_user_host)?; ssh_home.write_fmt(format_args!( "/{PROJECT_DIRECTORY}", PROJECT_DIRECTORY = PROJECT_DIRECTORY, ))?; ssh_home }; let ssh_project = format!( "{SSH_ROOT}/{PROJECT_NAME}-{PROJECT_ID}/{REFERENCE_NAME}-{SHORT_SHA}", SSH_ROOT = ssh_root, PROJECT_NAME = project_name.as_ref(), PROJECT_ID = project_id, REFERENCE_NAME = reference_name.as_ref(), SHORT_SHA = commit_sha.get_short_sha(), ); { let command_in_ssh = if inject_project_directory { let mut command_in_ssh = String::with_capacity(command_string.len() + ssh_project.len() + 1); if command[0] == "sudo" { command_in_ssh.push_str("sudo "); if command.len() > 1 { command_in_ssh.push_str(command[1]); command_in_ssh.write_fmt(format_args!(" {:?} ", ssh_project))?; command_in_ssh.push_str(&command[2..].join(" ")); } } else { command_in_ssh.push_str(command[0]); command_in_ssh.write_fmt(format_args!(" {:?} ", ssh_project))?; command_in_ssh.push_str(&command[1..].join(" ")); } command_in_ssh } else { command_string.clone() }; let mut command = create_ssh_command(ssh_user_host, command_in_ssh); let output = command.execute_output()?; if !output.status.success() { return Err("Control failed!".into()); } } { let mut command = create_ssh_command(ssh_user_host, format!("cd {SSH_PROJECT:?} && echo \"{TIMESTAMP} {COMMAND:?} {REFERENCE_NAME}-{SHORT_SHA}\" >> {SSH_PROJECT:?}/../control.log", SSH_PROJECT = ssh_project, REFERENCE_NAME = reference_name.as_ref(), TIMESTAMP = current_timestamp(), SHORT_SHA = commit_sha.get_short_sha(), COMMAND = command_string, )); let output = command.execute_output()?; if !output.status.success() { return Err("Control failed!".into()); } } } info!("Successfully!"); Ok(()) }
use std::error::Error; use std::fmt::Write as FmtWrite; use execute::Execute; use clap::{ArgMatches, Values}; use crate::constants::*; use crate::functions::*; use crate::parse::*; #[inline] fn handle_command(values: Option<Values>) -> Result<Vec<&str>, &'static str> { match values { Some(values) => Ok(values.collect()), None => Err("A command is needed."), } } pub(crate) fn simple_control(matches: &ArgMatches) -> Result<(), Box<dyn Error>> { check_ssh()?; let project_id = parse_parse_id(matches); let commit_sha = parse_commit_sha(matches)
rol failed!".into()); } } { let mut command = create_ssh_command(ssh_user_host, format!("cd {SSH_PROJECT:?} && echo \"{TIMESTAMP} {COMMAND:?} {REFERENCE_NAME}-{SHORT_SHA}\" >> {SSH_PROJECT:?}/../control.log", SSH_PROJECT = ssh_project, REFERENCE_NAME = reference_name.as_ref(), TIMESTAMP = current_timestamp(), SHORT_SHA = commit_sha.get_short_sha(), COMMAND = command_string, )); let output = command.execute_output()?; if !output.status.success() { return Err("Control failed!".into()); } } } info!("Successfully!"); Ok(()) }
; let project_name = parse_project_name(matches); let reference_name = parse_reference_name(matches); let phase = parse_phase(matches); let command = handle_command(matches.values_of("COMMAND"))?; let command_string: String = command.join(" "); let inject_project_directory = matches.is_present("INJECT_PROJECT_DIRECTORY"); let ssh_user_hosts = find_ssh_user_hosts(phase, project_id)?; if ssh_user_hosts.is_empty() { warn!("No hosts to control!"); return Ok(()); } for ssh_user_host in ssh_user_hosts.iter() { info!("Controlling to {} ({})", ssh_user_host, command_string); let ssh_root = { let mut ssh_home = get_ssh_home(ssh_user_host)?; ssh_home.write_fmt(format_args!( "/{PROJECT_DIRECTORY}", PROJECT_DIRECTORY = PROJECT_DIRECTORY, ))?; ssh_home }; let ssh_project = format!( "{SSH_ROOT}/{PROJECT_NAME}-{PROJECT_ID}/{REFERENCE_NAME}-{SHORT_SHA}", SSH_ROOT = ssh_root, PROJECT_NAME = project_name.as_ref(), PROJECT_ID = project_id, REFERENCE_NAME = reference_name.as_ref(), SHORT_SHA = commit_sha.get_short_sha(), ); { let command_in_ssh = if inject_project_directory { let mut command_in_ssh = String::with_capacity(command_string.len() + ssh_project.len() + 1); if command[0] == "sudo" { command_in_ssh.push_str("sudo "); if command.len() > 1 { command_in_ssh.push_str(command[1]); command_in_ssh.write_fmt(format_args!(" {:?} ", ssh_project))?; command_in_ssh.push_str(&command[2..].join(" ")); } } else { command_in_ssh.push_str(command[0]); command_in_ssh.write_fmt(format_args!(" {:?} ", ssh_project))?; command_in_ssh.push_str(&command[1..].join(" ")); } command_in_ssh } else { command_string.clone() }; let mut command = create_ssh_command(ssh_user_host, command_in_ssh); let output = command.execute_output()?; if !output.status.success() { return Err("Cont
random
[ { "content": "fn get_matches() -> ArgMatches {\n\n let app = Command::new(APP_NAME)\n\n .term_width(terminal_size().map(|(width, _)| width.0 as usize).unwrap_or(0))\n\n .version(CARGO_PKG_VERSION)\n\n .author(CARGO_PKG_AUTHORS)\n\n .about(concat!(\"GitLab Deploy is used for deploying software projects to multiple hosts during different phases\\n\\nEXAMPLES:\\n\", concat_line!(prefix \"gitlab-deploy \",\n\n \"frontend-develop --gitlab-project-id 123 --commit-sha 0b14cd4fdec3bdffffdaf1de6fe13aaa01c4827f --build-target develop\",\n\n \"frontend-deploy --gitlab-project-id 123 --commit-sha 0b14cd4fdec3bdffffdaf1de6fe13aaa01c4827f --project-name website --reference-name pre-release --phase test --build-target test\",\n\n \"frontend-control --gitlab-project-id 123 --commit-sha 0b14cd4fdec3bdffffdaf1de6fe13aaa01c4827f --project-name website --reference-name pre-release --phase test\",\n\n \"backend-develop --gitlab-project-id 123 --gitlab-project-path website-api --project-name website --reference develop\",\n\n \"backend-deploy --gitlab-project-id 123 --commit-sha 0b14cd4fdec3bdffffdaf1de6fe13aaa01c4827f --project-name website --reference-name pre-release --phase test\",\n\n \"backend-control --gitlab-project-id 123 --commit-sha 0b14cd4fdec3bdffffdaf1de6fe13aaa01c4827f --project-name website --reference-name pre-release --phase test --command up\",\n\n \"simple-deploy --gitlab-project-id 123 --commit-sha 0b14cd4fdec3bdffffdaf1de6fe13aaa01c4827f --project-name website --reference-name pre-release --phase test\",\n\n \"simple-control --gitlab-project-id 123 --commit-sha 0b14cd4fdec3bdffffdaf1de6fe13aaa01c4827f --project-name website --reference-name pre-release --phase test sudo /usr/local/bin/apply-nginx.sh dev.env\",\n\n )));\n\n\n\n let arg_gitlab_project_id = Arg::new(\"GITLAB_PROJECT_ID\")\n\n .display_order(0)\n\n .required(true)\n\n .long(\"gitlab-project-id\")\n", "file_path": "src/main.rs", "rank": 1, "score": 59363.16290544738 }, { "content": "fn main() {\n\n let mut log_config = simplelog::ConfigBuilder::new();\n\n\n\n log_config.set_time_level(simplelog::LevelFilter::Debug);\n\n\n\n simplelog::TermLogger::init(\n\n simplelog::LevelFilter::Info,\n\n log_config.build(),\n\n simplelog::TerminalMode::Mixed,\n\n simplelog::ColorChoice::Auto,\n\n )\n\n .unwrap();\n\n\n\n let matches = get_matches();\n\n\n\n if let Some(sub_matches) = matches.subcommand_matches(\"frontend-develop\") {\n\n info!(\"Running {} {} for front-end development\", APP_NAME, CARGO_PKG_VERSION);\n\n\n\n if let Err(err) = front_develop(sub_matches) {\n\n err.to_string().split('\\n').for_each(|line| {\n", "file_path": "src/main.rs", "rank": 2, "score": 31734.99972160455 }, { "content": " _ => return Err(()),\n\n };\n\n\n\n Ok(command)\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn as_str(&self) -> &'static str {\n\n match self {\n\n Self::Up => \"up\",\n\n Self::Stop => \"stop\",\n\n Self::Down => \"down\",\n\n Self::Logs => \"logs\",\n\n Self::DownAndUp => \"down_up\",\n\n }\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn get_command_str(&self) -> &'static str {\n\n match self {\n", "file_path": "src/parse/command.rs", "rank": 3, "score": 27509.30485634396 }, { "content": "#[derive(Debug, Eq, PartialEq)]\n\npub(crate) enum Command {\n\n Up,\n\n Stop,\n\n Down,\n\n Logs,\n\n DownAndUp,\n\n}\n\n\n\nimpl Command {\n\n #[inline]\n\n pub(crate) fn parse_str<S: AsRef<str>>(s: S) -> Result<Self, ()> {\n\n let s = s.as_ref();\n\n\n\n let command = match s.to_ascii_lowercase().as_str() {\n\n \"start\" | \"up\" => Command::Up,\n\n \"stop\" => Command::Stop,\n\n \"down\" => Command::Down,\n\n \"log\" | \"logs\" => Command::Logs,\n\n \"down_up\" | \"restart\" => Command::DownAndUp,\n", "file_path": "src/parse/command.rs", "rank": 4, "score": 27506.29611048718 }, { "content": " Self::Up | Self::DownAndUp => {\n\n \"docker-compose up -d --build && (timeout 10 docker-compose logs -f || true)\"\n\n }\n\n Self::Stop => \"docker-compose stop\",\n\n Self::Down => \"docker-compose down\",\n\n Self::Logs => \"docker-compose logs\",\n\n }\n\n }\n\n}\n", "file_path": "src/parse/command.rs", "rank": 5, "score": 27493.41522933621 }, { "content": " process::exit(-2);\n\n }\n\n }\n\n }\n\n None => {\n\n error!(\"`--phase` needs to be set.\");\n\n process::exit(-2);\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn parse_command(matches: &ArgMatches) -> Command {\n\n match matches.value_of(\"COMMAND\") {\n\n Some(command) => {\n\n match Command::parse_str(command) {\n\n Ok(command) => command,\n\n Err(_) => {\n\n error!(\"{:?} is not a correct command.\", command);\n\n process::exit(-2);\n\n }\n", "file_path": "src/parse/mod.rs", "rank": 6, "score": 19.061557044263157 }, { "content": "}\n\n\n\npub(crate) fn parse_reference(matches: &ArgMatches) -> Reference {\n\n match matches.value_of(\"REFERENCE\") {\n\n Some(reference) => {\n\n match Reference::parse_str(reference) {\n\n Ok(reference) => reference,\n\n Err(_) => {\n\n error!(\"{:?} is not a correct reference.\", reference);\n\n process::exit(-2);\n\n }\n\n }\n\n }\n\n None => {\n\n error!(\"`--reference` needs to be set.\");\n\n process::exit(-2);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parse/mod.rs", "rank": 7, "score": 15.447244196702616 }, { "content": " None => {\n\n error!(\"`--gitlab-api-url-prefix` needs to be set.\");\n\n process::exit(-2);\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn parse_api_token(matches: &ArgMatches) -> ApiToken {\n\n match matches.value_of(\"GITLAB_API_TOKEN\") {\n\n Some(api_token) => {\n\n match ApiToken::parse_str(api_token) {\n\n Ok(api_token) => api_token,\n\n Err(_) => {\n\n error!(\"{:?} is not a correct GitLab API token.\", api_token);\n\n process::exit(-2);\n\n }\n\n }\n\n }\n\n None => {\n\n error!(\"`--gitlab-api-token` needs to be set.\");\n", "file_path": "src/parse/mod.rs", "rank": 8, "score": 14.981402390136944 }, { "content": " }\n\n None => {\n\n error!(\"`--commit-short-sha` needs to be set.\");\n\n process::exit(-2);\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn parse_project_name(matches: &ArgMatches) -> Name {\n\n match matches.value_of(\"PROJECT_NAME\") {\n\n Some(project_name) => {\n\n match Name::parse_str(project_name) {\n\n Ok(project_name) => project_name,\n\n Err(_) => {\n\n error!(\"{:?} is not a correct project name.\", project_name);\n\n process::exit(-2);\n\n }\n\n }\n\n }\n\n None => {\n", "file_path": "src/parse/mod.rs", "rank": 9, "score": 14.923706859943648 }, { "content": " error!(\"`--project-name` needs to be set.\");\n\n process::exit(-2);\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn parse_reference_name(matches: &ArgMatches) -> Name {\n\n match matches.value_of(\"REFERENCE_NAME\") {\n\n Some(reference_name) => {\n\n match Name::parse_str(reference_name) {\n\n Ok(reference_name) => reference_name,\n\n Err(_) => {\n\n error!(\"{:?} is not a correct reference name.\", reference_name);\n\n process::exit(-2);\n\n }\n\n }\n\n }\n\n None => {\n\n error!(\"`--reference-name` needs to be set.\");\n\n process::exit(-2);\n", "file_path": "src/parse/mod.rs", "rank": 10, "score": 14.829145360112932 }, { "content": " }\n\n }\n\n }\n\n None => {\n\n error!(\"`--gitlab-project-id` needs to be set.\");\n\n process::exit(-2);\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn parse_commit_sha(matches: &ArgMatches) -> CommitSha {\n\n match matches.value_of(\"COMMIT_SHA\") {\n\n Some(sha) => {\n\n match CommitSha::parse_str(sha) {\n\n Ok(sha) => sha,\n\n Err(_) => {\n\n error!(\"{:?} is not a correct commit sha.\", sha);\n\n process::exit(-2);\n\n }\n\n }\n", "file_path": "src/parse/mod.rs", "rank": 11, "score": 14.554489109180437 }, { "content": " }\n\n }\n\n None => {\n\n error!(\"`--command` needs to be set.\");\n\n process::exit(-2);\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn parse_api_url_prefix(matches: &ArgMatches) -> ApiUrlPrefix {\n\n match matches.value_of(\"GITLAB_API_URL_PREFIX\") {\n\n Some(api_url_prefix) => {\n\n match ApiUrlPrefix::parse_str(api_url_prefix) {\n\n Ok(api_url_prefix) => api_url_prefix,\n\n Err(_) => {\n\n error!(\"{:?} is not a correct GitLab API URL prefix.\", api_url_prefix);\n\n process::exit(-2);\n\n }\n\n }\n\n }\n", "file_path": "src/parse/mod.rs", "rank": 12, "score": 14.49778722356061 }, { "content": "pub(crate) fn parse_build_target(matches: &ArgMatches) -> BuildTarget {\n\n match matches.value_of(\"BUILD_TARGET\") {\n\n Some(target) => {\n\n match BuildTarget::parse_str(target) {\n\n Ok(target) => target,\n\n Err(_) => {\n\n error!(\"{:?} is not a correct build target.\", target);\n\n process::exit(-2);\n\n }\n\n }\n\n }\n\n None => {\n\n error!(\"`--build-target` needs to be set.\");\n\n process::exit(-2);\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn parse_build_target_allow_null(matches: &ArgMatches) -> Option<BuildTarget> {\n\n match matches.value_of(\"BUILD_TARGET\") {\n", "file_path": "src/parse/mod.rs", "rank": 14, "score": 14.044896854599417 }, { "content": " }\n\n }\n\n}\n\n\n\npub(crate) fn parse_project_path(matches: &ArgMatches) -> ProjectPath {\n\n match matches.value_of(\"GITLAB_PROJECT_PATH\") {\n\n Some(project_path) => {\n\n match ProjectPath::parse_str(project_path) {\n\n Ok(project_path) => project_path,\n\n Err(_) => {\n\n error!(\"{:?} is not a correct project path.\", project_path);\n\n process::exit(-2);\n\n }\n\n }\n\n }\n\n None => {\n\n error!(\"`--gitlab-project-path` needs to be set.\");\n\n process::exit(-2);\n\n }\n\n }\n", "file_path": "src/parse/mod.rs", "rank": 15, "score": 13.87550593988949 }, { "content": " Some(target) => {\n\n match BuildTarget::parse_str(target) {\n\n Ok(target) => Some(target),\n\n Err(_) => {\n\n error!(\"{:?} is not a correct build target.\", target);\n\n process::exit(-2);\n\n }\n\n }\n\n }\n\n None => None,\n\n }\n\n}\n\n\n\npub(crate) fn parse_phase(matches: &ArgMatches) -> Phase {\n\n match matches.value_of(\"PHASE\") {\n\n Some(target) => {\n\n match Phase::parse_str(target) {\n\n Ok(target) => target,\n\n Err(_) => {\n\n error!(\"{:?} is not a correct parse.\", target);\n", "file_path": "src/parse/mod.rs", "rank": 16, "score": 13.811642601624357 }, { "content": "use std::error::Error;\n\nuse std::fmt::Write as FmtWrite;\n\n\n\nuse execute::{command_args, Execute};\n\n\n\nuse clap::ArgMatches;\n\n\n\nuse tempfile::tempdir;\n\n\n\nuse crate::constants::*;\n\nuse crate::functions::*;\n\nuse crate::parse::*;\n\n\n\npub(crate) fn front_develop(matches: &ArgMatches) -> Result<(), Box<dyn Error>> {\n\n check_zstd()?;\n\n check_ssh()?;\n\n check_wget()?;\n\n check_tar()?;\n\n check_bash()?;\n\n\n", "file_path": "src/front_develop.rs", "rank": 17, "score": 13.488667259486316 }, { "content": "use std::error::Error;\n\nuse std::fmt::Write as FmtWrite;\n\n\n\nuse execute::{command_args, Execute};\n\n\n\nuse clap::ArgMatches;\n\n\n\nuse tempfile::tempdir;\n\n\n\nuse crate::constants::*;\n\nuse crate::functions::*;\n\nuse crate::parse::*;\n\n\n\npub(crate) fn back_deploy(matches: &ArgMatches) -> Result<(), Box<dyn Error>> {\n\n check_zstd()?;\n\n check_ssh()?;\n\n check_wget()?;\n\n check_tar()?;\n\n check_bash()?;\n\n check_docker()?;\n", "file_path": "src/back_deploy.rs", "rank": 18, "score": 13.340385823355463 }, { "content": " let host = result.get(2).unwrap().as_str();\n\n let port = match result.get(3) {\n\n Some(port) => Some(port.as_str().parse::<u16>().map_err(|_| ())?),\n\n None => None,\n\n };\n\n\n\n Ok(SshUserHost {\n\n user: String::from(user),\n\n host: String::from(host),\n\n port: port.unwrap_or(22),\n\n })\n\n }\n\n}\n\n\n\nimpl SshUserHost {\n\n #[allow(dead_code)]\n\n #[inline]\n\n pub(crate) fn get_user(&self) -> &str {\n\n self.user.as_str()\n\n }\n", "file_path": "src/parse/ssh_user_host.rs", "rank": 19, "score": 12.846086252040928 }, { "content": " }\n\n}\n\n\n\npub(crate) fn parse_ssh_user_host(matches: &ArgMatches) -> SshUserHost {\n\n match matches.value_of(\"DEVELOP_SSH_USR_HOST\") {\n\n Some(ssh_user_host) => {\n\n match SshUserHost::parse_str(ssh_user_host) {\n\n Ok(ssh_user_host) => ssh_user_host,\n\n Err(_) => {\n\n error!(\"{:?} is not a correct SSH user and host.\", ssh_user_host);\n\n process::exit(-2);\n\n }\n\n }\n\n }\n\n None => {\n\n error!(\"`--develop-ssh-user-host` needs to be set.\");\n\n process::exit(-2);\n\n }\n\n }\n\n}\n", "file_path": "src/parse/mod.rs", "rank": 20, "score": 12.796334179616618 }, { "content": " process::exit(-2);\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn parse_ssh_url_prefix(matches: &ArgMatches) -> SshUrlPrefix {\n\n match matches.value_of(\"GITLAB_SSH_URL_PREFIX\") {\n\n Some(ssh_url_prefix) => {\n\n match SshUrlPrefix::parse_str(ssh_url_prefix) {\n\n Ok(ssh_url_prefix) => ssh_url_prefix,\n\n Err(_) => {\n\n error!(\"{:?} is not a correct SSH URL prefix.\", ssh_url_prefix);\n\n process::exit(-2);\n\n }\n\n }\n\n }\n\n None => {\n\n error!(\"`--gitlab-ssh-url-prefix` needs to be set.\");\n\n process::exit(-2);\n\n }\n", "file_path": "src/parse/mod.rs", "rank": 21, "score": 12.619607354365096 }, { "content": "use std::error::Error;\n\nuse std::path::PathBuf;\n\nuse std::process::Stdio;\n\n\n\nuse execute::Execute;\n\n\n\nuse clap::ArgMatches;\n\n\n\nuse trim_in_place::TrimInPlace;\n\n\n\nuse crate::constants::*;\n\nuse crate::functions::*;\n\nuse crate::parse::*;\n\n\n\npub(crate) fn front_control(matches: &ArgMatches) -> Result<(), Box<dyn Error>> {\n\n check_ssh()?;\n\n\n\n let project_id = parse_parse_id(matches);\n\n\n\n let commit_sha = parse_commit_sha(matches);\n", "file_path": "src/front_control.rs", "rank": 22, "score": 11.958079962323207 }, { "content": "use std::error::Error;\n\nuse std::fmt::Write as FmtWrite;\n\n\n\nuse execute::Execute;\n\n\n\nuse clap::ArgMatches;\n\n\n\nuse crate::constants::*;\n\nuse crate::functions::*;\n\nuse crate::parse::*;\n\n\n\npub(crate) fn back_develop(matches: &ArgMatches) -> Result<(), Box<dyn Error>> {\n\n check_ssh()?;\n\n check_bash()?;\n\n\n\n let project_id = parse_parse_id(matches);\n\n\n\n let project_name = parse_project_name(matches);\n\n\n\n let project_path = parse_project_path(matches);\n", "file_path": "src/back_develop.rs", "rank": 23, "score": 11.934019377855993 }, { "content": "use std::error::Error;\n\nuse std::fmt::Write as FmtWrite;\n\nuse std::fs::File;\n\n\n\nuse execute::Execute;\n\n\n\nuse clap::ArgMatches;\n\n\n\nuse tempfile::tempdir;\n\n\n\nuse crate::constants::*;\n\nuse crate::functions::*;\n\nuse crate::parse::*;\n\n\n\npub(crate) fn simple_deploy(matches: &ArgMatches) -> Result<(), Box<dyn Error>> {\n\n check_ssh()?;\n\n check_wget()?;\n\n check_docker()?;\n\n\n\n let project_id = parse_parse_id(matches);\n", "file_path": "src/simple_deploy.rs", "rank": 24, "score": 11.915401652585073 }, { "content": "use std::error::Error;\n\nuse std::fmt::Write as FmtWrite;\n\nuse std::process::Stdio;\n\n\n\nuse execute::Execute;\n\nuse trim_in_place::TrimInPlace;\n\n\n\nuse clap::ArgMatches;\n\n\n\nuse crate::constants::*;\n\nuse crate::functions::*;\n\nuse crate::parse::*;\n\n\n\npub(crate) fn back_control(matches: &ArgMatches) -> Result<(), Box<dyn Error>> {\n\n check_ssh()?;\n\n\n\n let project_id = parse_parse_id(matches);\n\n\n\n let commit_sha = parse_commit_sha(matches);\n\n\n", "file_path": "src/back_control.rs", "rank": 25, "score": 11.90725961787651 }, { "content": "use std::error::Error;\n\nuse std::fmt::Write as FmtWrite;\n\n\n\nuse execute::Execute;\n\n\n\nuse clap::ArgMatches;\n\n\n\nuse tempfile::tempdir;\n\n\n\nuse crate::constants::*;\n\nuse crate::functions::*;\n\nuse crate::parse::*;\n\n\n\npub(crate) fn front_deploy(matches: &ArgMatches) -> Result<(), Box<dyn Error>> {\n\n check_zstd()?;\n\n check_ssh()?;\n\n check_wget()?;\n\n check_tar()?;\n\n check_bash()?;\n\n\n", "file_path": "src/front_deploy.rs", "rank": 26, "score": 11.906067510859078 }, { "content": "\n\nuse validators::prelude::*;\n\n\n\nuse crate::constants::*;\n\nuse crate::parse::*;\n\n\n\npub(crate) fn check_zstd() -> Result<(), Box<dyn Error>> {\n\n let mut command = command!(\"zstd --version\");\n\n\n\n if command.execute_check_exit_status_code(0).is_err() {\n\n return Err(\"Cannot find zstd.\".into());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub(crate) fn check_ssh() -> Result<(), Box<dyn Error>> {\n\n // scp should also be checked implicitly\n\n let mut command = command!(\"ssh -V\");\n\n\n", "file_path": "src/functions.rs", "rank": 27, "score": 11.784513420060836 }, { "content": "pub(crate) use api_url_prefix::*;\n\npub(crate) use build_target::*;\n\npub(crate) use command::*;\n\npub(crate) use commit_sha::*;\n\npub(crate) use image_name::*;\n\npub(crate) use name::*;\n\npub(crate) use phase::*;\n\npub(crate) use project_path::*;\n\npub(crate) use reference::*;\n\npub(crate) use ssh_url_prefix::*;\n\npub(crate) use ssh_user_host::*;\n\n\n\npub(crate) fn parse_parse_id(matches: &ArgMatches) -> u64 {\n\n match matches.value_of(\"GITLAB_PROJECT_ID\") {\n\n Some(project_id) => {\n\n match project_id.parse::<u64>() {\n\n Ok(project_id) => project_id,\n\n Err(_) => {\n\n error!(\"{:?} is not a correct GitLab project ID\", project_id);\n\n process::exit(-2);\n", "file_path": "src/parse/mod.rs", "rank": 28, "score": 10.748659979346224 }, { "content": "use std::borrow::Cow;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::env;\n\nuse std::error::Error;\n\nuse std::fs::{self, File};\n\nuse std::io::{BufRead, BufReader, ErrorKind};\n\nuse std::path::Path;\n\nuse std::path::PathBuf;\n\nuse std::process::{Command, Stdio};\n\n\n\nuse tempfile::TempDir;\n\n\n\nuse execute::{command, command_args, Execute};\n\n\n\nuse chrono::format::{DelayedFormat, StrftimeItems};\n\nuse chrono::Local;\n\nuse regex::Regex;\n\nuse scanner_rust::{ScannerError, ScannerStr};\n\nuse slash_formatter::delete_end_slash_in_place;\n\nuse trim_in_place::TrimInPlace;\n", "file_path": "src/functions.rs", "rank": 29, "score": 10.727052831030663 }, { "content": "\n\n let mut sc = ScannerStr::new(&line);\n\n\n\n let project_id = match sc.next_u64() {\n\n Ok(r) => {\n\n match r {\n\n Some(r) => r,\n\n None => continue,\n\n }\n\n }\n\n Err(err) => {\n\n match err {\n\n ScannerError::ParseIntError(_) => {\n\n return Err(format!(\n\n \"In {PHASE_PATH:?} at line {LINE}, cannot read the project id: {}\",\n\n err,\n\n PHASE_PATH = phase_path,\n\n LINE = line_number\n\n )\n\n .into())\n", "file_path": "src/functions.rs", "rank": 30, "score": 9.933726968585962 }, { "content": "\n\n #[allow(dead_code)]\n\n #[inline]\n\n pub(crate) fn get_host(&self) -> &str {\n\n self.host.as_str()\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn get_port(&self) -> u16 {\n\n self.port\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn user_host(&self) -> String {\n\n format!(\"{}@{}\", self.user, self.host)\n\n }\n\n}\n\n\n\nimpl Display for SshUserHost {\n\n #[inline]\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {\n\n if self.port != 22 {\n\n f.write_fmt(format_args!(\"{}@{}:{}\", self.user, self.host, self.port))\n\n } else {\n\n f.write_fmt(format_args!(\"{}@{}\", self.user, self.host))\n\n }\n\n }\n\n}\n", "file_path": "src/parse/ssh_user_host.rs", "rank": 31, "score": 9.582922767813036 }, { "content": "\n\n Err(format!(\"Cannot check the existence of {:?} of {}\", path.as_ref(), ssh_user_host).into())\n\n}\n\n\n\npub(crate) fn check_directory_exist<S: AsRef<str>>(\n\n ssh_user_host: &SshUserHost,\n\n path: S,\n\n) -> Result<bool, Box<dyn Error>> {\n\n let mut command =\n\n create_ssh_command(ssh_user_host, format!(\"test -d {PATH:?}\", PATH = path.as_ref(),));\n\n\n\n command.stdout(Stdio::piped());\n\n command.stderr(Stdio::piped());\n\n\n\n let output = command.execute_output()?;\n\n\n\n if let Some(code) = output.status.code() {\n\n match code {\n\n 0 => return Ok(true),\n\n 1 => return Ok(false),\n", "file_path": "src/functions.rs", "rank": 32, "score": 9.494738492140206 }, { "content": "use validators::prelude::*;\n\n\n\n#[derive(Debug, Validator)]\n\n#[validator(regex(\"^[a-zA-Z0-9]{40}$\"))]\n\npub(crate) struct CommitSha(String);\n\n\n\nimpl CommitSha {\n\n #[inline]\n\n pub(crate) fn get_sha(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n\n\n #[inline]\n\n pub(crate) fn get_short_sha(&self) -> &str {\n\n &self.get_sha()[..8]\n\n }\n\n}\n\n\n\nimpl AsRef<str> for CommitSha {\n\n #[inline]\n\n fn as_ref(&self) -> &str {\n\n self.get_sha()\n\n }\n\n}\n", "file_path": "src/parse/commit_sha.rs", "rank": 33, "score": 8.923489151136597 }, { "content": " if command.execute_check_exit_status_code(0).is_err() {\n\n return Err(\"Cannot find ssh.\".into());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub(crate) fn check_wget() -> Result<(), Box<dyn Error>> {\n\n let mut command = command!(\"wget --version\");\n\n\n\n if command.execute_check_exit_status_code(0).is_err() {\n\n return Err(\"Cannot find wget.\".into());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub(crate) fn check_tar() -> Result<(), Box<dyn Error>> {\n\n let mut command = command!(\"tar --version\");\n\n\n", "file_path": "src/functions.rs", "rank": 34, "score": 8.865524202037744 }, { "content": " if command.execute_check_exit_status_code(0).is_err() {\n\n return Err(\"Cannot find tar.\".into());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub(crate) fn check_bash() -> Result<(), Box<dyn Error>> {\n\n let mut command = command!(\"bash --version\");\n\n\n\n if command.execute_check_exit_status_code(0).is_err() {\n\n return Err(\"Cannot find bash.\".into());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub(crate) fn check_docker() -> Result<(), Box<dyn Error>> {\n\n let mut command = command!(\"docker --version\");\n\n\n", "file_path": "src/functions.rs", "rank": 35, "score": 8.865524202037744 }, { "content": "use validators::prelude::*;\n\n\n\n#[derive(Debug, Validator)]\n\n#[validator(regex(r\"^[a-zA-Z0-9\\-_.]{1,80}$\"))]\n\npub(crate) struct Phase(String);\n\n\n\nimpl AsRef<str> for Phase {\n\n #[inline]\n\n fn as_ref(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n}\n", "file_path": "src/parse/phase.rs", "rank": 36, "score": 8.84827485288747 }, { "content": "use validators::prelude::*;\n\n\n\n#[derive(Debug, Validator)]\n\n#[validator(regex(r\"^[a-zA-Z0-9\\-_.]{1,80}$\"))]\n\npub(crate) struct Name(String);\n\n\n\nimpl AsRef<str> for Name {\n\n #[inline]\n\n fn as_ref(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n}\n", "file_path": "src/parse/name.rs", "rank": 37, "score": 8.84827485288747 }, { "content": "use validators::prelude::*;\n\n\n\n#[derive(Debug, Validator)]\n\n#[validator(regex(r\"^\\S+$\"))]\n\npub(crate) struct ApiToken(String);\n\n\n\nimpl AsRef<str> for ApiToken {\n\n #[inline]\n\n fn as_ref(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n}\n", "file_path": "src/parse/api_token.rs", "rank": 38, "score": 8.666495729278992 }, { "content": "use validators::prelude::*;\n\n\n\n#[derive(Debug, Validator)]\n\n#[validator(regex(r\"^[a-z0-9\\-_]{1,80}$\"))]\n\npub(crate) struct BuildTarget(String);\n\n\n\nimpl AsRef<str> for BuildTarget {\n\n #[inline]\n\n fn as_ref(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n}\n", "file_path": "src/parse/build_target.rs", "rank": 39, "score": 8.666495729278992 }, { "content": "use validators::prelude::*;\n\n\n\n#[derive(Debug, Validator)]\n\n#[validator(regex(r\"^[a-z0-9\\-_]{1,80}$\"))]\n\npub(crate) struct ImageName(String);\n\n\n\nimpl AsRef<str> for ImageName {\n\n #[inline]\n\n fn as_ref(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n}\n", "file_path": "src/parse/image_name.rs", "rank": 40, "score": 8.666495729278992 }, { "content": " command.stderr(Stdio::piped());\n\n\n\n let output = command.execute_output()?;\n\n\n\n if !output.status.success() {\n\n String::from_utf8_lossy(output.stderr.as_slice()).split('\\n').for_each(|line| {\n\n if !line.is_empty() {\n\n warn!(\"{}\", line);\n\n }\n\n });\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub(crate) fn check_file_exist<S: AsRef<str>>(\n\n ssh_user_host: &SshUserHost,\n\n path: S,\n\n) -> Result<bool, Box<dyn Error>> {\n\n let mut command =\n", "file_path": "src/functions.rs", "rank": 41, "score": 8.615990686624531 }, { "content": "use validators::prelude::*;\n\n\n\n#[derive(Debug, Validator)]\n\n#[validator(line(char_length(trimmed_min = 1)))]\n\npub(crate) struct Reference(String);\n\n\n\nimpl AsRef<str> for Reference {\n\n #[inline]\n\n fn as_ref(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n}\n", "file_path": "src/parse/reference.rs", "rank": 42, "score": 8.57866025921993 }, { "content": "use std::fmt::{self, Display, Formatter};\n\n\n\nuse regex::Regex;\n\n\n\n#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]\n\npub(crate) struct SshUserHost {\n\n user: String,\n\n host: String,\n\n port: u16,\n\n}\n\n\n\nimpl SshUserHost {\n\n pub(crate) fn parse_str<S: AsRef<str>>(s: S) -> Result<Self, ()> {\n\n let s = s.as_ref();\n\n\n\n let regex = Regex::new(r\"^([^/\\s]+)@([^/\\s:]+)(?::([0-9]{1,5}))?$\").unwrap();\n\n\n\n let result = regex.captures(s).ok_or(())?;\n\n\n\n let user = result.get(1).unwrap().as_str();\n", "file_path": "src/parse/ssh_user_host.rs", "rank": 43, "score": 8.52490196233939 }, { "content": "use validators::prelude::*;\n\n\n\n#[derive(Debug, Validator)]\n\n#[validator(regex(r\"^(ssh://)?[^/\\s]+@[^/\\s:]+(?::[^/\\s]+)?$\"))]\n\npub(crate) struct SshUrlPrefix(String);\n\n\n\nimpl AsRef<str> for SshUrlPrefix {\n\n #[inline]\n\n fn as_ref(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n}\n", "file_path": "src/parse/ssh_url_prefix.rs", "rank": 44, "score": 8.492764932218977 }, { "content": "use validators::prelude::*;\n\n\n\n#[derive(Debug, Validator)]\n\n#[validator(line(char_length(trimmed_min = 1)))]\n\npub(crate) struct ProjectPath(String);\n\n\n\nimpl AsRef<str> for ProjectPath {\n\n #[inline]\n\n fn as_ref(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n}\n", "file_path": "src/parse/project_path.rs", "rank": 45, "score": 8.408742143018241 }, { "content": "use slash_formatter::delete_end_slash;\n\nuse validators::prelude::*;\n\n\n\nuse validators_prelude::url;\n\n\n\n#[derive(Debug, Validator)]\n\n#[validator(http_url(local(Allow)))]\n\npub(crate) struct ApiUrlPrefix {\n\n url: url::Url,\n\n #[allow(dead_code)]\n\n is_https: bool,\n\n}\n\n\n\nimpl AsRef<str> for ApiUrlPrefix {\n\n #[inline]\n\n fn as_ref(&self) -> &str {\n\n delete_end_slash(self.url.as_str())\n\n }\n\n}\n", "file_path": "src/parse/api_url_prefix.rs", "rank": 46, "score": 8.187485329178115 }, { "content": " \"-p\",\n\n ssh_user_host.get_port().to_string(),\n\n ssh_user_host.user_host(),\n\n command.as_ref()\n\n )\n\n}\n\n\n\n#[inline]\n\npub(crate) fn create_scp_command<F: AsRef<str>, T: AsRef<str>>(\n\n ssh_user_host: &SshUserHost,\n\n from: F,\n\n to: T,\n\n) -> Command {\n\n command_args!(\n\n \"scp\",\n\n \"-o\",\n\n \"StrictHostKeyChecking=no\",\n\n \"-o\",\n\n \"BatchMode=yes\",\n\n \"-P\",\n", "file_path": "src/functions.rs", "rank": 47, "score": 7.919790800802911 }, { "content": " create_ssh_command(ssh_user_host, format!(\"cd {SSH_PROJECT:?} && echo \\\"{TIMESTAMP} {COMMAND} {REFERENCE_NAME}-{SHORT_SHA}\\\" >> {SSH_PROJECT:?}/../control.log && {COMMAND_STR}\",\n\n SSH_PROJECT = ssh_project,\n\n REFERENCE_NAME = reference_name.as_ref(),\n\n TIMESTAMP = current_timestamp(),\n\n SHORT_SHA = commit_sha.get_short_sha(),\n\n COMMAND = command.as_str(),\n\n COMMAND_STR = command_str,\n\n ));\n\n\n\n let output = command.execute_output()?;\n\n\n\n if !output.status.success() {\n\n return Err(\"Control failed!\".into());\n\n }\n\n }\n\n\n\n if matches!(command, Command::Up | Command::DownAndUp) {\n\n let mut command =\n\n create_ssh_command(ssh_user_host, format!(\"cd {SSH_PROJECT:?} && echo \\\"{REFERENCE_NAME}-{SHORT_SHA}\\\" > {SSH_PROJECT:?}/../last-up\",\n\n SSH_PROJECT = ssh_project,\n", "file_path": "src/back_control.rs", "rank": 48, "score": 7.828944056942804 }, { "content": " docker_compose\n\n }\n\n Err(ref error) if error.kind() == ErrorKind::NotFound => {\n\n return Err(\n\n format!(\"deploy/{} cannot be found in the project.\", docker_compose_name).into()\n\n );\n\n }\n\n Err(error) => return Err(error.into()),\n\n };\n\n\n\n let regex =\n\n Regex::new(&format!(\"(?m)^( *image: +{IMAGE_NAME}) *$\", IMAGE_NAME = image_name.as_ref()))\n\n .unwrap();\n\n\n\n if !regex.is_match(docker_compose.as_str()) {\n\n return Err(format!(\n\n \"deploy/{} or deploy/image-name.txt cannot match\",\n\n docker_compose_name\n\n )\n\n .into());\n", "file_path": "src/functions.rs", "rank": 49, "score": 7.824232533924824 }, { "content": " let output = command.execute_output()?;\n\n\n\n if !output.status.success() {\n\n return Err(\"Build failed\".into());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n#[inline]\n\npub(crate) fn create_ssh_command<S: AsRef<str>>(\n\n ssh_user_host: &SshUserHost,\n\n command: S,\n\n) -> Command {\n\n command_args!(\n\n \"ssh\",\n\n \"-o\",\n\n \"StrictHostKeyChecking=no\",\n\n \"-o\",\n\n \"BatchMode=yes\",\n", "file_path": "src/functions.rs", "rank": 50, "score": 7.650374498322606 }, { "content": " info!(\"Running {} {} for simple control\", APP_NAME, CARGO_PKG_VERSION);\n\n\n\n if let Err(err) = simple_control(sub_matches) {\n\n err.to_string().split('\\n').for_each(|line| {\n\n if !line.is_empty() {\n\n error!(\"{}\", line);\n\n }\n\n });\n\n\n\n process::exit(-3);\n\n }\n\n } else {\n\n error!(\"You need to input a subcommand!\");\n\n process::exit(-1);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 51, "score": 7.564426505643021 }, { "content": " let project_name = parse_project_name(matches);\n\n\n\n let reference_name = parse_reference_name(matches);\n\n\n\n let phase = parse_phase(matches);\n\n\n\n let command = parse_command(matches);\n\n\n\n let ssh_user_hosts = find_ssh_user_hosts(phase, project_id)?;\n\n\n\n if ssh_user_hosts.is_empty() {\n\n warn!(\"No hosts to control!\");\n\n return Ok(());\n\n }\n\n\n\n for ssh_user_host in ssh_user_hosts.iter() {\n\n info!(\"Controlling to {} ({})\", ssh_user_host, command.as_str());\n\n\n\n let ssh_root = {\n\n let mut ssh_home = get_ssh_home(ssh_user_host)?;\n", "file_path": "src/back_control.rs", "rank": 52, "score": 7.420455806479604 }, { "content": "\n\n return Err(format!(\"Cannot get the home directory of {}\", ssh_user_host).into());\n\n }\n\n\n\n let mut home = String::from_utf8(output.stdout)?;\n\n\n\n home.trim_in_place();\n\n\n\n delete_end_slash_in_place(&mut home);\n\n\n\n Ok(home)\n\n}\n\n\n\npub(crate) fn list_ssh_files<S: AsRef<str>>(\n\n ssh_user_host: &SshUserHost,\n\n path: S,\n\n) -> Result<(), Box<dyn Error>> {\n\n let mut command =\n\n create_ssh_command(ssh_user_host, format!(\"ls {PATH:?}\", PATH = path.as_ref(),));\n\n\n", "file_path": "src/functions.rs", "rank": 53, "score": 7.407771440742219 }, { "content": "#[macro_use]\n\nextern crate log;\n\n\n\nmod constants;\n\nmod functions;\n\nmod parse;\n\n\n\nmod back_control;\n\nmod back_deploy;\n\nmod back_develop;\n\nmod front_control;\n\nmod front_deploy;\n\nmod front_develop;\n\nmod simple_control;\n\nmod simple_deploy;\n\n\n\nuse std::env;\n\nuse std::process;\n\n\n\nuse clap::{Arg, ArgMatches, Command};\n", "file_path": "src/main.rs", "rank": 56, "score": 7.127731207685633 }, { "content": " ssh_user_host.get_port().to_string(),\n\n from.as_ref(),\n\n format!(\"{}:{}\", ssh_user_host.user_host(), to.as_ref()),\n\n )\n\n}\n\n\n\npub(crate) fn get_ssh_home(ssh_user_host: &SshUserHost) -> Result<String, Box<dyn Error>> {\n\n let mut command = create_ssh_command(ssh_user_host, \"echo $HOME\");\n\n\n\n command.stdout(Stdio::piped());\n\n command.stderr(Stdio::piped());\n\n\n\n let output = command.execute_output()?;\n\n\n\n if !output.status.success() {\n\n String::from_utf8_lossy(output.stderr.as_slice()).split('\\n').for_each(|line| {\n\n if !line.is_empty() {\n\n error!(\"{}\", line);\n\n }\n\n });\n", "file_path": "src/functions.rs", "rank": 57, "score": 7.087462694349429 }, { "content": "use terminal_size::terminal_size;\n\n\n\nuse concat_with::concat_line;\n\n\n\nuse back_control::*;\n\nuse back_deploy::*;\n\nuse back_develop::*;\n\nuse front_control::*;\n\nuse front_deploy::*;\n\nuse front_develop::*;\n\nuse simple_control::*;\n\nuse simple_deploy::*;\n\n\n\nconst APP_NAME: &str = \"gitlab-deploy\";\n\nconst CARGO_PKG_VERSION: &str = env!(\"CARGO_PKG_VERSION\");\n\nconst CARGO_PKG_AUTHORS: &str = env!(\"CARGO_PKG_AUTHORS\");\n\n\n", "file_path": "src/main.rs", "rank": 58, "score": 7.0244698697278185 }, { "content": "mod api_token;\n\nmod api_url_prefix;\n\nmod build_target;\n\nmod command;\n\nmod commit_sha;\n\nmod image_name;\n\nmod name;\n\nmod phase;\n\nmod project_path;\n\nmod reference;\n\nmod ssh_url_prefix;\n\nmod ssh_user_host;\n\n\n\nuse std::process;\n\n\n\nuse clap::ArgMatches;\n\n\n\nuse validators::prelude::*;\n\n\n\npub(crate) use api_token::*;\n", "file_path": "src/parse/mod.rs", "rank": 59, "score": 6.976862404798069 }, { "content": " create_ssh_command(ssh_user_host, format!(\"test -f {PATH:?}\", PATH = path.as_ref(),));\n\n\n\n command.stdout(Stdio::piped());\n\n command.stderr(Stdio::piped());\n\n\n\n let output = command.execute_output()?;\n\n\n\n if let Some(code) = output.status.code() {\n\n match code {\n\n 0 => return Ok(true),\n\n 1 => return Ok(false),\n\n _ => (),\n\n }\n\n }\n\n\n\n String::from_utf8_lossy(output.stderr.as_slice()).split('\\n').for_each(|line| {\n\n if !line.is_empty() {\n\n error!(\"{}\", line);\n\n }\n\n });\n", "file_path": "src/functions.rs", "rank": 60, "score": 6.974338024861204 }, { "content": " if command.execute_check_exit_status_code(0).is_err() {\n\n return Err(\"Cannot find docker.\".into());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub(crate) fn check_front_deploy(temp_dir: &TempDir) -> Result<Name, Box<dyn Error>> {\n\n let deploy_dir = temp_dir.path().join(\"deploy\");\n\n\n\n if !deploy_dir.join(\"build.sh\").is_file() {\n\n return Err(\"deploy/build.sh cannot be found in the project.\".into());\n\n }\n\n\n\n let public_name = match fs::read_to_string(deploy_dir.join(\"public-name.txt\")) {\n\n Ok(mut public_name) => {\n\n public_name.trim_in_place();\n\n\n\n match Name::parse_string(public_name) {\n\n Ok(public_name) => public_name,\n", "file_path": "src/functions.rs", "rank": 61, "score": 6.934857659943834 }, { "content": " return Err(\"deploy/develop-down.sh cannot be found in the project.\".into());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub(crate) fn run_front_build(\n\n temp_dir: &TempDir,\n\n target: BuildTarget,\n\n) -> Result<(), Box<dyn Error>> {\n\n info!(\"Running deploy/build.sh\");\n\n\n\n let mut command: Command = command_args!(\"bash\", \"deploy/build.sh\", target.as_ref());\n\n\n\n command.current_dir(temp_dir.path());\n\n\n\n let output = command.execute_output()?;\n\n\n\n if !output.status.success() {\n\n return Err(\"Build failed\".into());\n", "file_path": "src/functions.rs", "rank": 62, "score": 6.770400594982847 }, { "content": " }\n\n\n\n Ok(())\n\n}\n\n\n\npub(crate) fn run_back_build(\n\n temp_dir: &TempDir,\n\n commit_sha: &CommitSha,\n\n build_target: Option<&BuildTarget>,\n\n) -> Result<(), Box<dyn Error>> {\n\n info!(\"Running deploy/build.sh\");\n\n\n\n let mut command: Command = command_args!(\"bash\", \"deploy/build.sh\", commit_sha.get_short_sha());\n\n\n\n if let Some(build_target) = build_target {\n\n command.arg(build_target.as_ref());\n\n }\n\n\n\n command.current_dir(temp_dir.path());\n\n\n", "file_path": "src/functions.rs", "rank": 63, "score": 6.727049530214953 }, { "content": ") -> Result<HashSet<SshUserHost>, Box<dyn Error>> {\n\n let mut home = env::var(\"HOME\")?;\n\n\n\n delete_end_slash_in_place(&mut home);\n\n\n\n let phase_path = Path::new(home.as_str()).join(PHASE_DIRECTORY).join(phase.as_ref());\n\n\n\n let file = match File::open(phase_path.as_path()) {\n\n Ok(f) => f,\n\n Err(ref err) if err.kind() == ErrorKind::NotFound => {\n\n return Err(format!(\"{:?} is not a supported phase!\", phase.as_ref()).into());\n\n }\n\n Err(err) => return Err(err.into()),\n\n };\n\n\n\n let mut reader = BufReader::new(file);\n\n\n\n let mut map: HashMap<u64, HashSet<SshUserHost>> = HashMap::new();\n\n\n\n let mut line_number = 0;\n", "file_path": "src/functions.rs", "rank": 64, "score": 6.4638600222523355 }, { "content": " if !line.is_empty() {\n\n error!(\"{}\", line);\n\n }\n\n });\n\n\n\n process::exit(-3);\n\n }\n\n } else if let Some(sub_matches) = matches.subcommand_matches(\"frontend-deploy\") {\n\n info!(\"Running {} {} for front-end deployment\", APP_NAME, CARGO_PKG_VERSION);\n\n\n\n if let Err(err) = front_deploy(sub_matches) {\n\n err.to_string().split('\\n').for_each(|line| {\n\n if !line.is_empty() {\n\n error!(\"{}\", line);\n\n }\n\n });\n\n\n\n process::exit(-3);\n\n }\n\n } else if let Some(sub_matches) = matches.subcommand_matches(\"frontend-control\") {\n", "file_path": "src/main.rs", "rank": 65, "score": 6.455112807179345 }, { "content": " if !line.is_empty() {\n\n error!(\"{}\", line);\n\n }\n\n });\n\n\n\n process::exit(-3);\n\n }\n\n } else if let Some(sub_matches) = matches.subcommand_matches(\"simple-deploy\") {\n\n info!(\"Running {} {} for simple deployment\", APP_NAME, CARGO_PKG_VERSION);\n\n\n\n if let Err(err) = simple_deploy(sub_matches) {\n\n err.to_string().split('\\n').for_each(|line| {\n\n if !line.is_empty() {\n\n error!(\"{}\", line);\n\n }\n\n });\n\n\n\n process::exit(-3);\n\n }\n\n } else if let Some(sub_matches) = matches.subcommand_matches(\"simple-control\") {\n", "file_path": "src/main.rs", "rank": 66, "score": 6.455112807179345 }, { "content": " .takes_value(true)\n\n .help(\"Set the target of this build\");\n\n\n\n let arg_phase = Arg::new(\"PHASE\")\n\n .display_order(12)\n\n .required(true)\n\n .long(\"phase\")\n\n .visible_aliases(&[\"phase\"])\n\n .takes_value(true)\n\n .help(\"Set the phase\");\n\n\n\n let arg_command = Arg::new(\"COMMAND\")\n\n .display_order(13)\n\n .required(true)\n\n .long(\"command\")\n\n .takes_value(true)\n\n .help(\"Set the command\");\n\n\n\n let arg_gitlab_api_url_prefix = Arg::new(\"GITLAB_API_URL_PREFIX\")\n\n .display_order(100)\n", "file_path": "src/main.rs", "rank": 67, "score": 6.147965663468462 }, { "content": " command1.current_dir(temp_dir.path());\n\n\n\n let mut command2 = create_ssh_command(\n\n &ssh_user_host,\n\n format!(\"tar -xf - -C {HTML_PATH:?}\", HTML_PATH = ssh_html_path),\n\n );\n\n\n\n info!(\"Extracting {}\", tarball_path);\n\n\n\n let result = command1.execute_multiple(&mut [&mut command2])?;\n\n\n\n if let Some(0) = result {\n\n // do nothing\n\n } else {\n\n return Err(\"Extract failed.\".into());\n\n }\n\n }\n\n\n\n info!(\"Listing the public static files...\");\n\n\n\n list_ssh_files(&ssh_user_host, ssh_html_path)?;\n\n\n\n info!(\"Successfully!\");\n\n\n\n Ok(())\n\n}\n", "file_path": "src/front_develop.rs", "rank": 68, "score": 6.0399893570610645 }, { "content": " info!(\"Running {} {} for front-end control\", APP_NAME, CARGO_PKG_VERSION);\n\n\n\n if let Err(err) = front_control(sub_matches) {\n\n err.to_string().split('\\n').for_each(|line| {\n\n if !line.is_empty() {\n\n error!(\"{}\", line);\n\n }\n\n });\n\n\n\n process::exit(-3);\n\n }\n\n } else if let Some(sub_matches) = matches.subcommand_matches(\"backend-develop\") {\n\n info!(\"Running {} {} for back-end development\", APP_NAME, CARGO_PKG_VERSION);\n\n\n\n if let Err(err) = back_develop(sub_matches) {\n\n err.to_string().split('\\n').for_each(|line| {\n\n if !line.is_empty() {\n\n error!(\"{}\", line);\n\n }\n\n });\n", "file_path": "src/main.rs", "rank": 69, "score": 5.91460240923048 }, { "content": " ssh_user_host,\n\n format!(\n\n \"cd {SSH_PROJECT:?}/../{FOLDER} && {COMMAND}\",\n\n SSH_PROJECT = ssh_project,\n\n FOLDER = folder,\n\n COMMAND = Command::Down.get_command_str(),\n\n ),\n\n );\n\n\n\n let output = command.execute_output()?;\n\n\n\n if !output.status.success() {\n\n warn!(\"{} cannot be fully shut down\", folder);\n\n }\n\n }\n\n }\n\n }\n\n\n\n {\n\n let mut command =\n", "file_path": "src/back_control.rs", "rank": 70, "score": 5.7957625082154856 }, { "content": " }\n\n\n\n let docker_compose = regex\n\n .replace_all(docker_compose.as_str(), format!(\"$1:{}\", commit_sha.get_short_sha()))\n\n .into_owned();\n\n\n\n Ok((image_name, docker_compose))\n\n}\n\n\n\npub(crate) fn check_back_deploy_via_ssh<S: AsRef<str>>(\n\n ssh_user_host: &SshUserHost,\n\n ssh_root: S,\n\n) -> Result<(), Box<dyn Error>> {\n\n let deploy_path = format!(\"{}/deploy\", ssh_root.as_ref());\n\n\n\n if !check_file_exist(ssh_user_host, format!(\"{}/develop-up.sh\", deploy_path.as_str()))? {\n\n return Err(\"deploy/develop-up.sh cannot be found in the project.\".into());\n\n }\n\n\n\n if !check_file_exist(ssh_user_host, format!(\"{}/develop-down.sh\", deploy_path.as_str()))? {\n", "file_path": "src/functions.rs", "rank": 71, "score": 5.781100421342595 }, { "content": " );\n\n\n\n {\n\n let mut command =\n\n create_scp_command(ssh_user_host, tarball_path.as_str(), ssh_tarball_path.as_str());\n\n\n\n command.current_dir(temp_dir.path());\n\n\n\n let status = command.execute()?;\n\n\n\n if let Some(0) = status {\n\n // do nothing\n\n } else {\n\n return Err(format!(\n\n \"Cannot copy {FROM:?} to {USER_HOST}:{TO:?} ({PORT}).\",\n\n FROM = tarball_path,\n\n USER_HOST = ssh_user_host.user_host(),\n\n TO = ssh_tarball_path,\n\n PORT = ssh_user_host.get_port(),\n\n )\n", "file_path": "src/front_deploy.rs", "rank": 72, "score": 5.738217523489858 }, { "content": "\n\n let ssh_tarball_path = format!(\n\n \"{SSH_PROJECT}/{IMAGE_NAME}.tar.zst\",\n\n SSH_PROJECT = ssh_project,\n\n IMAGE_NAME = image_name.as_ref()\n\n );\n\n\n\n {\n\n let mut command =\n\n create_scp_command(ssh_user_host, tarball_path.as_str(), ssh_tarball_path.as_str());\n\n\n\n command.current_dir(temp_dir.path());\n\n\n\n let status = command.execute()?;\n\n\n\n if let Some(0) = status {\n\n // do nothing\n\n } else {\n\n return Err(format!(\n\n \"Cannot copy {FROM:?} to {USER_HOST}:{TO:?} ({PORT}).\",\n", "file_path": "src/back_deploy.rs", "rank": 73, "score": 5.706852982273143 }, { "content": " );\n\n\n\n let status = command.execute()?;\n\n\n\n if let Some(0) = status {\n\n // do nothing\n\n } else {\n\n return Err(format!(\n\n \"Cannot create the directory {:?} for storing the archive of public static files.\",\n\n ssh_project\n\n )\n\n .into());\n\n }\n\n }\n\n\n\n let ssh_docker_compose_path = format!(\"{}/docker-compose.yml\", ssh_project);\n\n\n\n {\n\n let mut command = create_ssh_command(\n\n ssh_user_host,\n", "file_path": "src/back_deploy.rs", "rank": 74, "score": 5.517817248823276 }, { "content": "\n\n process::exit(-3);\n\n }\n\n } else if let Some(sub_matches) = matches.subcommand_matches(\"backend-deploy\") {\n\n info!(\"Running {} {} for back-end deployment\", APP_NAME, CARGO_PKG_VERSION);\n\n\n\n if let Err(err) = back_deploy(sub_matches) {\n\n err.to_string().split('\\n').for_each(|line| {\n\n if !line.is_empty() {\n\n error!(\"{}\", line);\n\n }\n\n });\n\n\n\n process::exit(-3);\n\n }\n\n } else if let Some(sub_matches) = matches.subcommand_matches(\"backend-control\") {\n\n info!(\"Running {} {} for back-end control\", APP_NAME, CARGO_PKG_VERSION);\n\n\n\n if let Err(err) = back_control(sub_matches) {\n\n err.to_string().split('\\n').for_each(|line| {\n", "file_path": "src/main.rs", "rank": 75, "score": 5.363921015092254 }, { "content": " match last_project_id {\n\n Some(last_project_id) => {\n\n set.extend(map.get(&last_project_id).unwrap().iter().cloned());\n\n break;\n\n }\n\n None => return Err(format!(\n\n \"In {PHASE_PATH:?} at line {LINE}, should be written after the line that you want to reference\",\n\n PHASE_PATH = phase_path,\n\n LINE = line_number,\n\n ).into())\n\n }\n\n }\n\n\n\n let ssh_user_host = match SshUserHost::parse_str(user_host) {\n\n Ok(ssh_user_host) => ssh_user_host,\n\n Err(_) => {\n\n return Err(format!(\n\n \"In {PHASE_PATH:?} at line {LINE}, the format of {USER_HOST:?} is not correct\",\n\n PHASE_PATH = phase_path,\n\n LINE = line_number,\n", "file_path": "src/functions.rs", "rank": 76, "score": 5.327336139093452 }, { "content": " Err(_) => {\n\n return Err(\"deploy/public-name.txt is not correct\".into());\n\n }\n\n }\n\n }\n\n Err(ref error) if error.kind() == ErrorKind::NotFound => {\n\n return Err(\"deploy/public-name.txt cannot be found in the project.\".into());\n\n }\n\n Err(error) => return Err(error.into()),\n\n };\n\n\n\n Ok(public_name)\n\n}\n\n\n\npub(crate) fn check_back_deploy(\n\n temp_dir: &TempDir,\n\n commit_sha: &CommitSha,\n\n build_target: Option<&BuildTarget>,\n\n) -> Result<(ImageName, String), Box<dyn Error>> {\n\n let deploy_dir = temp_dir.path().join(\"deploy\");\n", "file_path": "src/functions.rs", "rank": 77, "score": 5.293466961913474 }, { "content": "\n\n ssh_home.write_fmt(format_args!(\n\n \"/{PROJECT_DIRECTORY}\",\n\n PROJECT_DIRECTORY = PROJECT_DIRECTORY,\n\n ))?;\n\n\n\n ssh_home\n\n };\n\n\n\n let ssh_project = format!(\n\n \"{SSH_ROOT}/{PROJECT_NAME}-{PROJECT_ID}/{REFERENCE_NAME}-{SHORT_SHA}\",\n\n SSH_ROOT = ssh_root,\n\n PROJECT_NAME = project_name.as_ref(),\n\n PROJECT_ID = project_id,\n\n REFERENCE_NAME = reference_name.as_ref(),\n\n SHORT_SHA = commit_sha.get_short_sha(),\n\n );\n\n\n\n let command_str = command.get_command_str();\n\n\n", "file_path": "src/back_control.rs", "rank": 78, "score": 5.221532504092516 }, { "content": " ));\n\n\n\n let status = command.execute()?;\n\n\n\n if let Some(0) = status {\n\n // do nothing\n\n } else {\n\n return Err(format!(\n\n \"Cannot create the directory {:?} for storing the public static files.\",\n\n ssh_html_path\n\n )\n\n .into());\n\n }\n\n }\n\n\n\n let tarball_path = format!(\"deploy/{PUBLIC_NAME}.tar.zst\", PUBLIC_NAME = public_name.as_ref());\n\n\n\n {\n\n let mut command1 = command_args!(\"zstd\", \"-T0\", \"-d\", \"-c\", tarball_path);\n\n\n", "file_path": "src/front_develop.rs", "rank": 79, "score": 5.047093220471062 }, { "content": " return Err(\"deploy/image-name.txt is not correct\".into());\n\n }\n\n }\n\n }\n\n Err(ref error) if error.kind() == ErrorKind::NotFound => {\n\n return Err(\"deploy/image-name.txt cannot be found in the project.\".into());\n\n }\n\n Err(error) => return Err(error.into()),\n\n };\n\n\n\n let docker_compose_name = if let Some(build_target) = build_target {\n\n Cow::Owned(format!(\"docker-compose.{}.yml\", build_target.as_ref()))\n\n } else {\n\n Cow::Borrowed(\"docker-compose.yml\")\n\n };\n\n\n\n let docker_compose = match fs::read_to_string(deploy_dir.join(docker_compose_name.as_ref())) {\n\n Ok(mut docker_compose) => {\n\n docker_compose.trim_in_place();\n\n\n", "file_path": "src/functions.rs", "rank": 80, "score": 4.83067647406282 }, { "content": ") -> Result<(), Box<dyn Error>> {\n\n let archive_url = format!(\n\n \"{GITLAB_API_URL_PREFIX}/projects/{PROJECT_ID}/repository/archive?sha={COMMIT_SHA}\",\n\n GITLAB_API_URL_PREFIX = api_url_prefix.as_ref(),\n\n PROJECT_ID = project_id,\n\n COMMIT_SHA = commit_sha.as_ref()\n\n );\n\n\n\n info!(\"Fetching project from {:?}\", archive_url);\n\n\n\n {\n\n let mut command1 = command_args!(\n\n \"wget\",\n\n \"--no-check-certificate\",\n\n archive_url,\n\n \"--header\",\n\n format!(\"PRIVATE-TOKEN: {GITLAB_API_TOKEN}\", GITLAB_API_TOKEN = api_token.as_ref()),\n\n \"-O\",\n\n \"-\",\n\n );\n", "file_path": "src/functions.rs", "rank": 81, "score": 4.795009706582486 }, { "content": " .about(\"Fetch the project via GitLab API and then build it and use the public static files on a development host\")\n\n .args(&[\n\n arg_gitlab_project_id.clone(),\n\n arg_commit_sha.clone(),\n\n arg_build_target.clone(),\n\n arg_gitlab_api_url_prefix.clone(),\n\n arg_gitlab_api_token.clone(),\n\n arg_develop_ssh_user_host.clone(),\n\n ]);\n\n\n\n let front_deploy = Command::new(\"frontend-deploy\")\n\n .display_order(11)\n\n .about(\"Fetch the project via GitLab API and then build it and deploy the archive of public static files on multiple hosts according to the phase\")\n\n .args(&[\n\n arg_gitlab_project_id.clone(),\n\n arg_commit_sha.clone(),\n\n arg_project_name.clone(),\n\n arg_reference_name.clone(),\n\n arg_build_target.clone(),\n\n arg_phase.clone(),\n", "file_path": "src/main.rs", "rank": 82, "score": 4.776743788027951 }, { "content": " _ => (),\n\n }\n\n }\n\n\n\n String::from_utf8_lossy(output.stderr.as_slice()).split('\\n').for_each(|line| {\n\n if !line.is_empty() {\n\n error!(\"{}\", line);\n\n }\n\n });\n\n\n\n Err(format!(\"Cannot check the existence of {:?} of {}\", path.as_ref(), ssh_user_host).into())\n\n}\n\n\n\npub(crate) fn download_archive(\n\n temp_dir: &TempDir,\n\n api_url_prefix: ApiUrlPrefix,\n\n api_token: ApiToken,\n\n project_id: u64,\n\n commit_sha: &CommitSha,\n\n) -> Result<PathBuf, Box<dyn Error>> {\n", "file_path": "src/functions.rs", "rank": 83, "score": 4.411696957124261 }, { "content": " .help(\"Set the SSH URL prefix\");\n\n\n\n let arg_develop_ssh_user_host = Arg::new(\"DEVELOP_SSH_USR_HOST\")\n\n .display_order(103)\n\n .required(true)\n\n .long(\"develop-ssh-user-host\")\n\n .visible_aliases(&[\"ssh-user-host\"])\n\n .env(\"DEVELOP_SSH_HOST\")\n\n .help(\"Set the SSH user, host and the optional port for development\");\n\n\n\n let arg_command_arg =\n\n Arg::new(\"COMMAND\").required(true).help(\"Command to execute\").multiple_values(true);\n\n\n\n let arg_inject_project_directory = Arg::new(\"INJECT_PROJECT_DIRECTORY\")\n\n .display_order(1000)\n\n .long(\"inject-project-directory\")\n\n .help(\"Inject the project directory as the first argument to the command\");\n\n\n\n let front_develop = Command::new(\"frontend-develop\")\n\n .display_order(10)\n", "file_path": "src/main.rs", "rank": 84, "score": 4.372223540928648 }, { "content": " };\n\n\n\n let git_path = format!(\"{}/.git\", ssh_root);\n\n\n\n let exist = check_directory_exist(&ssh_user_host, git_path)?;\n\n\n\n if exist {\n\n info!(\"The project exists, trying to pull\");\n\n\n\n check_back_deploy_via_ssh(&ssh_user_host, ssh_root.as_str())?;\n\n\n\n info!(\"Running deploy/develop-down.sh\");\n\n\n\n {\n\n let mut command = create_ssh_command(\n\n &ssh_user_host,\n\n format!(\"cd {SSH_ROOT:?} && bash 'deploy/develop-down.sh'\", SSH_ROOT = ssh_root,),\n\n );\n\n\n\n command.execute_output()?;\n", "file_path": "src/back_develop.rs", "rank": 85, "score": 4.338308226753772 }, { "content": "\n\n if let Some(set) = map.remove(&project_id) {\n\n Ok(set)\n\n } else {\n\n Err(format!(\n\n \"The project {PROJECT_ID} is not set in {PHASE_PATH:?}\",\n\n PROJECT_ID = project_id,\n\n PHASE_PATH = phase_path\n\n )\n\n .into())\n\n }\n\n}\n\n\n\n#[inline]\n\npub(crate) fn current_timestamp() -> DelayedFormat<StrftimeItems<'static>> {\n\n Local::now().format(\"[%Y-%m-%d-%H-%M-%S]\")\n\n}\n", "file_path": "src/functions.rs", "rank": 86, "score": 4.287722319964693 }, { "content": " let mut command = create_ssh_command(&ssh_user_host, format!(\"mkdir -p {SSH_ROOT:?} && cd {SSH_ROOT:?} && git clone --recursive {SSH_URL:?} . && git checkout {REFERENCE:?}\",\n\n SSH_ROOT = ssh_root,\n\n SSH_URL = ssh_url,\n\n REFERENCE = reference.as_ref(),\n\n ));\n\n\n\n let output = command.execute_output()?;\n\n\n\n if !output.status.success() {\n\n return Err(format!(\n\n \"Cannot clone {SSH_URL:?} and checkout out {REFERENCE:?}\",\n\n SSH_URL = ssh_url,\n\n REFERENCE = reference.as_ref()\n\n )\n\n .into());\n\n }\n\n }\n\n\n\n check_back_deploy_via_ssh(&ssh_user_host, ssh_root.as_str())?;\n\n\n", "file_path": "src/back_develop.rs", "rank": 87, "score": 4.230773935602397 }, { "content": " let status = command.execute()?;\n\n\n\n if let Some(0) = status {\n\n // do nothing\n\n } else {\n\n return Err(format!(\n\n \"Cannot create the directory {:?} for storing the archive of public static files.\",\n\n ssh_project\n\n )\n\n .into());\n\n }\n\n }\n\n\n\n let tarball_path =\n\n format!(\"deploy/{PUBLIC_NAME}.tar.zst\", PUBLIC_NAME = public_name.as_ref());\n\n\n\n let ssh_tarball_path = format!(\n\n \"{SSH_PROJECT}/{PUBLIC_NAME}.tar.zst\",\n\n SSH_PROJECT = ssh_project,\n\n PUBLIC_NAME = public_name.as_ref()\n", "file_path": "src/front_deploy.rs", "rank": 88, "score": 3.8746009346082855 }, { "content": " if command == Command::DownAndUp {\n\n let mut command = create_ssh_command(\n\n ssh_user_host,\n\n format!(\"cat {SSH_PROJECT:?}/../last-up\", SSH_PROJECT = ssh_project,),\n\n );\n\n\n\n command.stdout(Stdio::piped());\n\n command.stderr(Stdio::piped());\n\n\n\n let output = command.execute_output()?;\n\n\n\n if output.status.success() {\n\n let mut folder = String::from_utf8(output.stdout)?;\n\n\n\n folder.trim_in_place();\n\n\n\n info!(\"Trying to shut down {} first\", folder);\n\n\n\n {\n\n let mut command = create_ssh_command(\n", "file_path": "src/back_control.rs", "rank": 89, "score": 3.6882010448401665 }, { "content": " format!(\n\n \"cat - > {SSH_DOCKER_COMPOSE_PATH}\",\n\n SSH_DOCKER_COMPOSE_PATH = ssh_docker_compose_path\n\n ),\n\n );\n\n\n\n let status = command.execute_input(docker_compose.as_str())?;\n\n\n\n if let Some(0) = status {\n\n // do nothing\n\n } else {\n\n return Err(format!(\n\n \"Cannot create the docker compose file {:?}.\",\n\n ssh_docker_compose_path\n\n )\n\n .into());\n\n }\n\n }\n\n\n\n let tarball_path = format!(\"deploy/{IMAGE_NAME}.tar.zst\", IMAGE_NAME = image_name.as_ref());\n", "file_path": "src/back_deploy.rs", "rank": 90, "score": 3.682643067033633 }, { "content": "\n\n let project_id = parse_parse_id(matches);\n\n\n\n let commit_sha = parse_commit_sha(matches);\n\n\n\n let project_name = parse_project_name(matches);\n\n\n\n let reference_name = parse_reference_name(matches);\n\n\n\n let build_target = parse_build_target_allow_null(matches);\n\n\n\n let phase = parse_phase(matches);\n\n\n\n let api_url_prefix = parse_api_url_prefix(matches);\n\n\n\n let api_token = parse_api_token(matches);\n\n\n\n let ssh_user_hosts = find_ssh_user_hosts(phase, project_id)?;\n\n\n\n if ssh_user_hosts.is_empty() {\n", "file_path": "src/back_deploy.rs", "rank": 91, "score": 3.676036439894347 }, { "content": " let project_id = parse_parse_id(matches);\n\n\n\n let commit_sha = parse_commit_sha(matches);\n\n\n\n let project_name = parse_project_name(matches);\n\n\n\n let reference_name = parse_reference_name(matches);\n\n\n\n let build_target = parse_build_target(matches);\n\n\n\n let phase = parse_phase(matches);\n\n\n\n let api_url_prefix = parse_api_url_prefix(matches);\n\n\n\n let api_token = parse_api_token(matches);\n\n\n\n let ssh_user_hosts = find_ssh_user_hosts(phase, project_id)?;\n\n\n\n if ssh_user_hosts.is_empty() {\n\n warn!(\"No hosts to deploy!\");\n", "file_path": "src/front_deploy.rs", "rank": 92, "score": 3.6670651175648907 }, { "content": "pub(crate) const SERVICE_DIRECTORY: &str = \"services\";\n\npub(crate) const PROJECT_DIRECTORY: &str = \"projects\";\n\npub(crate) const PHASE_DIRECTORY: &str = \"phases\";\n", "file_path": "src/constants.rs", "rank": 93, "score": 3.6336481166650594 }, { "content": "\n\n let commit_sha = parse_commit_sha(matches);\n\n\n\n let project_name = parse_project_name(matches);\n\n\n\n let reference_name = parse_reference_name(matches);\n\n\n\n let phase = parse_phase(matches);\n\n\n\n let api_url_prefix = parse_api_url_prefix(matches);\n\n\n\n let api_token = parse_api_token(matches);\n\n\n\n let ssh_user_hosts = find_ssh_user_hosts(phase, project_id)?;\n\n\n\n if ssh_user_hosts.is_empty() {\n\n warn!(\"No hosts to deploy!\");\n\n return Ok(());\n\n }\n\n\n", "file_path": "src/simple_deploy.rs", "rank": 94, "score": 3.569891310951519 }, { "content": " let project_id = parse_parse_id(matches);\n\n\n\n let commit_sha = parse_commit_sha(matches);\n\n\n\n let build_target = parse_build_target(matches);\n\n\n\n let api_url_prefix = parse_api_url_prefix(matches);\n\n\n\n let api_token = parse_api_token(matches);\n\n\n\n let ssh_user_host = parse_ssh_user_host(matches);\n\n\n\n let temp_dir = tempdir()?;\n\n\n\n download_and_extract_archive(&temp_dir, api_url_prefix, api_token, project_id, &commit_sha)?;\n\n\n\n let public_name = check_front_deploy(&temp_dir)?;\n\n\n\n run_front_build(&temp_dir, build_target)?;\n\n\n", "file_path": "src/front_develop.rs", "rank": 96, "score": 3.3976828135600545 }, { "content": " }\n\n ScannerError::IOError(err) => return Err(err.into()),\n\n ScannerError::ParseFloatError(_) => unreachable!(),\n\n }\n\n }\n\n };\n\n\n\n let mut set: HashSet<SshUserHost> = HashSet::with_capacity(1);\n\n\n\n while let Some(user_host) = sc.next()? {\n\n if set.is_empty() && user_host == \".\" {\n\n if sc.next()?.is_some() {\n\n return Err(format!(\n\n \"In {PHASE_PATH:?} at line {LINE}, it is not correct\",\n\n PHASE_PATH = phase_path,\n\n LINE = line_number,\n\n )\n\n .into());\n\n }\n\n\n", "file_path": "src/functions.rs", "rank": 97, "score": 3.338070304371424 }, { "content": " -V, --version Print version information\n\n\n\nSUBCOMMANDS:\n\n frontend-develop Fetch the project via GitLab API and then build it and use the public static files on a development host\n\n frontend-deploy Fetch the project via GitLab API and then build it and deploy the archive of public static files on multiple hosts according to the phase\n\n frontend-control Control the project on multiple hosts according to the phase\n\n backend-develop Fetch the project via Git and checkout to a specific branch and then start up the service on a development host\n\n backend-deploy Fetch the project via GitLab API and then build it and deploy the docker image on multiple hosts according to the phase\n\n backend-control Control the project on multiple hosts according to the phase\n\n simple-deploy Fetch the project via GitLab API and deploy the project files on multiple hosts according to the phase\n\n simple-control Control the project on multiple hosts according to the phase\n\n help Print this message or the help of the given subcommand(s)\n\n```\n\n\n", "file_path": "README.md", "rank": 98, "score": 3.208329785047306 }, { "content": " info!(\"Running deploy/develop-up.sh\");\n\n\n\n let mut command = create_ssh_command(\n\n &ssh_user_host,\n\n format!(\"cd {SSH_ROOT:?} && bash 'deploy/develop-up.sh'\", SSH_ROOT = ssh_root),\n\n );\n\n\n\n let output = command.execute_output()?;\n\n\n\n if !output.status.success() {\n\n return Err(\"Failed!\".into());\n\n }\n\n\n\n info!(\"Successfully!\");\n\n\n\n Ok(())\n\n}\n", "file_path": "src/back_develop.rs", "rank": 99, "score": 3.109615507723848 } ]
Rust
src/bin/bevy_client.rs
qkniep/moonshot
e5b37cf15d6f31cb422a1954f86fc443dfb52063
use bevy::{ input::{keyboard::KeyboardInput, ElementState, Input}, log::{Level, LogSettings}, prelude::*, render::{camera::Camera, pass::ClearColor}, ui::camera::UI_CAMERA, }; use moonshot::building::*; use moonshot::combat::*; use moonshot::components::*; use moonshot::cursor_world_coords::*; use moonshot::network::{NetworkPlugin, PlayerAction, Transport}; struct GamePlugin; impl Plugin for GamePlugin { fn build(&self, app: &mut AppBuilder) { app.add_resource(ClearColor(Color::hex("22265A").unwrap())) .add_resource(CursorInWorld::default()) .add_resource(PlayerResources { pink: 30, green: 0 }) .add_startup_system(game_setup) .add_system(cursor_world_coords) .add_system(camera_motion) .add_system(kepler_motion) .add_system(building) .add_system(planet_auras) .add_system(combat) .add_system(resource_mining); } } fn main() { App::build() .add_resource(WindowDescriptor { title: "Moonshot!".to_string(), width: 1920, height: 1080, ..Default::default() }) .add_resource(LogSettings { level: Level::DEBUG, ..Default::default() }) .add_plugins(DefaultPlugins) .add_plugin(GamePlugin) .add_plugin(NetworkPlugin) .run(); } fn game_setup( commands: &mut Commands, asset_server: Res<AssetServer>, mut texture_atlases: ResMut<Assets<TextureAtlas>>, ) { let texture_handle = asset_server.load("sprites/sprite_sheet.png"); let texture_atlas = TextureAtlas::from_grid(texture_handle, Vec2::new(256.0, 256.0), 4, 4); let texture_atlas_handle = texture_atlases.set("SPRITE_SHEET", texture_atlas); commands .spawn(Camera2dBundle::default()) .spawn(UiCameraBundle::default()) .spawn(TextBundle { style: Style { align_self: AlignSelf::FlexStart, ..Default::default() }, text: Text { value: "0, 0".to_string(), font: asset_server.load("fonts/Nunito-Regular.ttf"), style: TextStyle { font_size: 60.0, color: Color::WHITE, alignment: TextAlignment::default(), }, }, ..Default::default() }) .with(ResourcesText) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(0), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(1.0)), ..Default::default() }) .with(Planet::default()) .with_children(|parent| { parent .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 300.0, speed: 1.0, building: None, }) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 500.0, speed: 0.5, building: None, }); }) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(0), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_translation(Vec3::splat(700.0)), ..Default::default() }) .with(Planet::default()) .with_children(|parent| { parent .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 300.0, speed: 1.0, building: None, }) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 500.0, speed: 0.5, building: None, }); }); } fn camera_motion( time: Res<Time>, keyboard_input: Res<Input<KeyCode>>, mut query: Query<(&Camera, Mut<Transform>)>, ) { for (camera, mut trans) in query.iter_mut() { if camera.name == Some(UI_CAMERA.to_string()) { continue; } let mut direction = Vec3::splat(0.0); if keyboard_input.pressed(KeyCode::Up) { direction += Vec3::new(0.0, 1.0, 0.0) } if keyboard_input.pressed(KeyCode::Down) { direction += Vec3::new(0.0, -1.0, 0.0) } if keyboard_input.pressed(KeyCode::Left) { direction += Vec3::new(-1.0, 0.0, 0.0) } if keyboard_input.pressed(KeyCode::Right) { direction += Vec3::new(1.0, 0.0, 0.0) } let camera_speed = 500.0; let ds = camera_speed * time.delta_seconds; if direction.length() > 0.0 { trans.translation += direction.normalize() * ds; } } } fn kepler_motion(time: Res<Time>, mut query: Query<(&Moon, Mut<Transform>)>) { for (moon, mut trans) in query.iter_mut() { let ds = moon.speed * time.seconds_since_startup; let x = moon.orbit_radius * ds.cos() as f32; let y = moon.orbit_radius * ds.sin() as f32; trans.translation = Vec3::new(x, y, 0.0); } } struct ResourceMiningState { timer: Timer, } impl Default for ResourceMiningState { fn default() -> Self { Self { timer: Timer::from_seconds(1.0, true), } } } fn resource_mining( mut state: Local<ResourceMiningState>, time: Res<Time>, mut resources: ResMut<PlayerResources>, moon_query: Query<&Moon>, mut text_query: Query<(&mut Text, &ResourcesText)>, ) { if state.timer.tick(time.delta_seconds).just_finished() { for moon in moon_query.iter() { if let Some(BuildingType::Mining) = moon.building { resources.pink += 1; } } } for (mut text, _) in text_query.iter_mut() { text.value = format!("{}, {}", resources.pink, resources.green); } } #[derive(Default)] pub struct PlanetAuraState { keyboard_event_reader: EventReader<KeyboardInput>, current_planet: Option<Entity>, } pub fn planet_auras( mut state: Local<PlanetAuraState>, cursor_in_world: Res<CursorInWorld>, keyboard_inputs: Res<Events<KeyboardInput>>, mouse_input: Res<Input<MouseButton>>, mut resources: ResMut<PlayerResources>, mut transport: ResMut<Transport>, mut planet_query: Query<(Entity, Mut<Planet>, &GlobalTransform)>, ) { let world_coords = cursor_in_world.position; for event in state.keyboard_event_reader.iter(&keyboard_inputs) { if let Some(entity) = state.current_planet { if event.state == ElementState::Pressed { let (_, mut planet, _) = planet_query.get_mut(entity).unwrap(); planet.current_aura = match event.key_code { Some(KeyCode::P) => Some(Aura::ProductionSpeed), Some(KeyCode::R) => Some(Aura::RocketSpeed), Some(KeyCode::D) => Some(Aura::RocketDamage), Some(KeyCode::M) => Some(Aura::MoonSpeed), Some(KeyCode::S) => Some(Aura::Shield), _ => planet.current_aura, }; let aura_change = PlayerAction::ChangeAura { aura: planet.current_aura, planet: entity.id(), }; let serialized = bincode::serialize(&aura_change).unwrap(); transport.send(serialized); state.current_planet = None; } } } if mouse_input.pressed(MouseButton::Left) { for (entity, _, trans) in planet_query.iter_mut() { if trans.translation.x - 128.0 * trans.scale.x <= world_coords.x && trans.translation.x + 128.0 * trans.scale.x >= world_coords.x && trans.translation.y - 128.0 * trans.scale.y <= world_coords.y && trans.translation.y + 128.0 * trans.scale.y >= world_coords.y { state.current_planet = Some(entity); } } } }
use bevy::{ input::{keyboard::KeyboardInput, ElementState, Input}, log::{Level, LogSettings}, prelude::*, render::{camera::Camera, pass::ClearColor}, ui::camera::UI_CAMERA, }; use moonshot::building::*; use moonshot::combat::*; use moonshot::components::*; use moonshot::cursor_world_coords::*; use moonshot::network::{NetworkPlugin, PlayerAction, Transport}; struct GamePlugin; impl Plugin for GamePlugin { fn build(&self, app: &mut AppBuilder) { app.add_resource(ClearColor(Color::hex("22265A").unwrap())) .add_resource(CursorInWorld::default()) .add_resource(PlayerResources { pink: 30, green: 0 }) .add_startup_system(game_setup) .add_system(cursor_world_coords) .add_system(camera_motion) .add_system(kepler_motion) .add_system(building) .add_system(planet_auras) .add_system(combat) .add_system(resource_mining); } } fn main() { App::build() .add_resource(WindowDescriptor { title: "Moonshot!".to_string(), width: 1920, height: 1080, ..Default::default() }) .add_resource(LogSettings { level: Level::DEBUG, ..Default::default() }) .add_plugins(DefaultPlugins) .add_plugin(GamePlugin) .add_plugin(NetworkPlugin) .run(); } fn game_setup( commands: &mut Commands, asset_server: Res<AssetServer>, mut texture_atlases: ResMut<Assets<TextureAtlas>>, ) { let texture_handle = asset_server.load("sprites/sprite_sheet.png"); let texture_atlas = TextureAtlas::from_grid(texture_handle, Vec2::new(256.0, 256.0), 4, 4); let texture_atlas_handle = texture_atlases.set("SPRITE_SHEET", texture_atlas); commands .spawn(Camera2dBundle::default()) .spawn(UiCameraBundle::default()) .spawn(TextBundle { style: Style { align_self: AlignSelf::FlexStart, ..Default::default() }, text: Text { value: "0, 0".to_string(), font: asset_server.load("fonts/Nunito-Regular.ttf"), style: TextStyle { font_size: 60.0, color: Color::WHITE, alignment: TextAlignment::default(), }, }, ..Default::default() }) .with(ResourcesText) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(0), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(1.0)), ..Default::default() }) .with(Planet::default()) .with_children(|parent| { parent .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 300.0, speed: 1.0, building: None, }) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 500.0, speed: 0.5, building: None, }); }) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(0), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_translation(Vec3::splat(700.0)), ..Default::default() }) .with(Planet::default()) .with_children(|parent| { parent .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 300.0, speed: 1.0, building: None, }) .spawn(SpriteSheetBundle { sprite: TextureAtlasSprite::new(1), texture_atlas: texture_atlas_handle.clone(), transform: Transform::from_scale(Vec3::splat(0.5)), ..Default::default() }) .with(Moon { orbit_radius: 500.0, speed: 0.5, building: None, }); }); } fn camera_motion( time: Res<Time>, keyboard_input: Res<Input<KeyCode>>, mut query: Query<(&Camera, Mut<Transform>)>, ) { for (camera, mut trans) in query.iter_mut() { if camera.name == Some(UI_CAMERA.to_string()) { continue; } let mut direction = Vec3::splat(0.0); if keyboard_input.pressed(KeyCode::Up) { direction += Vec3::new(0.0, 1.0, 0.0) } if keyboard_input.pressed(KeyCode::Down) { direction += Vec3::new(0.0, -1.0, 0.0) } if keyboard_input.pressed(KeyCode::Left) { direction += Vec3::new(-1.0, 0.0, 0.0) } if keyboard_input.pressed(KeyCode::Right) { direction += Vec3::new(1.0, 0.0, 0.0) } let camera_speed = 500.0; let ds = camera_speed * time.delta_seconds; if direction.length() > 0.0 { trans.translation += direction.normalize() * ds; } } } fn kepler_motion(time: Res<Time>, mut query: Query<(&Moon, Mut<Transform>)>) { for (moon, mut trans) in query.iter_mut() { let ds = moon.speed * time.seconds_since_startup; let x = moon.orbit_radius * ds.cos() as f32; let y = moon.orbit_radius * ds.sin() as f32; trans.translation = Vec3::new(x, y, 0.0); } } struct ResourceMiningState { timer: Timer, } impl Default for ResourceMiningState { fn default() -> Self { Self { timer: Timer::from_seconds(1.0, true), } } } fn resource_mining( mut state: Local<ResourceMiningState>, time: Res<Time>, mut resources: ResMut<PlayerResources>, moon_query: Query<&Moon>, mut text_query: Query<(&mut Text, &ResourcesText)>, ) { if state.timer.tick(time.delta_seconds).just_finished() { for moon in moon_query.iter() { if let Some(BuildingType::Mining) = moon.building { resources.pink += 1; } } } for (mut text, _) in text_query.iter_mut() { text.value = format!("{}, {}", resources.pink, resources.green); } } #[derive(Default)] pub struct PlanetAuraState { keyboard_event_reader: EventReader<KeyboardInput>, current_planet: Option<Entity>, } pub fn planet_auras( mut state: Local<PlanetAuraState>, cursor_in_world: Res<CursorInWorld>, keyboard_inputs: Res<Events<KeyboardInput>>, mouse_input: Res<Input<MouseButton>>, mut resources: ResMut<PlayerResources>, mut transport: ResMut<Transport>, mut planet_query: Query<(Entity, Mut<Planet>, &GlobalTransform)>, ) { let world_coords = cursor_in_world.position; for event in state.keyboard_event_reader.iter(&keyboard_inputs) { if let Some(entity) = state.current_planet { if event.state == ElementState::Pressed { let (_, mut planet, _) = planet_query.get_mut(entity).unwrap(); planet.current_aura = match event.key_code { Some(KeyCode::P) => Some(Aura::ProductionSpeed), Some(KeyCode::R) => Some(Aura::RocketSpeed), Some(KeyCode::D) => Some(Aura::RocketDamage), Some(KeyCode::M) => Some(Aura::MoonSpeed), Some(KeyCode::S) => Some(Aura::Shield), _ => planet.current_aura, }; let aura_change = PlayerAction::ChangeAura { aura: planet.current_aura, planet: entity.id(), }; let serialized = bincode::serialize(&aura_change).unwrap(); transport.send(serialized); state.current_planet = None; } } } if mouse_input.pressed(MouseButton::Left) { for (entity, _, trans) in planet_query.iter_mut() { if trans.translation.x - 128.0 * trans.scale.x <= world_coords.x && trans.translation.x + 128.0 * trans.scale.x >= world_coords.x && trans.translation.
y - 128.0 * trans.scale.y <= world_coords.y && trans.translation.y + 128.0 * trans.scale.y >= world_coords.y { state.current_planet = Some(entity); } } } }
function_block-function_prefixed
[ { "content": "pub fn update_simulation_time(mut sim_time: ResMut<NetworkSimulationTime>, time: Res<Time>) {\n\n sim_time.update_elapsed(time.delta_seconds);\n\n sim_time.reset_frame_lag();\n\n while sim_time.elapsed_duration() > sim_time.per_frame_duration() {\n\n sim_time.increment_frame_number();\n\n }\n\n}\n", "file_path": "src/network/time.rs", "rank": 0, "score": 87387.59690682282 }, { "content": "fn send_messages(mut transport: ResMut<Transport>, mut stream: ResMut<TcpStream>) {\n\n let messages = transport.drain_messages();\n\n for message in messages {\n\n if let Err(e) = stream.write_all(&message.payload) {\n\n error!(\"Failed to send network message: {}\", e);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/network/mod.rs", "rank": 1, "score": 75171.17899556115 }, { "content": "pub fn building_moon_texture_index(building: BuildingType) -> u32 {\n\n match building {\n\n BuildingType::Mining => 9,\n\n BuildingType::Production => 8,\n\n }\n\n}\n\n\n", "file_path": "src/building.rs", "rank": 2, "score": 73120.35212612341 }, { "content": "pub fn building(\n\n commands: &mut Commands,\n\n mut state: Local<BuildingState>,\n\n cursor_in_world: Res<CursorInWorld>,\n\n keyboard_inputs: Res<Events<KeyboardInput>>,\n\n mouse_input: Res<Input<MouseButton>>,\n\n texture_atlases: Res<Assets<TextureAtlas>>,\n\n mut resources: ResMut<PlayerResources>,\n\n mut transport: ResMut<Transport>,\n\n mut moon_query: Query<(Entity, &Moon, &GlobalTransform)>,\n\n) {\n\n let world_coords = cursor_in_world.position;\n\n\n\n // change to building mode on button press\n\n for event in state.keyboard_event_reader.iter(&keyboard_inputs) {\n\n if state.currently_building.is_none() && event.state == ElementState::Pressed {\n\n state.currently_building = match event.key_code {\n\n Some(KeyCode::B) => Some(BuildingType::Mining),\n\n Some(KeyCode::R) => Some(BuildingType::Production),\n\n _ => state.currently_building,\n", "file_path": "src/building.rs", "rank": 3, "score": 72505.24068095897 }, { "content": "fn building_cursor_texture(building: BuildingType) -> TextureAtlasSprite {\n\n match building {\n\n BuildingType::Mining => TextureAtlasSprite::new(5),\n\n BuildingType::Production => TextureAtlasSprite::new(12),\n\n }\n\n}\n\n\n", "file_path": "src/building.rs", "rank": 4, "score": 52881.110890953154 }, { "content": "/// System for shooting rockets in mouse cursor direction.\n\npub fn combat(\n\n commands: &mut Commands,\n\n mut state: Local<CombatState>,\n\n time: Res<Time>,\n\n keyboard_inputs: Res<Events<KeyboardInput>>,\n\n mouse_input: Res<Input<MouseButton>>,\n\n mut resources: ResMut<PlayerResources>,\n\n cursor_in_world: Res<CursorInWorld>,\n\n mut transport: ResMut<Transport>,\n\n moon_query: Query<(Entity, &Moon, &GlobalTransform)>,\n\n mut rocket_query: Query<(Entity, &Rocket, Mut<Transform>)>,\n\n) {\n\n for event in state.keyboard_event_reader.iter(&keyboard_inputs) {\n\n if event.key_code == Some(KeyCode::A) && event.state == ElementState::Pressed {\n\n if resources.pink < 3 || state.current_rocket_base.is_none() {\n\n continue;\n\n }\n\n resources.pink -= 3;\n\n\n\n let base_moon = state.current_rocket_base.unwrap();\n", "file_path": "src/combat.rs", "rank": 5, "score": 50290.50739157346 }, { "content": "pub fn cursor_world_coords(\n\n mut state: Local<CursorState>,\n\n mut cursor_in_world: ResMut<CursorInWorld>,\n\n cursor_inputs: Res<Events<CursorMoved>>,\n\n camera_query: Query<(&Camera, &Transform, &OrthographicProjection)>,\n\n mut cursorfollowing_query: Query<(&CursorFollowing, Mut<Transform>)>,\n\n) {\n\n if let Some(event) = state.cursor_event_reader.iter(&cursor_inputs).last() {\n\n let cursor_position = event.position;\n\n\n\n // get the releveant attributes of the 2D orth. projection\n\n let mut camera_pos = Vec2::splat(0.0);\n\n let mut camera_width = 0.0;\n\n let mut camera_height = 0.0;\n\n for (camera, trans, orth) in camera_query.iter() {\n\n if camera.name == Some(UI_CAMERA.to_string()) {\n\n continue;\n\n }\n\n\n\n camera_pos = Vec2::new(trans.translation.x, trans.translation.y);\n", "file_path": "src/cursor_world_coords.rs", "rank": 6, "score": 44243.26329408083 }, { "content": "fn building_cost(building: BuildingType) -> u32 {\n\n match building {\n\n BuildingType::Mining => 20,\n\n BuildingType::Production => 15,\n\n }\n\n}\n", "file_path": "src/building.rs", "rank": 7, "score": 42855.543542296145 }, { "content": "fn handle_messages(\n\n commands: &mut Commands,\n\n mut stream: ResMut<TcpStream>,\n\n mut event_channel: ResMut<Events<NetworkSimulationEvent>>,\n\n mut moon_query: Query<(Mut<Moon>, Mut<TextureAtlasSprite>)>,\n\n texture_atlases: Res<Assets<TextureAtlas>>,\n\n) {\n\n let peer_addr = stream.peer_addr().unwrap();\n\n\n\n if let Ok(turn) = bincode::deserialize_from::<&mut TcpStream, ServerTurn>(&mut *stream) {\n\n trace!(\"Received msg: {:?}\", turn);\n\n for action in turn.actions {\n\n match action {\n\n PlayerAction::Build { building, moon } => {\n\n let (mut moon, mut sprite) = moon_query.get_mut(Entity::new(moon)).unwrap();\n\n sprite.index = building_moon_texture_index(building);\n\n moon.building = Some(building);\n\n },\n\n PlayerAction::ShootRocket { pos, dir } => {\n\n let angle = dir.y.atan2(dir.x);\n", "file_path": "src/network/mod.rs", "rank": 8, "score": 22276.472314900042 }, { "content": " if let Some(building) = state.currently_building {\n\n if mouse_input.pressed(MouseButton::Left) {\n\n // check if cursor is inside of a moon\n\n // TODO: use actual sprite size instead of magic number\n\n for (entity, _, trans) in moon_query.iter_mut() {\n\n if trans.translation.x - 128.0 * trans.scale.x <= world_coords.x\n\n && trans.translation.x + 128.0 * trans.scale.x >= world_coords.x\n\n && trans.translation.y - 128.0 * trans.scale.y <= world_coords.y\n\n && trans.translation.y + 128.0 * trans.scale.y >= world_coords.y\n\n && resources.pink >= building_cost(building)\n\n {\n\n let build = PlayerAction::Build {\n\n building,\n\n moon: entity.id(),\n\n };\n\n let serialized = bincode::serialize(&build).unwrap();\n\n transport.send(serialized);\n\n resources.pink -= building_cost(building);\n\n }\n\n }\n\n commands.despawn(state.cursor_follower.unwrap());\n\n state.currently_building = None;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/building.rs", "rank": 9, "score": 18940.072289961467 }, { "content": "// Copyright (C) 2020 Quentin M. Kniep <hello@quentinkniep.com>\n\n// Distributed under terms of the MIT license.\n\n\n\nuse bevy::{\n\n input::{keyboard::KeyboardInput, ElementState, Input},\n\n prelude::*,\n\n};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::components::{Moon, PlayerResources};\n\nuse crate::cursor_world_coords::*;\n\nuse crate::network::{PlayerAction, Transport};\n\n\n\n#[derive(Deserialize, Serialize, Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum BuildingType {\n\n Mining,\n\n Production,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct BuildingState {\n\n keyboard_event_reader: EventReader<KeyboardInput>,\n\n cursor_follower: Option<Entity>,\n\n currently_building: Option<BuildingType>,\n\n}\n\n\n", "file_path": "src/building.rs", "rank": 10, "score": 18938.358818598754 }, { "content": " };\n\n\n\n if let Some(building) = state.currently_building {\n\n state.cursor_follower = commands\n\n .spawn(SpriteSheetBundle {\n\n sprite: building_cursor_texture(building),\n\n texture_atlas: texture_atlases.get_handle(\"SPRITE_SHEET\"),\n\n transform: Transform {\n\n translation: world_coords.extend(0.0),\n\n rotation: Quat::default(),\n\n scale: Vec3::splat(0.25),\n\n },\n\n ..Default::default()\n\n })\n\n .with(CursorFollowing)\n\n .current_entity();\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/building.rs", "rank": 11, "score": 18930.42312649742 }, { "content": " pub fn per_frame_duration(&self) -> f32 {\n\n self.per_frame_duration\n\n }\n\n\n\n /// Returns the number of frames the game lags behind the server simulation.\n\n pub fn frame_lag(&self) -> u32 {\n\n self.frame_lag\n\n }\n\n}\n\n\n\nimpl Default for NetworkSimulationTime {\n\n fn default() -> Self {\n\n Self {\n\n frame_number: 0,\n\n elapsed_duration: 0.0,\n\n // 30 frames / second\n\n per_frame_duration: 1.0 / 30.0,\n\n frame_lag: 1,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/network/time.rs", "rank": 12, "score": 18241.301479910064 }, { "content": "// Copyright (C) 2020 Quentin M. Kniep <hello@quentinkniep.com>\n\n// Distributed under terms of the MIT license.\n\n\n\nuse std::ops::RangeInclusive;\n\n\n\nuse bevy::prelude::*;\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct NetworkSimulationTime {\n\n /// The current simulation frame\n\n frame_number: u32,\n\n /// Accumulated duration since last simulation frame (in seconds)\n\n elapsed_duration: f32,\n\n /// Duration per frame (in seconds)\n\n per_frame_duration: f32,\n\n /// Number of frames the game lags behind the server simulation\n\n frame_lag: u32,\n\n}\n\n\n\nimpl NetworkSimulationTime {\n", "file_path": "src/network/time.rs", "rank": 13, "score": 18241.08283600108 }, { "content": " /// Returns the simulation frame numbers needed to be run this game frame.\n\n pub fn sim_frames_to_run(&self) -> RangeInclusive<u32> {\n\n (self.frame_number + 1 - self.frame_lag)..=self.frame_number\n\n }\n\n\n\n /// Bumps the frame number\n\n pub fn increment_frame_number(&mut self) {\n\n self.frame_number += 1;\n\n self.elapsed_duration -= self.per_frame_duration;\n\n self.frame_lag += 1;\n\n }\n\n\n\n /// Resets the frame lag\n\n pub fn reset_frame_lag(&mut self) {\n\n self.frame_lag = 0;\n\n }\n\n\n\n /// Increases the `elapsed_duration` by the given duration in seconds\n\n pub fn update_elapsed(&mut self, seconds: f32) {\n\n self.elapsed_duration += seconds;\n", "file_path": "src/network/time.rs", "rank": 14, "score": 18238.778118212504 }, { "content": " }\n\n\n\n /// Returns the current simulation frame number\n\n pub fn frame_number(&self) -> u32 {\n\n self.frame_number\n\n }\n\n\n\n /// Sets the frame number to the given frame number. This is useful when synchronizing frames\n\n /// with a server for example.\n\n pub fn set_frame_number(&mut self, new_frame: u32) {\n\n self.frame_number = new_frame;\n\n }\n\n\n\n /// Returns the total duration since the last simulation frame\n\n pub fn elapsed_duration(&self) -> f32 {\n\n self.elapsed_duration\n\n }\n\n\n\n /// Returns the duration between each simulation frame. This number is calculated when a frame rate\n\n /// is set\n", "file_path": "src/network/time.rs", "rank": 15, "score": 18237.385902851365 }, { "content": "// Copyright (C) 2020 Quentin M. Kniep <hello@quentinkniep.com>\n\n// Distributed under terms of the MIT license.\n\n\n\nuse bevy::prelude::*;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::building::*;\n\n\n\n#[derive(Default)]\n\npub struct Planet {\n\n pub current_aura: Option<Aura>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum Aura {\n\n MoonSpeed,\n\n ProductionSpeed,\n\n RocketDamage,\n\n RocketSpeed,\n\n Shield,\n", "file_path": "src/components.rs", "rank": 16, "score": 18.002098223999848 }, { "content": "// Copyright (C) 2020 Quentin M. Kniep <hello@quentinkniep.com>\n\n// Distributed under terms of the MIT license.\n\n\n\nuse bevy::{\n\n input::{keyboard::KeyboardInput, ElementState, Input},\n\n prelude::*,\n\n};\n\n\n\nuse crate::building::*;\n\nuse crate::components::*;\n\nuse crate::cursor_world_coords::*;\n\nuse crate::network::{PlayerAction, Transport};\n\n\n\n#[derive(Default)]\n\npub struct CombatState {\n\n keyboard_event_reader: EventReader<KeyboardInput>,\n\n current_rocket_base: Option<Entity>,\n\n}\n\n\n\n/// System for shooting rockets in mouse cursor direction.\n", "file_path": "src/combat.rs", "rank": 17, "score": 17.315789944582566 }, { "content": "}\n\n\n\npub struct Moon {\n\n pub orbit_radius: f32,\n\n pub speed: f64,\n\n pub building: Option<BuildingType>,\n\n}\n\n\n\npub struct Rocket {\n\n pub velocity: Vec2,\n\n}\n\n\n\npub struct ResourcesText;\n\npub struct PlayerResources {\n\n pub pink: u32,\n\n pub green: u32,\n\n}\n", "file_path": "src/components.rs", "rank": 18, "score": 16.97122854376448 }, { "content": "pub enum NetworkSimulationEvent {\n\n Message(SocketAddr, Vec<u8>),\n\n Connect(SocketAddr),\n\n Disconnect(SocketAddr),\n\n}\n\n\n\n/// This plugin can be added into a Bevy app to add network functionality.\n\npub struct NetworkPlugin;\n\n\n\nimpl Plugin for NetworkPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n let stream = TcpStream::connect(\"127.0.0.1:7777\").unwrap();\n\n stream.set_nonblocking(true).unwrap();\n\n app.add_resource(stream)\n\n .add_resource(Events::<NetworkSimulationEvent>::default())\n\n .add_resource(Transport::default())\n\n .add_resource(NetworkSimulationTime::default())\n\n .add_system(update_simulation_time)\n\n .add_system(send_messages)\n\n .add_system(handle_messages);\n", "file_path": "src/network/mod.rs", "rank": 19, "score": 16.684966609802128 }, { "content": " && trans.translation.x + 128.0 * trans.scale.x >= world_coords.x\n\n && trans.translation.y - 128.0 * trans.scale.y <= world_coords.y\n\n && trans.translation.y + 128.0 * trans.scale.y >= world_coords.y\n\n && moon.building == Some(BuildingType::Production)\n\n {\n\n //sprite.index = ...;\n\n state.current_rocket_base = Some(entity);\n\n }\n\n }\n\n }\n\n\n\n // move rockets according to their current velocity\n\n for (entity, rocket, mut trans) in rocket_query.iter_mut() {\n\n trans.translation += rocket.velocity.extend(0.0) * time.delta_seconds;\n\n // despawn if out of bounds\n\n if trans.translation.length() > 2000.0 {\n\n commands.despawn(entity);\n\n }\n\n }\n\n}\n", "file_path": "src/combat.rs", "rank": 20, "score": 14.589074851203579 }, { "content": " let (_, _, trans) = moon_query.get(base_moon).unwrap();\n\n let rocket_position = trans.translation;\n\n let rocket_direction =\n\n (cursor_in_world.position - trans.translation.truncate()).normalize();\n\n\n\n let launch = PlayerAction::ShootRocket {\n\n pos: rocket_position.truncate(),\n\n dir: rocket_direction,\n\n };\n\n let serialized = bincode::serialize(&launch).unwrap();\n\n transport.send(serialized);\n\n }\n\n }\n\n\n\n let world_coords = cursor_in_world.position;\n\n if mouse_input.pressed(MouseButton::Left) {\n\n // check if cursor is inside of a moon\n\n // TODO: use actual sprite size instead of magic number\n\n for (entity, moon, trans) in moon_query.iter() {\n\n if trans.translation.x - 128.0 * trans.scale.x <= world_coords.x\n", "file_path": "src/combat.rs", "rank": 21, "score": 14.362556602900776 }, { "content": "// Copyright (C) 2020 Quentin M. Kniep <hello@quentinkniep.com>\n\n// Distributed under terms of the MIT license.\n\n\n\nuse bevy::{\n\n prelude::*,\n\n render::camera::{Camera, OrthographicProjection},\n\n ui::camera::UI_CAMERA,\n\n};\n\n\n\npub struct CursorFollowing;\n\n\n\n#[derive(Debug, Default, Copy, Clone)]\n\npub struct CursorInWorld {\n\n pub position: Vec2,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct CursorState {\n\n cursor_event_reader: EventReader<CursorMoved>,\n\n}\n\n\n", "file_path": "src/cursor_world_coords.rs", "rank": 22, "score": 12.466613415713669 }, { "content": "pub enum PlayerAction {\n\n Build { building: BuildingType, moon: u32 },\n\n ChangeAura { aura: Option<Aura>, planet: u32 },\n\n ShootRocket { pos: Vec2, dir: Vec2 },\n\n}\n\n\n\n/// A single frame of the server's simulation.\n\n/// Contains a set of player issued actions which are executed on that frame of the simulation.\n\n#[derive(Deserialize, Serialize, Debug)]\n\npub struct ServerTurn {\n\n actions: Vec<PlayerAction>,\n\n}\n\n\n\nimpl ServerTurn {\n\n pub fn new(actions: Vec<PlayerAction>) -> Self {\n\n ServerTurn { actions }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/network/mod.rs", "rank": 23, "score": 12.350735521324912 }, { "content": " camera_width = orth.right - orth.left;\n\n camera_height = orth.top - orth.bottom;\n\n }\n\n\n\n // convert cursor position in window to world coordinates\n\n let x = cursor_position.x;\n\n let y = cursor_position.y;\n\n let screen_coords = Vec2::new(x - camera_width / 2.0, y - camera_height / 2.0);\n\n let world_coords = camera_pos + screen_coords;\n\n\n\n // assign the new world coords to the gloabl resource\n\n cursor_in_world.position = world_coords;\n\n debug!(\"Cursor move in world, new position: {}\", cursor_in_world.position);\n\n\n\n // move CursorFollowing entities to mouse cursor\n\n for (_, mut trans) in cursorfollowing_query.iter_mut() {\n\n trans.translation = world_coords.extend(0.0);\n\n }\n\n }\n\n}\n", "file_path": "src/cursor_world_coords.rs", "rank": 24, "score": 11.820150295100417 }, { "content": "// Copyright (C) 2020 Quentin M. Kniep <hello@quentinkniep.com>\n\n// Distributed under terms of the MIT license.\n\n\n\nmod time;\n\n\n\nuse std::{\n\n collections::VecDeque,\n\n io::Write,\n\n net::{SocketAddr, TcpStream},\n\n};\n\n\n\nuse bevy::prelude::*;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::building::*;\n\nuse crate::components::{Aura, Moon, Rocket};\n\nuse self::time::*;\n\n\n\n/// Player issued actions in the game which need to be processed through the server.\n\n#[derive(Deserialize, Serialize, Debug)]\n", "file_path": "src/network/mod.rs", "rank": 25, "score": 11.41533301344136 }, { "content": " commands.spawn(SpriteSheetBundle {\n\n sprite: TextureAtlasSprite::new(7),\n\n texture_atlas: texture_atlases.get_handle(\"SPRITE_SHEET\"),\n\n transform: Transform {\n\n translation: pos.extend(0.0),\n\n rotation: Quat::from_rotation_z(angle),\n\n scale: Vec3::splat(0.25),\n\n },\n\n ..Default::default()\n\n })\n\n .with(Rocket {\n\n velocity: 300.0 * dir,\n\n });\n\n }\n\n _ => {}\n\n }\n\n }\n\n //event_channel.send(NetworkSimulationEvent::Message(peer_addr, msg_payload));\n\n }\n\n}\n", "file_path": "src/network/mod.rs", "rank": 26, "score": 8.6172555106637 }, { "content": " }\n\n}\n\n\n\npub struct Message {\n\n length: u16,\n\n pub payload: Vec<u8>,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct Transport {\n\n messages: VecDeque<Message>,\n\n}\n\n\n\nimpl Transport {\n\n pub fn send(&mut self, payload: Vec<u8>) {\n\n if payload.len() >= 65536 {\n\n panic!(\"Payload to large for u16 length field!\");\n\n }\n\n\n\n self.messages.push_back(Message {\n", "file_path": "src/network/mod.rs", "rank": 27, "score": 8.028474383752709 }, { "content": "# Moonshot\n\n\n\nThis will be my contribution to GitHub's 2020 Game Off.\n\n\n\nI am writing a strategy game in Rust using the [Bevy](https://bevyengine.org/) game engine.\n\nThe game plays in space and will probably have something to do with mining moons for minerals,\n\ndestroying your opponent's moons, and flying around with rockets.\n\n\n\n![Screenshot](screenshot.png)\n", "file_path": "README.md", "rank": 28, "score": 4.543510790218367 }, { "content": "// Copyright (C) 2020 Quentin M. Kniep <hello@quentinkniep.com>\n\n// Distributed under terms of the MIT license.\n\n\n\npub mod building;\n\npub mod combat;\n\npub mod components;\n\npub mod cursor_world_coords;\n\npub mod network;\n", "file_path": "src/lib.rs", "rank": 29, "score": 3.410262914597474 }, { "content": " length: payload.len() as u16,\n\n payload,\n\n });\n\n }\n\n\n\n pub fn drain_messages(&mut self) -> Vec<Message> {\n\n self.messages.drain(0..).collect()\n\n }\n\n}\n\n\n", "file_path": "src/network/mod.rs", "rank": 30, "score": 3.280795013280664 } ]
Rust
src/day11.rs
jjcomer/advent2019
b6fa95f1ffb6ce56ed5ab47bacd3d0377322c69a
use crate::intcode; use crate::intcode::{run_program, IntCodeResult, Program}; use std::collections::HashMap; #[aoc_generator(day11)] pub fn input_generator(input: &str) -> Program { intcode::input_generator(input) } enum Direction { Up, Down, Left, Right, } fn change_direction(current_direction: &Direction, turn: i64) -> Direction { match turn { 0 => match current_direction { Direction::Up => Direction::Left, Direction::Left => Direction::Down, Direction::Down => Direction::Right, Direction::Right => Direction::Up, }, 1 => match current_direction { Direction::Up => Direction::Right, Direction::Right => Direction::Down, Direction::Down => Direction::Left, Direction::Left => Direction::Up, }, _ => panic!("Unknown direction {}", turn), } } enum Colour { Black, White, } type Coord = (i64, i64); type Map = HashMap<Coord, Colour>; fn move_bot(current_direction: &Direction, (x, y): &Coord) -> Coord { match current_direction { Direction::Up => (*x, y + 1), Direction::Left => (x - 1, *y), Direction::Down => (*x, y - 1), Direction::Right => (x + 1, *y), } } fn check_colour<'a>(map: &'a Map, current_position: &Coord) -> &'a Colour { map.get(current_position).unwrap_or(&Colour::Black) } fn gen_colour(input: i64) -> Colour { match input { 0 => Colour::Black, 1 => Colour::White, _ => panic!("Unexpected colour {}", input), } } #[aoc(day11, part1)] pub fn solve_part1(input: &Program) -> usize { let mut program = input.to_owned(); let mut pointer = 0; let mut relative_index = 0; let mut current_position = (0, 0); let mut current_direction = Direction::Up; let mut map: Map = HashMap::new(); loop { let current_colour = match check_colour(&map, &current_position) { Colour::Black => 0, Colour::White => 1, }; let result = run_program(&mut program, pointer, relative_index, vec![current_colour]); match result { IntCodeResult::Halt(_) => { break; } IntCodeResult::Input(new_pointer, new_relative_index, output) => { let new_colour = gen_colour(output[0]); map.insert(current_position, new_colour); current_direction = change_direction(&current_direction, output[1]); current_position = move_bot(&current_direction, &current_position); pointer = new_pointer; relative_index = new_relative_index; } }; } map.len() } #[aoc(day11, part2)] pub fn solve_part2(input: &Program) -> usize { let mut program = input.to_owned(); let mut pointer = 0; let mut relative_index = 0; let mut current_position = (0, 5); let mut current_direction = Direction::Up; let mut map: Map = HashMap::new(); map.insert(current_position, Colour::White); loop { let current_colour = match check_colour(&map, &current_position) { Colour::Black => 0, Colour::White => 1, }; let result = run_program(&mut program, pointer, relative_index, vec![current_colour]); match result { IntCodeResult::Halt(_) => { break; } IntCodeResult::Input(new_pointer, new_relative_index, output) => { let new_colour = gen_colour(output[0]); map.insert(current_position, new_colour); current_direction = change_direction(&current_direction, output[1]); current_position = move_bot(&current_direction, &current_position); pointer = new_pointer; relative_index = new_relative_index; } }; } let max_x = map.keys().map(|x| x.0).max().unwrap(); let max_y = map.keys().map(|x| x.1).max().unwrap(); println!("X {} .. Y {}", max_x, max_y); for y in 0..=max_y { for x in 0..=max_x { match check_colour(&map, &(x, y)) { Colour::Black => print!(" "), Colour::White => print!("X"), } } println!(); } map.len() }
use crate::intcode; use crate::intcode::{run_program, IntCodeResult, Program}; use std::collections::HashMap; #[aoc_generator(day11)] pub fn input_generator(input: &str) -> Program { intcode::input_generator(input) } enum Direction { Up, Down, Left, Right, } fn change_direction(current_direction: &Direction, turn: i64) -> Direction { match turn { 0 => match current_directi
current_position = move_bot(&current_direction, &current_position); pointer = new_pointer; relative_index = new_relative_index; } }; } let max_x = map.keys().map(|x| x.0).max().unwrap(); let max_y = map.keys().map(|x| x.1).max().unwrap(); println!("X {} .. Y {}", max_x, max_y); for y in 0..=max_y { for x in 0..=max_x { match check_colour(&map, &(x, y)) { Colour::Black => print!(" "), Colour::White => print!("X"), } } println!(); } map.len() }
on { Direction::Up => Direction::Left, Direction::Left => Direction::Down, Direction::Down => Direction::Right, Direction::Right => Direction::Up, }, 1 => match current_direction { Direction::Up => Direction::Right, Direction::Right => Direction::Down, Direction::Down => Direction::Left, Direction::Left => Direction::Up, }, _ => panic!("Unknown direction {}", turn), } } enum Colour { Black, White, } type Coord = (i64, i64); type Map = HashMap<Coord, Colour>; fn move_bot(current_direction: &Direction, (x, y): &Coord) -> Coord { match current_direction { Direction::Up => (*x, y + 1), Direction::Left => (x - 1, *y), Direction::Down => (*x, y - 1), Direction::Right => (x + 1, *y), } } fn check_colour<'a>(map: &'a Map, current_position: &Coord) -> &'a Colour { map.get(current_position).unwrap_or(&Colour::Black) } fn gen_colour(input: i64) -> Colour { match input { 0 => Colour::Black, 1 => Colour::White, _ => panic!("Unexpected colour {}", input), } } #[aoc(day11, part1)] pub fn solve_part1(input: &Program) -> usize { let mut program = input.to_owned(); let mut pointer = 0; let mut relative_index = 0; let mut current_position = (0, 0); let mut current_direction = Direction::Up; let mut map: Map = HashMap::new(); loop { let current_colour = match check_colour(&map, &current_position) { Colour::Black => 0, Colour::White => 1, }; let result = run_program(&mut program, pointer, relative_index, vec![current_colour]); match result { IntCodeResult::Halt(_) => { break; } IntCodeResult::Input(new_pointer, new_relative_index, output) => { let new_colour = gen_colour(output[0]); map.insert(current_position, new_colour); current_direction = change_direction(&current_direction, output[1]); current_position = move_bot(&current_direction, &current_position); pointer = new_pointer; relative_index = new_relative_index; } }; } map.len() } #[aoc(day11, part2)] pub fn solve_part2(input: &Program) -> usize { let mut program = input.to_owned(); let mut pointer = 0; let mut relative_index = 0; let mut current_position = (0, 5); let mut current_direction = Direction::Up; let mut map: Map = HashMap::new(); map.insert(current_position, Colour::White); loop { let current_colour = match check_colour(&map, &current_position) { Colour::Black => 0, Colour::White => 1, }; let result = run_program(&mut program, pointer, relative_index, vec![current_colour]); match result { IntCodeResult::Halt(_) => { break; } IntCodeResult::Input(new_pointer, new_relative_index, output) => { let new_colour = gen_colour(output[0]); map.insert(current_position, new_colour); current_direction = change_direction(&current_direction, output[1]);
random
[ { "content": "#[aoc_generator(day19)]\n\npub fn input_generator(input: &str) -> Program {\n\n intcode::input_generator(input)\n\n}\n\n\n\n#[cached(\n\n convert = r#\"{format!(\"{},{}\",x,y)}\"#,\n\n create = \"{UnboundCache::new()}\",\n\n type = \"UnboundCache<String,bool>\"\n\n)]\n", "file_path": "src/day19.rs", "rank": 0, "score": 171258.99464076897 }, { "content": "#[aoc_generator(day13)]\n\npub fn input_generator(input: &str) -> Program {\n\n intcode::input_generator(input)\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 1, "score": 171258.99464076897 }, { "content": "#[aoc_generator(day9)]\n\npub fn input_generator(input: &str) -> Program {\n\n intcode::input_generator(input)\n\n}\n\n\n", "file_path": "src/day9.rs", "rank": 3, "score": 171258.99464076897 }, { "content": "pub fn input_generator(input: &str) -> Program {\n\n input\n\n .split(',')\n\n .map(|d| d.parse::<i64>().unwrap())\n\n .enumerate()\n\n .fold(HashMap::new(), |mut acc, instruction| {\n\n let (index, instruction) = instruction;\n\n acc.insert(index, instruction);\n\n acc\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn example1() {\n\n let input = \"109,1,204,-1,1001,100,1,100,1008,100,16,101,1006,101,0,99\";\n\n let expected = input\n", "file_path": "src/intcode.rs", "rank": 4, "score": 171258.99464076897 }, { "content": "#[aoc_generator(day17)]\n\npub fn input_generator(input: &str) -> Program {\n\n intcode::input_generator(input)\n\n}\n\n\n", "file_path": "src/day17.rs", "rank": 5, "score": 171258.99464076897 }, { "content": "#[aoc(day9, part2)]\n\npub fn solve_part2(input: &Program) -> i64 {\n\n let mut input = input.to_owned();\n\n match run_program(&mut input, 0, 0, vec![2]) {\n\n IntCodeResult::Halt(output) => {\n\n println!(\"{:?}\", output);\n\n output[0]\n\n }\n\n _ => panic!(\"Not expecting more input\"),\n\n }\n\n}\n", "file_path": "src/day9.rs", "rank": 6, "score": 170302.0779835194 }, { "content": "#[aoc(day9, part1)]\n\npub fn solve_part1(input: &Program) -> i64 {\n\n let mut input = input.to_owned();\n\n match run_program(&mut input, 0, 0, vec![1]) {\n\n IntCodeResult::Halt(output) => {\n\n println!(\"{:?}\", output);\n\n output[0]\n\n }\n\n _ => panic!(\"Not expecting more input\"),\n\n }\n\n}\n\n\n", "file_path": "src/day9.rs", "rank": 7, "score": 170302.0779835194 }, { "content": "#[aoc(day19, part2)]\n\npub fn solve_part2(input: &Program) -> i64 {\n\n for i in 1800 as i64.. {\n\n if i % 100 == 0 {\n\n println!(\"Round {}\", i);\n\n }\n\n for (x, y) in (0..=i).zip((0..=i).rev()) {\n\n //println!(\"Testing {} {}\", x, y);\n\n if check_box(input, x, y) {\n\n println!(\"Found it {} {}\", x, y);\n\n print_beam(\n\n input,\n\n 110,\n\n x - 5,\n\n y - 5,\n\n vec![(x, y), (x + 99, y), (x, y + 99)],\n\n );\n\n return x + (y * 10000);\n\n }\n\n }\n\n }\n\n -1\n\n}\n", "file_path": "src/day19.rs", "rank": 8, "score": 170302.07798351944 }, { "content": "#[aoc(day17, part2)]\n\npub fn solve_part2(input: &Program) -> i64 {\n\n let mut program = input.to_owned();\n\n program.insert(0, 2);\n\n let main_sequence = \"A,A,B,C,B,A,C,B,C,A\\n\".chars();\n\n let a_sequence = \"L,6,R,12,L,6,L,8,L,8\\n\".chars();\n\n let b_sequence = \"L,6,R,12,R,8,L,8\\n\".chars();\n\n let c_sequence = \"L,4,L,4,L,6\\n\".chars();\n\n let display = \"n\\n\".chars();\n\n\n\n let input: Vec<i64> = main_sequence\n\n .chain(a_sequence)\n\n .chain(b_sequence)\n\n .chain(c_sequence)\n\n .chain(display)\n\n .map(|x| x as i64)\n\n .rev()\n\n .collect();\n\n\n\n println!(\"INPUT: {:?}\", input);\n\n\n", "file_path": "src/day17.rs", "rank": 9, "score": 170302.0779835194 }, { "content": "#[aoc_generator(day7)]\n\npub fn input_generator(input: &str) -> intcode::Program {\n\n intcode::input_generator(input)\n\n}\n\n\n", "file_path": "src/day7.rs", "rank": 10, "score": 163835.81840905844 }, { "content": "#[aoc(day7, part1)]\n\npub fn solve_part1(input: &intcode::Program) -> i64 {\n\n let series = (0..5).collect::<Vec<i64>>();\n\n permutations_of(&series)\n\n .map(|x| run_series(input.to_owned(), x))\n\n .max()\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/day7.rs", "rank": 11, "score": 162922.6896054765 }, { "content": "fn is_beam(program: &Program, x: i64, y: i64) -> bool {\n\n let mut program = program.to_owned();\n\n if let IntCodeResult::Halt(output) = run_program(&mut program, 0, 0, vec![x, y]) {\n\n 1 == *output.first().unwrap()\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/day19.rs", "rank": 13, "score": 149156.95899622364 }, { "content": "fn get_value(value_type: i64, i: i64, program: &Program, relative_base: i64) -> i64 {\n\n //println!(\"LOOKUP: {} {} {}\", value_type, i, relative_base);\n\n match value_type {\n\n 1 => *program.get(&to_index(i)).unwrap_or(&0),\n\n 2 => {\n\n let relative_offset = *program.get(&to_index(i)).unwrap_or(&0);\n\n // println!(\n\n // \"RELATIVE: {} {} {}\",\n\n // relative_base,\n\n // relative_offset,\n\n // relative_base + relative_offset\n\n // );\n\n\n\n *program\n\n .get(&to_index(relative_offset + relative_base))\n\n .unwrap_or(&0)\n\n }\n\n 0 => {\n\n let lookup_index = to_index(*program.get(&to_index(i)).unwrap_or(&0));\n\n *program.get(&lookup_index).unwrap_or(&0)\n\n }\n\n _ => panic!(\"Unknown value type: {}\", value_type),\n\n }\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 14, "score": 147416.03869879653 }, { "content": "fn check_box(program: &Program, x: i64, y: i64) -> bool {\n\n is_beam(program, x, y) && is_beam(program, x + 99, y) && is_beam(program, x, y + 99)\n\n}\n\n\n", "file_path": "src/day19.rs", "rank": 15, "score": 146906.46752341266 }, { "content": "fn print_beam(program: &Program, size: i64, x: i64, y: i64, highlights: Vec<(i64, i64)>) {\n\n for x in x..=x + size {\n\n for y in y..=y + size {\n\n if highlights.contains(&(x, y)) && is_beam(program, x, y) {\n\n print!(\"1\");\n\n } else if highlights.contains(&(x, y)) && !is_beam(program, x, y) {\n\n print!(\"0\")\n\n } else if is_beam(program, x, y) {\n\n print!(\"x\");\n\n } else {\n\n print!(\".\");\n\n }\n\n }\n\n println!(\"\");\n\n }\n\n}\n\n\n", "file_path": "src/day19.rs", "rank": 16, "score": 146355.53963968848 }, { "content": "fn get_index(value_type: i64, i: i64, program: &Program, relative_base: i64) -> usize {\n\n match value_type {\n\n 2 => {\n\n let relative_offset = *program.get(&to_index(i)).unwrap_or(&0);\n\n // println!(\n\n // \"RELATIVE INDEX: {} {} {}\",\n\n // relative_base,\n\n // relative_offset,\n\n // relative_base + relative_offset\n\n // );\n\n to_index(relative_offset + relative_base)\n\n }\n\n 0 => to_index(*program.get(&to_index(i)).unwrap_or(&0)),\n\n _ => panic!(\"Unknown value type: {}\", value_type),\n\n }\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 17, "score": 141808.8948721147 }, { "content": "fn run_series<'a>(program: intcode::Program, series: impl Iterator<Item = &'a i64>) -> i64 {\n\n let mut output = 0;\n\n for phase in series {\n\n match intcode::run_program(&mut program.clone(), 0, 0, vec![output, *phase]) {\n\n intcode::IntCodeResult::Halt(o) => output = o[0],\n\n intcode::IntCodeResult::Input(_, _, _) => (),\n\n }\n\n }\n\n output\n\n}\n\n\n", "file_path": "src/day7.rs", "rank": 18, "score": 130955.44746392296 }, { "content": "#[aoc_generator(day6)]\n\npub fn input_generator(input: &str) -> Orbits {\n\n let mut nodes = HashMap::new();\n\n\n\n for x in input.lines() {\n\n let mut n = x.split(')');\n\n let node_a = n.next().unwrap().to_owned();\n\n let node_b = n.next().unwrap().to_owned();\n\n nodes.insert(node_b, node_a);\n\n }\n\n nodes\n\n}\n\n\n", "file_path": "src/day6.rs", "rank": 19, "score": 124708.07522320216 }, { "content": "#[aoc(day17, part1)]\n\npub fn solve_part1(input: &Program) -> usize {\n\n let mut program = input.to_owned();\n\n let result = run_program(&mut program, 0, 0, vec![]);\n\n\n\n match result {\n\n IntCodeResult::Halt(output) => {\n\n display_output(&output);\n\n let line_length = find_line_length(&output);\n\n println!(\"Size: {}\", line_length);\n\n println!(\n\n \"First Line: {:?}\",\n\n output\n\n .iter()\n\n .take(line_length)\n\n .enumerate()\n\n .collect::<Vec<(usize, &i64)>>()\n\n );\n\n\n\n let intersections = iproduct!(0..line_length, 0..(output.len() / line_length))\n\n .filter(move |(x, y)| is_intersection(&output, line_length, *x, *y))\n", "file_path": "src/day17.rs", "rank": 21, "score": 123974.26492410013 }, { "content": "#[aoc(day19, part1)]\n\npub fn solve_part1(input: &Program) -> usize {\n\n let mut beam = HashSet::new();\n\n\n\n for (x, y) in iproduct!(0..50 as i64, 0..50 as i64) {\n\n if is_beam(input, x, y) {\n\n beam.insert((x, y));\n\n }\n\n }\n\n beam.len()\n\n}\n\n\n", "file_path": "src/day19.rs", "rank": 22, "score": 123974.26492410013 }, { "content": "#[aoc(day13, part1)]\n\npub fn solve_part1(input: &Program) -> usize {\n\n let mut input = input.to_owned();\n\n match run_program(&mut input, 0, 0, vec![1]) {\n\n IntCodeResult::Halt(output) => {\n\n println!(\"{:?} {}\", output, output.len());\n\n count_blocks(&output)\n\n }\n\n _ => panic!(\"Not expecting more input\"),\n\n }\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 23, "score": 123974.26492410013 }, { "content": "#[aoc(day13, part2)]\n\npub fn solve_part2(input: &Program) -> usize {\n\n let mut input = input.to_owned();\n\n input.insert(0, 2);\n\n let mut pointer = 0;\n\n let mut relative_index = 0;\n\n let mut counter = 0;\n\n let mut screen = HashMap::new();\n\n\n\n loop {\n\n let joystick = find_move(&screen);\n\n println!(\"Move {} --> joystick {}\", counter, joystick);\n\n\n\n match run_program(&mut input, pointer, relative_index, vec![joystick]) {\n\n IntCodeResult::Halt(output) => {\n\n if 0 != count_blocks(&output) {\n\n println!(\"Hmm, expected no blocks... found {}\", count_blocks(&output));\n\n }\n\n render_screen(&mut screen, &output);\n\n return find_score(&output);\n\n }\n", "file_path": "src/day13.rs", "rank": 24, "score": 123974.26492410012 }, { "content": "#[aoc(day14, part1)]\n\npub fn solve_part1(input: &[Recipe]) -> i64 {\n\n let fuel_recipe = input.iter().find(|r| r.result.name == \"FUEL\").unwrap();\n\n let mut shopping_list = HashMap::new();\n\n let mut pantry = HashMap::new();\n\n find_needed_ore(input, &mut shopping_list, &mut pantry, fuel_recipe, 1);\n\n compute_ore(input, &shopping_list)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn example1() {\n\n let input = \"10 ORE => 10 A\\n1 ORE => 1 B\\n7 A, 1 B => 1 C\\n7 A, 1 C => 1 D\\n7 A, 1 D => 1 E\\n7 A, 1 E => 1 FUEL\";\n\n let result = solve_part1(&generate_input(&input));\n\n assert_eq!(\n\n result, 31,\n\n \"Expected fuel consumption to be 31 but got {}\",\n\n result\n\n );\n", "file_path": "src/day14.rs", "rank": 26, "score": 123751.15856595262 }, { "content": "#[aoc(day12, part1)]\n\npub fn solve_part1(input: &Coord) -> i64 {\n\n let mut planets = input.to_owned();\n\n process(1000, &mut planets)\n\n}\n\n\n", "file_path": "src/day12.rs", "rank": 27, "score": 123751.15856595262 }, { "content": "#[aoc_generator(day4)]\n\npub fn input_generator(input: &str) -> Vec<i32> {\n\n input\n\n .split('-')\n\n .map(|x| x.parse::<i32>().unwrap())\n\n .collect::<Vec<i32>>()\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 28, "score": 119317.63056453102 }, { "content": "#[aoc_generator(day8)]\n\npub fn input_generator(input: &str) -> Vec<i32> {\n\n input\n\n .chars()\n\n .map(|x| x.to_digit(10).unwrap() as i32)\n\n .collect()\n\n}\n\n\n", "file_path": "src/day8.rs", "rank": 29, "score": 119317.63056453102 }, { "content": "#[aoc_generator(day16)]\n\npub fn generate_input(input: &str) -> Vec<i32> {\n\n input\n\n .trim()\n\n .chars()\n\n .map(|c| c.to_digit(10).unwrap() as i32)\n\n .collect()\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 30, "score": 119317.63056453102 }, { "content": "#[aoc_generator(day1)]\n\npub fn input_generator(input: &str) -> Vec<i32> {\n\n input.lines().map(|s| s.parse::<i32>().unwrap()).collect()\n\n}\n\n\n", "file_path": "src/day1.rs", "rank": 31, "score": 119317.63056453102 }, { "content": "#[aoc_generator(day5)]\n\npub fn input_generator(input: &str) -> Vec<i32> {\n\n input\n\n .split(',')\n\n .map(|d| d.parse::<i32>().unwrap())\n\n .collect()\n\n}\n\n\n", "file_path": "src/day5.rs", "rank": 32, "score": 119317.63056453102 }, { "content": "#[aoc_generator(day14)]\n\npub fn generate_input(input: &str) -> Vec<Recipe> {\n\n input.lines().map(parse_line).collect()\n\n}\n\n\n", "file_path": "src/day14.rs", "rank": 33, "score": 119317.63056453102 }, { "content": "#[aoc_generator(day2)]\n\npub fn input_generator(input: &str) -> Vec<usize> {\n\n input\n\n .split(',')\n\n .map(|d| d.parse::<usize>().unwrap())\n\n .collect()\n\n}\n\n\n", "file_path": "src/day2.rs", "rank": 34, "score": 119317.63056453102 }, { "content": "#[aoc_generator(day12)]\n\npub fn generate_input(input: &str) -> Vec<Body> {\n\n let reg = Regex::new(r\"^<x=([-\\d]+), y=([-\\d]+), z=([-\\d]+)>$\").unwrap();\n\n\n\n input\n\n .lines()\n\n .map(|l| {\n\n let matches = reg.captures(l).unwrap();\n\n let position = [\n\n matches[1].parse::<i64>().unwrap(),\n\n matches[2].parse::<i64>().unwrap(),\n\n matches[3].parse::<i64>().unwrap(),\n\n ];\n\n Body {\n\n position,\n\n ..Default::default()\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/day12.rs", "rank": 35, "score": 119317.63056453102 }, { "content": "pub fn run_program(\n\n program: &mut Program,\n\n pointer: i64,\n\n relative_base: i64,\n\n mut input: Vec<i64>,\n\n) -> IntCodeResult {\n\n let mut pointer = pointer;\n\n let mut output = Vec::new();\n\n let mut relative_base = relative_base;\n\n loop {\n\n let current_instruction = parse_instruction(program[&to_index(pointer)]);\n\n // println!(\n\n // \"Command {} {} {:?}\",\n\n // pointer, relative_base, current_instruction\n\n // );\n\n let opcode = (current_instruction[3] * 10) + current_instruction[4];\n\n match opcode {\n\n 1 => {\n\n let a = get_value(current_instruction[2], pointer + 1, program, relative_base);\n\n let b = get_value(current_instruction[1], pointer + 2, program, relative_base);\n", "file_path": "src/intcode.rs", "rank": 36, "score": 116124.53871987731 }, { "content": "#[aoc_generator(day3)]\n\npub fn input_generator(input: &str) -> Vec<(DistanceMap, PointSet)> {\n\n input\n\n .lines()\n\n .map(|x| parse_points(x))\n\n .map(build_points)\n\n .collect()\n\n}\n\n\n", "file_path": "src/day3.rs", "rank": 37, "score": 110393.35485309093 }, { "content": "fn parse_instruction(params: i64) -> Vec<i64> {\n\n format!(\"{:05}\", params)\n\n .chars()\n\n .map(|x| x.to_digit(10).unwrap() as i64)\n\n .collect()\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 38, "score": 94864.36176435329 }, { "content": "fn run_program(program: &mut [usize]) {\n\n let mut pointer = 0;\n\n loop {\n\n let current_instruction = program[pointer];\n\n match current_instruction {\n\n 1 => {\n\n let pos_a = program[pointer + 1];\n\n let pos_b = program[pointer + 2];\n\n let result = program[pointer + 3];\n\n program[result] = program[pos_a] + program[pos_b];\n\n }\n\n 2 => {\n\n let pos_a = program[pointer + 1];\n\n let pos_b = program[pointer + 2];\n\n let result = program[pointer + 3];\n\n program[result] = program[pos_a] * program[pos_b];\n\n }\n\n 99 => return,\n\n _ => {}\n\n };\n\n pointer += 4;\n\n }\n\n}\n\n\n", "file_path": "src/day2.rs", "rank": 39, "score": 91513.70386502016 }, { "content": "fn process(cycles: i64, planets: &mut Coord) -> i64 {\n\n //println!(\"Cycle: 0 Planets: {:?}\", planets);\n\n for _cycle in 0..cycles {\n\n let p_copy = planets.to_owned();\n\n for mut a in planets.iter_mut() {\n\n for b in p_copy.iter() {\n\n gravitate(&mut a, &b);\n\n }\n\n }\n\n for i in 0..4 {\n\n apply_velocity(planets.get_mut(i).unwrap());\n\n }\n\n //println!(\"Cycle: {} Planets: {:?}\", cycle, planets);\n\n }\n\n planets.iter().map(|x| x.compute_energy()).sum()\n\n}\n\n\n", "file_path": "src/day12.rs", "rank": 41, "score": 90055.7630395193 }, { "content": "fn display_output(input: &[i64]) {\n\n for c in input {\n\n print!(\"{}\", (*c as u8) as char);\n\n }\n\n}\n\n\n\nconst SCAFFOLD: i64 = 35;\n\nconst SPACE: i64 = 46;\n\nconst NEW_LINE: i64 = 10;\n\n\n", "file_path": "src/day17.rs", "rank": 42, "score": 87327.72345621118 }, { "content": "fn to_index(i: i64) -> usize {\n\n i.try_into().unwrap()\n\n}\n\n\n\npub type Program = HashMap<usize, i64>;\n\npub type Output = Vec<i64>;\n\n\n\npub enum IntCodeResult {\n\n Halt(Output),\n\n Input(i64, i64, Output),\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 43, "score": 86933.60123765963 }, { "content": "fn parse_line(line: &str) -> Recipe {\n\n let captures = LINE_REGEX.captures(line).unwrap();\n\n let result = Ingredient {\n\n amount: captures[2].parse().unwrap(),\n\n name: captures[3].to_owned(),\n\n };\n\n let ingredients = captures[1]\n\n .split(\", \")\n\n .map(|element| {\n\n let captures = INGREDIENT_REGEX.captures(element).unwrap();\n\n Ingredient {\n\n amount: captures[1].parse().unwrap(),\n\n name: captures[2].to_owned(),\n\n }\n\n })\n\n .collect();\n\n Recipe {\n\n result,\n\n ingredients,\n\n }\n\n}\n\n\n", "file_path": "src/day14.rs", "rank": 44, "score": 84190.53324630817 }, { "content": "fn count_blocks(output: &[i64]) -> usize {\n\n let mut iter = output.iter();\n\n let mut blocks = HashSet::new();\n\n while let Some(x) = iter.next() {\n\n let y = iter.next().unwrap();\n\n let t = iter.next().unwrap();\n\n if *t == 2 {\n\n blocks.insert((x, y));\n\n }\n\n }\n\n blocks.len()\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 46, "score": 83185.25967224227 }, { "content": "fn find_move(screen: &Screen) -> i64 {\n\n let (ball_x, _) = screen.iter().find(|e| *e.1 == 4).unwrap_or((&(0, 0), &0)).0;\n\n let (paddle_x, _) = screen.iter().find(|e| *e.1 == 3).unwrap_or((&(0, 0), &0)).0;\n\n\n\n println!(\"B: {}, P:{}\", ball_x, paddle_x);\n\n\n\n match ball_x.cmp(paddle_x) {\n\n Ordering::Equal => 0,\n\n Ordering::Greater => 1,\n\n Ordering::Less => -1,\n\n }\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 47, "score": 83185.25967224227 }, { "content": "fn find_score(output: &[i64]) -> usize {\n\n let mut iter = output.iter();\n\n loop {\n\n if let Some(x) = iter.next() {\n\n let y = iter.next().unwrap();\n\n let score = iter.next().unwrap();\n\n if *x == -1 && *y == 0 {\n\n return *score as usize;\n\n }\n\n } else {\n\n return 0;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 48, "score": 83185.25967224227 }, { "content": "fn get_location(input: &[i64], line_length: usize, x: usize, y: usize) -> i64 {\n\n let index = x + (y * line_length);\n\n //println!(\"Looking up {} {:?}\", index, input.get(index));\n\n *input.get(index).unwrap_or(&SPACE)\n\n}\n\n\n", "file_path": "src/day17.rs", "rank": 49, "score": 81993.6937454209 }, { "content": "fn find_line_length(input: &[i64]) -> usize {\n\n input.iter().take_while(|x| **x != NEW_LINE).count() + 1\n\n}\n\n\n", "file_path": "src/day17.rs", "rank": 50, "score": 81518.87548340633 }, { "content": "fn run_program(program: &mut [i32], input: i32) -> i32 {\n\n let mut pointer = 0;\n\n let mut output = 0;\n\n loop {\n\n let current_instruction = parse_instruction(program[to_index(pointer)]);\n\n //println!(\"Command {} {:?}\", pointer, current_instruction);\n\n let opcode = (current_instruction[3] * 10) + current_instruction[4];\n\n match opcode {\n\n 1 => {\n\n let a = get_value(current_instruction[2], pointer + 1, program);\n\n let b = get_value(current_instruction[1], pointer + 2, program);\n\n let r = program[to_index(pointer + 3)];\n\n //println!(\"ADD {} + {} = {} WRITE TO {}\", a, b, a + b, r);\n\n program[to_index(r)] = a + b;\n\n pointer += 4;\n\n }\n\n 2 => {\n\n let a = get_value(current_instruction[2], pointer + 1, program);\n\n let b = get_value(current_instruction[1], pointer + 2, program);\n\n let r = program[to_index(pointer + 3)];\n", "file_path": "src/day5.rs", "rank": 51, "score": 81395.83658578424 }, { "content": "fn parse_points(raw_points: &str) -> Vec<String> {\n\n raw_points.split(',').map(|s| s.to_owned()).collect()\n\n}\n\n\n", "file_path": "src/day3.rs", "rank": 52, "score": 78828.84947398944 }, { "content": "fn count_orbits(o: &str, orbits: &Orbits) -> i32 {\n\n if let Some(o) = orbits.get(o) {\n\n 1 + count_orbits(o, orbits)\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "src/day6.rs", "rank": 53, "score": 78488.7488239583 }, { "content": "#[aoc(day2, part2)]\n\npub fn solve_part2(input: &[usize]) -> usize {\n\n for (i, j) in iproduct!(0..99, 0..99) {\n\n let mut input_copy = input.to_owned();\n\n input_copy[1] = i;\n\n input_copy[2] = j;\n\n run_program(&mut input_copy);\n\n if input_copy[0] == 19_690_720 {\n\n return (i * 100) + j;\n\n }\n\n }\n\n panic!(\"Unable to find answer\")\n\n}\n", "file_path": "src/day2.rs", "rank": 54, "score": 77423.34550653334 }, { "content": "#[aoc(day4, part2)]\n\npub fn solve_part2(input: &[i32]) -> usize {\n\n let low = input.get(0).unwrap();\n\n let high = input.get(1).unwrap();\n\n (*low..*high)\n\n .map(split_number)\n\n .filter(|x| check_long_pairs(x))\n\n .filter(|x| check_ascending(x))\n\n .count()\n\n}\n", "file_path": "src/day4.rs", "rank": 55, "score": 77423.34550653334 }, { "content": "#[aoc(day1, part1)]\n\npub fn solve_part1(input: &[i32]) -> i32 {\n\n input.iter().map(|n| (n / 3) - 2).sum()\n\n}\n\n\n", "file_path": "src/day1.rs", "rank": 56, "score": 77423.34550653334 }, { "content": "#[aoc(day4, part1)]\n\npub fn solve_part1(input: &[i32]) -> usize {\n\n let low = input.get(0).unwrap();\n\n let high = input.get(1).unwrap();\n\n (*low..*high)\n\n .map(split_number)\n\n .filter(|x| check_pairs(x))\n\n .filter(|x| check_ascending(x))\n\n .count()\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 57, "score": 77423.34550653334 }, { "content": "#[aoc(day5, part2)]\n\npub fn solve_part2(input: &[i32]) -> i32 {\n\n let mut input = input.to_owned();\n\n run_program(&mut input, 5)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn example1() {\n\n let input = \"3,0,4,0,99\";\n\n assert_eq!(solve_part1(&input_generator(&input)), 1);\n\n }\n\n\n\n #[test]\n\n fn example2() {\n\n let input = \"3,9,8,9,10,9,4,9,99,-1,8\";\n\n assert_eq!(run_program(&mut input_generator(&input), 8), 1);\n\n assert_eq!(run_program(&mut input_generator(&input), 7), 0);\n", "file_path": "src/day5.rs", "rank": 58, "score": 77423.34550653334 }, { "content": "#[aoc(day5, part1)]\n\npub fn solve_part1(input: &[i32]) -> i32 {\n\n let mut input = input.to_owned();\n\n run_program(&mut input, 1)\n\n}\n\n\n", "file_path": "src/day5.rs", "rank": 59, "score": 77423.34550653334 }, { "content": "pub fn solve_part2(input: &[i32]) -> i32 {\n\n let series = (5..10).collect::<Vec<i32>>();\n\n permutations_of(&series)\n\n .map(|x| run_series_2(input.to_owned(), x))\n\n .max()\n\n .unwrap()\n\n}\n", "file_path": "src/day7.rs", "rank": 60, "score": 77423.34550653334 }, { "content": "#[aoc(day1, part2)]\n\npub fn solve_part2(input: &[i32]) -> i32 {\n\n input.iter().map(|n| (n / 3) - 2).map(add_extra_fuel).sum()\n\n}\n", "file_path": "src/day1.rs", "rank": 61, "score": 77423.34550653334 }, { "content": "#[aoc(day12, part2)]\n\npub fn solve_part2(input: &Coord) -> usize {\n\n let mut planets = input.to_owned();\n\n let mut counter = 0;\n\n let mut x_cycles = HashMap::new();\n\n let mut y_cycles = HashMap::new();\n\n let mut z_cycles = HashMap::new();\n\n loop {\n\n let p_copy = planets.to_owned();\n\n for mut a in planets.iter_mut() {\n\n for b in p_copy.iter() {\n\n gravitate(&mut a, &b);\n\n }\n\n }\n\n for i in 0..4 {\n\n apply_velocity(planets.get_mut(i).unwrap());\n\n }\n\n counter += 1;\n\n check_for_periods(&mut x_cycles, input, &planets, 0, counter);\n\n check_for_periods(&mut y_cycles, input, &planets, 1, counter);\n\n check_for_periods(&mut z_cycles, input, &planets, 2, counter);\n", "file_path": "src/day12.rs", "rank": 62, "score": 77423.34550653334 }, { "content": "#[aoc(day8, part2)]\n\npub fn solve_part2(input: &[i32]) -> usize {\n\n let layer_size = 6 * 25;\n\n let layer_count = input.len() / layer_size;\n\n\n\n println!();\n\n\n\n for x in 0..layer_size {\n\n let pixel = gen_pixel(input, x, layer_size, layer_count);\n\n\n\n if pixel == 1 {\n\n print!(\"{}\", pixel);\n\n } else {\n\n print!(\" \");\n\n }\n\n if (x + 1) % 25 == 0 {\n\n println!();\n\n }\n\n }\n\n println!();\n\n 0\n\n}\n", "file_path": "src/day8.rs", "rank": 63, "score": 77423.34550653334 }, { "content": "#[aoc(day6, part1)]\n\npub fn solve_part1(input: &Orbits) -> i32 {\n\n let mut orbit_count = 0;\n\n for k in input.keys() {\n\n orbit_count += count_orbits(k, input);\n\n }\n\n orbit_count\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn example1() {\n\n let input = \"COM)B\\nB)C\\nC)D\\nD)E\\nE)F\\nB)G\\nG)H\\nD)I\\nE)J\\nJ)K\\nK)L\";\n\n assert_eq!(solve_part1(&input_generator(&input)), 42);\n\n }\n\n}\n", "file_path": "src/day6.rs", "rank": 64, "score": 77423.34550653334 }, { "content": "#[aoc(day2, part1)]\n\npub fn solve_part1(input: &[usize]) -> usize {\n\n let mut input = input.to_owned();\n\n input[1] = 12;\n\n input[2] = 2;\n\n\n\n run_program(&mut input);\n\n input[0]\n\n}\n\n\n", "file_path": "src/day2.rs", "rank": 65, "score": 77423.34550653334 }, { "content": "#[aoc(day8, part1)]\n\npub fn solve_part1(input: &[i32]) -> usize {\n\n let input = input.to_owned();\n\n let mut layers = Vec::new();\n\n let layer_size = 6 * 25;\n\n let layer_count = input.len() / layer_size;\n\n for x in 0..layer_count {\n\n let start = x * layer_size;\n\n layers.push(&input[start..start + layer_size]);\n\n }\n\n layers.sort_by(compare_images);\n\n let biggest = layers.first().unwrap();\n\n let ones = biggest.iter().filter(|x| **x == 1).count();\n\n let twos = biggest.iter().filter(|x| **x == 2).count();\n\n\n\n ones * twos\n\n}\n\n\n", "file_path": "src/day8.rs", "rank": 66, "score": 77423.34550653334 }, { "content": "fn render_screen(screen: &mut Screen, output: &[i64]) {\n\n let mut iter = output.iter();\n\n\n\n while let Some(x) = iter.next() {\n\n let y = iter.next().unwrap();\n\n let block_type = iter.next().unwrap();\n\n screen.insert((*x, *y), *block_type);\n\n }\n\n\n\n let max_x = screen.keys().map(|k| k.0).max().unwrap();\n\n let max_y = screen.keys().map(|k| k.1).max().unwrap();\n\n\n\n println!(\"Max X: {} Max Y: {}\", max_x, max_y);\n\n\n\n for y in 0..=19 {\n\n for x in 0..=36 {\n\n if let Some(block_type) = screen.get(&(x, y)) {\n\n match block_type {\n\n 0 => print!(\" \"),\n\n 1 => print!(\"W\"),\n", "file_path": "src/day13.rs", "rank": 67, "score": 76015.10037599536 }, { "content": "#[aoc(day16, part1)]\n\npub fn solve_part_1(input: &Vec<i32>) -> i32 {\n\n let result = (0..100).fold(input.clone(), |acc, _| process(&acc));\n\n let answer: String = result.iter().take(8).map(|d| format!(\"{}\", d)).collect();\n\n\n\n answer.parse().unwrap()\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 68, "score": 74098.44547183359 }, { "content": "#[aoc(day16, part2)]\n\npub fn solve_part_2(input: &Vec<i32>) -> i32 {\n\n let input: Vec<i32> = input\n\n .iter()\n\n .cycle()\n\n .take(input.len() * 10000)\n\n .cloned()\n\n .collect();\n\n\n\n let result = (0..100).fold(input, |acc, i| {\n\n println!(\"Processing: {}\", i);\n\n process(&acc)\n\n });\n\n let offset: usize = result\n\n .iter()\n\n .take(7)\n\n .map(|d| format!(\"{}\", d))\n\n .collect::<String>()\n\n .parse()\n\n .unwrap();\n\n\n", "file_path": "src/day16.rs", "rank": 69, "score": 74098.44547183359 }, { "content": "fn compute_ore(cook_book: &[Recipe], shopping_list: &Pantry) -> i64 {\n\n println!(\"Shopping List: {:?}\", shopping_list);\n\n let mut ore_needed = 0;\n\n for (ingredient_name, amount) in shopping_list {\n\n let recipe = cook_book\n\n .iter()\n\n .find(|r| *ingredient_name == r.result.name)\n\n .unwrap();\n\n\n\n let ore_ingredient = recipe.ingredients.get(0).unwrap();\n\n let batches = if 0 == amount % recipe.result.amount {\n\n amount / recipe.result.amount\n\n } else {\n\n (amount / recipe.result.amount) + 1\n\n };\n\n\n\n ore_needed += batches * ore_ingredient.amount;\n\n }\n\n ore_needed\n\n}\n\n\n", "file_path": "src/day14.rs", "rank": 70, "score": 73228.79891253781 }, { "content": "#[aoc(day3, part1)]\n\npub fn solve_part1(input: &[(DistanceMap, PointSet)]) -> i32 {\n\n let (_, wire_a) = &input[0];\n\n let (_, wire_b) = &input[1];\n\n wire_a\n\n .intersection(wire_b)\n\n .map(|x| calc_distance(*x))\n\n .min()\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/day3.rs", "rank": 71, "score": 71358.91747423785 }, { "content": "#[aoc(day3, part2)]\n\npub fn solve_part2(input: &[(DistanceMap, PointSet)]) -> i32 {\n\n let (distance_a, wire_a) = &input[0];\n\n let (distance_b, wire_b) = &input[1];\n\n wire_a\n\n .intersection(wire_b)\n\n .map(|p| calc_distance_2(&distance_a, &distance_b, *p))\n\n .min()\n\n .unwrap()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn example1() {\n\n let input = \"R75,D30,R83,U83,L12,D49,R71,U7,L72\\nU62,R66,U55,R34,D71,R55,D58,R83\";\n\n assert_eq!(solve_part1(&input_generator(&input)), 159);\n\n }\n\n\n", "file_path": "src/day3.rs", "rank": 72, "score": 71358.91747423785 }, { "content": "fn get_value(value_type: i32, i: i32, program: &[i32]) -> i32 {\n\n if value_type == 1 {\n\n //println!(\"Direct {}\", i);\n\n program[to_index(i)]\n\n } else {\n\n let lookup_index = to_index(program[to_index(i)]);\n\n //println!(\"Indirect {} -> {}\", i, lookup_index);\n\n program[lookup_index]\n\n }\n\n}\n\n\n", "file_path": "src/day5.rs", "rank": 73, "score": 70210.20142760783 }, { "content": "type Screen = HashMap<(i64, i64), i64>;\n\n\n", "file_path": "src/day13.rs", "rank": 74, "score": 69479.40392585115 }, { "content": "fn is_intersection(input: &[i64], line_length: usize, x: usize, y: usize) -> bool {\n\n get_location(input, line_length, x, y) == SCAFFOLD\n\n && get_location(input, line_length, x + 1, y) == SCAFFOLD\n\n && get_location(input, line_length, x - 1, y) == SCAFFOLD\n\n && get_location(input, line_length, x, y + 1) == SCAFFOLD\n\n && get_location(input, line_length, x, y - 1) == SCAFFOLD\n\n}\n\n\n", "file_path": "src/day17.rs", "rank": 75, "score": 67357.63139496234 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nenum Action {\n\n Keep,\n\n Negate,\n\n Drop,\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 78, "score": 61079.716976992095 }, { "content": "fn build_points(directions: Vec<String>) -> (DistanceMap, HashSet<(i32, i32)>) {\n\n let mut current_location = (0, 0);\n\n let mut points = HashSet::new();\n\n let mut distance = HashMap::new();\n\n let mut counter = 0;\n\n for direction in directions {\n\n let d = &direction[..1];\n\n let n = direction[1..].parse::<usize>().unwrap();\n\n let (xo, yo) = match d {\n\n \"R\" => (1, 0),\n\n \"D\" => (0, -1),\n\n \"U\" => (0, 1),\n\n \"L\" => (-1, 0),\n\n _ => panic!(\"Unknown direction {}\", d),\n\n };\n\n for _ in 0..n {\n\n counter += 1;\n\n let (x1, y1) = current_location;\n\n let new_location = (x1 + xo, y1 + yo);\n\n current_location = new_location;\n\n points.insert(new_location);\n\n update_distance(&mut distance, new_location, counter);\n\n }\n\n }\n\n (distance, points)\n\n}\n\n\n", "file_path": "src/day3.rs", "rank": 80, "score": 48784.77817664684 }, { "content": "type Pantry = HashMap<String, i64>;\n\n\n", "file_path": "src/day14.rs", "rank": 81, "score": 48550.99016500078 }, { "content": "fn run_series_2<'a>(_program: Vec<i32>, _series: impl Iterator<Item = &'a i32>) -> i32 {\n\n 0\n\n}\n\n\n", "file_path": "src/day7.rs", "rank": 82, "score": 46260.41045539006 }, { "content": "fn check_for_periods(\n\n periods: &mut HashMap<usize, i64>,\n\n original: &Coord,\n\n current: &Coord,\n\n dimension: usize,\n\n cycle: i64,\n\n) {\n\n for (i, x) in original.iter().enumerate() {\n\n if let Entry::Vacant(v) = periods.entry(i) {\n\n let body = current.get(i).unwrap();\n\n if x.position[dimension] == body.position[dimension] && body.velocity[dimension] == 0 {\n\n // println!(\n\n // \"Found cycle for {:?} {} on cycle: {}\",\n\n // current.get(i).unwrap(),\n\n // dimension,\n\n // cycle\n\n // );\n\n v.insert(cycle);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/day12.rs", "rank": 83, "score": 40265.76963656088 }, { "content": "fn find_needed_ore(\n\n cook_book: &[Recipe],\n\n shopping_list: &mut Pantry,\n\n pantry: &mut Pantry,\n\n recipe: &Recipe,\n\n amount: i64,\n\n) {\n\n let amount = match pantry.entry(recipe.result.name.clone()) {\n\n Entry::Occupied(mut e) => {\n\n let store_amount = e.get_mut();\n\n if *store_amount > 0 {\n\n let new_amount = amount - *store_amount;\n\n println!(\n\n \"Taking from the store {} - {} = {}\",\n\n amount, store_amount, new_amount\n\n );\n\n *store_amount -= amount;\n\n new_amount\n\n } else {\n\n amount\n", "file_path": "src/day14.rs", "rank": 84, "score": 39297.303081897655 }, { "content": "fn to_index(i: i32) -> usize {\n\n i.try_into().unwrap()\n\n}\n\n\n", "file_path": "src/day5.rs", "rank": 85, "score": 36348.21695582779 }, { "content": "fn check_pairs(password: &[i8]) -> bool {\n\n (0..5).zip(1..6).any(|x| pair_test(password, &x))\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 86, "score": 34634.26950724149 }, { "content": "fn check_ascending(password: &[i8]) -> bool {\n\n password\n\n .iter()\n\n .scan(-1 as i8, |state, x| {\n\n if *state <= *x {\n\n *state = *x;\n\n Some(true)\n\n } else {\n\n Some(false)\n\n }\n\n })\n\n .all(|x| x)\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 87, "score": 34634.26950724149 }, { "content": "fn apply_velocity(planet: &mut Body) {\n\n for i in 0..3 {\n\n planet.position[i] += planet.velocity[i];\n\n }\n\n}\n\n\n\npub type Coord = [Body];\n\n//<x=-1, y=0, z=2>\n\n\n", "file_path": "src/day12.rs", "rank": 88, "score": 34634.26950724149 }, { "content": "fn lcm(a: usize, b: usize) -> usize {\n\n let div = gcd(a, b);\n\n (a / div) * b\n\n}\n\n\n", "file_path": "src/day12.rs", "rank": 89, "score": 34001.43295393142 }, { "content": "fn gcd(a: usize, b: usize) -> usize {\n\n let mut a = a;\n\n let mut b = b;\n\n while b != 0 {\n\n let t = b;\n\n b = a % b;\n\n a = t\n\n }\n\n a\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn example1() {\n\n let input = \"<x=-1, y=0, z=2>\";\n\n assert_eq!(\n\n generate_input(&input),\n", "file_path": "src/day12.rs", "rank": 90, "score": 34001.43295393142 }, { "content": "fn check_long_pairs(password: &[i8]) -> bool {\n\n let mut count = 1;\n\n let mut last = -1;\n\n\n\n for p in password {\n\n if last == *p {\n\n count += 1;\n\n } else {\n\n if count == 2 {\n\n return true;\n\n }\n\n count = 1;\n\n }\n\n last = *p;\n\n }\n\n count == 2\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 91, "score": 33872.30721565884 }, { "content": "fn add_extra_fuel(fuel: i32) -> i32 {\n\n let mut total_fuel = fuel;\n\n let mut current_fuel = fuel;\n\n loop {\n\n current_fuel = (current_fuel / 3) - 2;\n\n if current_fuel <= 0 {\n\n return total_fuel;\n\n }\n\n total_fuel += current_fuel;\n\n }\n\n}\n\n\n", "file_path": "src/day1.rs", "rank": 92, "score": 33872.30721565884 }, { "content": "fn calc_distance((x, y): (i32, i32)) -> i32 {\n\n //println!(\"Intersection: {} {} {}\", x, y, x.abs() + y.abs());\n\n x.abs() + y.abs()\n\n}\n\n\n", "file_path": "src/day3.rs", "rank": 93, "score": 33178.657372870715 }, { "content": "fn gravitate(planet: &mut Body, other: &Body) {\n\n for i in 0..3 {\n\n // println!(\n\n // \"Comparing {:?} {:?} {:?}\",\n\n // planet,\n\n // other,\n\n // planet.position[i].cmp(&other.position[i])\n\n // );\n\n match planet.position[i].cmp(&other.position[i]) {\n\n Ordering::Equal => {}\n\n Ordering::Greater => {\n\n planet.velocity[i] -= 1;\n\n }\n\n Ordering::Less => planet.velocity[i] += 1,\n\n }\n\n //println!(\"Compared {:?} {:?}\", planet, other);\n\n }\n\n}\n\n\n", "file_path": "src/day12.rs", "rank": 94, "score": 33178.657372870715 }, { "content": "fn split_number(password: i32) -> Vec<i8> {\n\n let str_number = format!(\"{}\", password);\n\n str_number\n\n .chars()\n\n .map(|c| c.to_digit(10).unwrap() as i8)\n\n .collect()\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 95, "score": 33104.63940170162 }, { "content": "fn parse_instruction(params: i32) -> Vec<i32> {\n\n format!(\"{:05}\", params)\n\n .chars()\n\n .map(|x| x.to_digit(10).unwrap() as i32)\n\n .collect()\n\n}\n\n\n", "file_path": "src/day5.rs", "rank": 96, "score": 33104.63940170162 }, { "content": "fn process(signal: &Vec<i32>) -> Vec<i32> {\n\n (0..signal.len())\n\n .map(|i| process_digit(signal, i + 1))\n\n .collect()\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 97, "score": 32416.69508128806 }, { "content": "fn compare_images(a: &&[i32], b: &&[i32]) -> std::cmp::Ordering {\n\n let a_ones = a.iter().filter(|x| **x == 0).count();\n\n let b_ones = b.iter().filter(|x| **x == 0).count();\n\n\n\n a_ones.cmp(&b_ones)\n\n}\n\n\n", "file_path": "src/day8.rs", "rank": 98, "score": 30527.32309009659 }, { "content": "fn process_digit(signal: &Vec<i32>, position: usize) -> i32 {\n\n let cycle = iter::repeat(Action::Drop)\n\n .take(position)\n\n .chain(iter::repeat(Action::Keep).take(position))\n\n .chain(iter::repeat(Action::Drop).take(position))\n\n .chain(iter::repeat(Action::Negate).take(position))\n\n .cycle()\n\n .skip(1);\n\n signal\n\n .iter()\n\n .zip(cycle)\n\n .map(|(n, action)| match action {\n\n Action::Keep => *n,\n\n Action::Negate => -n,\n\n Action::Drop => 0,\n\n })\n\n .sum::<i32>()\n\n .abs()\n\n % 10\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 99, "score": 30430.070500678463 } ]
Rust
src/ecs/src/world.rs
lukebitts/Luck
aef25bc3442872789228e14989d203afe3cc8a52
use mopa::Any; use super::entity::Entities; use super::component::Components; use super::{Entity, System}; use std::any::TypeId; pub struct World { entities: Entities, components: Components, systems: Vec<Box<System>>, to_destroy: Vec<Entity>, } unsafe impl Send for World {} unsafe impl Sync for World {} pub struct WorldBuilder { systems: Vec<Box<System>>, } impl WorldBuilder { #[allow(unknown_lints)] #[allow(inline_always)] #[inline(always)] pub fn new() -> Self { WorldBuilder { systems: Vec::new() } } pub fn with_system<T: System>(mut self, system: T) -> Self { self.systems.push(Box::new(system)); self } pub fn build(self) -> World { World { entities: Entities::new(), components: Components::new(), systems: self.systems, to_destroy: Vec::new(), } } pub fn build_with_capacity(self, capacity: usize) -> World { World { entities: Entities::with_capacity(capacity), components: Components::with_capacity(capacity), systems: self.systems, to_destroy: Vec::new(), } } } fn match_entity_signature(system: &System, components: &Box<[TypeId]>) -> bool { let signature = system.signature(); let mut count = 0; for s in &*signature { if components.contains(&s) { count = count + 1; } } count == signature.len() } impl World { pub fn create_entity(&mut self) -> Entity { self.entities.create_entity() } pub fn destroy_entity(&mut self, entity: Entity) { assert!(self.entities.is_valid(entity) && !self.to_destroy.contains(&entity)); self.to_destroy.push(entity); } #[allow(unknown_lints)] #[allow(inline_always)] #[inline(always)] pub fn is_valid(&self, entity: Entity) -> bool { self.entities.is_valid(entity) } pub fn add_component<T: Any>(&mut self, entity: Entity, component: T) -> &mut T { assert!(self.entities.is_valid(entity)); self.components.add_component::<T>(entity.id() as usize, component) } pub fn get_component<T: Any>(&self, entity: Entity) -> Option<&T> { assert!(self.entities.is_valid(entity)); self.components.get_component::<T>(entity.id() as usize) } pub fn get_component_mut<T: Any>(&mut self, entity: Entity) -> Option<&mut T> { assert!(self.entities.is_valid(entity)); self.components.get_component_mut::<T>(entity.id() as usize) } pub fn remove_component<T: Any>(&mut self, entity: Entity) -> Option<T> { assert!(self.entities.is_valid(entity)); self.components.remove_component::<T>(entity.id() as usize) } pub fn remove_all_components(&mut self, entity: Entity) { assert!(self.entities.is_valid(entity)); self.components.remove_all_components(entity.id() as usize) } pub fn get_system_mut<T: System>(&mut self) -> Option<&mut T> { self.systems.iter_mut().filter_map(|s| s.downcast_mut::<T>()).next() } pub fn get_system<T: System>(&self) -> Option<&T> { self.systems.iter().filter_map(|s| s.downcast_ref::<T>()).next() } pub fn apply(&mut self, entity: Entity) { assert!(self.entities.is_valid(entity)); let World { ref mut systems, ref mut components, .. } = *self; for system in systems.iter_mut() { if match_entity_signature(&**system, &components.generate_signature(entity.id() as usize)) { if !system.has_entity(entity) { system.on_entity_added(entity); } } else if system.has_entity(entity) { system.on_entity_removed(entity); } } } pub fn process(&mut self) { use rayon::par_iter::*; let mut callbacks = Vec::with_capacity(self.systems.len()); self.systems .par_iter() .map(|s| s.process(self)) .collect_into(&mut callbacks); for callback in &mut callbacks { (*callback)(self); } self.destroy_scheduled_entities(); } fn destroy_scheduled_entities(&mut self) { let to_destroy = self.to_destroy.clone(); for entity in to_destroy { self.remove_all_components(entity); self.apply(entity); self.entities.destroy_entity(entity); } self.to_destroy.clear(); } } impl Drop for World { fn drop(&mut self) { for entity in &self.entities { self.to_destroy.push(entity); } self.destroy_scheduled_entities(); } } #[cfg(test)] mod test { use super::WorldBuilder; use super::super::{Signature, Entity, System, World}; use std::ops::FnMut; use std::any::TypeId; use std; #[derive(Default, PartialEq, Debug)] struct PositionComponent(f32, f32, f32); #[derive(Default)] struct VelocityComponent(f32, f32, f32); #[derive(Default)] struct SpatialSystem { entities: Vec<Entity>, marker: bool, } impl_system!(SpatialSystem, (PositionComponent), { Box::new(move |w: &mut World|{ if !w.get_system::<SpatialSystem>().unwrap().marker { assert_eq!(w.get_system::<VelocitySystem>().unwrap().marker, false); w.get_system_mut::<SpatialSystem>().unwrap().marker = true; } }) }); impl Drop for SpatialSystem { fn drop(&mut self) { assert_eq!(self.entities.len(), 0); } } #[derive(Default)] struct VelocitySystem { entities: Vec<Entity>, marker: bool, } impl_system!(VelocitySystem, (PositionComponent, VelocityComponent), { let v1 = PositionComponent(0.0, 0.0, 0.0); Box::new(move |w: &mut World|{ if !w.get_system::<VelocitySystem>().unwrap().marker { assert_eq!(w.get_system::<SpatialSystem>().unwrap().marker, true); w.get_system_mut::<VelocitySystem>().unwrap().marker = true; assert_eq!(v1, v1); } }) }); impl Drop for VelocitySystem { fn drop(&mut self) { assert_eq!(self.entities.len(), 0); } } #[test] fn creation() { let w = WorldBuilder::new() .with_system(SpatialSystem::default()) .build(); assert!(w.get_system::<SpatialSystem>().is_some()); assert!(w.get_system::<VelocitySystem>().is_none()); assert_eq!(w.systems.len(), 1); let w = WorldBuilder::new() .with_system(SpatialSystem::default()) .with_system(VelocitySystem::default()) .build(); assert!(w.get_system::<SpatialSystem>().is_some()); assert!(w.get_system::<VelocitySystem>().is_some()); assert_eq!(w.systems.len(), 2); } #[test] fn component_system_operations() { let mut w = WorldBuilder::new() .with_system(SpatialSystem::default()) .with_system(VelocitySystem::default()) .build(); let e1 = w.create_entity(); w.add_component(e1, PositionComponent::default()); w.add_component(e1, VelocityComponent::default()); w.apply(e1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().entities.len(), 1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), true); assert_eq!(w.get_system::<VelocitySystem>().unwrap().entities.len(), 1); assert_eq!(w.get_system::<VelocitySystem>().unwrap().has_entity(e1), true); w.remove_component::<VelocityComponent>(e1); w.apply(e1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().entities.len(), 1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), true); assert_eq!(w.get_system::<VelocitySystem>().unwrap().entities.len(), 0); assert_eq!(w.get_system::<VelocitySystem>().unwrap().has_entity(e1), false); w.destroy_entity(e1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), true); assert_eq!(w.get_system::<VelocitySystem>().unwrap().has_entity(e1), false); w.process(); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), false); w.process(); } }
use mopa::Any; use super::entity::Entities; use super::component::Components; use super::{Entity, System}; use std::any::TypeId; pub struct World { entities: Entities, components: Components, systems: Vec<Box<System>>, to_destroy: Vec<Entity>, } unsafe impl Send for World {} unsafe impl Sync for World {} pub struct WorldBuilder { systems: Vec<Box<System>>, } impl WorldBuilder { #[allow(unknown_lints)] #[allow(inline_always)] #[inline(always)] pub fn new() -> Self { WorldBuilder { systems: Vec::new() } } pub fn with_system<T: System>(mut self, system: T) -> Self { self.systems.push(Box::new(system)); self } pub fn build(self) -> World { World { entities: Entities::new(), components: Components::new(), systems: self.systems, to_destroy: Vec::new(), } } pub fn build_with_capacity(self, capacity: usize) -> World { World { entities: Entities::with_capacity(capacity), components: Components::with_capacity(capacity), systems: self.systems, to_destroy: Vec::new(), } } } fn match_entity_signature(system: &System, components: &Box<[TypeId]>) -> bool { let signature = system.signature(); let mut count = 0; for s in &*signature { if components.contains(&s) { count = count + 1; } } count == signature.len() } impl World { pub fn create_entity(&mut self) -> Entity { self.entities.create_entity() } pub fn destroy_entity(&mut self, entity: Entity) { assert!(self.entities.is_valid(entity) && !self.to_destroy.contains(&entity)); self.to_destroy.push(entity); } #[allow(unknown_lints)] #[allow(inline_always)] #[inline(always)] pub fn is_valid(&self, entity: Entity) -> bool { self.entities.is_valid(entity) } pub fn add_component<T: Any>(&mut self, entity: Entity, component: T) -> &mut T { assert!(self.entities.is_valid(entity)); self.components.add_component::<T>(entity.id() as usize, component) } pub fn get_component<T: Any>(&self, entity: Entity) -> Option<&T> { assert!(self.entities.is_valid(entity)); self.components.get_component::<T>(entity.id() as usize) } pub fn get_component_mut<T: Any>(&mut self, entity: Entity) -> Option<&mut T> { assert!(self.entities.is_valid(entity)); self.components.get_component_mut::<T>(entity.id() as usize) } pub fn remove_component<T: Any>(&mut self, entity: Entity) -> Option<T> { assert!(self.entities.is_valid(entity)); self.components.remove_component::<T>(entity.id() as usize) } pub fn remove_all_components(&mut self, entity: Entity) { assert!(self.entities.is_valid(entity)); self.components.remove_all_components(entity.id() as usize) } pub fn get_system_mut<T: System>(&mut self) -> Option<&mut T> { self.systems.iter_mut().filter_map(|s| s.downcast_mut::<T>()).next() } pub fn get_system<T: System>(&self) -> Option<&T> { self.systems.iter().filter_map(|s| s.downcast_ref::<T>()).next() } pub fn apply(&mut self, entity: Entity) { assert!(self.entities.is_valid(entity)); let World { ref mut systems, ref mut components, .. } = *self; for system in systems.iter_mut() { if match_entity_signature(&**system, &components.generate_signature(entity.id() as usize)) { if !system.has_entity(entity) { system.on_entity_added(entity); } } else if system.has_entity(entity) { system.on_entity_removed(entity); } } } pub fn process(&mut self) { use rayon::par_iter::*; let mut callbacks = Vec::with_capacity(self.systems.len());
fn destroy_scheduled_entities(&mut self) { let to_destroy = self.to_destroy.clone(); for entity in to_destroy { self.remove_all_components(entity); self.apply(entity); self.entities.destroy_entity(entity); } self.to_destroy.clear(); } } impl Drop for World { fn drop(&mut self) { for entity in &self.entities { self.to_destroy.push(entity); } self.destroy_scheduled_entities(); } } #[cfg(test)] mod test { use super::WorldBuilder; use super::super::{Signature, Entity, System, World}; use std::ops::FnMut; use std::any::TypeId; use std; #[derive(Default, PartialEq, Debug)] struct PositionComponent(f32, f32, f32); #[derive(Default)] struct VelocityComponent(f32, f32, f32); #[derive(Default)] struct SpatialSystem { entities: Vec<Entity>, marker: bool, } impl_system!(SpatialSystem, (PositionComponent), { Box::new(move |w: &mut World|{ if !w.get_system::<SpatialSystem>().unwrap().marker { assert_eq!(w.get_system::<VelocitySystem>().unwrap().marker, false); w.get_system_mut::<SpatialSystem>().unwrap().marker = true; } }) }); impl Drop for SpatialSystem { fn drop(&mut self) { assert_eq!(self.entities.len(), 0); } } #[derive(Default)] struct VelocitySystem { entities: Vec<Entity>, marker: bool, } impl_system!(VelocitySystem, (PositionComponent, VelocityComponent), { let v1 = PositionComponent(0.0, 0.0, 0.0); Box::new(move |w: &mut World|{ if !w.get_system::<VelocitySystem>().unwrap().marker { assert_eq!(w.get_system::<SpatialSystem>().unwrap().marker, true); w.get_system_mut::<VelocitySystem>().unwrap().marker = true; assert_eq!(v1, v1); } }) }); impl Drop for VelocitySystem { fn drop(&mut self) { assert_eq!(self.entities.len(), 0); } } #[test] fn creation() { let w = WorldBuilder::new() .with_system(SpatialSystem::default()) .build(); assert!(w.get_system::<SpatialSystem>().is_some()); assert!(w.get_system::<VelocitySystem>().is_none()); assert_eq!(w.systems.len(), 1); let w = WorldBuilder::new() .with_system(SpatialSystem::default()) .with_system(VelocitySystem::default()) .build(); assert!(w.get_system::<SpatialSystem>().is_some()); assert!(w.get_system::<VelocitySystem>().is_some()); assert_eq!(w.systems.len(), 2); } #[test] fn component_system_operations() { let mut w = WorldBuilder::new() .with_system(SpatialSystem::default()) .with_system(VelocitySystem::default()) .build(); let e1 = w.create_entity(); w.add_component(e1, PositionComponent::default()); w.add_component(e1, VelocityComponent::default()); w.apply(e1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().entities.len(), 1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), true); assert_eq!(w.get_system::<VelocitySystem>().unwrap().entities.len(), 1); assert_eq!(w.get_system::<VelocitySystem>().unwrap().has_entity(e1), true); w.remove_component::<VelocityComponent>(e1); w.apply(e1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().entities.len(), 1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), true); assert_eq!(w.get_system::<VelocitySystem>().unwrap().entities.len(), 0); assert_eq!(w.get_system::<VelocitySystem>().unwrap().has_entity(e1), false); w.destroy_entity(e1); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), true); assert_eq!(w.get_system::<VelocitySystem>().unwrap().has_entity(e1), false); w.process(); assert_eq!(w.get_system::<SpatialSystem>().unwrap().has_entity(e1), false); w.process(); } }
self.systems .par_iter() .map(|s| s.process(self)) .collect_into(&mut callbacks); for callback in &mut callbacks { (*callback)(self); } self.destroy_scheduled_entities(); }
function_block-function_prefix_line
[ { "content": "/// A trait that describes which components the system should process. It is split from the\n\n/// System trait to allow it to be implemented through the impl_signature macro.\n\npub trait Signature : mopa::Any + Send + Sync {\n\n /// Should return the components this system expects to process.\n\n fn signature(&self) -> Box<[TypeId]>;\n\n}\n\n\n", "file_path": "src/ecs/src/system.rs", "rank": 1, "score": 123332.91070441963 }, { "content": "/// A trait that every System struct should implement.\n\npub trait System : Signature {\n\n // TODO: Add a on_drop event? Implementing Drop for a system is useless since it is only\n\n // called after the World already cleaned it.\n\n\n\n /// Should return true if an entity add event has been received by this System.\n\n fn has_entity(&self, entity: Entity) -> bool;\n\n\n\n /// This event is fired everytime the signature of an entity matches the signature of the\n\n /// system and the system has not received this entity yet (checked through has_entity).\n\n fn on_entity_added(&mut self, entity: Entity);\n\n\n\n /// This event is fired everytime the signature of an entity doesn't match the signature of the\n\n /// system the system has a reference to this entity (checked through has_entity).\n\n fn on_entity_removed(&mut self, entity: Entity);\n\n\n\n /// This event is fired every frame. Only read only operations can be done during the proccess\n\n /// itself since this step is run concurrently. Multable changes have to be done inside the\n\n /// returning function witch will be run in order depending on the orther the systems were\n\n /// added to the World.\n\n fn process(&self, _: &World) -> Box<FnMut(&mut World) + Send + Sync> {\n", "file_path": "src/ecs/src/system.rs", "rank": 2, "score": 91302.64345194987 }, { "content": "/// Normalizes x and y. Also makes sure y is orthogonal to x.\n\npub fn orthonormalize(x: &mut Vector3<f32>, y: &mut Vector3<f32>) {\n\n *x = normalize(*x);\n\n *y = normalize(*x - *y * dot(*y, *x));\n\n}\n", "file_path": "src/math/src/extensions.rs", "rank": 3, "score": 82128.01477523068 }, { "content": "/// Returns a perspective matrix from the camera parameters.\n\npub fn frustum(left: f32,\n\n right: f32,\n\n bottom: f32,\n\n top: f32,\n\n near_val: f32,\n\n far_val: f32)\n\n -> Matrix4<f32> {\n\n let mut result = Matrix4::zero();\n\n\n\n result.c0.x = (2.0 * near_val) / (right - left);\n\n result.c1.y = (2.0 * near_val) / (top - bottom);\n\n result.c2.x = (right + left) / (right - left);\n\n result.c2.y = (top + bottom) / (top - bottom);\n\n result.c2.w = 1.0;\n\n\n\n result.c2.z = far_val / (far_val - near_val);\n\n result.c3.z = -(far_val * near_val) / (far_val - near_val);\n\n\n\n result\n\n}\n", "file_path": "src/math/src/extensions.rs", "rank": 4, "score": 55062.31172451815 }, { "content": "/// Returns an orthogonal matrix from the camera parameters.\n\npub fn ortho(left: f32,\n\n right: f32,\n\n bottom: f32,\n\n top: f32,\n\n near_val: f32,\n\n far_val: f32)\n\n -> Matrix4<f32> {\n\n let mut res = Matrix4::one();\n\n res.c0.x = 2.0 / (right - left);\n\n res.c1.y = 2.0 / (top - bottom);\n\n res.c3.x = -(right + left) / (right - left);\n\n res.c3.y = -(top + bottom) / (top - bottom);\n\n\n\n res.c2.z = 1.0 / (far_val - near_val);\n\n res.c3.z = -near_val / (far_val - near_val);\n\n\n\n res\n\n}\n\n\n", "file_path": "src/math/src/extensions.rs", "rank": 5, "score": 55062.31172451815 }, { "content": "/// Returns true if a box (or aabb) is inside the defined 6 plane frustrum.\n\npub fn is_box_in_frustum(origin: Vector3<f32>,\n\n half_dim: Vector3<f32>,\n\n planes: [Vector4<f32>; 6])\n\n -> FrustumTestResult {\n\n let corner_offsets = [Vector3::new(-1.0, -1.0, -1.0),\n\n Vector3::new(-1.0, -1.0, 1.0),\n\n Vector3::new(-1.0, 1.0, -1.0),\n\n Vector3::new(-1.0, 1.0, 1.0),\n\n Vector3::new(1.0, -1.0, -1.0),\n\n Vector3::new(1.0, -1.0, 1.0),\n\n Vector3::new(1.0, 1.0, -1.0),\n\n Vector3::new(1.0, 1.0, 1.0)];\n\n let mut ret = 1;\n\n for plane in &planes {\n\n let plane_normal = Vector3::new(plane.x, plane.y, plane.z);\n\n let mut idx = vector_to_index(plane_normal);\n\n\n\n let mut test_point = origin + half_dim * corner_offsets[idx as usize];\n\n\n\n if half_plane_test(test_point, plane_normal, plane.w) == 0 {\n", "file_path": "src/math/src/extensions.rs", "rank": 6, "score": 50808.842977349224 }, { "content": "/// Scales a matrix by a vector3.\n\npub fn scale(m: Matrix4<f32>, v: Vector3<f32>) -> Matrix4<f32> {\n\n let mut res = Matrix4::one();\n\n res.c0 = m[0] * v[0];\n\n res.c1 = m[1] * v[1];\n\n res.c2 = m[2] * v[2];\n\n res.c3 = m[3];\n\n res\n\n}\n\n\n", "file_path": "src/math/src/extensions.rs", "rank": 7, "score": 44209.35005659563 }, { "content": "/// Translates a matrix by a vector3.\n\npub fn translate(m: Matrix4<f32>, v: Vector3<f32>) -> Matrix4<f32> {\n\n let mut m = m;\n\n\n\n m.c3.x += v.x;\n\n m.c3.y += v.y;\n\n m.c3.z += v.z;\n\n\n\n m\n\n}\n\n\n", "file_path": "src/math/src/extensions.rs", "rank": 8, "score": 44209.35005659563 }, { "content": "/// Returns a look at matrix from the supplied parameters. Eye is the camera position, center is\n\n/// the location you want the camera to point, up is the up direction in whichever abstraction\n\n/// you are working with (usually `Vector3::new(0, 1, 0)`).\n\npub fn look_at(eye: Vector3<f32>, center: Vector3<f32>, up: Vector3<f32>) -> Matrix4<f32> {\n\n let f = normalize(center - eye);\n\n let s = normalize(cross(up, f));\n\n let u = cross(f, s);\n\n\n\n let mut result = Matrix4::one();\n\n result.c0.x = s.x;\n\n result.c1.x = s.y;\n\n result.c2.x = s.z;\n\n result.c0.y = u.x;\n\n result.c1.y = u.y;\n\n result.c2.y = u.z;\n\n result.c0.z = f.x;\n\n result.c1.z = f.y;\n\n result.c2.z = f.z;\n\n result.c3.x = -dot(s, eye);\n\n result.c3.y = -dot(u, eye);\n\n result.c3.z = -dot(f, eye);\n\n result\n\n}\n\n\n", "file_path": "src/math/src/extensions.rs", "rank": 9, "score": 38415.23752223341 }, { "content": "type EntityKey = u64;\n\n\n\n/// A type used to represent an entity. Objects of this type can be copied and `Entities::is_alive`\n\n/// is guaranteed to return false if the entity was destroyed, even taking in account id reuse.\n\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\n\npub struct Entity {\n\n id: EntityId,\n\n key: EntityKey,\n\n}\n\n\n\nimpl Entity {\n\n /// Returns the id of the entity in the Entities object (or the World). You can't\n\n /// differentiate dead or alive entities just by their id.\n\n pub fn id(&self) -> EntityId {\n\n self.id\n\n }\n\n}\n\n\n\n/// An object to hold entities and their ids. Entities are stored sequentially and\n\n/// when an entity is destroyed, it's id is reused and old instances of Entity objects that pointed\n", "file_path": "src/ecs/src/entity.rs", "rank": 10, "score": 27124.94274488366 }, { "content": "fn vector_to_index(v: Vector3<f32>) -> i32 {\n\n let mut idx = 0;\n\n if v.z >= 0.0 {\n\n idx |= 1;\n\n }\n\n if v.y >= 0.0 {\n\n idx |= 2;\n\n }\n\n if v.x >= 0.0 {\n\n idx |= 4;\n\n }\n\n idx\n\n}\n\n\n", "file_path": "src/math/src/extensions.rs", "rank": 11, "score": 25712.93111444486 }, { "content": "//! }\n\n//! }\n\n//! ```\n\n\n\nuse std::any::TypeId;\n\nuse std::ops::FnMut;\n\n\n\nuse super::Entity;\n\nuse super::World;\n\nuse mopa;\n\n\n\n/// A trait that describes which components the system should process. It is split from the\n\n/// System trait to allow it to be implemented through the impl_signature macro.\n", "file_path": "src/ecs/src/system.rs", "rank": 12, "score": 23333.900984677803 }, { "content": "//! }\n\n//!\n\n//! impl System for S1 {\n\n//! fn has_entity(&self, entity: Entity) -> bool {\n\n//! self.entities.iter().enumerate().find(|e| *e.1 == entity).is_some()\n\n//! }\n\n//! fn on_entity_added(&mut self, entity: Entity) {\n\n//! self.entities.push(entity);\n\n//! }\n\n//! fn on_entity_removed(&mut self, entity: Entity) {\n\n//! self.entities.retain(|&x| x != entity);\n\n//! }\n\n//! fn process(&self, _: &World) -> Box<FnMut(&mut World) + Send + Sync> {\n\n//! //[...]\n\n//! //Read only operations, like finding which entities need processing.\n\n//! Box::new(move |w: &mut World|{\n\n//! //[...]\n\n//! //Operations that mutate the world, you can access the system state through\n\n//! //w.get_system::<S1>()\n\n//! })\n", "file_path": "src/ecs/src/system.rs", "rank": 13, "score": 23331.78585856222 }, { "content": " }\n\n fn on_entity_added(&mut self, entity: Entity) {\n\n self.entities.push(entity);\n\n }\n\n fn on_entity_removed(&mut self, entity: Entity) {\n\n self.entities.retain(|&x| x != entity);\n\n }\n\n fn process(&self, _: &World) -> Box<FnMut(&mut World) + Send + Sync> {\n\n $process\n\n }\n\n }\n\n };\n\n\n\n ( $name:ty , ( $( $mask:path ),+ ) ) => {\n\n impl_system!($name, ( $($mask),+ ), {\n\n fn ret(_: &mut World) {}\n\n Box::new(ret)\n\n });\n\n };\n\n}\n", "file_path": "src/ecs/src/system.rs", "rank": 14, "score": 23331.415676724337 }, { "content": "//! System is the trait the must be implemented by every system.\n\n//! # Example\n\n//! ```\n\n//! use luck_ecs::{Entity, System, Signature, World};\n\n//! use std::any::TypeId;\n\n//! use std::ops::FnMut;\n\n//!\n\n//! struct S1 {\n\n//! entities: Vec<Entity>\n\n//! }\n\n//!\n\n//!\n\n//! //You can implement the signature yourself or use the `impl_signature!` macro.\n\n//! impl Signature for S1 {\n\n//! fn signature(&self) -> Box<[TypeId]> {\n\n//! Box::new([\n\n//! TypeId::of::<u32>(),\n\n//! TypeId::of::<i32>()\n\n//! ])\n\n//! }\n", "file_path": "src/ecs/src/system.rs", "rank": 15, "score": 23331.32343931033 }, { "content": " fn ret(_: &mut World) {}\n\n Box::new(ret)\n\n }\n\n}\n\n\n\nmopafy!(System);\n\n\n\n/// A macro to make it easier to implement the Signature trait.\n\n/// # Example\n\n/// ```\n\n/// #[macro_use] extern crate luck_ecs;\n\n///\n\n/// fn main() {\n\n/// use luck_ecs::{Entity, System, Signature, World};\n\n/// use std::any::TypeId;\n\n///\n\n/// struct S1 {\n\n/// entities: Vec<Entity>\n\n/// }\n\n///\n", "file_path": "src/ecs/src/system.rs", "rank": 16, "score": 23328.4136780977 }, { "content": " /// Constructs a new instance of `Components`. The internal vector is empty and will only\n\n /// allocate when a component is added.\n\n #[allow(unknown_lints)]\n\n #[allow(inline_always)]\n\n #[inline(always)]\n\n pub fn new() -> Self {\n\n Components {\n\n components: Vec::new(),\n\n signatures: Vec::new(),\n\n }\n\n }\n\n\n\n /// Constructs a new instance of `Components`. The internal vector is initialized with the\n\n /// specified capacity.\n\n #[allow(unknown_lints)]\n\n #[allow(inline_always)]\n\n #[inline(always)]\n\n pub fn with_capacity(capacity: usize) -> Self {\n\n Components {\n\n components: Vec::with_capacity(capacity),\n", "file_path": "src/ecs/src/component.rs", "rank": 17, "score": 23328.318485566397 }, { "content": "\n\n map.remove::<T>()\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Removes every component associated with the `index`.\n\n pub fn remove_all_components(&mut self, index: usize) {\n\n if self.components.get_mut(index).map(|map| *map = AnyMap::new()).is_some() {\n\n self.signatures[index] = Box::new([]);\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::Components;\n\n\n\n #[derive(Debug, Eq, PartialEq)]\n", "file_path": "src/ecs/src/component.rs", "rank": 18, "score": 23327.780419409693 }, { "content": " signatures: Vec::new(),\n\n }\n\n }\n\n\n\n /// Returns a list with every component associated with the `index`.\n\n pub fn generate_signature(&mut self, index: usize) -> Box<[TypeId]> {\n\n self.signatures.get(index).cloned().unwrap_or_default()\n\n }\n\n\n\n /// Adds the `component` to the internal component list associated with the number\n\n /// `index`.\n\n pub fn add_component<T: Any>(&mut self, index: usize, component: T) -> &mut T {\n\n while self.components.len() <= index {\n\n self.components.push(AnyMap::new());\n\n self.signatures.push(Box::new([]));\n\n }\n\n\n\n match self.components[index].insert(component) {\n\n Some(_) => (),\n\n None => {\n", "file_path": "src/ecs/src/component.rs", "rank": 19, "score": 23326.766970043 }, { "content": "/// impl_signature!(S1, (u32, i32));\n\n/// }\n\n/// ```\n\n#[macro_export]\n\nmacro_rules! impl_signature {\n\n ( $name:ty , ( $( $mask:path ),+ ) ) => {\n\n impl<'a> Signature for $name {\n\n fn signature(&self) -> Box<[TypeId]> {\n\n Box::new([ $(std::any::TypeId::of::<$mask>()),+ ])\n\n }\n\n }\n\n }\n\n}\n\n\n\nmacro_rules! impl_system {\n\n ( $name:ty , ( $( $mask:path ),+ ) , $process:block ) => {\n\n impl_signature!($name, ( $($mask),+ ) );\n\n impl<'a> System for $name {\n\n fn has_entity(&self, entity: Entity) -> bool {\n\n self.entities.iter().enumerate().find(|e| *e.1 == entity).is_some()\n", "file_path": "src/ecs/src/system.rs", "rank": 20, "score": 23326.291626231294 }, { "content": " }\n\n\n\n /// If there is a component of type T associated with the number `index`, a mutable reference\n\n /// to this component is returned. If index is out of bounds or the number is not associated\n\n /// with the component type, None is returned.\n\n pub fn get_component_mut<T: Any>(&mut self, index: usize) -> Option<&mut T> {\n\n if let Some(map) = self.components.get_mut(index) {\n\n map.get_mut::<T>()\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Removes the component `T` associated with the number `index` and returns it.\n\n pub fn remove_component<T: Any>(&mut self, index: usize) -> Option<T> {\n\n if let Some(map) = self.components.get_mut(index) {\n\n let mut signature = Vec::new();\n\n signature.extend_from_slice(&*self.signatures[index]);\n\n signature.retain(|x| *x != TypeId::of::<T>());\n\n self.signatures[index] = signature.into_boxed_slice();\n", "file_path": "src/ecs/src/component.rs", "rank": 21, "score": 23325.733551889727 }, { "content": " struct FooComponent(u32);\n\n\n\n #[test]\n\n fn with_reference() {\n\n static INT_REF: &'static i32 = &15;\n\n\n\n #[derive(Debug, Eq, PartialEq)]\n\n struct RefHolder<'a> {\n\n r: &'a i32,\n\n }\n\n\n\n let mut comp_list = Components::new();\n\n comp_list.add_component(0usize, RefHolder { r: INT_REF });\n\n\n\n assert_eq!(comp_list.get_component_mut::<RefHolder>(0usize).unwrap().r, &15);\n\n }\n\n\n\n #[test]\n\n fn addition_and_recovery() {\n\n let mut comp_list = Components::new();\n", "file_path": "src/ecs/src/component.rs", "rank": 22, "score": 23325.07154938627 }, { "content": "//! A module for the `Components` type. Through a `Components` you can add and remove\n\n//! any type that implements `Any` and has no non-static references.\n\n//! Should be used through the `World` and not directly.\n\nextern crate anymap;\n\n\n\nuse self::anymap::AnyMap;\n\nuse std::any::{Any, TypeId};\n\n\n\n/// This type holds a `Vec<AnyMap>`. Entities are identified by their id (the 'key' of the\n\n/// vector) and AnyMap can hold one of each component type. An entity can only have either\n\n/// 0 or 1 component for a given component type. If you have entities 1 and 500 alive the\n\n/// vector will keep 500 `AnyMap`'s in memory. Even if you destroy every entity the memory\n\n/// of the components won't be freed. There's no way to \"drain\" the memory due to the\n\n/// way entity handles work.\n\npub struct Components {\n\n components: Vec<AnyMap>,\n\n signatures: Vec<Box<[TypeId]>>,\n\n}\n\n\n\nimpl Components {\n", "file_path": "src/ecs/src/component.rs", "rank": 23, "score": 23324.845000249312 }, { "content": " let mut signature = Vec::new();\n\n signature.extend_from_slice(&*self.signatures[index]);\n\n signature.push(TypeId::of::<T>());\n\n self.signatures[index] = signature.into_boxed_slice();\n\n }\n\n }\n\n\n\n self.get_component_mut::<T>(index)\n\n .expect(\"Component we just added was not found. This should never happen\")\n\n }\n\n\n\n /// If there is a component of type T associated with the number `index`, a reference to this\n\n /// component is returned. If index is out of bounds or the number is not associated with the\n\n /// component type, None is returned.\n\n pub fn get_component<T: Any>(&self, index: usize) -> Option<&T> {\n\n if let Some(map) = self.components.get(index) {\n\n map.get::<T>()\n\n } else {\n\n None\n\n }\n", "file_path": "src/ecs/src/component.rs", "rank": 24, "score": 23324.088609595412 }, { "content": " for index in 0usize..100_000usize {\n\n assert_eq!(*comp_list.add_component(index, FooComponent(0u32)), FooComponent(0u32));\n\n assert_eq!(*comp_list.get_component::<FooComponent>(index).unwrap(),\n\n FooComponent(0u32));\n\n assert_eq!(*comp_list.get_component_mut::<FooComponent>(index).unwrap(),\n\n FooComponent(0u32));\n\n }\n\n }\n\n\n\n #[test]\n\n fn removal() {\n\n let mut comp_list = Components::new();\n\n let index = 0usize;\n\n\n\n assert_eq!(*comp_list.add_component(index, FooComponent(0u32)), FooComponent(0u32));\n\n assert_eq!(*comp_list.add_component(index, FooComponent(1u32)), FooComponent(1u32));\n\n\n\n assert_eq!(*comp_list.get_component::<FooComponent>(index).unwrap(), FooComponent(1u32));\n\n assert_eq!(comp_list.remove_component::<FooComponent>(index).unwrap(), FooComponent(1u32));\n\n assert_eq!(comp_list.get_component::<FooComponent>(index).is_none(), true);\n\n assert_eq!(comp_list.remove_component::<FooComponent>(index).is_none(), true);\n\n }\n\n}\n", "file_path": "src/ecs/src/component.rs", "rank": 25, "score": 23319.64496391107 }, { "content": " /// Constructs a new instance of `Entities`. The internal vectors are initialized with the\n\n /// specified capacity.\n\n /// # Examples\n\n /// ```\n\n /// use luck_ecs::entity::Entities;\n\n /// let mut entities: Entities = Entities::with_capacity(10);\n\n /// //The resulting Entities object will only allocate after the 11th entity is created.\n\n /// ```\n\n #[allow(unknown_lints)]\n\n #[allow(inline_always)]\n\n #[inline(always)]\n\n pub fn with_capacity(capacity: usize) -> Self {\n\n Entities {\n\n free_entity_ids: Vec::with_capacity(capacity),\n\n entities: Vec::with_capacity(capacity),\n\n }\n\n }\n\n\n\n /// Creates a new entity and return it's identification.\n\n /// # Examples\n", "file_path": "src/ecs/src/entity.rs", "rank": 26, "score": 22830.314742075363 }, { "content": " pub fn is_valid(&self, entity: Entity) -> bool {\n\n if let Some(key) = self.entities.get(entity.id as usize) {\n\n *key == entity.key\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n\nimpl iter::IntoIterator for Entities {\n\n type Item = Entity;\n\n type IntoIter = EntitiesIntoIterator;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n EntitiesIntoIterator {\n\n entities: self,\n\n index: 0,\n\n }\n\n }\n\n}\n", "file_path": "src/ecs/src/entity.rs", "rank": 27, "score": 22827.655297511956 }, { "content": " }\n\n }\n\n\n\n /// Constructs a new instance of `Entities`. The internal vectors are empty and will only\n\n /// allocate when an entity is created.\n\n /// # Examples\n\n /// ```\n\n /// use luck_ecs::entity::Entities;\n\n /// let mut entities = Entities::new();\n\n /// ```\n\n #[allow(unknown_lints)]\n\n #[allow(inline_always)]\n\n #[inline(always)]\n\n pub fn new() -> Self {\n\n Entities {\n\n free_entity_ids: Vec::new(),\n\n entities: Vec::new(),\n\n }\n\n }\n\n\n", "file_path": "src/ecs/src/entity.rs", "rank": 28, "score": 22827.411795675558 }, { "content": "\n\nimpl<'a> iter::IntoIterator for &'a Entities {\n\n type Item = Entity;\n\n type IntoIter = EntitiesIterator<'a>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n EntitiesIterator {\n\n entities: self,\n\n index: 0,\n\n }\n\n }\n\n}\n\n\n\n/// An iterator that moves out the Entities object. Returns only entities that are valid.\n\npub struct EntitiesIntoIterator {\n\n entities: Entities,\n\n index: usize,\n\n}\n\n\n\nimpl iter::Iterator for EntitiesIntoIterator {\n", "file_path": "src/ecs/src/entity.rs", "rank": 29, "score": 22826.18626413114 }, { "content": "/// to the destroyed entity are considered dead.\n\npub struct Entities {\n\n free_entity_ids: Vec<EntityId>,\n\n entities: Vec<EntityKey>,\n\n}\n\n\n\nimpl Entities {\n\n // Generates a new entity id and key either by reusing old ones or creating new ones.\n\n fn generate_entity_id(&mut self) -> (EntityId, EntityKey) {\n\n let free_id = self.free_entity_ids.pop();\n\n\n\n match free_id {\n\n None => {\n\n self.entities.push(1);\n\n (self.entities.len() as EntityId - 1, 1)\n\n }\n\n Some(free_id) => {\n\n let key = unsafe { self.entities.get_unchecked(free_id as usize) };\n\n (free_id, *key)\n\n }\n", "file_path": "src/ecs/src/entity.rs", "rank": 30, "score": 22826.089139651092 }, { "content": " /// entities.destroy_entity(entity);\n\n /// assert!(!entities.is_valid(entity));\n\n /// ```\n\n pub fn destroy_entity(&mut self, entity: Entity) {\n\n if self.is_valid(entity) {\n\n self.free_entity_ids.push(entity.id);\n\n self.entities[entity.id as usize] = self.entities[entity.id as usize] + 1;\n\n }\n\n }\n\n\n\n /// Returns the state of an entity. Entities created through an `Entities` object will return\n\n /// true. If they are destroyed or the `Entity` is invalid it will return false.\n\n /// # Examples\n\n /// ```\n\n /// use luck_ecs::entity::Entities;\n\n /// let mut entities: Entities = Entities::with_capacity(1);\n\n /// let entity = entities.create_entity();\n\n /// entities.destroy_entity(entity);\n\n /// assert!(!entities.is_valid(entity));\n\n /// ```\n", "file_path": "src/ecs/src/entity.rs", "rank": 31, "score": 22825.87116449901 }, { "content": " /// ```\n\n /// use luck_ecs::entity::Entities;\n\n /// let mut entities: Entities = Entities::with_capacity(1);\n\n /// let entity = entities.create_entity();\n\n /// ```\n\n #[allow(unknown_lints)]\n\n #[allow(inline_always)]\n\n #[inline(always)]\n\n pub fn create_entity(&mut self) -> Entity {\n\n let (id, key) = self.generate_entity_id();\n\n Entity { id: id, key: key }\n\n }\n\n\n\n /// Marks an entity as dead. The entity object is still in a valid state but call to\n\n /// `Entity::is_valid` will return false. Dead entities are ignored by the function.\n\n /// # Examples\n\n /// ```\n\n /// use luck_ecs::entity::Entities;\n\n /// let mut entities: Entities = Entities::with_capacity(1);\n\n /// let entity = entities.create_entity();\n", "file_path": "src/ecs/src/entity.rs", "rank": 32, "score": 22825.504087841255 }, { "content": "/// An iterator that does not move the Entities object. Returns only entities that are valid.\n\npub struct EntitiesIterator<'a> {\n\n entities: &'a Entities,\n\n index: usize,\n\n}\n\n\n\nimpl<'a> iter::Iterator for EntitiesIterator<'a> {\n\n type Item = Entity;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n loop {\n\n if let Some(key) = self.entities.entities.get(self.index) {\n\n self.index = self.index + 1;\n\n if !self.entities.free_entity_ids.contains(&((self.index - 1) as EntityId)) {\n\n return Some(Entity {\n\n id: (self.index - 1) as EntityId,\n\n key: *key,\n\n });\n\n } else {\n\n continue;\n\n }\n", "file_path": "src/ecs/src/entity.rs", "rank": 33, "score": 22825.45845153182 }, { "content": " // fn c() {\n\n // let mut entities: Entities = Entities::with_capacity(1_000_000usize);\n\n // for _ in 0..1_000_000 {\n\n // let _: Entity = entities.create_entity();\n\n // }\n\n // }\n\n // b.iter(c);\n\n // }\n\n\n\n // Tests the creation of 500.000 entities and the generation of their id's\n\n #[test]\n\n fn creation() {\n\n let mut entities: Entities = Entities::with_capacity(500_000usize);\n\n\n\n let base = EntityId::max_value() - 500_000;\n\n for i in base..EntityId::max_value() {\n\n let e: Entity = entities.create_entity();\n\n assert_eq!(((e.id + base), e.key), (i, 1));\n\n }\n\n }\n", "file_path": "src/ecs/src/entity.rs", "rank": 34, "score": 22825.238226510588 }, { "content": "\n\n // Tests the destruction and recreation of the entities and the reuse of deleted entity id's\n\n #[test]\n\n fn destruction_recreation() {\n\n let sample = 500_000usize;\n\n let mut entities: Entities = Entities::with_capacity(sample);\n\n\n\n let mut entity_list = Vec::with_capacity(sample);\n\n for i in 0..sample {\n\n let e: Entity = entities.create_entity();\n\n assert_eq!((e.id, e.key), (i as EntityId, 1));\n\n entity_list.push(e);\n\n }\n\n\n\n thread_rng().shuffle(&mut entity_list[..]);\n\n\n\n for e in entity_list {\n\n entities.destroy_entity(e);\n\n assert_eq!(entities.is_valid(e), false);\n\n let ne: Entity = entities.create_entity();\n", "file_path": "src/ecs/src/entity.rs", "rank": 35, "score": 22824.02778226625 }, { "content": "//! A module for two types, `Entity`and `Entities`. `Entity` represents an entity inside an\n\n//! `Entities` object which is responsible for managing an entity lifetime. The `Entities` type\n\n//! should be used through the `World` and not directly.\n\n\n\nuse std::iter;\n\n\n\n/// EntityId is a type that changes according to the pointer size of the target machines.\n\n/// It is supported `u64` for x64 machines and `u32` for x86 machines. Machines with\n\n/// different sizes might not work.\n\npub type EntityId = u64;\n", "file_path": "src/ecs/src/entity.rs", "rank": 36, "score": 22823.59788769363 }, { "content": " } else {\n\n return None;\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod test {\n\n // extern crate test;\n\n extern crate rand;\n\n use self::rand::{Rng, thread_rng};\n\n // use self::test::Bencher;\n\n use super::{Entity, Entities, EntityId};\n\n\n\n // Benchmark to test time time it takes to create a million entities, with level 3\n\n // optimizations the average time is 2,545,401 ns\n\n // #[bench]\n\n // fn creation_time(b: &mut Bencher) {\n", "file_path": "src/ecs/src/entity.rs", "rank": 37, "score": 22823.41861739785 }, { "content": " let mut iter = (&entities).into_iter();\n\n assert_eq!(iter.next(), Some(ent_list[0]));\n\n assert_eq!(iter.next(), Some(ent_list[1]));\n\n assert_eq!(iter.next(), Some(ent_list[3]));\n\n assert_eq!(iter.next(), None);\n\n\n\n }\n\n\n\n // Test to check if EntityId is smaller or equal to usize, since vectors use usize as key and\n\n // EntityId is used as the key of the vector.\n\n #[test]\n\n #[allow(unknown_lints)]\n\n #[allow(unused_comparisons)]\n\n #[allow(absurd_extreme_comparisons)]\n\n fn type_size() {\n\n let max_id = EntityId::max_value() as u64;\n\n let max_usize = usize::max_value() as u64;\n\n\n\n if max_id > max_usize || EntityId::min_value() < 0 {\n\n panic!(\"Type must be contained by usize\")\n\n }\n\n }\n\n}\n", "file_path": "src/ecs/src/entity.rs", "rank": 38, "score": 22822.93957560089 }, { "content": " type Item = Entity;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n loop {\n\n if let Some(key) = self.entities.entities.get(self.index) {\n\n self.index = self.index + 1;\n\n if !self.entities.free_entity_ids.contains(&((self.index - 1) as EntityId)) {\n\n return Some(Entity {\n\n id: (self.index - 1) as EntityId,\n\n key: *key,\n\n });\n\n } else {\n\n continue;\n\n }\n\n } else {\n\n return None;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ecs/src/entity.rs", "rank": 39, "score": 22821.807857558302 }, { "content": " assert_eq!((ne.id, ne.key), (e.id, e.key + 1));\n\n }\n\n }\n\n\n\n #[test]\n\n // Tests the iterator logic\n\n fn iteration() {\n\n let mut entities: Entities = Entities::new();\n\n\n\n let ent_list = [entities.create_entity(),\n\n entities.create_entity(),\n\n entities.create_entity(),\n\n entities.create_entity()];\n\n\n\n for (i, e) in (&entities).into_iter().enumerate() {\n\n assert_eq!(e, ent_list[i]);\n\n }\n\n\n\n entities.destroy_entity(ent_list[2]);\n\n\n", "file_path": "src/ecs/src/entity.rs", "rank": 40, "score": 22821.27377592302 }, { "content": "fn half_plane_test(p: Vector3<f32>, normal: Vector3<f32>, offset: f32) -> i32 {\n\n let dist = dot(p, normal) + offset;\n\n if dist > 0.02 {\n\n 1\n\n } else if dist < -0.02 {\n\n 0\n\n } else {\n\n 2\n\n }\n\n}\n\n\n", "file_path": "src/math/src/extensions.rs", "rank": 59, "score": 20125.03447205598 }, { "content": "#![allow(unused_features)]\n\n#![warn(missing_docs)]\n\n\n\n//#![feature(test)]\n\n//#![feature(fnbox)]\n\n\n\n//! TODO: Fill the documentation\n\n\n\n#[macro_use]\n\nextern crate mopa;\n\nextern crate rayon;\n\n\n\npub mod entity;\n\nmod component;\n\n#[macro_use]\n\npub mod system;\n\nmod world;\n\n\n\npub use entity::Entity;\n\npub use component::Components;\n\npub use system::{System, Signature};\n\npub use world::{World, WorldBuilder};\n", "file_path": "src/ecs/src/lib.rs", "rank": 60, "score": 17.872438188000544 }, { "content": "use std::ops::{Add, Mul};\n\nuse num::traits::{One, Zero};\n\nuse super::{Matrix4, Vector3, atan2, cross, cos, sin};\n\n\n\n/// A [quaternion](https://en.wikipedia.org/wiki/Quaternion) type.\n\n#[derive(Default, PartialEq, Debug, Copy, Clone)]\n\npub struct Quaternion {\n\n ///\n\n pub x: f32,\n\n ///\n\n pub y: f32,\n\n ///\n\n pub z: f32,\n\n ///\n\n pub w: f32,\n\n}\n\n\n\nimpl Quaternion {\n\n /// Returns a new instance of a quaternion with the specified values.\n\n pub fn new(x: f32, y: f32, z: f32, w: f32) -> Self {\n", "file_path": "src/math/src/quaternion.rs", "rank": 61, "score": 11.328614089368376 }, { "content": " }\n\n\n\n fn yaw(&self) -> f32 {\n\n let q = *self;\n\n (-2.0 * (q.x * q.z - q.w * q.y)).asin()\n\n }\n\n}\n\n\n\nimpl Zero for Quaternion {\n\n fn zero() -> Self {\n\n Quaternion::new(0.0, 0.0, 0.0, 1.0)\n\n }\n\n fn is_zero(&self) -> bool {\n\n Quaternion::zero() == *self\n\n }\n\n}\n\n\n\nimpl One for Quaternion {\n\n fn one() -> Self {\n\n Quaternion::new(1.0, 1.0, 1.0, 1.0)\n", "file_path": "src/math/src/quaternion.rs", "rank": 62, "score": 10.65688219135194 }, { "content": " pub min: Vector3<f32>,\n\n /// The maximum value.\n\n pub max: Vector3<f32>,\n\n}\n\n\n\nimpl Default for Aabb {\n\n /// Returns a null AABB.\n\n fn default() -> Aabb {\n\n let mut r = Aabb {\n\n min: Vector3::zero() + 1.0,\n\n max: Vector3::zero() - 1.0,\n\n };\n\n r.set_null();\n\n r\n\n }\n\n}\n\n\n\nimpl Aabb {\n\n /// Returns a new instance of Aabb with the specified values.\n\n pub fn new(p1: Vector3<f32>, p2: Vector3<f32>) -> Self {\n", "file_path": "src/math/src/aabb.rs", "rank": 63, "score": 9.727759718227901 }, { "content": " }\n\n\n\n /// Returns a vertex list for this Aabb. Useful for debug rendering or operations that\n\n /// require every point and not just the min and max.\n\n pub fn vertices(&self) -> [Vector3<f32>; 8] {\n\n [self.min,\n\n Vector3::new(self.max.x, self.min.y, self.min.z),\n\n Vector3::new(self.min.x, self.max.y, self.min.z),\n\n Vector3::new(self.min.x, self.min.y, self.max.z),\n\n Vector3::new(self.min.x, self.max.y, self.max.z),\n\n Vector3::new(self.max.x, self.min.y, self.max.z),\n\n Vector3::new(self.max.x, self.max.y, self.min.z),\n\n self.max]\n\n }\n\n\n\n /// This function considers the Aabb as a box, rotates it and then calculates a new Aabb for\n\n /// the rotated box. Rotating the same Aabb over and over will only make it grow.\n\n pub fn rotate(&mut self, orientation: super::Quaternion) {\n\n let mut v = self.vertices();\n\n\n", "file_path": "src/math/src/aabb.rs", "rank": 64, "score": 8.479642376192107 }, { "content": " let mut r = Aabb::default();\n\n r.extend_by_vec(p1);\n\n r.extend_by_vec(p2);\n\n r\n\n }\n\n\n\n /// Returns a new instance of Aabb considering a center and a radius.\n\n pub fn with_center(center: Vector3<f32>, radius: f32) -> Self {\n\n let mut r = Aabb::default();\n\n r.extend_by_radius(center, radius);\n\n r\n\n }\n\n\n\n /// Sets the Aabb to an invalid state.\n\n pub fn set_null(&mut self) {\n\n *self = Aabb {\n\n min: Vector3::new(1.0, 1.0, 1.0),\n\n max: Vector3::new(-1.0, -1.0, -1.0),\n\n };\n\n }\n", "file_path": "src/math/src/aabb.rs", "rank": 65, "score": 8.38620105674871 }, { "content": " pub fn extend_by_aabb(&mut self, aabb: Aabb) {\n\n if !aabb.is_null() {\n\n self.extend_by_vec(aabb.min);\n\n self.extend_by_vec(aabb.max);\n\n }\n\n }\n\n\n\n /// TODO\n\n pub fn extend_disk(&mut self, c: Vector3<f32>, n: Vector3<f32>, r: f32) {\n\n if super::length(n) < 1e-12 {\n\n self.extend_by_vec(c);\n\n } else {\n\n let norm = super::normalize(n);\n\n let x = (1.0 - norm.x).sqrt() * r;\n\n let y = (1.0 - norm.y).sqrt() * r;\n\n let z = (1.0 - norm.y).sqrt() * r;\n\n self.extend_by_vec(c + Vector3::new(x, y, z));\n\n self.extend_by_vec(c - Vector3::new(x, y, z));\n\n }\n\n }\n", "file_path": "src/math/src/aabb.rs", "rank": 66, "score": 8.295783890503309 }, { "content": " }\n\n\n\n /// Combines two Aabb's and the result in as Aabb that encompasses both parameters.\n\n pub fn combine(&mut self, aabb1: Aabb, aabb2: Aabb) {\n\n self.min = Vector3::new(super::min(aabb1.min.x, aabb2.min.x),\n\n super::min(aabb1.min.y, aabb2.min.y),\n\n super::min(aabb1.min.z, aabb2.min.z));\n\n self.max = Vector3::new(super::max(aabb1.max.x, aabb2.max.x),\n\n super::max(aabb1.max.y, aabb2.max.y),\n\n super::max(aabb1.max.z, aabb2.max.z));\n\n }\n\n\n\n /// Returns true if this Aabb contains the aabb parameter.\n\n pub fn contains(&self, aabb: Aabb) -> bool {\n\n let mut result = true;\n\n result = result && self.min.x <= aabb.min.x;\n\n result = result && self.min.y <= aabb.min.y;\n\n result = result && aabb.max.x <= self.max.x;\n\n result = result && aabb.max.y <= self.max.y;\n\n result\n", "file_path": "src/math/src/aabb.rs", "rank": 67, "score": 8.255984608840372 }, { "content": " ret\n\n }\n\n}\n\n\n\nimpl Mul<Vector3<f32>> for Quaternion {\n\n type Output = Vector3<f32>;\n\n fn mul(self, rhs: Vector3<f32>) -> Vector3<f32> {\n\n let quat_vector = Vector3::new(self.x, self.y, self.z);\n\n let uv = cross(quat_vector, rhs);\n\n let uuv = cross(quat_vector, uv);\n\n\n\n rhs + ((uv * self.w) + uuv) * 2f32\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::Quaternion;\n\n use super::super::Vector3;\n\n use num::traits::{One, Zero};\n", "file_path": "src/math/src/quaternion.rs", "rank": 68, "score": 8.09708777563528 }, { "content": "\n\n /// Checks the Aabb state.\n\n pub fn is_null(&self) -> bool {\n\n self.min.x > self.max.x || self.min.y > self.max.y || self.min.z > self.max.z\n\n }\n\n\n\n /// Extends the Aabb size by a value. This value is subtracted from the min vector and\n\n /// added to the max vector.\n\n pub fn extend_by_value(&mut self, val: f32) {\n\n if !self.is_null() {\n\n self.min = self.min - Vector3::new(val, val, val);\n\n self.max = self.max + Vector3::new(val, val, val);\n\n }\n\n }\n\n\n\n /// Extends the Aabb by a Vector3. The min vector becomes a combination of itself and the\n\n /// parameter p, having the smallest value of both. The same goes for the max vector.\n\n pub fn extend_by_vec(&mut self, p: Vector3<f32>) {\n\n if !self.is_null() {\n\n self.min = super::min(p, self.min);\n", "file_path": "src/math/src/aabb.rs", "rank": 69, "score": 8.004078743492284 }, { "content": " let mut mat_model = super::Matrix4::one();\n\n mat_model = mat_model * orientation.to_mat4();\n\n\n\n for vertex in &mut v {\n\n let temp = mat_model * super::Vector4::new(vertex.x, vertex.y, vertex.z, 1.0);\n\n *vertex = super::Vector3::new(temp.x, temp.y, temp.z);\n\n }\n\n\n\n self.min = v[0];\n\n self.max = v[0];\n\n\n\n for vertex in &v {\n\n self.min = super::min(self.min, *vertex);\n\n self.max = super::max(self.max, *vertex);\n\n }\n\n }\n\n}\n", "file_path": "src/math/src/aabb.rs", "rank": 70, "score": 7.388182561280377 }, { "content": " self.max = super::max(p, self.max);\n\n } else {\n\n self.min = p;\n\n self.max = p;\n\n }\n\n }\n\n\n\n /// Extends an aabb by the specified radius.\n\n pub fn extend_by_radius(&mut self, p: Vector3<f32>, radius: f32) {\n\n let r = Vector3::<f32>::new(radius, radius, radius);\n\n if !self.is_null() {\n\n self.min = super::min(p - r, self.min);\n\n self.max = super::max(p - r, self.max);\n\n } else {\n\n self.min = p - r;\n\n self.max = p + r;\n\n }\n\n }\n\n\n\n /// Extends this aabb so the parameter aabb can fit within it.\n", "file_path": "src/math/src/aabb.rs", "rank": 71, "score": 7.203916199777408 }, { "content": "//! A module for the AABB type. It also exposes an enum type for intersection tests.\n\n\n\nuse super::Vector3;\n\nuse num::traits::{Zero, One};\n\n\n\n/// The return type of `Aabb::intersect`.\n\n#[derive(Copy, Clone, Eq, PartialEq)]\n\npub enum IntersectionType {\n\n ///\n\n INSIDE,\n\n ///\n\n INTERSECT,\n\n ///\n\n OUTSIDE,\n\n}\n\n\n\n/// An AABB represented by two `Vector3`.\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Aabb {\n\n /// The minimum value.\n", "file_path": "src/math/src/aabb.rs", "rank": 72, "score": 6.230538231147824 }, { "content": "#![warn(missing_docs)]\n\n#![warn(unused)]\n\n\n\n//! TODO: Fill the documentation\n\n\n\nextern crate glm;\n\nextern crate num;\n\n\n\npub mod aabb;\n\nmod quaternion;\n\nmod extensions;\n\n\n\npub use glm::*;\n\npub use aabb::Aabb;\n\npub use quaternion::*;\n\npub use extensions::*;\n", "file_path": "src/math/src/lib.rs", "rank": 73, "score": 6.118785857989355 }, { "content": "use super::{Vector3, Vector4, Matrix4, normalize, cross, dot};\n\nuse num::traits::{Zero, One};\n\n\n\n/// Returns a look at matrix from the supplied parameters. Eye is the camera position, center is\n\n/// the location you want the camera to point, up is the up direction in whichever abstraction\n\n/// you are working with (usually `Vector3::new(0, 1, 0)`).\n", "file_path": "src/math/src/extensions.rs", "rank": 74, "score": 6.067908863181045 }, { "content": "extern crate luck_ecs;\n\nextern crate luck_math;\n\nextern crate luck_core;\n\n\n\npub use luck_ecs as ecs;\n\npub use luck_math as math;\n\npub use luck_core as core;\n", "file_path": "src/lib.rs", "rank": 75, "score": 5.7868992219825515 }, { "content": " /// Returns true if both Aabb's a sufficiently similar.\n\n pub fn similar_to(&self, b: Aabb, diff: f32) -> bool {\n\n if self.is_null() || b.is_null() {\n\n return false;\n\n }\n\n\n\n let acceptable_diff = ((self.diagonal() + b.diagonal()) / 2.0) * diff;\n\n let mut min_diff = self.min - b.min;\n\n min_diff = Vector3::new(min_diff.x.abs(), min_diff.y.abs(), min_diff.z.abs());\n\n if min_diff.x > acceptable_diff.x {\n\n return false;\n\n }\n\n if min_diff.y > acceptable_diff.y {\n\n return false;\n\n }\n\n if min_diff.z > acceptable_diff.z {\n\n return false;\n\n }\n\n\n\n let mut max_diff = self.max - b.max;\n", "file_path": "src/math/src/aabb.rs", "rank": 76, "score": 5.665930984227547 }, { "content": " Quaternion {\n\n x: x,\n\n y: y,\n\n z: z,\n\n w: w,\n\n }\n\n }\n\n\n\n /// Converts a rotation vector3 into a quaternion. The vector paramater should be supplied\n\n /// in radians.\n\n pub fn from_euler(v: Vector3<f32>) -> Self {\n\n // TODO: Should this receive parameters in degrees or radians?\n\n let c = cos(v * 0.5);\n\n let s = sin(v * 0.5);\n\n\n\n let mut ret = Quaternion::zero();\n\n\n\n ret.w = c.x * c.y * c.z + s.x * s.y * s.z;\n\n ret.x = s.x * c.y * c.z - c.x * s.y * s.z;\n\n ret.y = c.x * s.y * c.z + s.x * c.y * s.z;\n", "file_path": "src/math/src/quaternion.rs", "rank": 77, "score": 5.418220532430256 }, { "content": " }\n\n}\n\n\n\nimpl Add for Quaternion {\n\n type Output = Quaternion;\n\n fn add(self, rhs: Quaternion) -> Quaternion {\n\n Quaternion::new(self.x + rhs.x, self.y + rhs.y, self.z + rhs.z, self.w + rhs.w)\n\n }\n\n}\n\n\n\nimpl Mul for Quaternion {\n\n type Output = Quaternion;\n\n fn mul(self, rhs: Quaternion) -> Quaternion {\n\n let mut ret = Quaternion::one();\n\n\n\n ret.w = (self.w * rhs.w) - (self.x * rhs.x) - (self.y * rhs.y) - (self.z * rhs.z);\n\n ret.x = (self.w * rhs.x) + (self.x * rhs.w) + (self.y * rhs.z) - (self.z * rhs.y);\n\n ret.y = (self.w * rhs.y) + (self.y * rhs.w) + (self.z * rhs.x) - (self.x * rhs.z);\n\n ret.z = (self.w * rhs.z) + (self.z * rhs.w) + (self.x * rhs.y) - (self.y * rhs.x);\n\n\n", "file_path": "src/math/src/quaternion.rs", "rank": 78, "score": 4.884210096003048 }, { "content": " pub fn scale(&mut self, s: Vector3<f32>, o: Vector3<f32>) {\n\n if !self.is_null() {\n\n self.min = self.min - o;\n\n self.max = self.max - o;\n\n\n\n self.min = self.min * s;\n\n self.max = self.max * s;\n\n\n\n self.min = self.min + o;\n\n self.max = self.max + o;\n\n }\n\n }\n\n\n\n /// Returns true if both Aabb's overlap.\n\n pub fn overlaps(&self, bb: Aabb) -> bool {\n\n !((self.is_null() || bb.is_null()) || (bb.min.x > self.max.x || bb.max.x < self.min.x) ||\n\n (bb.min.y > self.max.y || bb.max.y < self.min.y) ||\n\n (bb.min.z > self.max.z || bb.max.z < self.min.z))\n\n }\n\n\n", "file_path": "src/math/src/aabb.rs", "rank": 79, "score": 4.750786871122639 }, { "content": "\n\n /// Returns the center of the aabb. If the Aabb is null, Vector3::zero is returned.\n\n pub fn center(&self) -> Vector3<f32> {\n\n if !self.is_null() {\n\n let d = self.diagonal();\n\n self.min + (d * 0.5)\n\n } else {\n\n Vector3::zero()\n\n }\n\n }\n\n\n\n /// Translates the Aabb by a vector.\n\n pub fn translate(&mut self, v: Vector3<f32>) {\n\n if !self.is_null() {\n\n self.min = self.min + v;\n\n self.max = self.max + v;\n\n }\n\n }\n\n\n\n /// Scales the Aabb by s considering o as the origin.\n", "file_path": "src/math/src/aabb.rs", "rank": 80, "score": 3.810857153083732 }, { "content": "\n\n #[test]\n\n #[allow(approx_constant)] //1.5708 being near pi/2 is not relevant\n\n fn conversion_operations() {\n\n let q = Quaternion::new(1.0, 0.0, 0.0, 1.0);\n\n\n\n // We need some back and forth conversions since the same euler angle can be represented\n\n // in different ways.\n\n let mut q = Quaternion::from_euler(q.to_euler()).to_euler();\n\n //Due to precision loss we need some rounding\n\n q.x = (q.x * 10000.0).round() / 10000.0;\n\n assert_eq!(q, Vector3::new(1.5708, -0.0, 0.0));\n\n\n\n // TODO: Test Quaternion::to_mat4\n\n }\n\n\n\n #[test]\n\n fn num_operations() {\n\n assert_eq!(Quaternion::zero(), Quaternion::new(0.0, 0.0, 0.0, 1.0));\n\n assert_eq!(Quaternion::one(), Quaternion::new(1.0, 1.0, 1.0, 1.0));\n", "file_path": "src/math/src/quaternion.rs", "rank": 81, "score": 3.754885954649541 }, { "content": "<img src=\"https://travis-ci.org/lukebitts/Luck.svg\" />\n\n\n\n<img align=\"left\" width=\"148px\" src=\"documentation/images/logo.png\" />\n\n\n\n# Luck\n\n\n\nLuck is a personal game engine project I'm working on to help me learn Rust. I\n\ndo not actively support it besides working on whatever interests me, but feel\n\nfree to use this code if it is in any way useful to you. It is not by any means\n\ncompleted and/or stable.\n\n\n\n\n\n## Sub-projects\n\n\n\nThis project is divided into several sub-projects:\n\n\n\n### ECS\n\n\n\nLuck ECS is an Entity-Component system, it has no dependecy on the rest of the\n\nengine and can be used by itself.\n\n\n\n### Math\n\n\n\nLuck Math depends on glm-rs and reexports the library adding a few functions and\n\ntypes that glm-rs did not port from glm. It provides a Quaternion and AABB\n\ntypes.\n\n\n\n### Core\n\n\n\n Luck Core is the implementation of the engine as a framework, there you can\n\n find rendering code, physics, cameras, etc. It has a number of dependecies\n\n which can be found in the cargo file.\n\n\n\n# Building\n\n\n\nTo build Luck, simply run `cargo build` in the root folder. It requires\n\n`rustc 1.8.0-nightly (57c357d89 2016-02-16)`. Due to some of the dependecies\n\nit can't be compiled on stable yet.\n\n\n\n# Travis\n\n\n\nThe project doesn't build on Travis because [Clippy][clippy] requires a very\n\nspecific `rustc` version. I'll fix it eventually.\n\n\n\n[clippy]: https://github.com/Manishearth/rust-clippy\n", "file_path": "README.md", "rank": 82, "score": 3.746898056374145 }, { "content": "\n\n let q = Quaternion::one();\n\n assert_eq!(q + q, Quaternion::new(2.0, 2.0, 2.0, 2.0));\n\n\n\n let q = Quaternion::zero();\n\n assert_eq!(q * q, q);\n\n\n\n let q = Quaternion::new(1.0, 0.0, 0.0, 1.0);\n\n assert_eq!(q * q, Quaternion::new(2.0, 0.0, 0.0, 0.0));\n\n\n\n let q = Quaternion::new(0.0, 1.0, 0.0, 1.0);\n\n assert_eq!(q * q, Quaternion::new(0.0, 2.0, 0.0, 0.0));\n\n\n\n let q = Quaternion::new(0.0, 0.0, 1.0, 1.0);\n\n assert_eq!(q * q, Quaternion::new(0.0, 0.0, 2.0, 0.0));\n\n\n\n let q = Quaternion::new(1.0, 0.0, 0.0, 1.0);\n\n let v = Vector3::new(1.0, 1.0, 1.0);\n\n assert_eq!(q * v, Vector3::new(1.0, -3.0, 1.0));\n\n }\n\n\n\n}\n", "file_path": "src/math/src/quaternion.rs", "rank": 83, "score": 3.4145506180563525 }, { "content": " ret.c2.z = 1.0 - 2.0 * (q.x).powi(2) - 2.0 * (q.y).powi(2);\n\n\n\n ret\n\n }\n\n\n\n /// Returns the quaternion rotation in euler angles. The resulting angle is represented\n\n /// in radians.\n\n pub fn to_euler(&self) -> Vector3<f32> {\n\n // TODO: Should this return in degrees or radians?\n\n Vector3::new(self.pitch(), self.yaw(), self.roll())\n\n }\n\n\n\n fn roll(&self) -> f32 {\n\n let q = *self;\n\n atan2(2.0 * (q.x * q.y + q.w * q.z), q.w * q.w + q.x * q.x - q.y * q.y - q.z * q.z)\n\n }\n\n\n\n fn pitch(&self) -> f32 {\n\n let q = *self;\n\n atan2(2.0 * (q.y * q.z + q.w * q.x), q.w * q.w - q.x * q.x - q.y * q.y + q.z * q.z)\n", "file_path": "src/math/src/quaternion.rs", "rank": 84, "score": 3.333072692141014 }, { "content": " ret.z = c.x * c.y * s.z - s.x * s.y * c.z;\n\n\n\n ret\n\n }\n\n\n\n /// Creates a rotation matrix from the quaternion.\n\n pub fn to_mat4(&self) -> Matrix4<f32> {\n\n let mut ret = Matrix4::<f32>::one();\n\n let q = self;\n\n\n\n ret.c0.x = 1.0 - 2.0 * (q.y).powi(2) - 2.0 * (q.z).powi(2);\n\n ret.c0.y = 2.0 * q.x * q.y + 2.0 * q.z * q.w;\n\n ret.c0.z = 2.0 * q.x * q.z - 2.0 * q.y * q.w;\n\n\n\n ret.c1.x = 2.0 * q.x * q.y - 2.0 * q.z * q.w;\n\n ret.c1.y = 1.0 - 2.0 * (q.x).powi(2) - 2.0 * (q.z).powi(2);\n\n ret.c1.z = 2.0 * q.y * q.z + 2.0 * q.x * q.w;\n\n\n\n ret.c2.x = 2.0 * q.x * q.z + 2.0 * q.y * q.w;\n\n ret.c2.y = 2.0 * q.y * q.z - 2.0 * q.x * q.w;\n", "file_path": "src/math/src/quaternion.rs", "rank": 85, "score": 3.108507769858699 }, { "content": " max_diff = Vector3::new(max_diff.x.abs(), max_diff.y.abs(), max_diff.z.abs());\n\n if max_diff.x > acceptable_diff.x {\n\n return false;\n\n }\n\n if max_diff.y > acceptable_diff.y {\n\n return false;\n\n }\n\n if max_diff.z > acceptable_diff.z {\n\n return false;\n\n }\n\n\n\n true\n\n }\n\n\n\n /// Returns the perimeter of the Aabb.\n\n pub fn perimeter(&self) -> f32 {\n\n let wx = self.max.x - self.min.x;\n\n let wy = self.max.y - self.min.y;\n\n\n\n 2.0 * (wx + wy)\n", "file_path": "src/math/src/aabb.rs", "rank": 86, "score": 2.8936102494301985 }, { "content": "# Contributing to Luck\n\n\n\nFeel free to send a pull request or open an issue. I can't guarantee that I will\n\nanswer, but I'm not actively ignoring this project.\n\n\n\n## Documentation and wording mistakes\n\n\n\nSince english is not my first language, the comments and documentation may be difficult to understand and/or have spelling mistakes. Feel free to open an issue or send a pull request to fix.\n", "file_path": "CONTRIBUTING.md", "rank": 87, "score": 2.5496921078886103 }, { "content": "\n\n /// Returns the diagonal of the AABB which is defined as Aabb::max - Aabb::min. If the Aabb\n\n /// is null, Vector3::zero() is returned.\n\n pub fn diagonal(&self) -> Vector3<f32> {\n\n if !self.is_null() {\n\n self.max - self.min\n\n } else {\n\n Vector3::zero()\n\n }\n\n }\n\n\n\n /// Returns the longest edge.\n\n pub fn longest_edge(&self) -> f32 {\n\n self.diagonal().x.max(self.diagonal().y).max(self.diagonal().z)\n\n }\n\n\n\n /// Returns the shortest edge.\n\n pub fn shortest_edge(&self) -> f32 {\n\n self.diagonal().x.min(self.diagonal().y).min(self.diagonal().z)\n\n }\n", "file_path": "src/math/src/aabb.rs", "rank": 88, "score": 2.4169862063500824 }, { "content": "# Change Log\n\n\n\nAll notable per-release changes will be documented in this file. This project\n\nadheres to [Semantic Versioning][sv].\n\n\n\n[sv]: http://semver.org/\n\n\n\n## Unreleased\n\n### Added\n\n* Added support to rust stable.\n\n\n\n### Removed\n\n* Removed dependency on Clippy and added allow unknown lints to the Clippy\n\n allow lints. Clippy should still be run on the project but through Cargo.\n\n* Removed benchmark tests since they are unstable.\n\n* Removed dependency on FnBox since it is still unstable (and there is no\n\n advantage over FnMut?)\n\n\n\n## 0.2.0 (2016-02-24) 🍀\n\n### Added\n\n* Added a language disclaimer to the CONTRIBUTING file.\n\n* Added a rustfmt.toml to the project.\n\n* Added a .travis.yml file to support integration with Travis.\n\n* Added code to the math module.\n\n* Added a \"Building\" session to the README file.\n\n\n\n### Changed\n\n* Changed the README file to look prettier in github.\n\n* Bumped the version of the ECS module from 0.1.0 to 0.1.1\n\n* Created some tests for the Quaternion code and fixed a bug with quaternion\n\n multiplication.\n\n\n\n## 0.1.0 (2016-02-22)\n\n### Added\n\n* Completed the ECS module. Fully documented.\n\n\n\n## 0.0.1 (2016-02-20)\n\n* Initial release\n", "file_path": "CHANGELOG.md", "rank": 89, "score": 2.3204857198845388 }, { "content": "\n\n/// The result of a call to `is_box_in_frustum`\n\n#[derive(Eq, PartialEq)]\n\npub enum FrustumTestResult {\n\n ///\n\n OUTSIDE = 0,\n\n ///\n\n INSIDE = 1,\n\n ///\n\n INTERSECT = 3,\n\n}\n\n\n", "file_path": "src/math/src/extensions.rs", "rank": 90, "score": 2.1603048517417305 }, { "content": " ret = 0;\n\n break;\n\n }\n\n\n\n idx = vector_to_index(-plane_normal);\n\n test_point = origin + half_dim * corner_offsets[idx as usize];\n\n\n\n if half_plane_test(test_point, plane_normal, plane.w) == 0 {\n\n ret |= 2;\n\n }\n\n }\n\n\n\n match ret {\n\n 0 => FrustumTestResult::OUTSIDE,\n\n 1 => FrustumTestResult::INSIDE,\n\n 3 => FrustumTestResult::INTERSECT,\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/math/src/extensions.rs", "rank": 91, "score": 1.648352955037661 }, { "content": " /// Calculates the intersection type between two Aabb's.\n\n pub fn intersect(&self, b: Aabb) -> IntersectionType {\n\n if self.is_null() || b.is_null() {\n\n return IntersectionType::OUTSIDE;\n\n }\n\n\n\n if (self.max.x < b.min.x) || (self.min.x > b.max.x) || (self.max.y < b.min.y) ||\n\n (self.min.y > b.max.y) || (self.max.z < b.min.z) || (self.min.z > b.max.z) {\n\n return IntersectionType::OUTSIDE;\n\n }\n\n\n\n if (self.min.x <= b.min.x) && (self.max.x >= b.max.x) && (self.min.y <= b.min.y) &&\n\n (self.max.y >= b.max.y) && (self.min.z <= b.min.z) &&\n\n (self.max.z >= b.max.z) {\n\n return IntersectionType::INSIDE;\n\n }\n\n\n\n IntersectionType::INTERSECT\n\n }\n\n\n", "file_path": "src/math/src/aabb.rs", "rank": 92, "score": 1.3696065971002849 } ]
Rust
src/configuration/archive.rs
dandyvica/clf
0774f971a973d89688a72f7283e251c7a429e946
use std::{ fmt::Debug, path::{Path, PathBuf}, }; use serde::Deserialize; #[derive(Debug, Deserialize, Clone)] #[serde(deny_unknown_fields)] pub struct LogArchive { pub dir: Option<PathBuf>, pub extension: Option<String>, pub pattern: Option<String>, } impl LogArchive { pub fn default_path<P: AsRef<Path> + Clone>(path: P) -> PathBuf { let default_path = format!("{}.1", path.as_ref().to_string_lossy()); PathBuf::from(default_path) } pub fn archived_path<P: AsRef<Path> + std::fmt::Debug>(&self, path: P) -> PathBuf { let dir = match &self.dir { None => { let dir = path.as_ref().parent(); debug_assert!(dir.is_some()); dir.unwrap() } Some(dir) => &dir, }; debug_assert!(dir.is_dir()); println!("dir={:?}", dir); debug_assert!(path.as_ref().file_name().is_some()); let file_name = path.as_ref().file_name().unwrap().to_string_lossy(); #[cfg(target_family = "windows")] let default_path = if self.extension.is_none() { format!("{}\\{}.1", dir.to_string_lossy(), file_name) } else { format!( "{}\\{}.{}", dir.to_string_lossy(), file_name, self.extension.as_ref().unwrap() ) }; #[cfg(target_family = "unix")] let default_path = if self.extension.is_none() { format!("{}/{}.1", dir.to_string_lossy(), file_name) } else { format!( "{}/{}.{}", dir.to_string_lossy(), file_name, self.extension.as_ref().unwrap() ) }; println!( "self={:?}, dir={}, file={}, rotated={}", path, dir.display(), file_name, default_path ); PathBuf::from(default_path) } } #[cfg(test)] mod tests { use std::path::PathBuf; use super::*; #[test] #[cfg(target_family = "unix")] fn default_path() { let yaml = r#" dir: /var/log extension: xz "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); println!("{:#?}", archive); let mut p = PathBuf::from("/var/log/kern.log"); assert_eq!( LogArchive::default_path(p), PathBuf::from("/var/log/kern.log.1") ); p = PathBuf::from("/var/log/syslog"); assert_eq!( LogArchive::default_path(p), PathBuf::from("/var/log/syslog.1") ); } #[test] #[cfg(target_family = "unix")] fn archived_path() { let p = PathBuf::from("/var/log/kern.log"); let archive = LogArchive { dir: None, extension: None, pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from("/var/log/kern.log.1") ); let yaml = r#" dir: /tmp "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); assert_eq!(archive.archived_path(&p), PathBuf::from("/tmp/kern.log.1")); let yaml = r#" extension: gz "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); assert_eq!( archive.archived_path(&p), PathBuf::from("/var/log/kern.log.gz") ); let yaml = r#" dir: /tmp extension: gz "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); assert_eq!(archive.archived_path(&p), PathBuf::from("/tmp/kern.log.gz")); } #[test] #[cfg(target_family = "windows")] fn archived_path() { let p = PathBuf::from(r"C:\Windows\WindowsUpdate.log"); let archive = LogArchive { dir: None, extension: None, pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"C:\Windows\WindowsUpdate.log.1") ); let archive = LogArchive { dir: Some(PathBuf::from(r"c:\Windows\Temp")), extension: None, pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"C:\Windows\Temp\WindowsUpdate.log.1") ); let archive = LogArchive { dir: None, extension: Some("gz".to_string()), pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"C:\Windows\WindowsUpdate.log.gz") ); let archive = LogArchive { dir: Some(PathBuf::from(r"c:\Windows\Temp")), extension: Some("gz".to_string()), pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"c:\Windows\Temp\WindowsUpdate.log.gz") ); } }
use std::{ fmt::Debug, path::{Path, PathBuf}, }; use serde::Deserialize; #[derive(Debug, Deserialize, Clone)] #[serde(deny_unknown_fields)] pub struct LogArchive { pub dir: Option<PathBuf>, pub extension: Option<String>, pub pattern: Option<String>, } impl LogArchive { pub fn default_path<P: AsRef<Path> + Clone>(path: P) -> PathBuf { let default_path = format!("{}.1", path.as_ref().to_string_lossy()); PathBuf::from(default_path) } pub fn archived_path<P: AsRef<Path> + std::fmt::Debug>(&self, path: P) -> PathBuf { let dir = match &self.dir { None => { let dir = path.as_ref().parent(); debug_assert!(dir.is_some()); dir.unwrap() } Some(dir) => &dir, }; debug_assert!(dir.is_dir()); println!("dir={:?}", dir); debug_assert!(path.as_ref().file_name().is_some()); let file_name = path.as_ref().file_name().unwrap().to_string_lossy(); #[cfg(target_family = "windows")] let default_path = if self.extension.is_none() { format!("{}\\{}.1", dir.to_string_lossy(), file_name) } else { format!( "{}\\{}.{}", dir.to_string_lossy(), file_name, self.extension.as_ref().unwrap() ) }; #[cfg(target_family = "unix")] let default_path = if self.extension.is_none() { format!("{}/{}.1", dir.to_string_lossy(), file_name) } else { format!( "{}/{}.{}", dir.to_string_lossy(), file_name, self.extension.as_ref().unwrap() ) }; println!( "self={:?}, dir={}, file={}, rotated={}", path, dir.display(), file_name, default_path ); PathBuf::from(default_path) } } #[cfg(test)] mod tests { use std::path::PathBuf; use super::*; #[test] #[cfg(target_family = "unix")] fn default_path() { let yaml = r#" dir: /var/log extension: xz "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); println!("{:#?}", archive); let mut p = PathBuf::from("/var/log/kern.log"); assert_eq!( LogArchive::default_path(p), PathBuf::from("/var/log/kern.log.1") ); p = PathBuf::from("/var/log/syslog"); assert_eq!( LogArchive::default_path(p), PathBuf::from("/var/log/syslog.1") ); } #[test] #[cfg(target_family = "unix")] fn archived_path() { let p = PathBuf::from("/var/log/kern.log"); let archive = LogArchive { dir: None, extension: None, pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from("/var/log/kern.log.1") ); let yaml = r#" dir: /tmp "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); assert_eq!(archive.archived_path(&p), PathBuf::from("/tmp/kern.log.1")); let yaml = r#" extension: gz "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); assert_eq!( archive.archived_path(&p), PathBuf::from("/var/log/kern.log.gz") ); let yaml = r#" dir: /tmp extension: gz "#; let archive: LogArchive = serde_yaml::from_str(yaml).expect("unable to read YAML"); assert_eq!(archive.archived_path(&p), PathBuf::from("/tmp/kern.log.gz")); } #[test] #[cfg(target_family = "windows")]
}
fn archived_path() { let p = PathBuf::from(r"C:\Windows\WindowsUpdate.log"); let archive = LogArchive { dir: None, extension: None, pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"C:\Windows\WindowsUpdate.log.1") ); let archive = LogArchive { dir: Some(PathBuf::from(r"c:\Windows\Temp")), extension: None, pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"C:\Windows\Temp\WindowsUpdate.log.1") ); let archive = LogArchive { dir: None, extension: Some("gz".to_string()), pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"C:\Windows\WindowsUpdate.log.gz") ); let archive = LogArchive { dir: Some(PathBuf::from(r"c:\Windows\Temp")), extension: Some("gz".to_string()), pattern: None, }; assert_eq!( archive.archived_path(&p), PathBuf::from(r"c:\Windows\Temp\WindowsUpdate.log.gz") ); }
function_block-full_function
[ { "content": "fn read_file<R: BufRead>(mut reader: R) {\n\n // our read buffer\n\n let mut buffer = Vec::with_capacity(1024);\n\n\n\n loop {\n\n let ret = reader.read_until(b'\\n', &mut buffer);\n\n if let Ok(bytes_read) = ret {\n\n if bytes_read == 0 {\n\n break;\n\n }\n\n }\n\n\n\n let line = String::from_utf8_lossy(&buffer);\n\n print!(\"{}\", line);\n\n\n\n buffer.clear();\n\n }\n\n}\n", "file_path": "src/uncompress.rs", "rank": 0, "score": 216530.36825795466 }, { "content": "/// Saves snapshot file into provided path\n\npub fn save_snapshot(snapshot: &mut Snapshot, snapfile: &PathBuf, retention: u64) {\n\n debug!(\"saving snapshot file {}\", &snapfile.display());\n\n if let Err(e) = snapshot.save(&snapfile, retention) {\n\n Nagios::exit_critical(&format!(\n\n \"unable to save snapshot file: {:?}, error: {}\",\n\n &snapfile, e\n\n ));\n\n }\n\n}\n\n\n", "file_path": "src/init.rs", "rank": 1, "score": 157628.6644247934 }, { "content": "// This method is common to all compression ad-hoc seek method.\n\nfn _set_offset<R>(mut reader: R, offset: u64) -> AppResult<u64>\n\nwhere\n\n R: Read,\n\n{\n\n // if 0, nothing to do\n\n if offset == 0 {\n\n return Ok(0);\n\n }\n\n\n\n let pos = match reader.by_ref().bytes().nth((offset - 1) as usize) {\n\n None => {\n\n return Err(AppError::new_custom(\n\n AppCustomErrorKind::SeekPosBeyondEof,\n\n &format!(\"tried to set offset beyond EOF, at offset: {}\", offset),\n\n ))\n\n }\n\n Some(x) => x,\n\n };\n\n Ok(pos.unwrap() as u64)\n\n}\n", "file_path": "src/logfile/seeker.rs", "rank": 2, "score": 133447.9321199191 }, { "content": "/// Spawn postscript\n\npub fn spawn_postscript(postscript: &mut Script, pids: &[u32]) {\n\n // add all pids to the end of arguments\n\n for pid in pids {\n\n postscript.command.push(pid.to_string());\n\n }\n\n\n\n // run script\n\n trace!(\"postscript: {:?}\", &postscript.command);\n\n let result = postscript.spawn(None);\n\n\n\n // check rc\n\n if let Err(e) = &result {\n\n error!(\"error: {} spawning command: {:?}\", e, postscript.command);\n\n } else {\n\n info!(\n\n \"postcript command successfully executed, pid={}\",\n\n result.unwrap()\n\n )\n\n }\n\n}\n", "file_path": "src/init.rs", "rank": 3, "score": 129837.97700621444 }, { "content": "fn main() -> std::io::Result<()> {\n\n let args: Vec<String> = env::args().collect();\n\n let mut file: Option<File> = None;\n\n\n\n // one argument is mandatory: address and port\n\n let addr = &args[1];\n\n\n\n if args.len() == 3 {\n\n file = Some(File::create(&args[2]).unwrap());\n\n }\n\n\n\n let listener = std::net::TcpListener::bind(addr).unwrap();\n\n println!(\"waiting on address: {}\", addr);\n\n match listener.accept() {\n\n Ok((mut socket, _addr)) => {\n\n // set short timeout\n\n // socket\n\n // .set_read_timeout(Some(std::time::Duration::new(3, 0)))\n\n // .expect(\"Couldn't set read timeout\");\n\n\n", "file_path": "tests/integration/echotcp.rs", "rank": 4, "score": 128676.05067053183 }, { "content": "fn main() -> std::io::Result<()> {\n\n #[cfg(target_family = \"unix\")]\n\n {\n\n let args: Vec<String> = env::args().collect();\n\n let mut file: Option<File> = None;\n\n\n\n // one argument is mandatory: address and port\n\n let addr = &args[1];\n\n let _ = std::fs::remove_file(&addr);\n\n\n\n if args.len() == 3 {\n\n file = Some(File::create(&args[2]).unwrap());\n\n }\n\n\n\n let listener = std::os::unix::net::UnixListener::bind(addr).unwrap();\n\n println!(\"waiting on address: {}\", addr);\n\n match listener.accept() {\n\n Ok((mut socket, _addr)) => {\n\n // set short timeout\n\n // socket\n", "file_path": "tests/integration/echodomain.rs", "rank": 5, "score": 128676.05067053183 }, { "content": "// manage error counters depending on options\n\nfn counters_calculation(counters: &mut PatternCounters, options: &SearchOptions) {\n\n // do we need to save our thresholds ?\n\n if options.savethresholds {\n\n // critical errors\n\n if options.criticalthreshold != 0 {\n\n if counters.critical_count < options.criticalthreshold {\n\n // nothing to do\n\n } else {\n\n // or just the delta\n\n counters.critical_count -= options.criticalthreshold;\n\n }\n\n }\n\n // warning errors\n\n if options.warningthreshold != 0 {\n\n // warning errors\n\n if counters.warning_count < options.warningthreshold {\n\n // nothing to do\n\n } else {\n\n // or just the delta\n\n counters.warning_count -= options.warningthreshold;\n", "file_path": "src/logfile/lookup.rs", "rank": 6, "score": 109956.86906764825 }, { "content": "/// Load the snapshot file: if option \"-p\" is present, use it, or use the config tag or build a new name from config file\n\npub fn load_snapshot(\n\n options: &CliOptions,\n\n config_snapshot_file: &Option<PathBuf>,\n\n) -> (Snapshot, PathBuf) {\n\n // if option \"-p\" is present, use it, or use the config tag or build a new name from config file\n\n let snapfile = if options.snapshot_file.is_some() {\n\n options.snapshot_file.as_ref().unwrap().clone()\n\n // it's given as a command line argument as '--snapshot'\n\n } else if config_snapshot_file.is_some() {\n\n // or it's using what's defined in the configuration file\n\n let conf_file_or_dir = config_snapshot_file.as_ref().unwrap();\n\n\n\n // if what is specified is a directory, use this to build the final snapshot file\n\n if conf_file_or_dir.is_dir() {\n\n Snapshot::build_name(&options.config_file, Some(conf_file_or_dir))\n\n } else {\n\n conf_file_or_dir.clone()\n\n }\n\n } else {\n\n // otherwise, the snapshot file is build from the config file, adding .json extension\n", "file_path": "src/init.rs", "rank": 7, "score": 108911.799354429 }, { "content": " def self.create(path, append=false)\n\n # open for writing or appending \n\n file = append ? File.open(path, \"a\") : File.open(path, \"w\")\n\n\n\n # write n lines\n\n n = 0\n\n 101.times do\n\n n += 1\n\n line = \"%03d\" % n\n\n\n\n if n == 51 then\n\n file.puts \"1970-01-01 00:00:00: ############# this is a fake ok pattern generated for tests, line number = #{line}\"\n\n next\n\n end\n\n\n\n random = rand(10000..99999)\n\n file.puts \"1970-01-01 00:00:00: ---- this is an error generated for tests, line number = #{line}, error id = #{random}\"\n\n\n\n n += 1\n\n line = \"%03d\" % n\n", "file_path": "tests/integration/ruby/testcase.rb", "rank": 8, "score": 104606.48170282005 }, { "content": "fn main() {\n\n // manage cli arguments\n\n let matches = App::new(\"Log files reader\")\n\n .version(\"0.1\")\n\n .author(\"Alain Viguier dandyvica@gmail.com\")\n\n .about(r#\"Run intergation tests with clf\"#)\n\n .arg(\n\n Arg::new(\"mode\")\n\n .short('m')\n\n .long(\"mode\")\n\n .required(false)\n\n .long_about(\"Debug or release\")\n\n .possible_values(&[\"debug\", \"release\"])\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::new(\"verbose\")\n\n .short('v')\n\n .long(\"verbose\")\n\n .required(false)\n", "file_path": "tests/integration/integration_test.rs", "rank": 9, "score": 96399.28434745384 }, { "content": "/// Create new logger and optionally delete logfile is bigger than cli value\n\npub fn init_log(options: &CliOptions) {\n\n // builds the logger from cli or the default one from platform specifics\n\n let logger = &options.clf_logger;\n\n\n\n // options depend on wheter we need to reset the log\n\n let writable = if options.reset_log {\n\n OpenOptions::new()\n\n .write(true)\n\n .truncate(true)\n\n .create(true)\n\n .open(logger)\n\n } else {\n\n OpenOptions::new().append(true).create(true).open(logger)\n\n };\n\n\n\n // check for opening or creation error\n\n if let Err(ref e) = writable {\n\n Nagios::exit_critical(&format!(\n\n \"unable to open or create log file {:?}, error {}\",\n\n logger, e\n", "file_path": "src/init.rs", "rank": 10, "score": 95712.32989997664 }, { "content": "pub fn from_epoch_secs() -> AppResult<u64> {\n\n let from_epoch = from_epoch()?;\n\n Ok(from_epoch.as_secs())\n\n}\n", "file_path": "src/misc/util.rs", "rank": 11, "score": 93558.50707564664 }, { "content": "/// Create a new config struct\n\npub fn init_config(options: &CliOptions) -> Config {\n\n #[cfg(feature = \"tera\")]\n\n let _config = Config::from_path(\n\n &options.config_file,\n\n options.tera_context.as_deref(),\n\n options.show_rendered,\n\n );\n\n\n\n #[cfg(not(feature = \"tera\"))]\n\n let _config = Config::from_path(&options.config_file);\n\n\n\n // check for loading errors\n\n if let Err(ref e) = _config {\n\n Nagios::exit_critical(&format!(\n\n \"error loading config file: {:?}, error: {}\",\n\n &options.config_file, e\n\n ));\n\n }\n\n\n\n let mut config = _config.unwrap();\n", "file_path": "src/init.rs", "rank": 12, "score": 91274.39236822045 }, { "content": " def initialize(yaml_file)\n\n @yaml = YAML.load_file(yaml_file)\n\n pp @yaml\n\n end\n\n\n\n\n\nend\n\n\n", "file_path": "tests/integration/ruby/testcase.rb", "rank": 13, "score": 90401.92400326033 }, { "content": "fn main() {\n\n let vars: Vec<(String, String)> = std::env::vars()\n\n .filter(|x| x.0.as_str().starts_with(\"CLF_\"))\n\n .collect();\n\n\n\n let args: Vec<String> = env::args().collect();\n\n\n\n let mut file = OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .append(true)\n\n .open(&args[1])\n\n .unwrap();\n\n\n\n let _ = write!(file, \"{}-{:?}\\n\", std::process::id(), vars);\n\n}\n", "file_path": "tests/integration/echovars.rs", "rank": 14, "score": 89747.12654893272 }, { "content": "/// All `PathBuf` utility functions.\n\npub trait ReadFs {\n\n fn is_match(self, re: &Regex) -> bool;\n\n fn is_usable(&self) -> AppResult<()>;\n\n fn list_files(&self, regex: &str) -> AppResult<Vec<PathBuf>>;\n\n fn signature(&self, hash_buffer_size: usize) -> AppResult<Signature>;\n\n}\n\n\n\nimpl ReadFs for PathBuf {\n\n /// `true` if the path matches the regex\n\n fn is_match(self, re: &Regex) -> bool {\n\n // converts file name to a string\n\n let s = self.into_os_string();\n\n re.is_match(&s.to_string_lossy())\n\n }\n\n\n\n /// Tells whether a `PathBuf` is accessible i.e. it combines `has_root()`, `exists()` and `is_file()`. \n\n fn is_usable(&self) -> AppResult<()> {\n\n // first canonicalize path\n\n let canon = self\n\n .canonicalize()\n", "file_path": "src/misc/extension.rs", "rank": 15, "score": 85976.02134243587 }, { "content": "/// Returns the list of files from a spawned command.\n\npub trait ListFiles {\n\n fn get_file_list(&self) -> AppResult<Vec<PathBuf>>;\n\n}\n\n\n\nimpl ListFiles for String {\n\n // in this case, the command is started with either bash or cmd.exe\n\n fn get_file_list(&self) -> AppResult<Vec<PathBuf>> {\n\n // build the corresponding command\n\n #[cfg(target_family = \"unix\")]\n\n let output = Command::new(\"bash\")\n\n .args(&[\"-c\", self])\n\n .output()\n\n .map_err(|e| {\n\n context!(\n\n e,\n\n \"unable to read output from command: bash -c '{:?}'\",\n\n self,\n\n )\n\n })\n\n .unwrap();\n", "file_path": "src/misc/extension.rs", "rank": 16, "score": 85965.16348329233 }, { "content": "/// Spawn a prescript and returns its pid\n\npub fn spawn_prescript(prescript: &Script, vars: Option<&GlobalVars>) -> u32 {\n\n let result = prescript.spawn(vars);\n\n\n\n // check rc\n\n if let Err(e) = &result {\n\n error!(\"error: {} spawning prescript: {:?}\", e, prescript.command);\n\n Nagios::exit_critical(&format!(\n\n \"error: {} spawning prescript: {:?}\",\n\n e, prescript.command\n\n ));\n\n }\n\n\n\n // now it's safe to unwrap to get pid\n\n debug_assert!(result.is_ok());\n\n result.unwrap()\n\n}\n\n\n", "file_path": "src/init.rs", "rank": 17, "score": 80225.05392821823 }, { "content": "/// Converts the timestamp to a human readable string in the snapshot.\n\npub fn timestamp_to_string<S>(value: &f64, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n // exract integer part = number of seconds\n\n // frational part = number of nanoseconds\n\n let secs = value.trunc();\n\n let nanos = value.fract();\n\n let utc_tms = Utc.timestamp(secs as i64, (nanos * 1_000_000_000f64) as u32);\n\n format!(\"{}\", utc_tms.format(\"%Y-%m-%d %H:%M:%S.%f\")).serialize(serializer)\n\n}\n\n\n", "file_path": "src/logfile/rundata.rs", "rank": 18, "score": 69212.65157218747 }, { "content": "/// Converts the error to string.\n\npub fn error_to_string<S>(value: &Option<AppError>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n if value.is_none() {\n\n \"None\".to_string().serialize(serializer)\n\n } else {\n\n format!(\"{}\", value.as_ref().unwrap()).serialize(serializer)\n\n }\n\n}\n\n\n\nimpl RunData {\n\n /// increment or decrement counters\n\n pub fn increment_counters(&mut self, pattern_type: &PatternType) {\n\n match pattern_type {\n\n PatternType::critical => self.counters.critical_count += 1,\n\n PatternType::warning => self.counters.warning_count += 1,\n\n PatternType::ok => self.counters.ok_count += 1,\n\n }\n\n }\n", "file_path": "src/logfile/rundata.rs", "rank": 19, "score": 65728.5235511631 }, { "content": "#!/usr/bin/perl\n\n\n\n# just fetch the relevant environment variable from arguments\n\n# and create a logfile\n\nuse strict;\n\nuse warnings;\n\n \n\n # create the file from fetch env variable\n\nmy $filename = '/tmp/concatenated.log.sh';\n\nopen(my $fh, '>', $filename) or die \"Could not open file '$filename' $!\";\n\nprint $ENV{'my_awesomescript'};\n\nprint $fh $ENV{'my_awesomescript'};\n\nclose $fh;\n\n\n\n# now just run file\n\nchmod 0744, $filename;\n\n\n\n# run it\n\nmy $exit_code = system($filename);\n\nexit($exit_code);\n\n\n\n\n\n\n", "file_path": "tests/integration/callbacks/create_file.pl", "rank": 20, "score": 64969.90573076634 }, { "content": "/// A custom deserializer for the `exclude` field.\n\nfn to_regex<'de, D>(deserializer: D) -> Result<Option<Regex>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let v: Value = Deserialize::deserialize(deserializer)?;\n\n //println!(\"v= {:?}\", v);\n\n let re = Regex::new(v.as_str().unwrap()).map_err(de::Error::custom)?;\n\n Ok(Some(re))\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(target_family = \"unix\")]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n #[cfg(target_os = \"linux\")]\n\n fn logfiledef() {\n\n let mut yaml = r#\"\n\npath: /var/log/syslog\n", "file_path": "src/configuration/logfiledef.rs", "rank": 21, "score": 61957.76111578095 }, { "content": "/// Replace the `logsource` YAML tag with the result of the script command\n\nfn fill_logdef<'de, D>(deserializer: D) -> Result<Vec<Search>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n // get the YAML `Value` from serde. See https://docs.serde.rs/serde_yaml/enum.Value.html\n\n let yaml_value: Value = Deserialize::deserialize(deserializer)?;\n\n\n\n // transform this value into our struct\n\n let vec_yaml: Result<Vec<Search>, _> =\n\n serde_yaml::from_value(yaml_value).map_err(de::Error::custom);\n\n if vec_yaml.is_err() {\n\n return vec_yaml;\n\n }\n\n let mut vec_search = vec_yaml.unwrap();\n\n\n\n // this vector wil hold new logfiles from the list returned from the script execution\n\n let mut vec_loglist: Vec<Search> = Vec::new();\n\n\n\n for search in &vec_search {\n\n match &search.logfile.path {\n", "file_path": "src/configuration/config.rs", "rank": 22, "score": 60427.505851450696 }, { "content": "fn main() {\n\n #[cfg(target_family = \"windows\")]\n\n {\n\n let path = std::env::var(\"PATH\").expect(\"unable to fetch %PATH%\");\n\n let new_path = format!(r\"{};.\\src\\windows\", path);\n\n std::env::set_var(\"PATH\", new_path);\n\n }\n\n}\n", "file_path": "build.rs", "rank": 23, "score": 59981.78237939674 }, { "content": "#[repr(C)]\n\n#[derive(Default)]\n\nstruct WinSign {\n\n pub inode: u64,\n\n pub dev: u64,\n\n}\n\n\n\nimpl Signature {\n\n fn hash<P: AsRef<Path> + Debug>(path: P, hash_buffer_size: usize) -> AppResult<u64> {\n\n use crc::crc64;\n\n debug_assert!(hash_buffer_size != 0);\n\n trace!(\"hash_buffer_size = {}\", hash_buffer_size);\n\n\n\n // open file\n\n let mut file = File::open(path.as_ref())\n\n .map_err(|e| context!(e, \"unable to open file for calculating hash {:?}\", path))?;\n\n\n\n //let mut reader = BufReader::new(&file);\n\n let mut buffer = vec![0; hash_buffer_size];\n\n\n\n file.read_exact(&mut buffer)\n\n .map_err(|e| context!(e, \"path={:?}, read_exact()\", path))?;\n", "file_path": "src/misc/extension.rs", "rank": 24, "score": 59201.15217698994 }, { "content": "fn main() {\n\n let matches = App::new(\"Uncompress gzip, bzip2, xz files\")\n\n .version(\"0.1\")\n\n .author(\"Alain Viguier dandyvica@gmail.com\")\n\n .about(r#\"An executable to read compressed files using compression methods defined in the crate\"#)\n\n .arg(\n\n Arg::new(\"file\")\n\n .long_about(\"Name of the file to read.\")\n\n .short('f')\n\n .long(\"file\")\n\n .required(true)\n\n .takes_value(true),\n\n ) .get_matches();\n\n\n\n // get file name\n\n let path = PathBuf::from(matches.value_of(\"file\").unwrap());\n\n let file = File::open(&path).expect(&format!(\"unable to open file {:?}\", &path));\n\n\n\n // get file extension\n\n let extension = path.extension().map(|x| x.to_string_lossy().to_string());\n", "file_path": "src/uncompress.rs", "rank": 25, "score": 58191.608009631906 }, { "content": "/// The main entry point.\n\nfn main() {\n\n //---------------------------------------------------------------------------------------------------\n\n // set up variables\n\n //---------------------------------------------------------------------------------------------------\n\n\n\n // tick time\n\n let now = Instant::now();\n\n\n\n // create a vector of thread handles for keeping track of what we've created and\n\n // wait for them to finish\n\n let mut children_list: Vec<ChildData> = Vec::new();\n\n\n\n // manage arguments from command line\n\n let options = CliOptions::options();\n\n\n\n // store all logfile access errors\n\n let mut access_errors = LogFileAccessErrorList::default();\n\n\n\n //---------------------------------------------------------------------------------------------------\n\n // initialize logger\n", "file_path": "src/clf.rs", "rank": 26, "score": 58191.608009631906 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "src/main.rs", "rank": 27, "score": 58191.608009631906 }, { "content": "/// Used to defined functions to set a precise offset in a file, either being compressed or not.\n\npub trait Seeker {\n\n /// Simulates the `seek`method for all used `BufReader<R>`.\n\n fn set_offset(&mut self, offset: u64) -> AppResult<u64>;\n\n}\n\n\n\nimpl Seeker for BufReader<File> {\n\n #[inline(always)]\n\n fn set_offset(&mut self, offset: u64) -> AppResult<u64> {\n\n let pos = self\n\n .seek(SeekFrom::Start(offset))\n\n .map_err(|e| context!(e, \"error seeking file {:?} for offset {}\", self, offset))?;\n\n Ok(pos)\n\n }\n\n}\n\n\n\n/// Implementing for `R: Read` helps testing wuth `Cursor` type.\n\nimpl<R> Seeker for BufReader<GzDecoder<R>>\n\nwhere\n\n R: Read,\n\n{\n", "file_path": "src/logfile/seeker.rs", "rank": 28, "score": 55502.330402066116 }, { "content": " def initialize(tag, root, clf_path)\n\n @tag = tag\n\n @clf = clf_path\n\n\n\n @snap_file = File.join(root, \"tmp\", \"#{tag}.json\")\n\n @config_file = File.join(root, \"tmp\", \"#{tag}.yml\")\n\n @logfile = File.join(root, \"tmp\", \"#{tag}.log\")\n\n @logfile_gzip = File.join(root, \"tmp\", \"#{tag}.log.gz\")\n\n @tmpfile = File.join(root, \"tmp\", \"#{tag}.txt\")\n\n\n\n # cleanup previous files\n\n File.delete(@logfile) if File.exist?(@logfile)\n\n File.delete(@logfile_gzip) if File.exist?(@logfile_gzip)\n\n File.delete(@snap_file) if File.exist?(@snap_file)\n\n File.delete(@tmpfile) if File.exist?(@tmpfile)\n\n\n\n puts \"executing test case: #{@tag}\"\n\n end\n\n\n", "file_path": "tests/integration/ruby/testcase.rb", "rank": 29, "score": 55403.240916503186 }, { "content": "pub trait Lookup<T> {\n\n fn reader<R: BufRead + Seeker>(\n\n &mut self,\n\n reader: R,\n\n tag: &Tag,\n\n global_options: &GlobalOptions,\n\n ) -> AppResult<Vec<ChildData>>;\n\n}\n\n\n\n/// A unit struct to represent a reader which is not calling any script but just scans the logfile and outputs matched lines.\n\npub struct BypassReader;\n\n\n\n/// A unit struct to represent a reader which reads each line, tests for a match and called a callback.\n\npub struct FullReader;\n\n\n\n// this will call the relevant reader\n\n#[derive(Debug, PartialEq)]\n\npub enum ReaderCallType {\n\n BypassReaderCall,\n\n FullReaderCall,\n", "file_path": "src/logfile/lookup.rs", "rank": 30, "score": 52554.04789902897 }, { "content": "// helper functions to exit in case of error\n\npub trait Expect<T> {\n\n fn expect_critical(self, text: &str) -> T;\n\n}\n\n\n\nimpl<T, E: Debug> Expect<T> for std::result::Result<T, E> {\n\n fn expect_critical(self, msg: &str) -> T {\n\n match self {\n\n Ok(inner) => inner,\n\n Err(e) => unwrap_failed(msg, &e),\n\n }\n\n }\n\n}\n", "file_path": "src/misc/extension.rs", "rank": 31, "score": 52554.04789902897 }, { "content": "// utility functions to get the number of seconds from 1/1/1970\n\nfn from_epoch() -> AppResult<Duration> {\n\n SystemTime::now()\n\n .duration_since(SystemTime::UNIX_EPOCH)\n\n .map_err(|e| context!(e, \"duration_since() error\",))\n\n}\n\n\n", "file_path": "src/misc/util.rs", "rank": 32, "score": 49354.273618951505 }, { "content": "/// Manage end of all started processes from clf.\n\nfn wait_children(children_list: Vec<ChildData>) {\n\n // just wait a little for all commands to finish. Otherwise, the last process will not be considered to be finished.\n\n if !children_list.is_empty() {\n\n let wait_timeout = std::time::Duration::from_millis(1000);\n\n thread::sleep(wait_timeout);\n\n }\n\n\n\n // as child can be None in case of Tcp or Domain socket, need to get rid of these\n\n for (i, started_child) in children_list\n\n .iter()\n\n .filter(|x| x.child.is_some())\n\n .enumerate()\n\n {\n\n // get a mutable reference\n\n let mut child = started_child.child.as_ref().unwrap().borrow_mut();\n\n\n\n // save pid & path\n\n let pid = child.id();\n\n let path = &started_child.path;\n\n\n", "file_path": "src/clf.rs", "rank": 33, "score": 45874.0691048235 }, { "content": "// send data through Tcp or Unix stream\n\nfn send_json_data<T: Write, U: Debug>(\n\n args: &Option<Vec<String>>,\n\n mut stream: T,\n\n global_vars: &GlobalVars,\n\n runtime_vars: &RuntimeVars,\n\n first_time: bool,\n\n addr: U,\n\n) -> AppResult<Option<ChildData>> {\n\n // create a dedicated JSON structure\n\n let mut json = match args {\n\n Some(args) => {\n\n if first_time {\n\n json!({\n\n \"args\": &args,\n\n \"global\": global_vars,\n\n \"vars\": runtime_vars\n\n })\n\n } else {\n\n json!({\n\n //\"args\": &args,\n", "file_path": "src/configuration/callback.rs", "rank": 34, "score": 43787.875302356144 }, { "content": "fn unwrap_failed(msg: &str, error: &dyn Debug) -> ! {\n\n Nagios::exit_critical(&format!(\"{}, error: {:?}\", msg, error))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::path::PathBuf;\n\n\n\n #[test]\n\n #[cfg(target_family = \"unix\")]\n\n fn is_usable() {\n\n assert!(PathBuf::from(\"foo.txt\").is_usable().is_err());\n\n assert!(PathBuf::from(\"/var/log/foo.txt\").is_usable().is_err());\n\n assert!(PathBuf::from(\"/var/log\").is_usable().is_err());\n\n assert!(PathBuf::from(\"/etc/resolv.conf\").is_usable().is_ok());\n\n }\n\n\n\n #[test]\n\n #[cfg(target_family = \"windows\")]\n", "file_path": "src/misc/extension.rs", "rank": 35, "score": 42817.71964602293 }, { "content": " .set_tag(\"options\", \"runcallback\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .replace_tag(\n\n \"address\",\n\n \"domain\",\n\n \"./tests/integration/tmp/generated.sock\n\n \",\n\n )\n\n .save_as(&tc.config_file);\n\n\n\n // create UDS server\n\n let addr = \"./tests/integration/tmp/generated.sock\";\n\n let _ = std::fs::remove_file(&addr);\n\n\n\n let child = std::thread::spawn(move || {\n\n // create a listener\n\n let listener = std::os::unix::net::UnixListener::bind(addr).unwrap();\n\n match listener.accept() {\n\n Ok((mut socket, _addr)) => {\n\n // set short timeout\n", "file_path": "tests/integration/integration_test.rs", "rank": 36, "score": 42546.51072198936 }, { "content": " .expect(&format!(\"unable to open file {}\", &tc.tmpfile));\n\n assert!(data.contains(&\"tests/integration/tmp/echotcp.log\"));\n\n }\n\n #[cfg(target_family = \"windows\")]\n\n if testcases.is_empty() || testcases.contains(&\"echotcp_win\") {\n\n let mut tc = TestCase::new(\"echotcp_win\", &mut nb_testcases);\n\n Config::from_file(r\".\\tests\\integration\\config\\echotcp_win.yml\")\n\n .set_tag(\"options\", \"runcallback\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let _ = tc.run(&opts, &[\"-d\"]);\n\n\n\n // check resulting file created from running script\n\n let data: String = std::fs::read_to_string(&tc.tmpfile)\n\n .expect(&format!(\"unable to open file {}\", &tc.tmpfile));\n\n assert!(data.contains(&r\"tests\\\\integration\\\\tmp\\\\echotcp_win.log\"));\n\n }\n\n\n\n println!(\"Number of test cases executed: {}\", nb_testcases - 1);\n\n}\n", "file_path": "tests/integration/integration_test.rs", "rank": 37, "score": 42544.80349395162 }, { "content": " .save_as(&tc.config_file);\n\n\n\n let _ = tc.exec(&opts, &[\"--config\", &tc.config_file]);\n\n assert!(PathBuf::from(\"./tests/integration/tmp/snapshot_creation.json\").is_file());\n\n }\n\n\n\n //------------------------------------------------------------------------------------------------\n\n // list files Unix & Windows\n\n //------------------------------------------------------------------------------------------------\n\n if testcases.is_empty() || testcases.contains(&\"list_files\") {\n\n let mut tc = TestCase::new(\"list_files\", &mut nb_testcases);\n\n tc.multiple_logs();\n\n\n\n #[cfg(target_family = \"unix\")]\n\n Config::from_file(\"./tests/integration/config/list_files.yml\")\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\n\n \"list\",\n\n r#\"[\"find\", \"./tests/integration/tmp\", \"-type\", \"f\", \"-name\", \"list_files.log.*\"]\"#,\n\n )\n", "file_path": "tests/integration/integration_test.rs", "rank": 38, "score": 42544.691190056205 }, { "content": "\n\n // check resulting file created from running script\n\n let data: String = std::fs::read_to_string(&tc.tmpfile)\n\n .expect(&format!(\"unable to open file {}\", &tc.tmpfile));\n\n assert!(data.contains(&\"tests/integration/tmp/echodomain.log\"));\n\n }\n\n\n\n // call presecript echo domain and send JSON data\n\n #[cfg(target_family = \"unix\")]\n\n if testcases.is_empty() || testcases.contains(&\"echotcp\") {\n\n let mut tc = TestCase::new(\"echotcp\", &mut nb_testcases);\n\n #[cfg(target_family = \"unix\")]\n\n Config::from_file(\"./tests/integration/config/echotcp.yml\")\n\n .set_tag(\"options\", \"runcallback\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let _ = tc.run(&opts, &[\"-d\"]);\n\n\n\n // check resulting file created from running script\n\n let data: String = std::fs::read_to_string(&tc.tmpfile)\n", "file_path": "tests/integration/integration_test.rs", "rank": 39, "score": 42543.94293994181 }, { "content": " .save_as(&tc.config_file);\n\n #[cfg(target_os = \"windows\")]\n\n Config::from_file(\"./tests/integration/config/list_files.yml\")\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\n\n \"list\",\n\n r#\"['cmd.exe', '/c', 'dir /B /S .\\tests\\integration\\tmp\\list_files.log.*']\"#,\n\n )\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\", \"-r\"]);\n\n\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"list_files.log\");\n\n }\n\n\n\n if testcases.is_empty() || testcases.contains(&\"list_cmd\") {\n\n let mut tc = TestCase::new(\"list_cmd\", &mut nb_testcases);\n\n tc.multiple_logs();\n\n\n\n #[cfg(target_family = \"unix\")]\n", "file_path": "tests/integration/integration_test.rs", "rank": 40, "score": 42542.09270720344 }, { "content": " let mut tc = TestCase::new(\"callback_tcp\", &mut nb_testcases);\n\n Config::default()\n\n .set_tag(\"options\", \"runcallback\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n\n\n // create UDS server\n\n let addr = \"127.0.0.1:8999\";\n\n\n\n let child = std::thread::spawn(move || {\n\n // create a listener\n\n let listener = std::net::TcpListener::bind(addr).unwrap();\n\n match listener.accept() {\n\n Ok((mut socket, _addr)) => {\n\n // set short timeout\n\n socket\n\n .set_read_timeout(Some(std::time::Duration::new(3, 0)))\n\n .expect(\"Couldn't set read timeout\");\n\n\n\n let mut nb_received = 0;\n", "file_path": "tests/integration/integration_test.rs", "rank": 41, "score": 42541.68940780334 }, { "content": " #[cfg(target_family = \"unix\")]\n\n Config::from_file(\"./tests/integration/config/ok_pattern.yml\")\n\n .set_tag(\"options\", \"runcallback,runifok\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .replace_tag(\"address\", \"script\", \"./target/debug/echovars\")\n\n .set_tag(\"args\", \"['./tests/integration/tmp/runifok.txt', 'arg2']\")\n\n .save_as(&tc.config_file);\n\n #[cfg(target_family = \"windows\")]\n\n Config::from_file(\"./tests/integration/config/ok_pattern.yml\")\n\n .set_tag(\"options\", \"runcallback,runifok\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .replace_tag(\"address\", \"script\", r\".\\target\\debug\\echovars\")\n\n .set_tag(\"args\", r\"['.\\tests\\integration\\tmp\\runifok.txt', 'arg2']\")\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"74\");\n\n jassert!(tc, \"warning_count\", \"73\");\n", "file_path": "tests/integration/integration_test.rs", "rank": 42, "score": 42541.306328879626 }, { "content": " // check retention deletion\n\n if testcases.is_empty() || testcases.contains(&\"retention\") {\n\n let mut tc = TestCase::new(\"retention\", &mut nb_testcases);\n\n\n\n // run once\n\n Config::default()\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let _ = tc.run(&opts, &[\"-d\"]);\n\n\n\n // wait a little before calling a second time to test retention\n\n let timeout = std::time::Duration::from_millis(2000);\n\n std::thread::sleep(timeout);\n\n\n\n // now change logfile: reuse previous one\n\n let new_file = \"./tests/integration/tmp/tera.log\";\n\n Config::default()\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", &new_file)\n", "file_path": "tests/integration/integration_test.rs", "rank": 43, "score": 42540.18864669163 }, { "content": " //------------------------------------------------------------------------------------------------\n\n TestScenario::prepare();\n\n\n\n // update environnement to include DLL if Windows\n\n #[cfg(target_family = \"windows\")]\n\n {\n\n let path = std::env::var(\"PATH\").expect(\"unable to fetch %PATH%\");\n\n let new_path = format!(r\"{};.\\src\\windows\", path);\n\n std::env::set_var(\"PATH\", new_path);\n\n }\n\n\n\n //------------------------------------------------------------------------------------------------\n\n // command line flags\n\n //------------------------------------------------------------------------------------------------\n\n\n\n // call help\n\n if testcases.is_empty() || testcases.contains(&\"help\") {\n\n let tc = TestCase::new(\"help\", &mut nb_testcases);\n\n let rc = tc.exec(&opts, &[\"--help\"]);\n\n\n", "file_path": "tests/integration/integration_test.rs", "rank": 44, "score": 42538.98419832227 }, { "content": " .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n\n\n // create UDS server\n\n let addr = \"127.0.0.1:8999\";\n\n\n\n let child = std::thread::spawn(move || {\n\n // create a listener and stop to simulate a listener\n\n let listener = std::net::TcpListener::bind(addr).unwrap();\n\n match listener.accept() {\n\n Ok((mut socket, _addr)) => {\n\n // set short timeout\n\n socket\n\n .set_read_timeout(Some(std::time::Duration::new(3, 0)))\n\n .expect(\"Couldn't set read timeout\");\n\n\n\n // loop to receive data\n\n loop {\n\n let json = JSONStream::get_json_from_stream(&mut socket);\n\n if json.is_err() {\n", "file_path": "tests/integration/integration_test.rs", "rank": 45, "score": 42538.443103192614 }, { "content": " if testcases.is_empty() || testcases.contains(&\"snapshot_creation\") {\n\n let mut tc = TestCase::new(\"snapshot_creation\", &mut nb_testcases);\n\n Config::default()\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n\n\n // run with command line argument\n\n let _ = tc.run(&opts, &[\"-d\"]);\n\n assert!(PathBuf::from(&tc.snap_file).is_file());\n\n\n\n // run without specifying a snapshot on the command line\n\n let _ = tc.exec(&opts, &[\"--config\", &tc.config_file]);\n\n assert!(PathBuf::from(\"./tests/integration/tmp/snapshot_foo.json\").is_file());\n\n\n\n // run without specifying a snapshot on the command line but tag snapshot_file is a directory\n\n Config::default()\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .set_tag(\"snapshot_file\", \"./tests/integration/tmp\")\n", "file_path": "tests/integration/integration_test.rs", "rank": 46, "score": 42538.23747687722 }, { "content": " jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n }\n\n\n\n // fastforward gzipped\n\n if testcases.is_empty() || testcases.contains(&\"fastforward_gzipped\") {\n\n let mut tc = TestCase::new(\"fastforward_gzipped\", &mut nb_testcases);\n\n // gzip log\n\n tc.gzip();\n\n\n\n Config::default()\n\n .set_tag(\"options\", \"fastforward\")\n\n .set_tag(\"path\", \"./tests/integration/tmp/fastforward_gzipped.log.gz\")\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"compression\", \"gzip\");\n\n jassert!(tc, \"extension\", \"gz\");\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n", "file_path": "tests/integration/integration_test.rs", "rank": 47, "score": 42537.949542291135 }, { "content": " Config::from_file(\"./tests/integration/config/list_cmd.yml\")\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\n\n \"cmd\",\n\n \"find ./tests/integration/tmp -type f | grep list_files.log.*\",\n\n )\n\n .save_as(&tc.config_file);\n\n\n\n #[cfg(target_os = \"windows\")]\n\n Config::from_file(\"./tests/integration/config/list_cmd.yml\")\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"cmd\", r\"dir /B /S .\\tests\\integration\\tmp\\list_files.log.*\")\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\", \"-r\"]);\n\n\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"list_files.log\");\n\n }\n\n\n\n //------------------------------------------------------------------------------------------------\n", "file_path": "tests/integration/integration_test.rs", "rank": 48, "score": 42537.5426637141 }, { "content": " // prescript\n\n #[cfg(target_family = \"unix\")]\n\n if testcases.is_empty() || testcases.contains(&\"prescript\") {\n\n let mut tc = TestCase::new(\"prescript\", &mut nb_testcases);\n\n\n\n // first run\n\n Config::from_file(\"./tests/integration/config/prescript.yml\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n assert_eq!(rc.0, 3);\n\n jassert!(rc, \"UNKNOWN\");\n\n }\n\n\n\n // callback call\n\n #[cfg(target_family = \"unix\")]\n\n if testcases.is_empty() || testcases.contains(&\"callback_domain\") {\n\n let mut tc = TestCase::new(\"callback_domain\", &mut nb_testcases);\n\n Config::default()\n", "file_path": "tests/integration/integration_test.rs", "rank": 49, "score": 42537.357920331604 }, { "content": " if testcases.is_empty() || testcases.contains(&\"ok_pattern\") {\n\n let mut tc = TestCase::new(\"ok_pattern\", &mut nb_testcases);\n\n Config::from_file(\"./tests/integration/config/ok_pattern.yml\")\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"74\");\n\n jassert!(tc, \"warning_count\", \"73\");\n\n jassert!(tc, \"ok_count\", \"1\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n }\n\n\n\n // ok pattern but runifok = true\n\n if testcases.is_empty() || testcases.contains(&\"runifok\") {\n\n let mut tc = TestCase::new(\"runifok\", &mut nb_testcases);\n", "file_path": "tests/integration/integration_test.rs", "rank": 50, "score": 42537.26279875243 }, { "content": " assert!(rc\n\n .1\n\n .contains(\"CRITICAL: (errors:99, warnings:98, unknowns:0)\"));\n\n }\n\n\n\n //------------------------------------------------------------------------------------------------\n\n // extra variables\n\n //------------------------------------------------------------------------------------------------\n\n //#[cfg(target_family = \"unix\")]\n\n if testcases.is_empty() || testcases.contains(&\"extra_vars\") {\n\n let mut tc = TestCase::new(\"extra_vars\", &mut nb_testcases);\n\n Config::default()\n\n .set_tag(\"options\", \"runcallback,stopat=5\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .replace_tag(\"address\", \"script\", \"./target/debug/echovars\")\n\n .set_tag(\"args\", \"['./tests/integration/tmp/extra_vars.txt', 'arg2']\")\n\n .save_as(&tc.config_file);\n\n let _ = tc.run(\n\n &opts,\n\n &[\n", "file_path": "tests/integration/integration_test.rs", "rank": 51, "score": 42537.252225751654 }, { "content": "use clap::{App, Arg};\n\nuse std::path::PathBuf;\n\n\n\nmod testcase;\n\nuse testcase::*;\n\n\n", "file_path": "tests/integration/integration_test.rs", "rank": 52, "score": 42536.871862789965 }, { "content": " .save_as(&tc.config_file);\n\n let _ = tc.run(&opts, &[]);\n\n\n\n // read whole snapshot file\n\n let snap =\n\n std::fs::read_to_string(&tc.snap_file).expect(\"unable to read JSON for retention\");\n\n\n\n // should contain a single logfile\n\n assert!(snap.contains(\"./tests/integration/tmp/tera.log\"));\n\n assert!(!snap.contains(\"./tests/integration/tmp/retention.log\"));\n\n\n\n // jassert!(tc, \"last_offset\", \"20100\");\n\n // jassert!(tc, \"last_line\", \"201\");\n\n // jassert!(tc, \"critical_count\", \"99\");\n\n // jassert!(tc, \"warning_count\", \"98\");\n\n // jassert!(tc, \"ok_count\", \"0\");\n\n // jassert!(tc, \"exec_count\", \"0\");\n\n // assert_eq!(rc.0, 2);\n\n }\n\n\n", "file_path": "tests/integration/integration_test.rs", "rank": 53, "score": 42536.85262877999 }, { "content": " // exit_msg\n\n //------------------------------------------------------------------------------------------------\n\n if testcases.is_empty() || testcases.contains(&\"exit_msg\") {\n\n let mut tc = TestCase::new(\"exit_msg\", &mut nb_testcases);\n\n Config::default()\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let _ = tc.run(&opts, &[\"-d\"]);\n\n\n\n // run a second time by changing the logfile\n\n Config::default()\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", \"./tests/integration/tmp/exit_msg_2.log\")\n\n .set_tag(\"snapshot_retention\", \"3600\")\n\n .save_as(&tc.config_file);\n\n tc.create_log(Some(\"./tests/integration/tmp/exit_msg_2.log\"), false);\n\n let rc = tc.run(&opts, &[]);\n\n\n\n assert_eq!(rc.0, 2);\n", "file_path": "tests/integration/integration_test.rs", "rank": 54, "score": 42536.8148956996 }, { "content": "\n\n // tera\n\n if testcases.is_empty() || testcases.contains(&\"tera\") {\n\n let mut tc = TestCase::new(\"tera\", &mut nb_testcases);\n\n Config::from_file(\"./tests/integration/config/tera.yml\")\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let context = r#\"{\"path\":\"./tests/integration/tmp/generated.log\", \"format\":\"plain\"}\"#;\n\n let rc = tc.run(&opts, &[\"-d\", \"-x\", context]);\n\n\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"99\");\n\n jassert!(tc, \"warning_count\", \"98\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n }\n\n\n", "file_path": "tests/integration/integration_test.rs", "rank": 55, "score": 42536.11232269067 }, { "content": " )\n\n .set_tag(\"path\", &tc.logfile)\n\n .replace_tag(\"address\", \"script\", \"./target/debug/echovars\")\n\n .set_tag(\n\n \"args\",\n\n \"['./tests/integration/tmp/script_threshold.txt', 'arg2']\",\n\n )\n\n .save_as(&tc.config_file);\n\n #[cfg(target_family = \"windows\")]\n\n Config::default()\n\n .set_tag(\n\n \"options\",\n\n \"runcallback,criticalthreshold=50,warningthreshold=60\",\n\n )\n\n .set_tag(\"path\", &tc.logfile)\n\n .replace_tag(\"address\", \"script\", \"python.exe\")\n\n .set_tag(\n\n \"args\",\n\n r\"['.\\target\\debug\\echovars', '.\\tests\\integration\\tmp\\script_threshold.txt']\",\n\n )\n", "file_path": "tests/integration/integration_test.rs", "rank": 56, "score": 42535.93852643499 }, { "content": " .set_tag(\"options\", \"runcallback\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .replace_tag(\"address\", \"script\", \"./target/debug/echovars\")\n\n .set_tag(\n\n \"args\",\n\n \"['./tests/integration/tmp/start_script.txt', 'arg2']\",\n\n )\n\n .save_as(&tc.config_file);\n\n #[cfg(target_family = \"windows\")]\n\n Config::default()\n\n .set_tag(\"options\", \"runcallback\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .replace_tag(\"address\", \"script\", r\".\\target\\debug\\echovars\")\n\n .set_tag(\"args\", r\"['.\\tests\\integration\\tmp\\start_script.txt']\")\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"99\");\n", "file_path": "tests/integration/integration_test.rs", "rank": 57, "score": 42535.41143692116 }, { "content": " }\n\n\n\n // successive runs rotation with no save threshold\n\n if testcases.is_empty() || testcases.contains(&\"rotate_nosave_thresholds\") {\n\n let mut tc = TestCase::new(\"rotate_nosave_thresholds\", &mut nb_testcases);\n\n\n\n // first run\n\n Config::default()\n\n .set_tag(\"options\", \"stopat=70\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"6900\");\n\n jassert!(tc, \"last_line\", \"69\");\n\n jassert!(tc, \"critical_count\", \"34\");\n\n jassert!(tc, \"warning_count\", \"34\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n", "file_path": "tests/integration/integration_test.rs", "rank": 58, "score": 42534.63616965924 }, { "content": " jassert!(tc, \"last_line\", \"7\");\n\n jassert!(tc, \"critical_count\", \"4\");\n\n jassert!(tc, \"warning_count\", \"3\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"7\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n\n\n let _res = child.join();\n\n }\n\n\n\n // call presecript echo domain and send JSON data\n\n #[cfg(target_family = \"unix\")]\n\n if testcases.is_empty() || testcases.contains(&\"echodomain\") {\n\n let mut tc = TestCase::new(\"echodomain\", &mut nb_testcases);\n\n Config::from_file(\"./tests/integration/config/echodomain.yml\")\n\n .set_tag(\"options\", \"runcallback\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let _ = tc.run(&opts, &[\"-d\"]);\n", "file_path": "tests/integration/integration_test.rs", "rank": 59, "score": 42534.530325531 }, { "content": "\n\n // successive runs rotation with save threshold\n\n if testcases.is_empty() || testcases.contains(&\"rotate_save_thresholds\") {\n\n let mut tc = TestCase::new(\"rotate_save_thresholds\", &mut nb_testcases);\n\n\n\n // first run\n\n Config::default()\n\n .set_tag(\"options\", \"stopat=70\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"6900\");\n\n jassert!(tc, \"last_line\", \"69\");\n\n jassert!(tc, \"critical_count\", \"34\");\n\n jassert!(tc, \"warning_count\", \"34\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n", "file_path": "tests/integration/integration_test.rs", "rank": 60, "score": 42534.41947110085 }, { "content": " jassert!(tc, \"critical_count\", \"0\");\n\n jassert!(tc, \"warning_count\", \"0\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 0);\n\n jassert!(rc, \"OK\");\n\n }\n\n\n\n // logfile missing\n\n if testcases.is_empty() || testcases.contains(&\"logfilemissing\") {\n\n let mut tc = TestCase::new(\"logfilemissing\", &mut nb_testcases);\n\n\n\n Config::default()\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", \"./tmp/my_foo_file\")\n\n .set_tag(\"logfilemissing\", \"critical\")\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n", "file_path": "tests/integration/integration_test.rs", "rank": 61, "score": 42534.22254068145 }, { "content": " .get_matches();\n\n\n\n let mut opts = Options::default();\n\n\n\n opts.mode = matches.value_of_t(\"mode\").unwrap_or(Target::debug);\n\n opts.verbose = matches.is_present(\"verbose\");\n\n opts.clf = matches\n\n .value_of_t(\"clf\")\n\n .unwrap_or(opts.mode.path().to_string());\n\n\n\n let mut testcases: Vec<&str> = Vec::new();\n\n if matches.is_present(\"testcase\") {\n\n testcases = matches.values_of(\"testcase\").unwrap().collect();\n\n }\n\n\n\n //println!(\"options={:?}\", opts);\n\n let mut nb_testcases: u16 = 1;\n\n\n\n //------------------------------------------------------------------------------------------------\n\n // prepare run by creating necessary directories if necessary\n", "file_path": "tests/integration/integration_test.rs", "rank": 62, "score": 42534.09011753394 }, { "content": " jassert!(tc, \"ok_count\", \"1\");\n\n jassert!(tc, \"exec_count\", \"198\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n\n\n // check reuslting file created from running script\n\n let data: String = std::fs::read_to_string(&tc.tmpfile)\n\n .expect(&format!(\"unable to open file {}\", &tc.tmpfile));\n\n assert_eq!(data.chars().filter(|c| *c == '\\n').count(), 198);\n\n }\n\n\n\n //------------------------------------------------------------------------------------------------\n\n // thresholds\n\n //------------------------------------------------------------------------------------------------\n\n // criticalthreshold\n\n if testcases.is_empty() || testcases.contains(&\"thresholds\") {\n\n let mut tc = TestCase::new(\"thresholds\", &mut nb_testcases);\n\n Config::default()\n\n .set_tag(\"options\", \"criticalthreshold=50,warningthreshold=60\")\n\n .set_tag(\"path\", &tc.logfile)\n", "file_path": "tests/integration/integration_test.rs", "rank": 63, "score": 42533.18104313618 }, { "content": " jassert!(tc, \"warning_count\", \"98\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"197\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n\n\n // check reuslting file created from running script\n\n let data: String = std::fs::read_to_string(&tc.tmpfile)\n\n .expect(&format!(\"unable to open file {}\", &tc.tmpfile));\n\n assert_eq!(data.chars().filter(|c| *c == '\\n').count(), 197);\n\n }\n\n\n\n // run a script with a threshold\n\n if testcases.is_empty() || testcases.contains(&\"script_threshold\") {\n\n let mut tc = TestCase::new(\"script_threshold\", &mut nb_testcases);\n\n #[cfg(target_family = \"unix\")]\n\n Config::default()\n\n .set_tag(\n\n \"options\",\n\n \"runcallback,criticalthreshold=50,warningthreshold=60\",\n", "file_path": "tests/integration/integration_test.rs", "rank": 64, "score": 42532.83187999753 }, { "content": " // exclude\n\n if testcases.is_empty() || testcases.contains(&\"exclude\") {\n\n let mut tc = TestCase::new(\"exclude\", &mut nb_testcases);\n\n\n\n Config::from_file(\"./tests/integration/config/exclude.yml\")\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"0\");\n\n jassert!(tc, \"warning_count\", \"98\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 1);\n\n }\n\n\n\n // truncate\n", "file_path": "tests/integration/integration_test.rs", "rank": 65, "score": 42532.517925260654 }, { "content": "\n\n // utf8\n\n if testcases.is_empty() || testcases.contains(&\"utf8\") {\n\n let mut tc = TestCase::new(\"utf8\", &mut nb_testcases);\n\n tc.create_log_utf8();\n\n\n\n Config::from_file(\"./tests/integration/config/utf8.yml\")\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"26128\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"100\");\n\n jassert!(tc, \"warning_count\", \"0\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n }\n", "file_path": "tests/integration/integration_test.rs", "rank": 66, "score": 42532.338406153605 }, { "content": " if testcases.is_empty() || testcases.contains(&\"truncate\") {\n\n let mut tc = TestCase::new(\"truncate\", &mut nb_testcases);\n\n Config::default()\n\n .set_tag(\"options\", \"truncate=10\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"0\");\n\n jassert!(tc, \"warning_count\", \"0\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 0);\n\n }\n\n\n\n //------------------------------------------------------------------------------------------------\n\n // test snapshot file creation options\n\n //------------------------------------------------------------------------------------------------\n", "file_path": "tests/integration/integration_test.rs", "rank": 67, "score": 42531.667805111676 }, { "content": " .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"49\");\n\n jassert!(tc, \"warning_count\", \"38\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"87\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n }\n\n\n\n // stop at, no savethresholds\n\n if testcases.is_empty() || testcases.contains(&\"stopat\") {\n\n let mut tc = TestCase::new(\"stopat\", &mut nb_testcases);\n\n Config::default()\n\n .set_tag(\"options\", \"stopat=70\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n", "file_path": "tests/integration/integration_test.rs", "rank": 68, "score": 42531.41169056817 }, { "content": " .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"49\");\n\n jassert!(tc, \"warning_count\", \"38\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n }\n\n\n\n // warningthreshold\n\n if testcases.is_empty() || testcases.contains(&\"huge_thresholds\") {\n\n let mut tc = TestCase::new(\"huge_thresholds\", &mut nb_testcases);\n\n Config::default()\n\n .set_tag(\"options\", \"criticalthreshold=1500,warningthreshold=1500\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n", "file_path": "tests/integration/integration_test.rs", "rank": 69, "score": 42531.2922342577 }, { "content": "\n\n Config::default()\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", \"./tmp/my_foo_file\")\n\n .set_tag(\"logfilemissing\", \"warning\")\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n assert_eq!(rc.0, 1);\n\n jassert!(rc, \"warning\");\n\n\n\n Config::default()\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", \"./tmp/my_foo_file\")\n\n .set_tag(\"logfilemissing\", \"unknown\")\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n assert_eq!(rc.0, 3);\n\n jassert!(rc, \"UNKNOWN\");\n\n }\n\n\n", "file_path": "tests/integration/integration_test.rs", "rank": 70, "score": 42531.17940446663 }, { "content": " //------------------------------------------------------------------------------------------------\n\n // dummy search\n\n //------------------------------------------------------------------------------------------------\n\n // ascii\n\n if testcases.is_empty() || testcases.contains(&\"dummy\") {\n\n let mut tc = TestCase::new(\"dummy\", &mut nb_testcases);\n\n Config::default()\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"99\");\n\n jassert!(tc, \"warning_count\", \"98\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n }\n", "file_path": "tests/integration/integration_test.rs", "rank": 71, "score": 42530.81473611877 }, { "content": " jassert!(tc, \"extension\", \"log\");\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"99\");\n\n jassert!(tc, \"warning_count\", \"98\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n }\n\n\n\n // fastforward\n\n if testcases.is_empty() || testcases.contains(&\"fastforward\") {\n\n let mut tc = TestCase::new(\"fastforward\", &mut nb_testcases);\n\n Config::default()\n\n .set_tag(\"options\", \"fastforward\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"compression\", \"uncompressed\");\n", "file_path": "tests/integration/integration_test.rs", "rank": 72, "score": 42530.43547329935 }, { "content": " socket\n\n .set_read_timeout(Some(std::time::Duration::new(3, 0)))\n\n .expect(\"Couldn't set read timeout\");\n\n\n\n let mut nb_received = 0;\n\n\n\n // loop to receive data\n\n loop {\n\n let json = JSONStream::get_json_from_stream(&mut socket);\n\n if json.is_err() {\n\n break;\n\n }\n\n\n\n nb_received += 1;\n\n\n\n let j = json.unwrap();\n\n\n\n // all asserts here\n\n if j.args.is_some() {\n\n assert_eq!(j.args.unwrap(), &[\"arg1\", \"arg2\", \"arg3\"]);\n", "file_path": "tests/integration/integration_test.rs", "rank": 73, "score": 42530.35889914888 }, { "content": " jassert!(tc, \"warning_count\", \"98\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n }\n\n\n\n // successive runs simulation with save threshold\n\n if testcases.is_empty() || testcases.contains(&\"successive_runs_save_thresholds\") {\n\n let mut tc = TestCase::new(\"successive_runs_save_thresholds\", &mut nb_testcases);\n\n\n\n // first run\n\n Config::default()\n\n .set_tag(\"options\", \"savethresholds\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n", "file_path": "tests/integration/integration_test.rs", "rank": 74, "score": 42530.255164296315 }, { "content": " jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"65\");\n\n jassert!(tc, \"warning_count\", \"64\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n }\n\n\n\n // successive runs simulation with no save threshold\n\n if testcases.is_empty() || testcases.contains(&\"successive_runs_nosave_thresholds\") {\n\n let mut tc = TestCase::new(\"successive_runs_nosave_thresholds\", &mut nb_testcases);\n\n\n\n // first run\n\n Config::default()\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\"]);\n", "file_path": "tests/integration/integration_test.rs", "rank": 75, "score": 42530.0237352376 }, { "content": " assert_eq!(rc, 0);\n\n }\n\n\n\n // missing argument\n\n if testcases.is_empty() || testcases.contains(&\"missing_argument\") {\n\n let tc = TestCase::new(\"missing_argument\", &mut nb_testcases);\n\n let rc = tc.exec(&opts, &[\"--syntax-check\"]);\n\n\n\n assert_eq!(rc, 2);\n\n }\n\n\n\n // good YAML syntax\n\n if testcases.is_empty() || testcases.contains(&\"good_syntax\") {\n\n let tc = TestCase::new(\"good_syntax\", &mut nb_testcases);\n\n let rc = tc.exec(\n\n &opts,\n\n &[\n\n \"--config\",\n\n \"./tests/integration/config/generated.yml\",\n\n \"--syntax-check\",\n", "file_path": "tests/integration/integration_test.rs", "rank": 76, "score": 42529.996199195026 }, { "content": " ],\n\n );\n\n\n\n assert_eq!(rc, 0);\n\n }\n\n\n\n // bad YAML syntax\n\n if testcases.is_empty() || testcases.contains(&\"bad_syntax\") {\n\n let tc = TestCase::new(\"bad_syntax\", &mut nb_testcases);\n\n let rc = tc.exec(\n\n &opts,\n\n &[\n\n \"--config\",\n\n \"./tests/intergration/config/bad_syntax.yml\",\n\n \"--syntax-check\",\n\n ],\n\n );\n\n\n\n assert_eq!(rc, 2);\n\n }\n", "file_path": "tests/integration/integration_test.rs", "rank": 77, "score": 42529.84314441398 }, { "content": " }\n\n });\n\n\n\n // wait a little before calling\n\n let ten_millis = std::time::Duration::from_millis(10);\n\n std::thread::sleep(ten_millis);\n\n\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n\n\n let _res = child.join();\n\n }\n\n\n\n // error during callback exec\n\n #[cfg(target_os = \"linux\")]\n\n if testcases.is_empty() || testcases.contains(&\"callback_error\") {\n\n let mut tc = TestCase::new(\"callback_error\", &mut nb_testcases);\n\n Config::default()\n\n .set_tag(\"options\", \"runcallback\")\n", "file_path": "tests/integration/integration_test.rs", "rank": 78, "score": 42529.606278106 }, { "content": "\n\n // show options\n\n if testcases.is_empty() || testcases.contains(&\"show_options\") {\n\n let tc = TestCase::new(\"show_options\", &mut nb_testcases);\n\n let rc = tc.exec(\n\n &opts,\n\n &[\n\n \"--config\",\n\n \"./tests/integration/config/generated.yml\",\n\n \"--syntax-check\",\n\n \"--show-options\",\n\n ],\n\n );\n\n\n\n assert_eq!(rc, 0);\n\n }\n\n\n\n // create or open log error\n\n #[cfg(target_family = \"unix\")]\n\n if testcases.is_empty() || testcases.contains(&\"show_options\") {\n", "file_path": "tests/integration/integration_test.rs", "rank": 79, "score": 42529.15009542096 }, { "content": " \"-d\",\n\n \"--var\",\n\n \"CLF_EXTRA_VAR1:value1\",\n\n \"CLF_EXTRA_VAR2:value2\",\n\n ],\n\n );\n\n\n\n // check reuslting file created from running script\n\n let data: String = std::fs::read_to_string(&tc.tmpfile)\n\n .expect(&format!(\"unable to open file {}\", &tc.tmpfile));\n\n assert!(data.contains(&\"CLF_EXTRA_VAR1\"));\n\n assert!(data.contains(&\"CLF_EXTRA_VAR2\"));\n\n assert!(data.contains(&\"value1\"));\n\n assert!(data.contains(&\"value2\"));\n\n }\n\n\n\n //------------------------------------------------------------------------------------------------\n\n // ok pattern\n\n //------------------------------------------------------------------------------------------------\n\n // ok pattern but runifok = false\n", "file_path": "tests/integration/integration_test.rs", "rank": 80, "score": 42528.883730284026 }, { "content": "\n\n // simulate a logfile rotation\n\n tc.rotate();\n\n\n\n // second run\n\n Config::default()\n\n .set_tag(\"options\", \"savethresholds,protocol\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[]);\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"198\");\n\n jassert!(tc, \"warning_count\", \"196\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n }\n\n\n", "file_path": "tests/integration/integration_test.rs", "rank": 81, "score": 42528.65498443128 }, { "content": " jassert!(rc, \"CRITICAL\");\n\n\n\n // simulate a logfile rotation\n\n tc.rotate();\n\n\n\n // second run\n\n Config::default()\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[]);\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"99\");\n\n jassert!(tc, \"warning_count\", \"98\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n }\n", "file_path": "tests/integration/integration_test.rs", "rank": 82, "score": 42528.55008831817 }, { "content": " let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"0\");\n\n jassert!(tc, \"warning_count\", \"0\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 0);\n\n jassert!(rc, \"OK\");\n\n }\n\n\n\n //------------------------------------------------------------------------------------------------\n\n // run scripts\n\n //------------------------------------------------------------------------------------------------\n\n // run a script\n\n if testcases.is_empty() || testcases.contains(&\"start_script\") {\n\n let mut tc = TestCase::new(\"start_script\", &mut nb_testcases);\n\n #[cfg(target_family = \"unix\")]\n\n Config::default()\n", "file_path": "tests/integration/integration_test.rs", "rank": 83, "score": 42528.48016648271 }, { "content": " let tc = TestCase::new(\"log_error\", &mut nb_testcases);\n\n let rc = tc.exec(\n\n &opts,\n\n &[\n\n \"--config\",\n\n \"./tests/integration/config/generated.yml\",\n\n \"--log\",\n\n \"/bin/foo.log\",\n\n ],\n\n );\n\n\n\n assert_eq!(rc, 2);\n\n }\n\n\n\n //------------------------------------------------------------------------------------------------\n\n // genuine search with fastforward\n\n //------------------------------------------------------------------------------------------------\n\n // rewind\n\n if testcases.is_empty() || testcases.contains(&\"rewind\") {\n\n let mut tc = TestCase::new(\"rewind\", &mut nb_testcases);\n", "file_path": "tests/integration/integration_test.rs", "rank": 84, "score": 42528.04451384616 }, { "content": " //println!(\"json={:#?}\", j);\n\n }\n\n }\n\n Err(e) => panic!(\"couldn't get client: {:?}\", e),\n\n }\n\n });\n\n\n\n // wait a little before calling\n\n let ten_millis = std::time::Duration::from_millis(10);\n\n std::thread::sleep(ten_millis);\n\n\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n\n\n let _res = child.join();\n\n }\n\n\n\n // callback call\n\n if testcases.is_empty() || testcases.contains(&\"callback_tcp\") {\n", "file_path": "tests/integration/integration_test.rs", "rank": 85, "score": 42526.53356894253 }, { "content": " .vars\n\n .get(\"CLF_LINE\")\n\n .unwrap()\n\n .as_str()\n\n .contains(\"generated for tests\"));\n\n\n\n let line_number: u64 = j.vars.get(\"CLF_LINE_NUMBER\").unwrap().as_u64();\n\n assert!(line_number <= 201);\n\n\n\n let cg1: usize = j.vars.get(\"CLF_CG_1\").unwrap().as_str().parse().unwrap();\n\n assert!(cg1 <= 201);\n\n\n\n let cg2: usize = j.vars.get(\"CLF_CG_2\").unwrap().as_str().parse().unwrap();\n\n assert!(cg2 <= 99999);\n\n assert!(cg2 >= 10000);\n\n\n\n //println!(\"json={:#?}\", j);\n\n }\n\n }\n\n Err(e) => panic!(\"couldn't get client: {:?}\", e),\n", "file_path": "tests/integration/integration_test.rs", "rank": 86, "score": 42525.14118891682 }, { "content": " .long_about(\"If set, show clf standard output when running test cases\")\n\n .takes_value(false),\n\n )\n\n .arg(\n\n Arg::new(\"clf\")\n\n .short('c')\n\n .long(\"clf\")\n\n .required(false)\n\n .long_about(\"Path of the clf executable. Defaults to ./target/debug/clf or ./target/release/clf\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::new(\"testcase\")\n\n .short('t')\n\n .long(\"testcase\")\n\n .required(false)\n\n .long_about(\"A list of testcases to execute. If not specified, all testcases are run\")\n\n .multiple(true)\n\n .takes_value(true),\n\n )\n", "file_path": "tests/integration/integration_test.rs", "rank": 87, "score": 42524.72047347088 }, { "content": " let rc = tc.run(&opts, &[\"-d\"]);\n\n\n\n jassert!(tc, \"last_offset\", \"6900\");\n\n jassert!(tc, \"last_line\", \"69\");\n\n jassert!(tc, \"critical_count\", \"34\");\n\n jassert!(tc, \"warning_count\", \"34\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n\n\n // as if we started again\n\n Config::default()\n\n .set_tag(\"options\", \"protocol\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[]);\n\n\n\n jassert!(tc, \"start_offset\", \"6900\");\n\n jassert!(tc, \"start_line\", \"69\");\n", "file_path": "tests/integration/integration_test.rs", "rank": 88, "score": 42524.48392603818 }, { "content": " Config::default()\n\n .set_tag(\"options\", \"rewind\")\n\n .set_tag(\"path\", &tc.logfile)\n\n .save_as(&tc.config_file);\n\n let rc = tc.run(&opts, &[\"-d\", \"-r\"]);\n\n\n\n jassert!(tc, \"compression\", \"uncompressed\");\n\n jassert!(tc, \"extension\", \"log\");\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"99\");\n\n jassert!(tc, \"warning_count\", \"98\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n\n\n // run another time\n\n let rc = tc.run(&opts, &[]);\n\n\n\n jassert!(tc, \"compression\", \"uncompressed\");\n", "file_path": "tests/integration/integration_test.rs", "rank": 89, "score": 42524.38543688857 }, { "content": " break;\n\n }\n\n let j = json.unwrap();\n\n\n\n let line_number: u64 = j.vars.get(\"CLF_LINE_NUMBER\").unwrap().as_u64();\n\n if line_number == 7 {\n\n break;\n\n };\n\n }\n\n }\n\n Err(e) => panic!(\"couldn't get client: {:?}\", e),\n\n }\n\n });\n\n\n\n // wait a little before calling\n\n let timeout = std::time::Duration::from_millis(100);\n\n std::thread::sleep(timeout);\n\n\n\n let rc = tc.run(&opts, &[\"-d\"]);\n\n jassert!(tc, \"last_offset\", \"700\");\n", "file_path": "tests/integration/integration_test.rs", "rank": 90, "score": 42523.29749844561 }, { "content": "\n\n // loop to receive data\n\n loop {\n\n let json = JSONStream::get_json_from_stream(&mut socket);\n\n if json.is_err() {\n\n break;\n\n }\n\n\n\n nb_received += 1;\n\n\n\n let j = json.unwrap();\n\n\n\n // all asserts here\n\n // all asserts here\n\n if j.args.is_some() {\n\n assert_eq!(j.args.unwrap(), &[\"arg1\", \"arg2\", \"arg3\"]);\n\n }\n\n\n\n // globals are only sent once\n\n if nb_received == 1 {\n", "file_path": "tests/integration/integration_test.rs", "rank": 91, "score": 42522.45083743881 }, { "content": " }\n\n\n\n assert_eq!(j.vars.get(\"CLF_NB_CG\").unwrap().as_u64(), 3);\n\n assert!(j\n\n .vars\n\n .get(\"CLF_LINE\")\n\n .unwrap()\n\n .as_str()\n\n .contains(\"generated for tests\"));\n\n\n\n let line_number: u64 = j.vars.get(\"CLF_LINE_NUMBER\").unwrap().as_u64();\n\n assert!(line_number <= 201);\n\n\n\n let cg1: usize = j.vars.get(\"CLF_CG_1\").unwrap().as_str().parse().unwrap();\n\n assert!(cg1 <= 201);\n\n\n\n let cg2: usize = j.vars.get(\"CLF_CG_2\").unwrap().as_str().parse().unwrap();\n\n assert!(cg2 <= 99999);\n\n assert!(cg2 >= 10000);\n\n\n", "file_path": "tests/integration/integration_test.rs", "rank": 92, "score": 42522.02600625238 }, { "content": " jassert!(tc, \"extension\", \"log\");\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"0\");\n\n jassert!(tc, \"warning_count\", \"0\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 0);\n\n\n\n // run another time\n\n tc.grow();\n\n let rc = tc.run(&opts, &[]);\n\n\n\n jassert!(tc, \"compression\", \"uncompressed\");\n\n jassert!(tc, \"extension\", \"log\");\n\n jassert!(tc, \"last_offset\", \"40200\");\n\n jassert!(tc, \"last_line\", \"402\");\n\n jassert!(tc, \"critical_count\", \"99\");\n\n jassert!(tc, \"warning_count\", \"98\");\n\n jassert!(tc, \"ok_count\", \"0\");\n", "file_path": "tests/integration/integration_test.rs", "rank": 93, "score": 42519.145695401814 }, { "content": " assert_eq!(\n\n j.global.as_ref().unwrap().get(\"CLF_firstname\").unwrap(),\n\n \"Al\"\n\n );\n\n assert_eq!(\n\n j.global.as_ref().unwrap().get(\"CLF_lastname\").unwrap(),\n\n \"Pacino\"\n\n );\n\n assert_eq!(\n\n j.global.as_ref().unwrap().get(\"CLF_profession\").unwrap(),\n\n \"actor\"\n\n );\n\n assert_eq!(\n\n j.global.as_ref().unwrap().get(\"CLF_city\").unwrap(),\n\n \"Los Angeles\"\n\n );\n\n }\n\n\n\n assert_eq!(j.vars.get(\"CLF_NB_CG\").unwrap().as_u64(), 3);\n\n assert!(j\n", "file_path": "tests/integration/integration_test.rs", "rank": 94, "score": 42519.145695401814 }, { "content": "\n\n jassert!(tc, \"last_offset\", \"20100\");\n\n jassert!(tc, \"last_line\", \"201\");\n\n jassert!(tc, \"critical_count\", \"99\");\n\n jassert!(tc, \"warning_count\", \"98\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n\n\n // simulate a logfile growth\n\n tc.grow();\n\n\n\n // second run\n\n let rc = tc.run(&opts, &[]);\n\n // jassert!(tc, \"start_offset\", \"20100\");\n\n // jassert!(tc, \"start_line\", \"201\");\n\n jassert!(tc, \"last_offset\", \"40200\");\n\n jassert!(tc, \"last_line\", \"402\");\n\n jassert!(tc, \"critical_count\", \"99\");\n", "file_path": "tests/integration/integration_test.rs", "rank": 95, "score": 42519.145695401814 }, { "content": " }\n\n\n\n // globals are only sent once\n\n if nb_received == 1 {\n\n assert_eq!(\n\n j.global.as_ref().unwrap().get(\"CLF_firstname\").unwrap(),\n\n \"Al\"\n\n );\n\n assert_eq!(\n\n j.global.as_ref().unwrap().get(\"CLF_lastname\").unwrap(),\n\n \"Pacino\"\n\n );\n\n assert_eq!(\n\n j.global.as_ref().unwrap().get(\"CLF_profession\").unwrap(),\n\n \"actor\"\n\n );\n\n assert_eq!(\n\n j.global.as_ref().unwrap().get(\"CLF_city\").unwrap(),\n\n \"Los Angeles\"\n\n );\n", "file_path": "tests/integration/integration_test.rs", "rank": 96, "score": 42519.145695401814 }, { "content": " jassert!(tc, \"critical_count\", \"99\");\n\n jassert!(tc, \"warning_count\", \"98\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n\n\n\n // simulate a logfile growth\n\n tc.grow();\n\n\n\n // second run\n\n let rc = tc.run(&opts, &[]);\n\n jassert!(tc, \"last_offset\", \"40200\");\n\n jassert!(tc, \"last_line\", \"402\");\n\n jassert!(tc, \"critical_count\", \"198\");\n\n jassert!(tc, \"warning_count\", \"196\");\n\n jassert!(tc, \"ok_count\", \"0\");\n\n jassert!(tc, \"exec_count\", \"0\");\n\n assert_eq!(rc.0, 2);\n\n jassert!(rc, \"CRITICAL\");\n", "file_path": "tests/integration/integration_test.rs", "rank": 97, "score": 42519.145695401814 }, { "content": "# a class for one testcase\n\nclass TestCase\n\n # input is a hash defining the testcase\n\n # hask key is the tc tag\n\n attr_reader :config_file\n\n\n", "file_path": "tests/integration/ruby/testcase.rb", "rank": 98, "score": 40194.45647501101 }, { "content": "//! The main module containing all necessary structures and traits for reading and searching\n\n//! a logfile for patterns.\n\n#[macro_use]\n\npub mod callback;\n\npub mod archive;\n\npub mod config;\n\npub mod global;\n\npub mod logfiledef;\n\npub mod logsource;\n\npub mod options;\n\npub mod pattern;\n\npub mod script;\n\npub mod search;\n\npub mod tag;\n\npub mod vars;\n", "file_path": "src/configuration/mod.rs", "rank": 99, "score": 36389.16250619563 } ]
Rust
src/output.rs
tjbell/tcount
535a66463229d97845cfed29031ea2528a4c2f72
use crate::count::Counts; use crate::language::Language; use crate::query::{Query, QueryKind}; use prettytable::{format, Cell, Row, Table}; use regex::Regex; use std::fmt::Display; use std::format; use std::str::FromStr; #[derive(Debug)] pub enum Format { Table, CSV, } impl FromStr for Format { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "table" => Ok(Format::Table), "csv" => Ok(Format::CSV), _ => Err(format!("\"{}\" is not supported. Use one of table|csv", s)), } } } pub fn format_builder() -> format::FormatBuilder { format::FormatBuilder::new() .separators( &[format::LinePosition::Top], format::LineSeparator::new('─', '─', '─', '─'), ) .separators( &[format::LinePosition::Title], format::LineSeparator::new('─', '─', '│', '│'), ) .separators( &[format::LinePosition::Bottom], format::LineSeparator::new('─', '─', '─', '─'), ) .padding(1, 1) } #[inline] fn title_cell(content: &str) -> Cell { Cell::new(content).style_spec("b") } #[inline] fn label_cell(label: &str) -> Cell { Cell::new(label).style_spec("li") } #[inline] fn count_cell(count: u64) -> Cell { Cell::new(&count.to_string()).style_spec("r") } #[inline] fn generic_cell(s: impl Display) -> Cell { Cell::new(&s.to_string()).style_spec("l") } pub fn print( format: &Format, counts: Vec<(String, Counts)>, totals: Option<Counts>, kinds: &Vec<String>, kind_patterns: &Vec<Regex>, queries: &Vec<Query>, ) { let mut table = Table::new(); table.set_format(format_builder().build()); let mut titles = Vec::with_capacity(3 + kinds.len() + kind_patterns.len() + queries.len()); titles.push(title_cell("Group")); titles.push(title_cell("Files")); titles.push(title_cell("Tokens")); kinds .iter() .for_each(|kind| titles.push(title_cell(&format!("Kind({})", kind)))); kind_patterns.iter().for_each(|kind_pat| { titles.push(title_cell(&format!("Pattern({})", kind_pat.to_string()))) }); queries.iter().for_each(|query| match &query.kind { QueryKind::Match => titles.push(title_cell(&format!("Query({})", query.name))), QueryKind::Captures(names) => names.iter().for_each(|name| { titles.push(title_cell(&format!("Query({}@{})", query.name, name))); }), }); table.set_titles(Row::new(titles)); counts .iter() .chain( { if let Some(totals) = totals { vec![(String::from("TOTALS"), totals)] } else { vec![] } } .iter(), ) .map(|(label, count)| { let mut cols = Vec::with_capacity(3 + kinds.len() + kind_patterns.len() + queries.len()); cols.push(label_cell(&label.to_string())); cols.push(count_cell(count.nfiles)); cols.push(count_cell(count.ntokens)); count.nkinds.iter().for_each(|n| cols.push(count_cell(*n))); count .nkind_patterns .iter() .for_each(|n| cols.push(count_cell(*n))); count .nqueries .iter() .for_each(|n| cols.push(count_cell(*n))); cols }) .for_each(|row| { table.add_row(Row::new(row)); }); match format { Format::Table => { table.printstd(); } Format::CSV => match table.to_csv(std::io::stdout()) { Ok(_) => {} Err(err) => eprintln!("{}", err), }, } } pub fn print_languages(langs: Vec<(&Language, Vec<String>, &Vec<String>)>) { let mut table = Table::new(); table.set_format(format_builder().build()); let mut titles = Vec::with_capacity(3); titles.push(title_cell("Language")); titles.push(title_cell("Extensions")); titles.push(title_cell("Query Dir Name")); table.set_titles(Row::new(titles)); langs.into_iter().for_each(|(lang, exts, dirs)| { let mut cols = Vec::new(); cols.push(label_cell(&lang.to_string())); cols.push(generic_cell(exts.join(","))); cols.push(generic_cell(dirs.join(","))); table.add_row(Row::new(cols)); }); table.printstd(); }
use crate::count::Counts; use crate::language::Language; use crate::query::{Query, QueryKind}; use prettytable::{format, Cell, Row, Table}; use regex::Regex; use std::fmt::Display; use std::format; use std::str::FromStr; #[derive(Debug)] pub enum Format { Table, CSV, } impl FromStr for Format { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "table" => Ok(Format::Table), "csv" => Ok(Format::CSV), _ => Err(format!("\"{}\" is not supported. Use one of table|csv", s)), } } }
#[inline] fn title_cell(content: &str) -> Cell { Cell::new(content).style_spec("b") } #[inline] fn label_cell(label: &str) -> Cell { Cell::new(label).style_spec("li") } #[inline] fn count_cell(count: u64) -> Cell { Cell::new(&count.to_string()).style_spec("r") } #[inline] fn generic_cell(s: impl Display) -> Cell { Cell::new(&s.to_string()).style_spec("l") } pub fn print( format: &Format, counts: Vec<(String, Counts)>, totals: Option<Counts>, kinds: &Vec<String>, kind_patterns: &Vec<Regex>, queries: &Vec<Query>, ) { let mut table = Table::new(); table.set_format(format_builder().build()); let mut titles = Vec::with_capacity(3 + kinds.len() + kind_patterns.len() + queries.len()); titles.push(title_cell("Group")); titles.push(title_cell("Files")); titles.push(title_cell("Tokens")); kinds .iter() .for_each(|kind| titles.push(title_cell(&format!("Kind({})", kind)))); kind_patterns.iter().for_each(|kind_pat| { titles.push(title_cell(&format!("Pattern({})", kind_pat.to_string()))) }); queries.iter().for_each(|query| match &query.kind { QueryKind::Match => titles.push(title_cell(&format!("Query({})", query.name))), QueryKind::Captures(names) => names.iter().for_each(|name| { titles.push(title_cell(&format!("Query({}@{})", query.name, name))); }), }); table.set_titles(Row::new(titles)); counts .iter() .chain( { if let Some(totals) = totals { vec![(String::from("TOTALS"), totals)] } else { vec![] } } .iter(), ) .map(|(label, count)| { let mut cols = Vec::with_capacity(3 + kinds.len() + kind_patterns.len() + queries.len()); cols.push(label_cell(&label.to_string())); cols.push(count_cell(count.nfiles)); cols.push(count_cell(count.ntokens)); count.nkinds.iter().for_each(|n| cols.push(count_cell(*n))); count .nkind_patterns .iter() .for_each(|n| cols.push(count_cell(*n))); count .nqueries .iter() .for_each(|n| cols.push(count_cell(*n))); cols }) .for_each(|row| { table.add_row(Row::new(row)); }); match format { Format::Table => { table.printstd(); } Format::CSV => match table.to_csv(std::io::stdout()) { Ok(_) => {} Err(err) => eprintln!("{}", err), }, } } pub fn print_languages(langs: Vec<(&Language, Vec<String>, &Vec<String>)>) { let mut table = Table::new(); table.set_format(format_builder().build()); let mut titles = Vec::with_capacity(3); titles.push(title_cell("Language")); titles.push(title_cell("Extensions")); titles.push(title_cell("Query Dir Name")); table.set_titles(Row::new(titles)); langs.into_iter().for_each(|(lang, exts, dirs)| { let mut cols = Vec::new(); cols.push(label_cell(&lang.to_string())); cols.push(generic_cell(exts.join(","))); cols.push(generic_cell(dirs.join(","))); table.add_row(Row::new(cols)); }); table.printstd(); }
pub fn format_builder() -> format::FormatBuilder { format::FormatBuilder::new() .separators( &[format::LinePosition::Top], format::LineSeparator::new('─', '─', '─', '─'), ) .separators( &[format::LinePosition::Title], format::LineSeparator::new('─', '─', '│', '│'), ) .separators( &[format::LinePosition::Bottom], format::LineSeparator::new('─', '─', '─', '─'), ) .padding(1, 1) }
function_block-full_function
[]
Rust
src/input/touch_controls.rs
khang06/doukutsu-rs
8e0fb80c8cdf59883c9e6ec1c6823b4f958949ed
use ggez::{Context, GameResult}; use winit::event::TouchPhase; use crate::common::Rect; use crate::engine_constants::EngineConstants; use crate::texture_set::TextureSet; #[derive(Copy, Clone, PartialEq, Eq)] pub enum TouchControlType { None, Dialog, Controls, } #[derive(Copy, Clone)] pub struct TouchPoint { id: u64, touch_id: u64, position: (f64, f64), last_position: (f64, f64), } pub struct TouchControls { pub control_type: TouchControlType, pub points: Vec<TouchPoint>, pub interact_icon: bool, touch_id_counter: u64, clicks: Vec<TouchPoint>, } impl TouchControls { pub fn new() -> TouchControls { TouchControls { control_type: TouchControlType::None, touch_id_counter: 0, interact_icon: false, points: Vec::with_capacity(8), clicks: Vec::with_capacity(8), } } pub fn process_winit_event(&mut self, scale: f32, touch: winit::event::Touch) { match touch.phase { TouchPhase::Started | TouchPhase::Moved => { if let Some(point) = self.points.iter_mut().find(|p| p.id == touch.id) { point.last_position = point.position; point.position = (touch.location.x / scale as f64, touch.location.y / scale as f64); } else { self.touch_id_counter = self.touch_id_counter.wrapping_add(1); let point = TouchPoint { id: touch.id, touch_id: self.touch_id_counter, position: (touch.location.x / scale as f64, touch.location.y / scale as f64), last_position: (0.0, 0.0), }; self.points.push(point); if touch.phase == TouchPhase::Started { self.clicks.push(point); } } } TouchPhase::Ended | TouchPhase::Cancelled => { self.points.retain(|p| p.id != touch.id); self.clicks.retain(|p| p.id != touch.id); } } } pub fn point_in(&self, bounds: Rect) -> Option<u64> { for point in self.points.iter() { if (point.position.0 as isize) > bounds.left && (point.position.0 as isize) < bounds.right && (point.position.1 as isize) > bounds.top && (point.position.1 as isize) < bounds.bottom { return Some(point.touch_id); } } None } pub fn consume_click_in(&mut self, bounds: Rect) -> bool { self.clicks.retain(|p| p.touch_id != 0); for point in self.clicks.iter_mut() { if (point.position.0 as isize) > bounds.left && (point.position.0 as isize) < bounds.right && (point.position.1 as isize) > bounds.top && (point.position.1 as isize) < bounds.bottom { point.touch_id = 0; return true; } } false } pub fn draw(&self, canvas_size: (f32, f32), constants: &EngineConstants, texture_set: &mut TextureSet, ctx: &mut Context) -> GameResult { let batch = texture_set.get_or_load_batch(ctx, constants, "Caret")?; let rect = Rect::new_size(104, 120, 24, 24); for point in self.points.iter() { batch.add_rect(point.position.0 as f32 - 12.0, point.position.1 as f32 - 12.0, &rect); } batch.draw(ctx)?; if self.control_type == TouchControlType::Controls { let batch = texture_set.get_or_load_batch(ctx, constants, "builtin/touch")?; let color = (255, 255, 255, 160); for x in 0..3 { for y in 0..3 { let mut icon_x = x; let icon_y = y; if self.interact_icon && x == 1 && y == 2 { icon_x = 3; } batch.add_rect_tinted(4.0 + 48.0 * x as f32 + 8.0, (canvas_size.1 - 4.0 - 48.0 * 3.0) + 48.0 * y as f32 + 8.0, color, &Rect::new_size(icon_x * 32, icon_y * 32, 32, 32)); } } batch.add_rect_tinted(canvas_size.0 - (4.0 + 48.0) + 8.0, canvas_size.1 - (4.0 + 48.0) + 8.0, color, &Rect::new_size(3 * 32, 32, 32, 32)); batch.add_rect_tinted(canvas_size.0 - (4.0 + 48.0) + 8.0, canvas_size.1 - (4.0 + 48.0) * 2.0 + 8.0, color, &Rect::new_size(3 * 32, 0, 32, 32)); batch.draw(ctx)?; } Ok(()) } }
use ggez::{Context, GameResult}; use winit::event::TouchPhase; use crate::common::Rect; use crate::engine_constants::EngineConstants; use crate::texture_set::TextureSet; #[derive(Copy, Clone, PartialEq, Eq)] pub enum TouchControlType { None, Dialog, Controls, } #[derive(Copy, Clone)] pub struct TouchPoint { id: u64, touch_id: u64, position: (f64, f64), last_position: (f64, f64), } pub struct TouchControls { pub control_type: TouchControlType, pub points: Vec<TouchPoint>, pub interact_icon: bool, touch_id_counter: u64, clicks: Vec<TouchPoint>, } impl TouchControls { pub fn new() -> TouchControls { TouchControls { control_type: TouchControlType::None, touch_id_counter: 0, interact_icon: false, points: Vec::with_capacity(8), clicks: Vec::with_capacity(8), } } pub fn process_winit_event(&mut self, scale: f32, touch: winit::event::Touch) { match touch.phase { TouchPhase::Started | TouchPhase::Moved => {
} TouchPhase::Ended | TouchPhase::Cancelled => { self.points.retain(|p| p.id != touch.id); self.clicks.retain(|p| p.id != touch.id); } } } pub fn point_in(&self, bounds: Rect) -> Option<u64> { for point in self.points.iter() { if (point.position.0 as isize) > bounds.left && (point.position.0 as isize) < bounds.right && (point.position.1 as isize) > bounds.top && (point.position.1 as isize) < bounds.bottom { return Some(point.touch_id); } } None } pub fn consume_click_in(&mut self, bounds: Rect) -> bool { self.clicks.retain(|p| p.touch_id != 0); for point in self.clicks.iter_mut() { if (point.position.0 as isize) > bounds.left && (point.position.0 as isize) < bounds.right && (point.position.1 as isize) > bounds.top && (point.position.1 as isize) < bounds.bottom { point.touch_id = 0; return true; } } false } pub fn draw(&self, canvas_size: (f32, f32), constants: &EngineConstants, texture_set: &mut TextureSet, ctx: &mut Context) -> GameResult { let batch = texture_set.get_or_load_batch(ctx, constants, "Caret")?; let rect = Rect::new_size(104, 120, 24, 24); for point in self.points.iter() { batch.add_rect(point.position.0 as f32 - 12.0, point.position.1 as f32 - 12.0, &rect); } batch.draw(ctx)?; if self.control_type == TouchControlType::Controls { let batch = texture_set.get_or_load_batch(ctx, constants, "builtin/touch")?; let color = (255, 255, 255, 160); for x in 0..3 { for y in 0..3 { let mut icon_x = x; let icon_y = y; if self.interact_icon && x == 1 && y == 2 { icon_x = 3; } batch.add_rect_tinted(4.0 + 48.0 * x as f32 + 8.0, (canvas_size.1 - 4.0 - 48.0 * 3.0) + 48.0 * y as f32 + 8.0, color, &Rect::new_size(icon_x * 32, icon_y * 32, 32, 32)); } } batch.add_rect_tinted(canvas_size.0 - (4.0 + 48.0) + 8.0, canvas_size.1 - (4.0 + 48.0) + 8.0, color, &Rect::new_size(3 * 32, 32, 32, 32)); batch.add_rect_tinted(canvas_size.0 - (4.0 + 48.0) + 8.0, canvas_size.1 - (4.0 + 48.0) * 2.0 + 8.0, color, &Rect::new_size(3 * 32, 0, 32, 32)); batch.draw(ctx)?; } Ok(()) } }
if let Some(point) = self.points.iter_mut().find(|p| p.id == touch.id) { point.last_position = point.position; point.position = (touch.location.x / scale as f64, touch.location.y / scale as f64); } else { self.touch_id_counter = self.touch_id_counter.wrapping_add(1); let point = TouchPoint { id: touch.id, touch_id: self.touch_id_counter, position: (touch.location.x / scale as f64, touch.location.y / scale as f64), last_position: (0.0, 0.0), }; self.points.push(point); if touch.phase == TouchPhase::Started { self.clicks.push(point); } }
if_condition
[ { "content": "#[inline(always)]\n\npub fn fix9_scale(val: i32, scale: f32) -> f32 {\n\n (val as f64 * scale as f64 / 512.0).floor() as f32 / scale\n\n}\n\n\n", "file_path": "src/common.rs", "rank": 0, "score": 267354.8191958265 }, { "content": "pub fn interpolate_fix9_scale(old_val: i32, val: i32, frame_delta: f64) -> f32 {\n\n if (frame_delta - 1.0).abs() < 0.001 {\n\n return (val / 0x200) as f32;\n\n }\n\n\n\n (lerp_f64(old_val as f64, val as f64, frame_delta) / 512.0) as f32\n\n}\n", "file_path": "src/common.rs", "rank": 1, "score": 231891.1791443359 }, { "content": "pub fn centibel_to_scale(a: i32) -> f32 {\n\n f32::powf(10.0, a as f32 / 2000.0)\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct RenderBuffer {\n\n pub position: f64,\n\n pub frequency: u32,\n\n pub volume: i32,\n\n pub pan: i32,\n\n pub sample: WavSample,\n\n pub playing: bool,\n\n pub looping: bool,\n\n pub base_pos: usize,\n\n pub len: usize,\n\n // -1 = infinite\n\n pub nloops: i32,\n\n}\n\n\n\nimpl RenderBuffer {\n", "file_path": "src/sound/playback.rs", "rank": 2, "score": 229031.71539297642 }, { "content": "// s1: sample 1\n\n// s2: sample 2\n\n// sp: previous sample (before s1)\n\n// sn: next sample (after s2)\n\n// mu: position to interpolate for\n\npub fn cubic_interp(s1: f32, s2: f32, sp: f32, sn: f32, mu: f32) -> f32 {\n\n let mu2 = mu * mu;\n\n let a0 = sn - s2 - sp + s1;\n\n let a1 = sp - s1 - a0;\n\n let a2 = s2 - sp;\n\n let a3 = s1;\n\n\n\n a0 * mu * mu2 + a1 * mu2 + a2 * mu + a3\n\n}\n", "file_path": "src/sound/stuff.rs", "rank": 3, "score": 191406.06892948074 }, { "content": "pub fn init() -> GameResult {\n\n pretty_env_logger::env_logger::from_env(Env::default().default_filter_or(\"info\"))\n\n .filter(Some(\"gfx_device_gl::factory\"), LevelFilter::Warn)\n\n .init();\n\n\n\n let resource_dir = if let Ok(data_dir) = env::var(\"CAVESTORY_DATA_DIR\") {\n\n path::PathBuf::from(data_dir)\n\n } else if let Ok(manifest_dir) = env::var(\"CARGO_MANIFEST_DIR\") {\n\n let mut path = path::PathBuf::from(manifest_dir);\n\n path.push(\"data\");\n\n path\n\n } else {\n\n path::PathBuf::from(\"data\")\n\n };\n\n\n\n info!(\"Resource directory: {:?}\", resource_dir);\n\n info!(\"Initializing engine...\");\n\n\n\n let event_loop = winit::event_loop::EventLoop::new();\n\n let mut context: Option<Context>;\n", "file_path": "src/lib.rs", "rank": 4, "score": 169582.21401447253 }, { "content": "pub trait PlayerControllerClone {\n\n fn clone_box(&self) -> Box<dyn PlayerController>;\n\n}\n\n\n\nimpl<T: 'static + PlayerController + Clone> PlayerControllerClone for T {\n\n fn clone_box(&self) -> Box<dyn PlayerController> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n\nimpl Clone for Box<dyn PlayerController> {\n\n fn clone(&self) -> Box<dyn PlayerController> {\n\n self.clone_box()\n\n }\n\n}\n", "file_path": "src/input/player_controller.rs", "rank": 5, "score": 164732.66976807336 }, { "content": "#[test]\n\npub fn test_npc_list() -> GameResult {\n\n impl NPC {\n\n fn test_tick(&mut self, _map: &NPCList) -> GameResult {\n\n self.action_counter += 1;\n\n\n\n Ok(())\n\n }\n\n }\n\n\n\n let mut npc = NPC::empty();\n\n npc.cond.set_alive(true);\n\n\n\n {\n\n let map = Box::new(NPCList::new());\n\n let mut ctr = 20;\n\n\n\n map.spawn(0, npc.clone())?;\n\n map.spawn(2, npc.clone())?;\n\n map.spawn(256, npc.clone())?;\n\n\n", "file_path": "src/npc/list.rs", "rank": 6, "score": 161332.1927224 }, { "content": "pub trait PlayerController: PlayerControllerClone {\n\n fn update(&mut self, state: &mut SharedGameState, ctx: &mut Context) -> GameResult;\n\n\n\n fn update_trigger(&mut self);\n\n\n\n /// True if \"move up\" button is down.\n\n fn move_up(&self) -> bool;\n\n\n\n /// True if \"move left\" button is down.\n\n fn move_left(&self) -> bool;\n\n\n\n /// True if \"move down\" button is down.\n\n fn move_down(&self) -> bool;\n\n\n\n /// True if \"move right\" button is down.\n\n fn move_right(&self) -> bool;\n\n\n\n /// True if \"prev weapon\" button is down.\n\n fn prev_weapon(&self) -> bool;\n\n\n", "file_path": "src/input/player_controller.rs", "rank": 7, "score": 160895.91339993244 }, { "content": "#[inline(always)]\n\nfn lerp_f64(v1: f64, v2: f64, t: f64) -> f64 {\n\n v1 * (1.0 - t.fract()) + v2 * t.fract()\n\n}\n\n\n", "file_path": "src/common.rs", "rank": 8, "score": 158935.0632544454 }, { "content": "#[cfg(target_os = \"android\")]\n\n#[cfg_attr(target_os = \"android\", ndk_glue::main(backtrace = \"on\"))]\n\npub fn android_main() {\n\n println!(\"main invoked.\");\n\n\n\n request_perms().expect(\"Failed to attach to the JVM and request storage permissions.\");\n\n\n\n env::set_var(\"CAVESTORY_DATA_DIR\", \"/storage/emulated/0/doukutsu\");\n\n init().unwrap();\n\n}\n\n\n\n#[cfg(target_os = \"android\")]\n\nstatic BACKENDS: [Backend; 2] = [\n\n Backend::OpenGLES { major: 3, minor: 0 },\n\n Backend::OpenGLES { major: 2, minor: 0 }\n\n];\n\n\n\n#[cfg(not(target_os = \"android\"))]\n\nstatic BACKENDS: [Backend; 4] = [\n\n Backend::OpenGL { major: 3, minor: 2 },\n\n Backend::OpenGLES { major: 3, minor: 2 },\n\n Backend::OpenGLES { major: 3, minor: 0 },\n\n Backend::OpenGLES { major: 2, minor: 0 }\n\n];\n\n\n", "file_path": "src/lib.rs", "rank": 9, "score": 149004.09235994238 }, { "content": "pub fn draw_number(x: f32, y: f32, val: usize, align: Alignment, state: &mut SharedGameState, ctx: &mut Context) -> GameResult {\n\n let batch = state.texture_set.get_or_load_batch(ctx, &state.constants, \"TextBox\")?;\n\n\n\n let n = val.to_string();\n\n let align_offset = if align == Alignment::Right { n.len() as f32 * 8.0 } else { 0.0 };\n\n\n\n for (offset, chr) in n.chars().enumerate() {\n\n let idx = chr as u16 - '0' as u16;\n\n batch.add_rect(x - align_offset + offset as f32 * 8.0, y, &Rect::new_size(idx * 8, 56, 8, 8));\n\n }\n\n\n\n batch.draw(ctx)?;\n\n Ok(())\n\n}\n", "file_path": "src/components/draw_common.rs", "rank": 10, "score": 148032.71807484503 }, { "content": "pub fn org_vol_to_vol(vol: u8) -> i32 {\n\n (vol as i32 - 255) * 8\n\n}\n\n\n", "file_path": "src/sound/stuff.rs", "rank": 11, "score": 119824.09649941145 }, { "content": "pub fn org_pan_to_pan(pan: u8) -> i32 {\n\n (PAN_TBL[pan as usize] as i32 - 256) * 10\n\n}\n\n\n", "file_path": "src/sound/stuff.rs", "rank": 12, "score": 119824.09649941145 }, { "content": "pub fn org_key_to_drum_freq(key: u8) -> i32 {\n\n key as i32 * 800 + 100\n\n}\n\n\n", "file_path": "src/sound/stuff.rs", "rank": 13, "score": 117569.83744749178 }, { "content": "pub fn org_key_to_oct_pitch(key: u8) -> (u8, u8) {\n\n (key / 12, key % 12)\n\n}\n\n\n", "file_path": "src/sound/stuff.rs", "rank": 14, "score": 111897.46147036317 }, { "content": "pub fn org_key_to_freq(key: u8, a: i16) -> i32 {\n\n let (oct, pitch) = org_key_to_oct_pitch(key);\n\n\n\n let freq = FRQ_TBL[pitch as usize] as f32;\n\n let oct = OCT_TBL[oct as usize] as f32;\n\n\n\n (freq * oct) as i32 + (a as i32 - 1000)\n\n}\n\n\n", "file_path": "src/sound/stuff.rs", "rank": 15, "score": 110970.55086247626 }, { "content": "#[cfg(target_os = \"android\")]\n\nfn request_perms() -> GameResult {\n\n use jni::objects::JValue;\n\n use jni::objects::JObject;\n\n\n\n let native_activity = ndk_glue::native_activity();\n\n let vm_ptr = native_activity.vm();\n\n let vm = unsafe { jni::JavaVM::from_raw(vm_ptr) }?;\n\n let vm_env = vm.attach_current_thread()?;\n\n\n\n fn perm_name<'a, 'b, 'c>(vm_env: &'b jni::AttachGuard<'a>, name: &'c str) -> GameResult<jni::objects::JValue<'a>> {\n\n let class = vm_env.find_class(\"android/Manifest$permission\")?;\n\n Ok(vm_env.get_static_field(class, name.to_owned(), \"Ljava/lang/String;\")?)\n\n }\n\n\n\n fn has_permission(vm_env: &jni::AttachGuard, activity: &jni::sys::jobject, name: &str) -> GameResult<bool> {\n\n let perm_granted = {\n\n let class = vm_env.find_class(\"android/content/pm/PackageManager\")?;\n\n vm_env.get_static_field(class, \"PERMISSION_GRANTED\", \"I\")?.i()?\n\n };\n\n\n", "file_path": "src/lib.rs", "rank": 16, "score": 103498.61876060354 }, { "content": "fn default_n016_save_point() -> [Rect<u16>; 8] {\n\n [\n\n Rect { left: 96, top: 16, right: 112, bottom: 32 },\n\n Rect { left: 112, top: 16, right: 128, bottom: 32 },\n\n Rect { left: 128, top: 16, right: 144, bottom: 32 },\n\n Rect { left: 144, top: 16, right: 160, bottom: 32 },\n\n Rect { left: 160, top: 16, right: 176, bottom: 32 },\n\n Rect { left: 176, top: 16, right: 192, bottom: 32 },\n\n Rect { left: 192, top: 16, right: 208, bottom: 32 },\n\n Rect { left: 208, top: 16, right: 224, bottom: 32 },\n\n ]\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 17, "score": 97001.18350098241 }, { "content": "fn default_n315_ma_pignon_clone() -> [Rect<u16>; 8] {\n\n [\n\n Rect { left: 128, top: 0, right: 144, bottom: 16 },\n\n Rect { left: 160, top: 0, right: 176, bottom: 16 },\n\n Rect { left: 176, top: 0, right: 192, bottom: 16 },\n\n Rect { left: 192, top: 0, right: 208, bottom: 16 },\n\n Rect { left: 128, top: 16, right: 144, bottom: 32 },\n\n Rect { left: 160, top: 16, right: 176, bottom: 32 },\n\n Rect { left: 176, top: 16, right: 192, bottom: 32 },\n\n Rect { left: 192, top: 16, right: 208, bottom: 32 },\n\n ]\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 18, "score": 94739.3268424054 }, { "content": "// TODO: Create a MixingBuffer or something...\n\npub fn mix(dst: &mut [u16], dst_fmt: WavFormat, srcs: &mut [RenderBuffer]) {\n\n let freq = dst_fmt.sample_rate as f64;\n\n\n\n for buf in srcs {\n\n if buf.playing {\n\n // index into sound samples\n\n let advance = buf.frequency as f64 / freq;\n\n\n\n let vol = centibel_to_scale(buf.volume);\n\n\n\n let (pan_l, pan_r) =\n\n match buf.pan.signum() {\n\n 0 => (1.0, 1.0),\n\n 1 => (centibel_to_scale(-buf.pan), 1.0),\n\n -1 => (1.0, centibel_to_scale(buf.pan)),\n\n _ => unsafe { std::hint::unreachable_unchecked() }\n\n };\n\n\n\n fn clamp<T: Ord>(v: T, limit: T) -> T {\n\n if v > limit {\n", "file_path": "src/sound/playback.rs", "rank": 19, "score": 94501.79136020553 }, { "content": "fn init_ctx<P: Into<path::PathBuf> + Clone>(event_loop: &winit::event_loop::EventLoopWindowTarget<()>, resource_dir: P) -> GameResult<Context> {\n\n for backend in BACKENDS.iter() {\n\n let ctx = ContextBuilder::new(\"doukutsu-rs\")\n\n .window_setup(WindowSetup::default().title(\"Cave Story ~ Doukutsu Monogatari (doukutsu-rs)\"))\n\n .window_mode(WindowMode::default()\n\n .resizable(true)\n\n .min_dimensions(320.0, 240.0)\n\n .dimensions(854.0, 480.0))\n\n .add_resource_path(resource_dir.clone())\n\n .backend(*backend)\n\n .build(event_loop);\n\n\n\n match ctx {\n\n Ok(mut ctx) => {\n\n mount_vfs(&mut ctx, Box::new(BuiltinFS::new()));\n\n\n\n return Ok(ctx);\n\n }\n\n Err(err) => {\n\n log::warn!(\"Failed to create backend using config {:?}: {}\", backend, err);\n\n }\n\n }\n\n }\n\n\n\n Err(GameError::EventLoopError(\"Failed to initialize OpenGL backend. Perhaps the driver is outdated?\".to_string()))\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 20, "score": 91822.36344824225 }, { "content": " trigger: KeyState(0),\n\n prev_touch_len: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl PlayerController for TouchPlayerController {\n\n fn update(&mut self, state: &mut SharedGameState, _ctx: &mut Context) -> GameResult {\n\n match state.touch_controls.control_type {\n\n TouchControlType::None => {}\n\n TouchControlType::Dialog => {\n\n self.state.set_jump(state.touch_controls.point_in(Rect::new_size(0, 0, state.canvas_size.0 as isize, state.canvas_size.1 as isize)).is_some());\n\n\n\n if state.touch_controls.points.len() > 1 && self.prev_touch_len != state.touch_controls.points.len() {\n\n self.prev_touch_len = state.touch_controls.points.len();\n\n self.old_state.set_jump(false);\n\n }\n\n }\n\n TouchControlType::Controls => {\n\n self.state.0 = 0;\n", "file_path": "src/input/touch_player_controller.rs", "rank": 28, "score": 88152.28801663306 }, { "content": "use ggez::{Context, GameResult};\n\n\n\nuse crate::bitfield;\n\nuse crate::common::Rect;\n\nuse crate::input::player_controller::PlayerController;\n\nuse crate::input::touch_controls::TouchControlType;\n\nuse crate::shared_game_state::SharedGameState;\n\n\n\n/// A no-op implementation of player controller.\n\n#[derive(Clone)]\n\npub struct TouchPlayerController {\n\n state: KeyState,\n\n old_state: KeyState,\n\n trigger: KeyState,\n\n prev_touch_len: usize,\n\n}\n\n\n\nbitfield! {\n\n #[derive(Clone, Copy)]\n\n pub struct KeyState(u16);\n", "file_path": "src/input/touch_player_controller.rs", "rank": 29, "score": 88146.58371304767 }, { "content": " impl Debug;\n\n\n\n pub left, set_left: 0;\n\n pub right, set_right: 1;\n\n pub up, set_up: 2;\n\n pub down, set_down: 3;\n\n pub map, set_map: 4;\n\n pub inventory, set_inventory: 5;\n\n pub jump, set_jump: 6;\n\n pub shoot, set_shoot: 7;\n\n pub next_weapon, set_next_weapon: 8;\n\n pub prev_weapon, set_prev_weapon: 9;\n\n pub pause, set_pause: 10;\n\n}\n\n\n\nimpl TouchPlayerController {\n\n pub fn new() -> TouchPlayerController {\n\n TouchPlayerController {\n\n state: KeyState(0),\n\n old_state: KeyState(0),\n", "file_path": "src/input/touch_player_controller.rs", "rank": 30, "score": 88144.51071569147 }, { "content": " // left\n\n self.state.set_left(self.state.left() || state.touch_controls.point_in(Rect::new_size(4, state.canvas_size.1 as isize - 4 - 48 * 2, 48, 48)).is_some());\n\n\n\n // up\n\n self.state.set_up(self.state.up() || state.touch_controls.point_in(Rect::new_size(48 + 4, state.canvas_size.1 as isize - 4 - 48 * 3, 48, 48)).is_some());\n\n\n\n // right\n\n self.state.set_right(self.state.right() || state.touch_controls.point_in(Rect::new_size(4 + 48 * 2, state.canvas_size.1 as isize - 4 - 48 * 2, 48, 48)).is_some());\n\n\n\n // down\n\n self.state.set_down(self.state.down() || state.touch_controls.point_in(Rect::new_size(48 + 4, state.canvas_size.1 as isize - 4 - 48, 48, 48)).is_some());\n\n\n\n // left+up\n\n self.state.set_left(self.state.left() || state.touch_controls.point_in(Rect::new_size(4, state.canvas_size.1 as isize - 4 - 48 * 3, 48, 48)).is_some());\n\n self.state.set_up(self.state.up() || state.touch_controls.point_in(Rect::new_size(4, state.canvas_size.1 as isize - 4 - 48 * 3, 48, 48)).is_some());\n\n\n\n // right+up\n\n self.state.set_right(self.state.right() || state.touch_controls.point_in(Rect::new_size(4 + 48 * 2, state.canvas_size.1 as isize - 4 - 48 * 3, 48, 48)).is_some());\n\n self.state.set_up(self.state.up() || state.touch_controls.point_in(Rect::new_size(4 + 48 * 2, state.canvas_size.1 as isize - 4 - 48 * 3, 48, 48)).is_some());\n\n\n", "file_path": "src/input/touch_player_controller.rs", "rank": 31, "score": 88143.31086694392 }, { "content": " // left+down\n\n self.state.set_left(self.state.left() || state.touch_controls.point_in(Rect::new_size(4, state.canvas_size.1 as isize - 48 - 4, 48, 48)).is_some());\n\n self.state.set_down(self.state.down() || state.touch_controls.point_in(Rect::new_size(4, state.canvas_size.1 as isize - 48 - 4, 48, 48)).is_some());\n\n\n\n // right+down\n\n self.state.set_right(self.state.right() || state.touch_controls.point_in(Rect::new_size(4 + 48 * 2, state.canvas_size.1 as isize - 48 - 4, 48, 48)).is_some());\n\n self.state.set_down(self.state.down() || state.touch_controls.point_in(Rect::new_size(4 + 48 * 2, state.canvas_size.1 as isize - 48 - 4, 48, 48)).is_some());\n\n\n\n self.state.set_jump(self.state.jump() || state.touch_controls.point_in(Rect::new_size(state.canvas_size.0 as isize - 48 - 4, state.canvas_size.1 as isize - 48 - 4, 48, 48)).is_some());\n\n self.state.set_shoot(self.state.shoot() || state.touch_controls.point_in(Rect::new_size(state.canvas_size.0 as isize - 48 - 4, state.canvas_size.1 as isize - (48 - 4) * 2, 48, 48)).is_some());\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn update_trigger(&mut self) {\n\n let mut trigger = self.state.0 ^ self.old_state.0;\n\n trigger &= self.state.0;\n\n self.old_state = self.state;\n", "file_path": "src/input/touch_player_controller.rs", "rank": 32, "score": 88141.90911897954 }, { "content": "\n\n fn look_right(&self) -> bool {\n\n self.state.right()\n\n }\n\n\n\n fn move_analog_x(&self) -> f64 {\n\n if self.state.left() && self.state.right() {\n\n 0.0\n\n } else if self.state.left() {\n\n -1.0\n\n } else if self.state.right() {\n\n 1.0\n\n } else {\n\n 0.0\n\n }\n\n }\n\n\n\n fn move_analog_y(&self) -> f64 {\n\n if self.state.up() && self.state.down() {\n\n 0.0\n", "file_path": "src/input/touch_player_controller.rs", "rank": 33, "score": 88130.24477804247 }, { "content": " self.trigger = KeyState(trigger);\n\n }\n\n\n\n fn move_up(&self) -> bool {\n\n self.state.up()\n\n }\n\n\n\n fn move_left(&self) -> bool {\n\n self.state.left()\n\n }\n\n\n\n fn move_down(&self) -> bool {\n\n self.state.down()\n\n }\n\n\n\n fn move_right(&self) -> bool {\n\n self.state.right()\n\n }\n\n\n\n fn prev_weapon(&self) -> bool {\n", "file_path": "src/input/touch_player_controller.rs", "rank": 34, "score": 88126.64024828444 }, { "content": " fn trigger_up(&self) -> bool {\n\n self.trigger.up()\n\n }\n\n\n\n fn trigger_left(&self) -> bool {\n\n self.trigger.left()\n\n }\n\n\n\n fn trigger_down(&self) -> bool {\n\n self.trigger.down()\n\n }\n\n\n\n fn trigger_right(&self) -> bool {\n\n self.trigger.right()\n\n }\n\n\n\n fn trigger_prev_weapon(&self) -> bool {\n\n self.trigger.prev_weapon()\n\n }\n\n\n", "file_path": "src/input/touch_player_controller.rs", "rank": 35, "score": 88126.64024828444 }, { "content": "\n\n fn trigger_menu_back(&self) -> bool {\n\n self.trigger.shoot()\n\n }\n\n\n\n fn trigger_menu_pause(&self) -> bool {\n\n self.trigger.pause()\n\n }\n\n\n\n fn look_up(&self) -> bool {\n\n self.state.up()\n\n }\n\n\n\n fn look_left(&self) -> bool {\n\n self.state.left()\n\n }\n\n\n\n fn look_down(&self) -> bool {\n\n self.state.down()\n\n }\n", "file_path": "src/input/touch_player_controller.rs", "rank": 36, "score": 88126.64024828444 }, { "content": " fn trigger_next_weapon(&self) -> bool {\n\n self.trigger.next_weapon()\n\n }\n\n\n\n fn trigger_jump(&self) -> bool {\n\n self.trigger.jump()\n\n }\n\n\n\n fn trigger_shoot(&self) -> bool {\n\n self.trigger.shoot()\n\n }\n\n\n\n fn trigger_skip(&self) -> bool {\n\n // TODO\n\n false\n\n }\n\n\n\n fn trigger_menu_ok(&self) -> bool {\n\n self.trigger.jump()\n\n }\n", "file_path": "src/input/touch_player_controller.rs", "rank": 37, "score": 88126.56358923647 }, { "content": " self.state.prev_weapon()\n\n }\n\n\n\n fn next_weapon(&self) -> bool {\n\n self.state.next_weapon()\n\n }\n\n\n\n fn jump(&self) -> bool {\n\n self.state.jump()\n\n }\n\n\n\n fn shoot(&self) -> bool {\n\n self.state.shoot()\n\n }\n\n\n\n fn skip(&self) -> bool {\n\n // TODO\n\n false\n\n }\n\n\n", "file_path": "src/input/touch_player_controller.rs", "rank": 38, "score": 88126.51156333016 }, { "content": " } else if self.state.up() {\n\n -1.0\n\n } else if self.state.down() {\n\n 1.0\n\n } else {\n\n 0.0\n\n }\n\n }\n\n}\n", "file_path": "src/input/touch_player_controller.rs", "rank": 39, "score": 88119.9846097748 }, { "content": "/// Decodes UTF-8 character in a less strict way.\n\n/// http://simonsapin.github.io/wtf-8/#decoding-wtf-8\n\npub fn read_cur_wtf8<T: AsRef<[u8]>>(cursor: &mut Cursor<T>, max_bytes: u32) -> (u32, char) {\n\n let result: u32;\n\n let consumed: u32;\n\n\n\n if max_bytes == 0 {\n\n return (0, '\\u{fffd}');\n\n }\n\n\n\n match cursor.read_u8() {\n\n Ok(byte @ 0x00..=0x7f) => {\n\n consumed = 1;\n\n result = byte as u32;\n\n }\n\n Ok(byte @ 0xc2..=0xdf) if max_bytes >= 2 => {\n\n let byte2 = { if let Ok(n) = cursor.read_u8() { n } else { return (1, '\\u{fffd}'); } };\n\n\n\n consumed = 2;\n\n result = (byte as u32 & 0x1f) << 6 | (byte2 as u32 & 0x3f);\n\n }\n\n Ok(byte @ 0xe0..=0xef) if max_bytes >= 3 => {\n", "file_path": "src/encoding.rs", "rank": 40, "score": 86479.39283941482 }, { "content": "struct Game {\n\n scene: Option<Box<dyn Scene>>,\n\n state: UnsafeCell<SharedGameState>,\n\n ui: UI,\n\n def_matrix: ColumnMatrix4<f32>,\n\n start_time: Instant,\n\n last_tick: u128,\n\n next_tick: u128,\n\n loops: u64,\n\n}\n\n\n\nimpl Game {\n\n fn new(ctx: &mut Context) -> GameResult<Game> {\n\n let s = Game {\n\n scene: None,\n\n ui: UI::new(ctx)?,\n\n def_matrix: DrawParam::new().to_matrix(),\n\n state: UnsafeCell::new(SharedGameState::new(ctx)?),\n\n start_time: Instant::now(),\n\n last_tick: 0,\n", "file_path": "src/lib.rs", "rank": 41, "score": 85058.72092291112 }, { "content": "/// Shift-JIS -> Unicode converter.\n\npub fn read_cur_shift_jis<T: AsRef<[u8]>>(cursor: &mut Cursor<T>, max_bytes: u32) -> (u32, char) {\n\n let result: u32;\n\n let consumed: u32;\n\n\n\n if max_bytes == 0 {\n\n return (0, '\\u{fffd}');\n\n }\n\n\n\n match cursor.read_u8() {\n\n Ok(byte @ 0x81..=0x9f)\n\n | Ok(byte @ 0xe0..=0xef)\n\n | Ok(byte @ 0xfa..=0xfc) if max_bytes >= 2 => {\n\n let byte2 = { if let Ok(n) = cursor.read_u8() { n } else { return (1, '\\u{fffd}'); } };\n\n consumed = 2;\n\n\n\n let sjis = byte2 as u16 | ((byte as u16) << 8);\n\n result = match sjis {\n\n 0x8140..=0x8142 => { sjis as u32 - 0x8140 + 0x3000 }\n\n 0x8143 => { 0xff0c }\n\n 0x8144 => { 0xff0e }\n", "file_path": "src/encoding.rs", "rank": 42, "score": 85035.34370964955 }, { "content": "struct DifficultyModifier {\n\n\n\n}\n", "file_path": "src/difficulty_modifier.rs", "rank": 43, "score": 81468.06055138729 }, { "content": "struct BuiltinMetadata {\n\n is_dir: bool,\n\n size: u64,\n\n}\n\n\n\nimpl VMetadata for BuiltinMetadata {\n\n fn is_dir(&self) -> bool {\n\n self.is_dir\n\n }\n\n\n\n fn is_file(&self) -> bool {\n\n !self.is_dir\n\n }\n\n\n\n fn len(&self) -> u64 {\n\n self.size\n\n }\n\n}\n\n\n", "file_path": "src/builtin_fs.rs", "rank": 44, "score": 81468.06055138729 }, { "content": "#[derive(PartialEq, Eq)]\n\nenum PlaybackState {\n\n Stopped,\n\n Playing,\n\n}\n\n\n", "file_path": "src/sound/mod.rs", "rank": 45, "score": 81451.00711776933 }, { "content": "#[derive(Clone)]\n\nenum FSNode {\n\n File(&'static str, &'static [u8]),\n\n Directory(&'static str, Vec<FSNode>),\n\n}\n\n\n\nimpl FSNode {\n\n fn get_name(&self) -> &'static str {\n\n match self {\n\n FSNode::File(name, _) => { name }\n\n FSNode::Directory(name, _) => { name }\n\n }\n\n }\n\n\n\n fn to_file(&self) -> GameResult<Box<dyn VFile>> {\n\n match self {\n\n FSNode::File(_, buf) => { Ok(BuiltinFile::from(buf)) }\n\n FSNode::Directory(name, _) => { Err(FilesystemError(format!(\"{} is a directory.\", name))) }\n\n }\n\n }\n\n\n", "file_path": "src/builtin_fs.rs", "rank": 46, "score": 81450.24862285156 }, { "content": "enum PlaybackMessage {\n\n Stop,\n\n PlaySong(Box<Song>),\n\n PlaySample(u8),\n\n SetSpeed(f32),\n\n SaveState,\n\n RestoreState,\n\n}\n\n\n", "file_path": "src/sound/mod.rs", "rank": 47, "score": 81444.58308628848 }, { "content": "#[derive(PartialEq, Eq, Copy, Clone)]\n\n#[repr(u8)]\n\nenum CurrentMenu {\n\n MainMenu,\n\n OptionMenu,\n\n SaveSelectMenu,\n\n ChallengesMenu,\n\n StartGame,\n\n LoadGame,\n\n}\n\n\n\npub struct TitleScene {\n\n tick: usize,\n\n controller: CombinedMenuController,\n\n current_menu: CurrentMenu,\n\n main_menu: Menu,\n\n option_menu: Menu,\n\n save_select_menu: Menu,\n\n}\n\n\n\nimpl TitleScene {\n\n pub fn new() -> Self {\n", "file_path": "src/scene/title_scene.rs", "rank": 48, "score": 79856.23069765355 }, { "content": "fn main() {\n\n doukutsu_rs::init().unwrap();\n\n}\n", "file_path": "src/main.rs", "rank": 49, "score": 78958.68838777997 }, { "content": "/// A trait to get or set a single bit.\n\n///\n\n/// This trait is implemented for all type that implement `BitRange<u8>`.\n\npub trait Bit {\n\n /// Get a single bit.\n\n fn bit(&self, bit: usize) -> bool;\n\n\n\n /// Check if only specific bit is set.\n\n fn bit_only(&self, bit: usize) -> bool;\n\n\n\n /// Set a single bit.\n\n fn set_bit(&mut self, bit: usize, value: bool);\n\n}\n\n\n\nimpl<T: BitRange<u8>> Bit for T {\n\n fn bit(&self, bit: usize) -> bool {\n\n self.bit_range(bit, bit) != 0\n\n }\n\n\n\n fn bit_only(&self, bit: usize) -> bool {\n\n self.get() == (1 << bit) as u8\n\n }\n\n\n", "file_path": "src/macros.rs", "rank": 50, "score": 78844.88695427214 }, { "content": "pub trait RNG {\n\n fn next(&self) -> i32;\n\n\n\n fn range(&self, range: Range<i32>) -> i32 {\n\n range.start.wrapping_add((self.next() >> 2) % (range.end.wrapping_sub(range.start).wrapping_add(1)))\n\n }\n\n}\n\n\n\n/// Deterministic XorShift-based random number generator\n\npub struct XorShift(Cell<(u64, u64, u64, u64)>);\n\n\n\nimpl XorShift {\n\n pub fn new(seed: i32) -> Self {\n\n Self(Cell::new((\n\n seed as u64,\n\n (seed as u64).wrapping_add(0x9e3779b97f4a7c15),\n\n (seed as u64).wrapping_add(0xbdd3944475a73cf0),\n\n 0\n\n )))\n\n }\n", "file_path": "src/rng.rs", "rank": 51, "score": 78844.88695427214 }, { "content": "#[test]\n\nfn inventory_test() {\n\n let mut inventory = Inventory::new();\n\n\n\n inventory.add_item(3);\n\n assert_eq!(inventory.has_item(2), false);\n\n assert_eq!(inventory.has_item(3), true);\n\n\n\n assert_eq!(inventory.has_item_amount(3, Ordering::Equal, 1), true);\n\n assert_eq!(inventory.has_item_amount(3, Ordering::Less, 2), true);\n\n inventory.consume_item(3);\n\n\n\n assert_eq!(inventory.has_item_amount(3, Ordering::Equal, 0), true);\n\n assert_eq!(inventory.has_item_amount(3, Ordering::Less, 2), true);\n\n\n\n inventory.add_item(2);\n\n assert_eq!(inventory.has_item(2), true);\n\n assert_eq!(inventory.has_item_amount(2, Ordering::Equal, 1), true);\n\n assert_eq!(inventory.has_item_amount(2, Ordering::Less, 1), false);\n\n\n\n inventory.add_item(4);\n\n inventory.add_item(4);\n\n inventory.add_item(4);\n\n inventory.add_item(4);\n\n\n\n assert_eq!(inventory.has_item(4), true);\n\n assert_eq!(inventory.has_item_amount(4, Ordering::Greater, 3), true);\n\n assert_eq!(inventory.has_item_amount(4, Ordering::Equal, 4), true);\n\n assert_eq!(inventory.has_item_amount(4, Ordering::Less, 2), false);\n\n}\n", "file_path": "src/inventory.rs", "rank": 52, "score": 77133.33157557035 }, { "content": "pub trait PhysicalEntity {\n\n fn x(&self) -> i32;\n\n fn y(&self) -> i32;\n\n fn vel_x(&self) -> i32;\n\n fn vel_y(&self) -> i32;\n\n\n\n fn hit_rect_size(&self) -> usize;\n\n fn offset_x(&self) -> i32 { 0 }\n\n fn offset_y(&self) -> i32 { 0 }\n\n\n\n fn hit_bounds(&self) -> &Rect<usize>;\n\n\n\n fn set_x(&mut self, x: i32);\n\n fn set_y(&mut self, y: i32);\n\n fn set_vel_x(&mut self, x: i32);\n\n fn set_vel_y(&mut self, y: i32);\n\n\n\n fn cond(&mut self) -> &mut Condition;\n\n fn flags(&mut self) -> &mut Flag;\n\n\n", "file_path": "src/physics.rs", "rank": 53, "score": 77121.68939899284 }, { "content": "/// Implement this trait on any object that represents an interactive game screen.\n\npub trait Scene {\n\n /// Called when the scene is shown.\n\n fn init(&mut self, _state: &mut SharedGameState, _ctx: &mut Context) -> GameResult { Ok(()) }\n\n\n\n /// Called at game tick. Perform any game state updates there.\n\n fn tick(&mut self, _state: &mut SharedGameState, _ctx: &mut Context) -> GameResult { Ok(()) }\n\n\n\n /// Called before draws between two ticks to update previous positions used for interpolation.\n\n /// DO NOT perform updates of the game state there.\n\n fn draw_tick(&mut self, _state: &mut SharedGameState) -> GameResult { Ok(()) }\n\n\n\n /// Called during frame rendering operation.\n\n fn draw(&self, _state: &mut SharedGameState, _ctx: &mut Context) -> GameResult { Ok(()) }\n\n\n\n /// Independent draw meant for debug overlay, that lets you mutate the game state.\n\n fn debug_overlay_draw(&mut self, _game_ui: &mut Components, _state: &mut SharedGameState, _ctx: &mut Context, _frame: &mut imgui::Ui) -> GameResult { Ok(()) }\n\n}\n", "file_path": "src/scene/mod.rs", "rank": 54, "score": 77121.68939899284 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n#[repr(u8)]\n\nenum BossLifeTarget {\n\n None,\n\n NPC(u16),\n\n Boss,\n\n}\n\n\n\npub struct BossLifeBar {\n\n target: BossLifeTarget,\n\n life: u16,\n\n max_life: u16,\n\n prev_life: u16,\n\n counter: u16,\n\n}\n\n\n\nimpl BossLifeBar {\n\n pub fn new() -> BossLifeBar {\n\n BossLifeBar {\n\n target: BossLifeTarget::None,\n\n life: 0,\n\n max_life: 0,\n", "file_path": "src/components/boss_life_bar.rs", "rank": 55, "score": 76980.89455776098 }, { "content": "fn test_waveforms() {\n\n let reference = include_bytes!(\"pixtone_ref.dat\");\n\n\n\n for n in 1..(WAVEFORMS.len()) {\n\n for (i, &val) in WAVEFORMS[n].iter().enumerate() {\n\n assert_eq!((val as u8, i, n), (reference[n as usize * 256 + i], i, n));\n\n }\n\n }\n\n}*/\n\n\n\npub struct Waveform {\n\n pub waveform_type: u8,\n\n pub pitch: f32,\n\n pub level: i32,\n\n pub offset: i32,\n\n}\n\n\n\nimpl Waveform {\n\n pub fn get_waveform(&self) -> &[i8; 0x100] {\n\n &WAVEFORMS[self.waveform_type as usize % WAVEFORMS.len()]\n", "file_path": "src/sound/pixtone.rs", "rank": 56, "score": 75445.32025513554 }, { "content": "#[test]\n\nfn test_varint() {\n\n for n in -4000..=4000 {\n\n let mut out = Vec::new();\n\n TextScript::put_varint(n, &mut out);\n\n\n\n let result = TextScript::read_varint(&mut out.iter().copied()).unwrap();\n\n assert_eq!(result, n);\n\n let mut cur = Cursor::new(&out);\n\n let result = read_cur_varint(&mut cur).unwrap();\n\n assert_eq!(result, n);\n\n }\n\n}\n", "file_path": "src/text_script.rs", "rank": 57, "score": 75445.32025513554 }, { "content": "fn test_builtin_fs() {\n\n let fs = BuiltinFS {\n\n root: vec![\n\n FSNode::File(\"test.txt\", &[]),\n\n FSNode::Directory(\"memes\", vec![\n\n FSNode::File(\"nothing.txt\", &[]),\n\n FSNode::Directory(\"secret stuff\", vec![\n\n FSNode::File(\"passwords.txt\", &[b'1', b'2', b'3', b'4', b'5', b'6',]),\n\n ]),\n\n ]),\n\n FSNode::File(\"test2.txt\", &[]),\n\n ],\n\n };\n\n\n\n println!(\"{:?}\", fs.get_node(Path::new(\"/\")).unwrap());\n\n println!(\"{:?}\", fs.get_node(Path::new(\"/test.txt\")).unwrap());\n\n println!(\"{:?}\", fs.get_node(Path::new(\"/memes\")).unwrap());\n\n println!(\"{:?}\", fs.get_node(Path::new(\"/memes/nothing.txt\")).unwrap());\n\n println!(\"{:?}\", fs.get_node(Path::new(\"/memes/secret stuff/passwords.txt\")).unwrap());\n\n}*/\n", "file_path": "src/builtin_fs.rs", "rank": 58, "score": 73879.71502067514 }, { "content": "fn check_status(status: ThreadStatus, state: &mut State) -> GameResult {\n\n match status {\n\n ThreadStatus::Ok | ThreadStatus::Yield => { return Ok(()); }\n\n _ => {}\n\n }\n\n\n\n let error = state.to_str(-1).unwrap_or(\"???\");\n\n match status {\n\n ThreadStatus::RuntimeError => Err(GameError::EventLoopError(format!(\"Lua Runtime Error: {}\", error))),\n\n ThreadStatus::SyntaxError => Err(GameError::EventLoopError(format!(\"Lua Syntax Error: {}\", error))),\n\n ThreadStatus::MemoryError => Err(GameError::EventLoopError(format!(\"Lua Memory Error: {}\", error))),\n\n ThreadStatus::MsgHandlerError => Err(GameError::EventLoopError(format!(\"Lua Message Handler Error: {}\", error))),\n\n ThreadStatus::FileError => Err(GameError::EventLoopError(format!(\"Lua File Error: {}\", error))),\n\n _ => Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/scripting/mod.rs", "rank": 59, "score": 73061.06480618981 }, { "content": "/// A trait to get or set ranges of bits.\n\npub trait BitRange<T> {\n\n fn get(&self) -> T;\n\n /// Get a range of bits.\n\n fn bit_range(&self, msb: usize, lsb: usize) -> T;\n\n /// Set a range of bits.\n\n fn set_bit_range(&mut self, msb: usize, lsb: usize, value: T);\n\n}\n\n\n", "file_path": "src/macros.rs", "rank": 60, "score": 72237.64841029953 }, { "content": "pub trait GameEntity<C> {\n\n fn tick(&mut self, state: &mut SharedGameState, custom: C) -> GameResult;\n\n\n\n fn draw(&self, state: &mut SharedGameState, ctx: &mut Context, frame: &Frame) -> GameResult;\n\n}\n", "file_path": "src/entity.rs", "rank": 61, "score": 72237.64841029953 }, { "content": "fn read_cur_varint(cursor: &mut Cursor<&Vec<u8>>) -> GameResult<i32> {\n\n let mut result = 0u32;\n\n\n\n for o in 0..5 {\n\n let n = cursor.read_u8()?;\n\n result |= (n as u32 & 0x7f) << (o * 7);\n\n\n\n if n & 0x80 == 0 {\n\n break;\n\n }\n\n }\n\n\n\n Ok(((result << 31) ^ (result >> 1)) as i32)\n\n}\n\n\n\nimpl TextScriptVM {\n\n pub fn new() -> Self {\n\n Self {\n\n scripts: Scripts {\n\n global_script: TextScript::new(),\n", "file_path": "src/text_script.rs", "rank": 62, "score": 69480.06126199583 }, { "content": "fn p2_default_keymap() -> PlayerKeyMap {\n\n PlayerKeyMap {\n\n left: VirtualKeyCode::Comma,\n\n up: VirtualKeyCode::L,\n\n right: VirtualKeyCode::Slash,\n\n down: VirtualKeyCode::Period,\n\n prev_weapon: VirtualKeyCode::G,\n\n next_weapon: VirtualKeyCode::H,\n\n jump: VirtualKeyCode::B,\n\n shoot: VirtualKeyCode::N,\n\n skip: VirtualKeyCode::RControl,\n\n inventory: VirtualKeyCode::T,\n\n map: VirtualKeyCode::Y,\n\n }\n\n}\n", "file_path": "src/settings.rs", "rank": 63, "score": 67503.14873524688 }, { "content": "fn p1_default_keymap() -> PlayerKeyMap {\n\n PlayerKeyMap {\n\n left: VirtualKeyCode::Left,\n\n up: VirtualKeyCode::Up,\n\n right: VirtualKeyCode::Right,\n\n down: VirtualKeyCode::Down,\n\n prev_weapon: VirtualKeyCode::A,\n\n next_weapon: VirtualKeyCode::S,\n\n jump: VirtualKeyCode::Z,\n\n shoot: VirtualKeyCode::X,\n\n skip: VirtualKeyCode::LControl,\n\n inventory: VirtualKeyCode::Q,\n\n map: VirtualKeyCode::W,\n\n }\n\n}\n\n\n", "file_path": "src/settings.rs", "rank": 64, "score": 67503.14873524688 }, { "content": "fn zero_index(s: &[u8]) -> usize {\n\n s.iter().position(|&c| c == b'\\0').unwrap_or(s.len())\n\n}\n\n\n", "file_path": "src/stage.rs", "rank": 65, "score": 65124.56107704931 }, { "content": "fn from_shift_jis(s: &[u8]) -> String {\n\n let mut cursor = Cursor::new(s);\n\n let mut chars = Vec::new();\n\n let mut bytes = s.len() as u32;\n\n\n\n while bytes > 0 {\n\n let (consumed, chr) = read_cur_shift_jis(&mut cursor, bytes);\n\n chars.push(chr);\n\n bytes -= consumed;\n\n }\n\n\n\n chars.iter().collect()\n\n}\n\n\n\nimpl StageData {\n\n // todo: refactor to make it less repetitive.\n\n pub fn load_stage_table(ctx: &mut Context, root: &str) -> GameResult<Vec<Self>> {\n\n let stage_tbl_path = [root, \"stage.tbl\"].join(\"\");\n\n let stage_sect_path = [root, \"stage.sect\"].join(\"\");\n\n let mrmap_bin_path = [root, \"mrmap.bin\"].join(\"\");\n", "file_path": "src/stage.rs", "rank": 66, "score": 65124.56107704931 }, { "content": "fn default_n349_statue() -> Rect<u16> {\n\n Rect { left: 0, top: 0, right: 16, bottom: 16 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 67, "score": 63380.04695889869 }, { "content": "fn default_n195_grate() -> Rect<u16> {\n\n Rect { left: 112, top: 64, right: 128, bottom: 80 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 68, "score": 63380.04695889869 }, { "content": "fn default_n090_background() -> Rect<u16> {\n\n Rect { left: 280, top: 80, right: 296, bottom: 104 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 69, "score": 63380.04695889869 }, { "content": "fn default_n227_bucket() -> Rect<u16> {\n\n Rect { left: 208, top: 32, right: 224, bottom: 48 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 70, "score": 63380.04695889869 }, { "content": "fn default_n168_boulder() -> Rect<u16> {\n\n Rect { left: 264, top: 56, right: 320, bottom: 96 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 71, "score": 63380.04695889869 }, { "content": "fn default_n193_broken_scooter() -> Rect<u16> {\n\n Rect { left: 256, top: 96, right: 320, bottom: 112 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 72, "score": 62191.58847354246 }, { "content": "fn default_n258_mimiga_sleeping() -> Rect<u16> {\n\n Rect { left: 48, top: 32, right: 64, bottom: 48 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 73, "score": 62191.58847354246 }, { "content": "fn default_n185_small_shutter() -> Rect<u16> {\n\n Rect { left: 96, top: 64, right: 112, bottom: 96 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 74, "score": 62191.58847354246 }, { "content": "fn default_n091_mimiga_cage() -> Rect<u16> {\n\n Rect { left: 96, top: 88, right: 128, bottom: 112 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 75, "score": 62191.58847354246 }, { "content": "fn default_n360_credits_thank_you() -> Rect<u16> {\n\n Rect { left: 0, top: 176, right: 48, bottom: 184 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 76, "score": 62191.58847354246 }, { "content": "fn default_n343_ballos_2_cutscene() -> Rect<u16> {\n\n Rect { left: 0, top: 0, right: 120, bottom: 120 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 77, "score": 62191.58847354246 }, { "content": "fn default_n116_red_petals() -> Rect<u16> {\n\n Rect { left: 272, top: 184, right: 320, bottom: 200 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 78, "score": 62191.58847354246 }, { "content": "fn default_n222_prison_bars() -> Rect<u16> {\n\n Rect { left: 96, top: 168, right: 112, bottom: 200 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 79, "score": 62191.58847354246 }, { "content": "fn default_n357_puppy_ghost() -> Rect<u16> {\n\n Rect { left: 224, top: 136, right: 240, bottom: 152 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 80, "score": 62191.58847354246 }, { "content": "fn default_n237_gunfish_projectile() -> Rect<u16> {\n\n Rect { left: 312, top: 32, right: 320, bottom: 40 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 81, "score": 62191.58847354246 }, { "content": "fn default_n021_chest_open() -> Rect<u16> {\n\n Rect { left: 224, top: 40, right: 240, bottom: 48 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 82, "score": 62191.58847354246 }, { "content": "fn default_n244_lava_drop() -> Rect<u16> {\n\n Rect { left: 96, top: 0, right: 104, bottom: 16 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 83, "score": 62191.58847354246 }, { "content": "fn default_n216_debug_cat() -> Rect<u16> {\n\n Rect { left: 256, top: 192, right: 272, bottom: 216 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 84, "score": 62191.58847354246 }, { "content": "fn default_n119_table_chair() -> Rect<u16> {\n\n Rect { left: 248, top: 184, right: 272, bottom: 200 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 85, "score": 62191.58847354246 }, { "content": "fn default_n027_death_trap() -> Rect<u16> {\n\n Rect { left: 96, top: 64, right: 128, bottom: 88 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 86, "score": 62191.58847354246 }, { "content": "fn default_n041_busted_door() -> Rect<u16> {\n\n Rect { left: 0, top: 80, right: 48, bottom: 112 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 87, "score": 62191.58847354246 }, { "content": "fn default_n159_monster_x_defeated() -> Rect<u16> {\n\n Rect { left: 144, top: 128, right: 192, bottom: 200 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 88, "score": 62191.58847354246 }, { "content": "fn default_n346_ballos_orbiting_platform() -> Rect<u16> {\n\n Rect { left: 240, top: 0, right: 272, bottom: 16 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 89, "score": 61076.08516152775 }, { "content": "fn default_n149_horizontal_moving_block() -> Rect<u16> {\n\n Rect { left: 16, top: 0, right: 48, bottom: 32 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 90, "score": 61076.08516152775 }, { "content": "fn default_n297_sue_dragon_mouth() -> Rect<u16> {\n\n Rect { left: 112, top: 48, right: 128, bottom: 64 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 91, "score": 61076.08516152775 }, { "content": "fn default_n328_human_transform_machine() -> Rect<u16> {\n\n Rect { left: 96, top: 0, right: 128, bottom: 48 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 92, "score": 61076.08516152775 }, { "content": "fn default_n194_broken_blue_robot() -> Rect<u16> {\n\n Rect { left: 192, top: 120, right: 224, bottom: 128 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 93, "score": 61076.08516152775 }, { "content": "fn default_n300_intro_demon_crown() -> Rect<u16> {\n\n Rect { left: 192, top: 80, right: 208, bottom: 96 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 94, "score": 61076.08516152775 }, { "content": "fn default_n052_sitting_blue_robot() -> Rect<u16> {\n\n Rect { left: 240, top: 96, right: 256, bottom: 112 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 95, "score": 61076.08516152775 }, { "content": "fn default_n157_vertical_moving_block() -> Rect<u16> {\n\n Rect { left: 16, top: 0, right: 48, bottom: 32 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 96, "score": 61076.08516152775 }, { "content": "fn default_n137_large_door_frame() -> Rect<u16> {\n\n Rect { left: 96, top: 136, right: 128, bottom: 188 }\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 97, "score": 61076.08516152775 }, { "content": "fn default_n002_behemoth() -> [Rect<u16>; 14] {\n\n [\n\n Rect { left: 32, top: 0, right: 64, bottom: 24 },\n\n Rect { left: 0, top: 0, right: 32, bottom: 24 },\n\n Rect { left: 32, top: 0, right: 64, bottom: 24 },\n\n Rect { left: 64, top: 0, right: 96, bottom: 24 },\n\n Rect { left: 96, top: 0, right: 128, bottom: 24 },\n\n Rect { left: 128, top: 0, right: 160, bottom: 24 },\n\n Rect { left: 160, top: 0, right: 192, bottom: 24 },\n\n Rect { left: 32, top: 24, right: 64, bottom: 48 },\n\n Rect { left: 0, top: 24, right: 32, bottom: 48 },\n\n Rect { left: 32, top: 24, right: 64, bottom: 48 },\n\n Rect { left: 64, top: 24, right: 96, bottom: 48 },\n\n Rect { left: 96, top: 24, right: 128, bottom: 48 },\n\n Rect { left: 128, top: 24, right: 160, bottom: 48 },\n\n Rect { left: 160, top: 24, right: 192, bottom: 48 },\n\n ]\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 98, "score": 61042.09945061742 }, { "content": "fn default_n001_experience() -> [Rect<u16>; 6] {\n\n [\n\n Rect { left: 0, top: 16, right: 16, bottom: 32 },\n\n Rect { left: 16, top: 16, right: 32, bottom: 32 },\n\n Rect { left: 32, top: 16, right: 48, bottom: 32 },\n\n Rect { left: 48, top: 16, right: 64, bottom: 32 },\n\n Rect { left: 64, top: 16, right: 80, bottom: 32 },\n\n Rect { left: 80, top: 16, right: 96, bottom: 32 },\n\n ]\n\n}\n\n\n", "file_path": "src/engine_constants/npcs.rs", "rank": 99, "score": 61042.09945061742 } ]
Rust
src/util/encrypt.rs
MarcoPolo/rust-sssmc39
a3e9d53d295b249c0212d6f7ab9dc434c8210df2
use crate::error::Error; #[cfg(feature = "ring_pbkdf2")] use ring::pbkdf2; #[cfg(feature = "ring_pbkdf2")] use std::num::NonZeroU32; #[cfg(feature = "rust_crypto_pbkdf2")] use pbkdf2::pbkdf2; #[cfg(feature = "rust_crypto_pbkdf2")] use sha2::Sha256; #[cfg(feature = "rust_crypto_pbkdf2")] use hmac::Hmac; #[derive(Debug, Clone, PartialEq, Eq)] pub struct MasterSecretEncConfig { pub min_iteration_count: u32, pub round_count: u8, pub customization_string: Vec<u8>, } impl Default for MasterSecretEncConfig { fn default() -> Self { let min_iteration_count = 10000; let round_count = 4; let customization_string = b"shamir".to_vec(); MasterSecretEncConfig { min_iteration_count, round_count, customization_string, } } } impl MasterSecretEncConfig { pub fn new() -> Self { MasterSecretEncConfig { ..Default::default() } } } pub struct MasterSecretEnc { pub config: MasterSecretEncConfig, } impl Default for MasterSecretEnc { fn default() -> Self { MasterSecretEnc { config: MasterSecretEncConfig::new(), } } } impl MasterSecretEnc { pub fn new() -> Result<MasterSecretEnc, Error> { Ok(MasterSecretEnc { config: MasterSecretEncConfig::new(), }) } pub fn encrypt( &self, master_secret: &[u8], passphrase: &str, iteration_exponent: u8, identifier: u16, ) -> Vec<u8> { let mut l = master_secret.to_owned(); let mut r = l.split_off(l.len() / 2); let salt = self.get_salt(identifier); for i in 0..self.config.round_count { let tmp_r = r.clone(); r = self.xor( &l, &self.round_function(i, passphrase, iteration_exponent, &salt, &r), ); l = tmp_r; } r.append(&mut l); r } pub fn decrypt( &self, enc_master_secret: &[u8], passphrase: &str, iteration_exponent: u8, identifier: u16, ) -> Vec<u8> { let mut l = enc_master_secret.to_owned(); let mut r = l.split_off(l.len() / 2); let salt = self.get_salt(identifier); for i in (0..self.config.round_count).rev() { let tmp_r = r.clone(); r = self.xor( &l, &self.round_function(i, passphrase, iteration_exponent, &salt, &r), ); l = tmp_r; } r.append(&mut l); r } fn get_salt(&self, identifier: u16) -> Vec<u8> { let mut retval = self.config.customization_string.clone(); retval.append(&mut identifier.to_be_bytes().to_vec()); retval } fn round_function( &self, i: u8, passphrase: &str, e: u8, salt: &[u8], r: &[u8], ) -> Vec<u8> { let iterations = (self.config.min_iteration_count / u32::from(self.config.round_count)) << u32::from(e); let out_length = r.len(); let mut salt = salt.to_owned(); let mut r = r.to_owned(); salt.append(&mut r); let mut password = vec![i]; password.append(&mut passphrase.as_bytes().to_vec()); self.pbkdf2_derive(iterations, &salt, &password, out_length) } #[cfg(feature = "rust_crypto_pbkdf2")] fn pbkdf2_derive(&self, iterations: u32, salt: &[u8], password: &[u8], out_length: usize) -> Vec<u8> { let mut out = vec![0; out_length]; pbkdf2::<Hmac<Sha256>>( password, salt, iterations as usize, &mut out, ); out } #[cfg(feature = "ring_pbkdf2")] fn pbkdf2_derive(&self, iterations: u32, salt: &[u8], password: &[u8], out_length: usize) -> Vec<u8> { let mut out = vec![0; out_length]; pbkdf2::derive( ring::pbkdf2::PBKDF2_HMAC_SHA256, NonZeroU32::new(iterations as u32).unwrap(), salt, password, &mut out, ); out } fn xor(&self, a: &[u8], b: &[u8]) -> Vec<u8> { let mut retval = vec![0; b.len()]; for i in 0..b.len() { retval[i] = a[i] ^ b[i]; } retval } } #[cfg(test)] mod tests { use super::*; use rand::{thread_rng, Rng}; fn roundtrip_test(secret: Vec<u8>, passphrase: &str, identifier: u16, iteration_exponent: u8) { let enc = MasterSecretEnc::default(); println!("master_secret: {:?}", secret); let encrypted_secret = enc.encrypt(&secret, passphrase, iteration_exponent, identifier); println!("encrypted_secret: {:?}", encrypted_secret); let decrypted_secret = enc.decrypt(&encrypted_secret, passphrase, iteration_exponent, identifier); println!("decrypted_secret: {:?}", decrypted_secret); assert_eq!(secret, decrypted_secret); } #[test] fn roundtrip_test_vector() { for e in vec![0, 6] { let secret = b"\x0c\x94\x90\xbcn\xd6\xbc\xbf\xac>\xbe}\xeeV\xf2P".to_vec(); roundtrip_test(secret, "", 7470, e); } } #[test] #[ignore] fn roundtrip_test_vector_slow() { let secret = b"\x0c\x94\x90\xbcn\xd6\xbc\xbf\xac>\xbe}\xeeV\xf2P".to_vec(); roundtrip_test(secret, "", 7470, 12); } #[test] fn roundtrip_16_bytes() { for _ in 0..20 { let s: [u8; 16] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "", id, 0); } } #[test] fn roundtrip_32_bytes() { for _ in 0..20 { let s: [u8; 32] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "", id, 0); } } #[test] fn roundtrip_12_bytes() { for _ in 0..10 { let s: [u8; 12] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "", id, 0); } } #[test] fn roundtrip_32_bytes_password() { for _ in 0..10 { let s: [u8; 12] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "pebkac", id, 0); } } }
use crate::error::Error; #[cfg(feature = "ring_pbkdf2")] use ring::pbkdf2; #[cfg(feature = "ring_pbkdf2")] use std::num::NonZeroU32; #[cfg(feature = "rust_crypto_pbkdf2")] use pbkdf2::pbkdf2; #[cfg(feature = "rust_crypto_pbkdf2")] use sha2::Sha256; #[cfg(feature = "rust_crypto_pbkdf2")] use hmac::Hmac; #[derive(Debug, Clone, PartialEq, Eq)] pub struct MasterSecretEncConfig { pub min_iteration_count: u32, pub round_count: u8, pub customization_string: Vec<u8>, } impl Default for MasterSecretEncConfig { fn default() -> Self { let min_iteration_count = 10000; let round_count = 4; let customization_string = b"shamir".to_vec(); MasterSecretEncConfig { min_iteration_count, round_count, customization_string, } } } impl MasterSecretEncConfig { pub fn new() -> Self { MasterSecretEncConfig { ..Default::default() } } } pub struct MasterSecretEnc { pub config: MasterSecretEncConfig, } impl Default for MasterSecretEnc { fn default() -> Self { MasterSecretEnc { config: MasterSecretEncConfig::new(), } } } impl MasterSecretEnc { pub fn new() -> Result<MasterSecretEnc, Error> { Ok(MasterSecretEnc { config: MasterSecretEncConfig::new(), }) } pub fn encrypt( &self, master_secret: &[u8], passphrase: &str, iteration_exponent: u8, identifier: u16, ) -> Vec<u8> { let mut l = master_secret.to_owned(); let mut r = l.split_off(l.len() / 2); let salt = self.get_salt(identifier); for i in 0..self.config.round_count { let tmp_r = r.clone(); r = self.xor( &l, &self.round_function(i, passphrase, iteration_exponent, &salt, &r), ); l = tmp_r; } r.append(&mut l); r } pub fn decrypt( &self, enc_master_secret: &[u8], passphrase: &str, iteration_exponent: u8, identifier: u16, ) -> Vec<u8> { let mut l = enc_master_secret.to_owned(); let mut r = l.split_off(l.len() / 2); let salt = self.get_salt(identifier); for i in (0..self.config.round_count).rev() { let tmp_r = r.clone(); r = self.xor( &l, &self.round_function(i, passphrase, iteration_exponent, &salt, &r), ); l = tmp_r; } r.append(&mut l); r } fn get_salt(&self, identifier: u16) -> Vec<u8> { let mut retval = self.config.customization_string.clone(); retval.append(&mut identifier.to_be_bytes().to_vec()); retval } fn round_function( &self, i: u8, passphrase: &str, e: u8, salt: &[u8], r: &[u8], ) -> Vec<u8> { let iterations = (self.config.min_iteration_count / u32::from(self.config.round_count)) << u32::from(e); let out_length = r.len(); let mut salt = salt.to_owned(); let mut r = r.to_owned(); salt.append(&mut r); let mut password = vec![i]; password.append(&mut passphrase.as_bytes().to_vec()); self.pbkdf2_derive(iterations, &salt, &password, out_length) } #[cfg(feature = "rust_crypto_pbkdf2")] fn pbkdf2_derive(&self, iterations: u32, salt: &[u8], password: &[u8], out_length: usize) -> Vec<u8> { let mut out = vec![0; out_length]; pbkdf2::<Hmac<Sha256>>( password, salt, iterations as usize, &mut out, ); out } #[cfg(feature = "ring_pbkdf2")] fn pbkdf2_derive(&self, iterations: u32, salt: &[u8], password: &[u8], out_length: usize) -> Vec<u8> { let mut out = vec![0; out_length]; pbkdf2::derive( ring::pbk
fn xor(&self, a: &[u8], b: &[u8]) -> Vec<u8> { let mut retval = vec![0; b.len()]; for i in 0..b.len() { retval[i] = a[i] ^ b[i]; } retval } } #[cfg(test)] mod tests { use super::*; use rand::{thread_rng, Rng}; fn roundtrip_test(secret: Vec<u8>, passphrase: &str, identifier: u16, iteration_exponent: u8) { let enc = MasterSecretEnc::default(); println!("master_secret: {:?}", secret); let encrypted_secret = enc.encrypt(&secret, passphrase, iteration_exponent, identifier); println!("encrypted_secret: {:?}", encrypted_secret); let decrypted_secret = enc.decrypt(&encrypted_secret, passphrase, iteration_exponent, identifier); println!("decrypted_secret: {:?}", decrypted_secret); assert_eq!(secret, decrypted_secret); } #[test] fn roundtrip_test_vector() { for e in vec![0, 6] { let secret = b"\x0c\x94\x90\xbcn\xd6\xbc\xbf\xac>\xbe}\xeeV\xf2P".to_vec(); roundtrip_test(secret, "", 7470, e); } } #[test] #[ignore] fn roundtrip_test_vector_slow() { let secret = b"\x0c\x94\x90\xbcn\xd6\xbc\xbf\xac>\xbe}\xeeV\xf2P".to_vec(); roundtrip_test(secret, "", 7470, 12); } #[test] fn roundtrip_16_bytes() { for _ in 0..20 { let s: [u8; 16] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "", id, 0); } } #[test] fn roundtrip_32_bytes() { for _ in 0..20 { let s: [u8; 32] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "", id, 0); } } #[test] fn roundtrip_12_bytes() { for _ in 0..10 { let s: [u8; 12] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "", id, 0); } } #[test] fn roundtrip_32_bytes_password() { for _ in 0..10 { let s: [u8; 12] = thread_rng().gen(); let id: u16 = thread_rng().gen(); roundtrip_test(s.to_vec(), "pebkac", id, 0); } } }
df2::PBKDF2_HMAC_SHA256, NonZeroU32::new(iterations as u32).unwrap(), salt, password, &mut out, ); out }
function_block-function_prefixed
[ { "content": "pub fn verify_checksum(custom_string: &[u8], data: &[u32]) -> Result<(), Error> {\n\n\tlet mut values: Vec<u32> = custom_string.iter().map(|d| u32::from(*d)).collect();\n\n\tfor e in data {\n\n\t\tvalues.push(e.to_owned());\n\n\t}\n\n\tif polymod(&values) != 1 {\n\n\t\treturn Err(ErrorKind::Config(format!(\n\n\t\t\t\"Error verifying checksum: {:?}\",\n\n\t\t\tdata,\n\n\t\t)))?;\n\n\t}\n\n\tOk(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\n\n\tuse crate::error::Error;\n\n\n", "file_path": "src/util/rs1024.rs", "rank": 0, "score": 150698.8173359619 }, { "content": "/// Combines mnemonic shares to obtain the master secret which was previously split using\n\n/// Shamir's secret sharing scheme.\n\n/// mnemonics: List of mnemonics.\n\n/// passphrase: The passphrase used to encrypt the master secret.\n\n/// return: The master secret.\n\npub fn combine_mnemonics(mnemonics: &[Vec<String>], passphrase: &str) -> Result<Vec<u8>, Error> {\n\n\tlet group_shares = decode_mnemonics(mnemonics)?;\n\n\tlet mut shares = vec![];\n\n\tfor mut gs in group_shares {\n\n\t\tshares.push(gs.decode_shares()?);\n\n\t}\n\n\tlet sp = Splitter::new(None);\n\n\t// restore proper member index for groups\n\n\tlet shares = shares\n\n\t\t.into_iter()\n\n\t\t.map(|mut s| {\n\n\t\t\ts.member_index = s.group_index;\n\n\t\t\ts\n\n\t\t})\n\n\t\t.collect::<Vec<_>>();\n\n\tlet ems = sp.recover_secret(&shares, shares[0].group_threshold)?;\n\n\tlet encoder = util::encrypt::MasterSecretEnc::new()?;\n\n\tlet dms = encoder.decrypt(\n\n\t\t&ems.share_value,\n\n\t\tpassphrase,\n\n\t\tems.iteration_exponent,\n\n\t\tems.identifier,\n\n\t);\n\n\tOk(dms)\n\n}\n\n\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 1, "score": 132781.69712041251 }, { "content": "/// Evaluates an interpolated polynomial at `Gf256::zero()` where\n\n/// the polynomial is determined using barycentric Lagrange\n\n/// interpolation based on the given `points` in\n\n/// the G(2^8) Galois field.\n\npub fn _interpolate_at(k: u8, points: &[(u8, u8)]) -> u8 {\n\n\t_barycentric_interpolate_at(k as usize, points)\n\n}\n\n\n\n/// Barycentric Lagrange interpolation algorithm from \"Polynomial\n\n/// Interpolation: Langrange vs Newton\" by Wilhelm Werner. Evaluates\n\n/// the polynomial at `Gf256::zero()`.\n", "file_path": "src/field/lagrange.rs", "rank": 2, "score": 123508.4919895726 }, { "content": "// fill a u8 vec with n bytes of random data\n\npub fn fill_vec_rand(n: usize) -> Vec<u8> {\n\n\tlet mut v = vec![];\n\n\tfor _ in 0..n {\n\n\t\tv.push(thread_rng().gen());\n\n\t}\n\n\tv\n\n}\n\n\n\n\n", "file_path": "src/util/mod.rs", "rank": 3, "score": 121668.73942020857 }, { "content": "// fill a u8 vec with n bytes of random data\n\npub fn fill_vec_rand(n: usize) -> Vec<u8> {\n\n\tlet mut v = vec![];\n\n\tfor _ in 0..n {\n\n\t\tv.push(thread_rng().gen());\n\n\t}\n\n\tv\n\n}\n\n\n\n// test vector entry, for deser from reference json\n", "file_path": "tests/ref_vectors.rs", "rank": 4, "score": 121668.73942020856 }, { "content": "/// Decode a hex string into bytes.\n\npub fn from_hex(hex_str: String) -> Result<Vec<u8>, num::ParseIntError> {\n\n\tif hex_str.len() % 2 == 1 {\n\n\t\t// TODO: other way to instantiate a ParseIntError?\n\n\t\tlet err = (\"QQQ\").parse::<u64>();\n\n\t\tif let Err(e) = err {\n\n\t\t\treturn Err(e);\n\n\t\t}\n\n\t}\n\n\tlet hex_trim = if &hex_str[..2] == \"0x\" {\n\n\t\thex_str[2..].to_owned()\n\n\t} else {\n\n\t\thex_str.clone()\n\n\t};\n\n\tsplit_n(&hex_trim.trim()[..], 2)\n\n\t\t.iter()\n\n\t\t.map(|b| u8::from_str_radix(b, 16))\n\n\t\t.collect::<Result<Vec<u8>, _>>()\n\n}\n\n\n", "file_path": "src/util/hex.rs", "rank": 5, "score": 119436.14226180557 }, { "content": "#[inline]\n\nfn _barycentric_interpolate_at(k: usize, points: &[(u8, u8)]) -> u8 {\n\n\t// Compute the barycentric weights `w`.\n\n\tlet mut w = vec![Gf256::zero(); k];\n\n\tw[0] = Gf256::one();\n\n\n\n\tlet mut x = Vec::with_capacity(k);\n\n\tx.push(Gf256::from_byte(points[0].0));\n\n\n\n\tfor i in 1..k {\n\n\t\tx.push(Gf256::from_byte(points[i].0));\n\n\t\tfor j in 0..i {\n\n\t\t\tlet delta = x[j] - x[i];\n\n\t\t\tassert_ne!(delta.poly, 0, \"Duplicate shares\");\n\n\t\t\tw[j] /= delta;\n\n\t\t\tlet n = w[j];\n\n\t\t\tw[i] -= n;\n\n\t\t}\n\n\t}\n\n\n\n\t// Evaluate the second or \"true\" form of the barycentric\n", "file_path": "src/field/lagrange.rs", "rank": 6, "score": 109251.61037156153 }, { "content": "fn split_n(s: &str, n: usize) -> Vec<&str> {\n\n\t(0..=(s.len() - n + 1) / 2)\n\n\t\t.map(|i| &s[2 * i..2 * i + n])\n\n\t\t.collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\tuse super::*;\n\n\n\n\t#[test]\n\n\tfn test_to_hex() {\n\n\t\tassert_eq!(to_hex(vec![0, 0, 0, 0]), \"00000000\");\n\n\t\tassert_eq!(to_hex(vec![10, 11, 12, 13]), \"0a0b0c0d\");\n\n\t\tassert_eq!(to_hex(vec![0, 0, 0, 255]), \"000000ff\");\n\n\t}\n\n\n\n\t#[test]\n\n\tfn test_from_hex() {\n\n\t\tassert_eq!(from_hex(\"00000000\".to_string()).unwrap(), vec![0, 0, 0, 0]);\n", "file_path": "src/util/hex.rs", "rank": 7, "score": 106868.05906603952 }, { "content": "fn test_json_vectors(input: &str) -> Result<(), Error> {\n\n\tlet tv_list: Vec<TVEntry> = serde_json::from_str(input).unwrap();\n\n\tfor tv in tv_list {\n\n\t\tlet ref_ms = tv.master_secret_to_u8_vec();\n\n\t\tprintln!(\"TESTVECS: {:?}\", tv.mnemonics_to_vecs());\n\n\t\tprintln!(\"MASTER SECRET: {:?}\", ref_ms);\n\n\t\tlet result = combine_mnemonics(&tv.mnemonics_to_vecs(), \"TREZOR\");\n\n\t\tif !ref_ms.is_empty() {\n\n\t\t\tif let Ok(returned_ms) = result {\n\n\t\t\t\tassert_eq!(ref_ms, returned_ms);\n\n\t\t\t\tprintln!(\"OK - passed\");\n\n\t\t\t}\n\n\t\t} else if result.is_ok() {\n\n\t\t\tprintln!(\"Result SHOULD HAVE FAILED - {:?}\", result);\n\n\t\t\tpanic!();\n\n\t\t} else {\n\n\t\t\tprintln!(\"{}\", result.unwrap_err());\n\n\t\t\tprintln!(\"OK - should fail\");\n\n\t\t}\n\n\t}\n\n\tOk(())\n\n}\n\n\n", "file_path": "tests/ref_vectors.rs", "rank": 8, "score": 104751.23138053366 }, { "content": "/// Encode the provided bytes into a hex string\n\npub fn to_hex(bytes: Vec<u8>) -> String {\n\n\tlet mut s = String::new();\n\n\tfor byte in bytes {\n\n\t\twrite!(&mut s, \"{:02x}\", byte).expect(\"Unable to write\");\n\n\t}\n\n\ts\n\n}\n\n\n", "file_path": "src/util/hex.rs", "rank": 9, "score": 102642.05234994121 }, { "content": "/// values intepreted as a list of 10 bit integers\n\nfn polymod(values: &[u32]) -> u32 {\n\n\tlet mut chk = 1;\n\n\tlet mut b: u32;\n\n\tfor v in values {\n\n\t\tb = chk >> 20;\n\n\t\tchk = (chk & 0xfffff) << 10 ^ v;\n\n\t\tfor (i, item) in GEN.iter().enumerate() {\n\n\t\t\tif (b >> i) & 1 == 1 {\n\n\t\t\t\tchk ^= *item;\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\tchk\n\n}\n\n\n", "file_path": "src/util/rs1024.rs", "rank": 10, "score": 92109.49358932872 }, { "content": "//TODO: Proper docs\n\n/// Generates shares from the provided master secret (e.g. BIP39 entropy)\n\npub fn generate_mnemonics(\n\n\tgroup_threshold: u8,\n\n\tgroups: &[(u8, u8)],\n\n\tmaster_secret: &[u8],\n\n\tpassphrase: &str,\n\n\titeration_exponent: u8,\n\n) -> Result<Vec<GroupShare>, Error> {\n\n\tshamir::generate_mnemonics(\n\n\t\tgroup_threshold,\n\n\t\tgroups,\n\n\t\tmaster_secret,\n\n\t\tpassphrase,\n\n\t\titeration_exponent,\n\n\t)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 11, "score": 85753.71232580216 }, { "content": "// TODO: Proper docs\n\n// should allow for different input formats\n\n/// Combines shares into a master secret (e.g. BIP39 entropy)\n\npub fn combine_mnemonics(\n\n\tmnemonics: &[Vec<String>],\n\n\tpassphrase: &str,\n\n) -> Result<Vec<u8>, Error> {\n\n\tshamir::combine_mnemonics(mnemonics, passphrase)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 12, "score": 85753.71232580216 }, { "content": "pub fn create_checksum(\n\n\tcustom_string: &[u8],\n\n\tdata: &[u32],\n\n\tchecksum_length_words: u8,\n\n) -> Vec<u32> {\n\n\tlet mut values: Vec<u32> = custom_string.iter().map(|d| u32::from(*d)).collect();\n\n\tfor e in data {\n\n\t\tvalues.push(e.to_owned());\n\n\t}\n\n\tfor _ in 0..checksum_length_words {\n\n\t\tvalues.push(0);\n\n\t}\n\n\tlet polymod = polymod(&values) ^ 1;\n\n\tlet mut retval = vec![];\n\n\tfor i in 0..checksum_length_words as usize {\n\n\t\tretval.push(polymod >> (10 * (2 - i)) & 1023);\n\n\t}\n\n\tretval\n\n}\n\n\n", "file_path": "src/util/rs1024.rs", "rank": 13, "score": 83495.76066825513 }, { "content": "// TODO: Proper docs\n\n/// Generate a random master secret (e.g. BIP39 entropy) and returns the shares from it\n\npub fn generate_mnemonics_random(\n\n\tgroup_threshold: u8,\n\n\tgroups: &[(u8, u8)],\n\n\tstrength_bits: u16,\n\n\tpassphrase: &str,\n\n\titeration_exponent: u8,\n\n) -> Result<Vec<GroupShare>, Error> {\n\n\tshamir::generate_mnemonics_random(\n\n\t\tgroup_threshold,\n\n\t\tgroups,\n\n\t\tstrength_bits,\n\n\t\tpassphrase,\n\n\t\titeration_exponent,\n\n\t)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 14, "score": 83495.76066825513 }, { "content": "/// Split a master secret into mnemonic shares\n\n/// group_threshold: The number of groups required to reconstruct the master secret\n\n/// groups: A list of (member_threshold, member_count) pairs for each group, where member_count\n\n/// is the number of shares to generate for the group and member_threshold is the number of\n\n/// members required to reconstruct the group secret.\n\n/// master_secret: The master secret to split.\n\n/// passphrase: The passphrase used to encrypt the master secret.\n\n/// iteration_exponent: The iteration exponent.\n\n/// return: List of mnemonics.\n\npub fn generate_mnemonics(\n\n\tgroup_threshold: u8,\n\n\tgroups: &[(u8, u8)],\n\n\tmaster_secret: &[u8],\n\n\tpassphrase: &str,\n\n\titeration_exponent: u8,\n\n) -> Result<Vec<GroupShare>, Error> {\n\n\t// Generate a 'proto share' so to speak, with identifer generated and group data filled\n\n\tlet mut proto_share = Share::new()?;\n\n\tproto_share.group_threshold = group_threshold;\n\n\tproto_share.group_count = groups.len() as u8;\n\n\n\n\tif master_secret.len() * 8 < proto_share.config.min_strength_bits as usize {\n\n\t\treturn Err(ErrorKind::Value(format!(\n\n\t\t\t\"The length of the master secret ({} bytes) must be at least {} bytes.\",\n\n\t\t\tmaster_secret.len(),\n\n\t\t\t(f64::from(proto_share.config.min_strength_bits) / 8f64).ceil(),\n\n\t\t)))?;\n\n\t}\n\n\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 15, "score": 81433.97250181022 }, { "content": "pub fn generate_mnemonics_random(\n\n\tgroup_threshold: u8,\n\n\tgroups: &[(u8, u8)],\n\n\tstrength_bits: u16,\n\n\tpassphrase: &str,\n\n\titeration_exponent: u8,\n\n) -> Result<Vec<GroupShare>, Error> {\n\n\tlet proto_share = Share::new()?;\n\n\tif strength_bits < proto_share.config.min_strength_bits {\n\n\t\treturn Err(ErrorKind::Value(format!(\n\n\t\t\t\"The requested strength of the master secret({} bits) must be at least {} bits.\",\n\n\t\t\tstrength_bits, proto_share.config.min_strength_bits,\n\n\t\t)))?;\n\n\t}\n\n\tif strength_bits % 16 != 0 {\n\n\t\treturn Err(ErrorKind::Value(format!(\n\n\t\t\t\"The requested strength of the master secret({} bits) must be a multiple of 16 bits.\",\n\n\t\t\tstrength_bits,\n\n\t\t)))?;\n\n\t}\n\n\tgenerate_mnemonics(\n\n\t\tgroup_threshold,\n\n\t\tgroups,\n\n\t\t&util::fill_vec_rand(strength_bits as usize / 8),\n\n\t\tpassphrase,\n\n\t\titeration_exponent,\n\n\t)\n\n}\n\n\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 16, "score": 79523.64379615411 }, { "content": "#[test]\n\nfn test_vectors() -> Result<(), Error> {\n\n\tlet src = include_str!(\"fixtures/vectors/vectors.json\");\n\n\ttest_json_vectors(src)\n\n}\n\n\n", "file_path": "tests/ref_vectors.rs", "rank": 17, "score": 79191.28117373036 }, { "content": "#[test]\n\nfn create_test_vectors() -> Result<(), Error> {\n\n\tlet mut output = vec![];\n\n\tfor n in [16, 32].to_vec() {\n\n\t\tlet description = format!(\"Valid mnemomic without sharing ({} bits)\", 8*n);\n\n\t\tlet secret = fill_vec_rand(n);\n\n\t\tlet groups = generate_mnemonics(1, &[(1, 1)].to_vec(), &secret, \"TREZOR\", 0)?;\n\n\t\toutput.push(TVEntry {\n\n\t\t\tmeta: description,\n\n\t\t\tmnemonics: groups[0].mnemonic_list_flat()?,\n\n\t\t\tmaster_secret: to_hex(secret.clone()),\n\n\t\t});\n\n\n\n\t\t/*let description = format!(\"Mnemonic with invalid checksum ({} bits)\", 8*n);\n\n\t\tlet indices = groups[0].member_shares[0].to_u8_vec()?;\n\n\t\tlet share = Share::from_u8_vec(&indices)?;\n\n\t\t\n\n\t\toutput.push(TVEntry {\n\n\t\t\tmeta: description,\n\n\t\t\tmnemonics: share.mnemonic_list_flat()?,\n\n\t\t\tmaster_secret: \"\".to_owned(),\n\n\t\t});*/\n\n\t}\n\n\tlet output = serde_json::to_string_pretty(&output).unwrap();\n\n\tprintln!(\"{}\", output);\n\n\t// and test them\n\n\ttest_json_vectors(&output)\n\n}\n", "file_path": "tests/ref_vectors.rs", "rank": 18, "score": 77286.93830631024 }, { "content": "/// Computeds the coefficient of the Lagrange polynomial interpolated\n\n/// from the given `points`, in the G(2^8) Galois field.\n\npub fn interpolate(points: &[(Gf256, Gf256)]) -> Poly {\n\n\tlet len = points.len();\n\n\n\n\tlet mut poly = vec![Gf256::zero(); len];\n\n\n\n\tfor &(x, y) in points {\n\n\t\tlet mut coeffs = vec![Gf256::zero(); len];\n\n\t\tcoeffs[0] = y;\n\n\n\n\t\tlet mut prod = Gf256::one();\n\n\t\tfor &(x1, _) in points {\n\n\t\t\tif x != x1 {\n\n\t\t\t\tprod *= x - x1;\n\n\n\n\t\t\t\tlet mut prec = Gf256::zero();\n\n\t\t\t\tcoeffs = coeffs\n\n\t\t\t\t\t.into_iter()\n\n\t\t\t\t\t.map(|coeff| {\n\n\t\t\t\t\t\tlet new_coeff = coeff * (-x1) + prec;\n\n\t\t\t\t\t\tprec = coeff;\n", "file_path": "src/field/lagrange.rs", "rank": 19, "score": 67437.56454801153 }, { "content": "/// Decodes all Mnemonics to a list of shares and performs error checking\n\nfn decode_mnemonics(mnemonics: &[Vec<String>]) -> Result<Vec<GroupShare>, Error> {\n\n\tlet mut shares = vec![];\n\n\tif mnemonics.is_empty() {\n\n\t\treturn Err(ErrorKind::Mnemonic(\n\n\t\t\t\"List of mnemonics is empty.\".to_string(),\n\n\t\t))?;\n\n\t}\n\n\tlet check_len = mnemonics[0].len();\n\n\tfor m in mnemonics {\n\n\t\tif m.len() != check_len {\n\n\t\t\treturn Err(ErrorKind::Mnemonic(\n\n\t\t\t\t\"Invalid set of mnemonics. All mnemonics must have the same length.\".to_string(),\n\n\t\t\t))?;\n\n\t\t}\n\n\t\tshares.push(Share::from_mnemonic(&m)?);\n\n\t}\n\n\n\n\tlet check_share = shares[0].clone();\n\n\tfor s in shares.iter() {\n\n\t\tif s.identifier != check_share.identifier\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 20, "score": 57988.82997492116 }, { "content": "#[derive(Serialize, Deserialize, Debug, Clone)]\n\nstruct TVEntry {\n\n\t// Metadata (test description)\n\n\tpub meta: String,\n\n\t// List of mnemonics\n\n\tpub mnemonics: Vec<String>,\n\n\t// Resulting master secret (decoding should fail if empty)\n\n\tpub master_secret: String,\n\n}\n\n\n\nimpl TVEntry {\n\n\t// get mnemonics as Vec<Vec<String>>\n\n\tpub fn mnemonics_to_vecs(&self) -> Vec<Vec<String>> {\n\n\t\tlet mut retvec = vec![];\n\n\t\tfor mn in self.mnemonics.iter() {\n\n\t\t\tretvec.push(mn.split_whitespace().map(|s| s.into()).collect());\n\n\t\t}\n\n\t\tretvec\n\n\t}\n\n\t// master secret to u8\n\n\tpub fn master_secret_to_u8_vec(&self) -> Vec<u8> {\n\n\t\tif self.master_secret.is_empty() {\n\n\t\t\tvec![]\n\n\t\t} else {\n\n\t\t\tfrom_hex(self.master_secret.clone()).unwrap()\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "tests/ref_vectors.rs", "rank": 21, "score": 48513.26896497169 }, { "content": "fn get_tables() -> &'static Tables {\n\n\t&TABLES\n\n}\n\n\n\n/// Type for elements of a finite field with 256 elements\n\n#[derive(Copy, Debug, Clone, PartialEq, Eq)]\n\npub struct Gf256 {\n\n\tpub poly: u8,\n\n}\n\n\n\nimpl Gf256 {\n\n\t/// returns the additive neutral element of the field\n\n\t#[inline]\n\n\tpub fn zero() -> Gf256 {\n\n\t\tGf256 { poly: 0 }\n\n\t}\n\n\t/// returns the multiplicative neutral element of the field\n\n\t#[inline]\n\n\tpub fn one() -> Gf256 {\n\n\t\tGf256 { poly: 1 }\n", "file_path": "src/field/gf256.rs", "rank": 22, "score": 38207.04103479588 }, { "content": "/// Error definition\n\n#[derive(Debug, Fail)]\n\npub struct Error {\n\n\tinner: Context<ErrorKind>,\n\n}\n\n\n\n/// Wallet errors, mostly wrappers around underlying crypto or I/O errors.\n\n#[derive(Clone, Eq, PartialEq, Debug, Fail)]\n\npub enum ErrorKind {\n\n\t/// Configuration error, with details\n\n\t#[fail(display = \"Configuration Error: {}\", _0)]\n\n\tConfig(String),\n\n\n\n\t/// Inconsistency between different arguments\n\n\t#[fail(display = \"Argument Error: {}\", _0)]\n\n\tArgument(String),\n\n\n\n\t/// Problems with a mnemonic or inconsistent mnemonics\n\n\t#[fail(display = \"Mnemonic Error: {}\", _0)]\n\n\tMnemonic(String),\n", "file_path": "src/error.rs", "rank": 23, "score": 30202.04031911673 }, { "content": "\t\t\toutput.push_str(&backtrace_output);\n\n\t\t}\n\n\t\tDisplay::fmt(&output, f)\n\n\t}\n\n}\n\n\n\nimpl Error {\n\n\t/// get kind\n\n\tpub fn kind(&self) -> ErrorKind {\n\n\t\tself.inner.get_context().clone()\n\n\t}\n\n\t/// get cause string\n\n\tpub fn cause_string(&self) -> String {\n\n\t\tmatch self.cause() {\n\n\t\t\tSome(k) => format!(\"{}\", k),\n\n\t\t\tNone => \"Unknown\".to_string(),\n\n\t\t}\n\n\t}\n\n\t/// get cause\n\n\tpub fn cause(&self) -> Option<&dyn Fail> {\n", "file_path": "src/error.rs", "rank": 24, "score": 30201.933988883036 }, { "content": "\t\tself.inner.cause()\n\n\t}\n\n\t/// get backtrace\n\n\tpub fn backtrace(&self) -> Option<&Backtrace> {\n\n\t\tself.inner.backtrace()\n\n\t}\n\n}\n\n\n\nimpl From<ErrorKind> for Error {\n\n\tfn from(kind: ErrorKind) -> Error {\n\n\t\tError {\n\n\t\t\tinner: Context::new(kind),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl From<Context<ErrorKind>> for Error {\n\n\tfn from(inner: Context<ErrorKind>) -> Error {\n\n\t\tError { inner }\n\n\t}\n\n}\n", "file_path": "src/error.rs", "rank": 25, "score": 30200.476796549647 }, { "content": "\n\n\t/// (unused currently)\n\n\t#[fail(display = \"Generic error: {}\", _0)]\n\n\tGenericError(String),\n\n}\n\n\n\nimpl Display for Error {\n\n\tfn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n\t\tlet show_bt = match env::var(\"RUST_BACKTRACE\") {\n\n\t\t\tOk(r) => { r == \"1\" }\n\n\t\t\tErr(_) => false,\n\n\t\t};\n\n\t\tlet backtrace = match self.backtrace() {\n\n\t\t\tSome(b) => format!(\"{}\", b),\n\n\t\t\tNone => String::from(\"Unknown\"),\n\n\t\t};\n\n\t\tlet inner_output = format!(\"{}\", self.inner,);\n\n\t\tlet backtrace_output = format!(\"\\n Backtrace: {}\", backtrace);\n\n\t\tlet mut output = inner_output.clone();\n\n\t\tif show_bt {\n", "file_path": "src/error.rs", "rank": 26, "score": 30198.65786843725 }, { "content": "// Copyright 2019 The Grin Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Error types for ssmc39 crate\n\n\n\nuse failure::{Backtrace, Context, Fail};\n\nuse std::env;\n\nuse std::fmt::{self, Display};\n\n\n", "file_path": "src/error.rs", "rank": 27, "score": 30193.20244298417 }, { "content": "\n\n\t/// Assembling the full master secret resulted in an incorrect checksum\n\n\t#[fail(display = \"Digest Error: {}\", _0)]\n\n\tDigest(String),\n\n\n\n\t/// Invalid usage of BitPacker.add_uX (num_bits longer than the size of uX)\n\n\t#[fail(display = \"BitVec Error: {}\", _0)]\n\n\tBitVec(String),\n\n\n\n\t/// (unused currently)\n\n\t#[fail(display = \"Checksum Validation Error: {}\", _0)]\n\n\tChecksum(String),\n\n\n\n\t/// Invalid value of one of the arguments\n\n\t#[fail(display = \"Value Error: {}\", _0)]\n\n\tValue(String),\n\n\n\n\t/// Invalid usage of BitPacker.remove_padding (num_bits contained set bits)\n\n\t#[fail(display = \"Padding Error: All padding bits must be 0\")]\n\n\tPadding,\n", "file_path": "src/error.rs", "rank": 28, "score": 30191.414022806108 }, { "content": "\t/// Convert from a u8 vec\n\n\tpub fn from_u8_vec(input: &[u8]) -> Result<Self, Error> {\n\n\t\tlet mut s = Share::new()?;\n\n\t\tlet mut bp = BitPacker::new();\n\n\t\tbp.append_vec_u8(input)?;\n\n\t\tbp.normalize(s.config.radix_bits as usize);\n\n\t\ts.parse_bp(&mut bp)?;\n\n\t\tOk(s)\n\n\t}\n\n\n\n\t// create the packed bit array\n\n\tfn pack_bits(&self) -> Result<BitPacker, Error> {\n\n\t\tlet mut padding_bit_count = self.config.radix_bits\n\n\t\t\t- (self.share_value.len() * 8 % self.config.radix_bits as usize) as u8;\n\n\t\tif padding_bit_count == 10 {\n\n\t\t\tpadding_bit_count = 0;\n\n\t\t}\n\n\t\tlet mut bp = BitPacker::new();\n\n\n\n\t\tbp.append_u16(self.identifier, self.config.id_length_bits)?;\n", "file_path": "src/shamir/share.rs", "rank": 42, "score": 26.087795609204978 }, { "content": "use crate::error::{Error, ErrorKind};\n\n\n\n/// Simple struct that wraps a bitvec and defines packing operations on it\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct BitPacker {\n\n\tbv: BitVec,\n\n}\n\n\n\n//TODO:\n\n// * Works, but:\n\n// * Faster\n\n// * Generics\n\n// * Iterator for reading values\n\n//\n\n\n\nimpl BitPacker {\n\n\t/// Create a new bitpacker\n\n\tpub fn new() -> Self {\n\n\t\tBitPacker { bv: BitVec::new() }\n\n\t}\n", "file_path": "src/util/bitpacker.rs", "rank": 43, "score": 24.76635452600037 }, { "content": "\tpub fn to_mnemonic(&self) -> Result<Vec<String>, Error> {\n\n\t\tlet bp = self.pack_bits()?;\n\n\n\n\t\t// Read bits as u32 vec\n\n\t\tlet mut ret_vec: Vec<u32> = vec![];\n\n\t\tfor i in (0..bp.len()).step_by(self.config.radix_bits as usize) {\n\n\t\t\tret_vec.push(bp.get_u32(i, self.config.radix_bits as usize)?);\n\n\t\t}\n\n\n\n\t\tOk(ret_vec\n\n\t\t\t.iter()\n\n\t\t\t.map(|d| WORDLIST[*d as usize].to_owned())\n\n\t\t\t.collect())\n\n\t}\n\n\n\n\t/// Convert share data to a share mnemonic (flattened string)\n\n\t/*pub fn to_mnemonic_flat(&self) -> Result<String, Error> {\n\n\t\tself.to_mnemonic()?.iter().fold(String::new(), |mut acc, s| {\n\n\t\t\tacc.push_str(s);\n\n\t\t\tacc.push_str(\" \");\n", "file_path": "src/shamir/share.rs", "rank": 44, "score": 24.6771617550028 }, { "content": "\tpub fn new() -> Result<Share, Error> {\n\n\t\tlet mut s = Share::default();\n\n\t\ts.identifier = s.generate_random_identifier();\n\n\t\tif WORDLIST.len() != s.config.radix as usize {\n\n\t\t\treturn Err(ErrorKind::Config(format!(\n\n\t\t\t\t\"The wordlist should contain {} words, but it contains {} words.\",\n\n\t\t\t\ts.config.radix,\n\n\t\t\t\tWORDLIST.len()\n\n\t\t\t)))?;\n\n\t\t}\n\n\t\tOk(s)\n\n\t}\n\n\n\n\t/// convenience to create new from Mnemonic\n\n\tpub fn from_mnemonic(mn: &[String]) -> Result<Self, Error> {\n\n\t\tlet mut s = Share::new()?;\n\n\t\ts.fill_with_mnemonic(mn)?;\n\n\t\tOk(s)\n\n\t}\n\n\n", "file_path": "src/shamir/share.rs", "rank": 45, "score": 23.90749814047219 }, { "content": "\t}\n\n}\n\n\n\n/// Main definition of a share and its mnemonic serialization\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct Share {\n\n\t/// Random 15 bit value which is the same for all shares and is used to verify\n\n\t/// that the shares belong together; it is also used as salt in the encryption\n\n\t/// of the master secret. (15 bits)\n\n\tpub identifier: u16,\n\n\t/// Indicates the total number of iterations to be used in PBKDF2. The number of\n\n\t/// iterations is calculated as 10000x2^e. (5 bits)\n\n\tpub iteration_exponent: u8,\n\n\t/// The x value of the group share (4 bits)\n\n\tpub group_index: u8,\n\n\t/// indicates how many group shares are needed to reconstruct the master secret.\n\n\t/// The actual value is endoded as Gt = GT - 1, so a value of 0 indicates that a\n\n\t/// single group share is needed (GT = 1), a value of 1 indicates that two group shares\n\n\t/// are needed (GT = 2) etc. (4 bits)\n\n\tpub group_threshold: u8,\n", "file_path": "src/shamir/share.rs", "rank": 46, "score": 23.65797325517998 }, { "content": "\n\nuse std::collections::BTreeMap;\n\nuse std::fmt;\n\n\n\nuse crate::util;\n\n\n\n/// Struct for returned shares\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct GroupShare {\n\n\t/// Group id\n\n\tpub group_id: u16,\n\n\t/// iteration exponent\n\n\tpub iteration_exponent: u8,\n\n\t/// group index\n\n\tpub group_index: u8,\n\n\t/// group threshold\n\n\tpub group_threshold: u8,\n\n\t/// number of group shares\n\n\tpub group_count: u8,\n\n\t/// member threshold:\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 47, "score": 23.088770617612997 }, { "content": "\n\nuse rand::{thread_rng, Rng};\n\nuse std::collections::HashMap;\n\n\n\nlazy_static! {\n\n\t/// List of ssmc words\n\n\tpub static ref WORDLIST: Vec<String> = { include_str!(\"wordlists/en.txt\").split_whitespace().map(|s| s.into()).collect() };\n\n\tpub static ref WORD_INDEX_MAP: HashMap<String, usize> = {\n\n\t\tlet mut retval = HashMap::new();\n\n\t\tfor (i, item) in WORDLIST.iter().enumerate() {\n\n\t\t\tretval.insert(item.to_owned(), i);\n\n\t\t\t// Also insert the unique 4 character prefix\n\n\t\t\tretval.insert(item[0..4].to_owned(), i);\n\n\t\t}\n\n\t\tretval\n\n\t};\n\n}\n\n\n\n/// Share-specific configuration values\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n", "file_path": "src/shamir/share.rs", "rank": 48, "score": 23.032315984744006 }, { "content": "\t\t\tsum_data.push(bp.get_u32(i, self.config.radix_bits as usize)?);\n\n\t\t}\n\n\n\n\t\tif (self.config.radix_bits as usize\n\n\t\t\t* (sum_data.len() - self.config.metadata_length_words as usize))\n\n\t\t\t% 16 > 8\n\n\t\t{\n\n\t\t\treturn Err(ErrorKind::Mnemonic(\"Invalid mnemonic length.\".to_string()))?;\n\n\t\t}\n\n\n\n\t\trs1024::verify_checksum(&self.config.customization_string, &sum_data)?;\n\n\n\n\t\t//TODO: iterator on bitpacker\n\n\t\tself.identifier = bp.get_u16(0, self.config.id_length_bits as usize)?;\n\n\t\tself.iteration_exponent = bp.get_u8(\n\n\t\t\tself.config.id_length_bits as usize,\n\n\t\t\tself.config.iteration_exp_length_bits as usize,\n\n\t\t)?;\n\n\t\tself.group_index = bp.get_u8(\n\n\t\t\t(self.config.id_length_bits + self.config.iteration_exp_length_bits) as usize,\n", "file_path": "src/shamir/share.rs", "rank": 49, "score": 21.062824677258757 }, { "content": "\n\nimpl Default for Share {\n\n\tfn default() -> Self {\n\n\t\tShare {\n\n\t\t\tidentifier: 0,\n\n\t\t\titeration_exponent: 0,\n\n\t\t\tgroup_index: 0,\n\n\t\t\tgroup_threshold: 0,\n\n\t\t\tgroup_count: 0,\n\n\t\t\tmember_index: 0,\n\n\t\t\tmember_threshold: 0,\n\n\t\t\tshare_value: vec![],\n\n\t\t\tchecksum: 0,\n\n\t\t\tconfig: ShareConfig::new(),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Share {\n\n\t/// Create a new share with defaults\n", "file_path": "src/shamir/share.rs", "rank": 50, "score": 20.654199525658797 }, { "content": "\t\t\tid_length_bits,\n\n\t\t\tradix,\n\n\t\t\tradix_bits,\n\n\t\t\tid_exp_length_words,\n\n\t\t\tcustomization_string,\n\n\t\t\tchecksum_length_words,\n\n\t\t\tmetadata_length_words,\n\n\t\t\tmin_mnemonic_length_words,\n\n\t\t\titeration_exp_length_bits,\n\n\t\t\tmin_strength_bits,\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl ShareConfig {\n\n\t/// Just use defaults for now\n\n\tpub fn new() -> Self {\n\n\t\tShareConfig {\n\n\t\t\t..Default::default()\n\n\t\t}\n", "file_path": "src/shamir/share.rs", "rank": 51, "score": 19.772855811897507 }, { "content": "\t\t\t..Default::default()\n\n\t\t}\n\n\t}\n\n}\n\n\n\n/// Main Struct\n\npub struct Splitter {\n\n\t/// Configuration values\n\n\tconfig: SplitterConfig,\n\n}\n\n\n\nimpl Splitter {\n\n\t/// Create new\n\n\tpub fn new(config: Option<&SplitterConfig>) -> Splitter {\n\n\t\tSplitter {\n\n\t\t\tconfig: match config {\n\n\t\t\t\tSome(c) => c.to_owned(),\n\n\t\t\t\tNone => SplitterConfig::new(),\n\n\t\t\t},\n\n\t\t}\n", "file_path": "src/shamir/splitter.rs", "rank": 52, "score": 19.52518436034531 }, { "content": "\t}\n\n\n\n\t/// Append first num_bits of a u8 to the bitvec, num_bits must be <= 8\n\n\tpub fn append_u8(&mut self, val: u8, num_bits: u8) -> Result<(), Error> {\n\n\t\tif num_bits > 8 {\n\n\t\t\treturn Err(ErrorKind::BitVec(\"number of bits to pack must be <= 8\".to_string()))?;\n\n\t\t}\n\n\t\tfor i in (0u8..num_bits).rev() {\n\n\t\t\tif val & 2u8.pow(u32::from(i)) == 0 {\n\n\t\t\t\tself.bv.push(false);\n\n\t\t\t} else {\n\n\t\t\t\tself.bv.push(true);\n\n\t\t\t}\n\n\t\t}\n\n\t\tOk(())\n\n\t}\n\n\n\n\t/// Retrieve num_bits from the given index as a u8\n\n\tpub fn get_u8(&self, index: usize, num_bits: usize) -> Result<u8, Error> {\n\n\t\tlet mut retval: u8 = 0;\n", "file_path": "src/util/bitpacker.rs", "rank": 53, "score": 19.50689714471962 }, { "content": "\tpub min_strength_bits: u16,\n\n}\n\n\n\nimpl Default for ShareConfig {\n\n\tfn default() -> Self {\n\n\t\tlet radix_bits = 10;\n\n\t\tlet id_length_bits = 15;\n\n\t\tlet iteration_exp_length_bits = 5;\n\n\t\tlet checksum_length_words = 3;\n\n\t\tlet customization_string = b\"shamir\".to_vec();\n\n\t\tlet min_strength_bits = 128;\n\n\n\n\t\t// derived values\n\n\t\tlet radix = 2u16.pow(u32::from(radix_bits));\n\n\t\tlet id_exp_length_words = (id_length_bits + iteration_exp_length_bits) / radix_bits;\n\n\t\tlet metadata_length_words = id_exp_length_words + 2 + checksum_length_words;\n\n\t\tlet min_mnemonic_length_words =\n\n\t\t\tmetadata_length_words + (f64::from(min_strength_bits) / 10f64).ceil() as u8;\n\n\n\n\t\tShareConfig {\n", "file_path": "src/shamir/share.rs", "rank": 54, "score": 19.177650800647687 }, { "content": "\t\tOk(())\n\n\t}\n\n\n\n\t/// Return n u8s from bitvec\n\n\tpub fn get_vec_u8(&mut self, start_pos: usize, len: usize) -> Result<Vec<u8>, Error> {\n\n\t\tlet mut retvec = vec![];\n\n\t\tfor i in (start_pos..len * 8).step_by(8) {\n\n\t\t\tretvec.push(self.get_u8(i, 8)?);\n\n\t\t}\n\n\t\tOk(retvec)\n\n\t}\n\n\n\n\t/// Append first num_bits of a u32 to the bitvec. num_bits must be <= 32\n\n\tpub fn append_u32(&mut self, val: u32, num_bits: u8) -> Result<(), Error> {\n\n\t\tif num_bits > 32 {\n\n\t\t\treturn Err(ErrorKind::BitVec(\"number of bits to pack must be <= 32\".to_string()))?;\n\n\t\t}\n\n\t\tfor i in (0u8..num_bits).rev() {\n\n\t\t\tif val & 2u32.pow(u32::from(i)) == 0 {\n\n\t\t\t\tself.bv.push(false);\n", "file_path": "src/util/bitpacker.rs", "rank": 55, "score": 18.908884677170864 }, { "content": "pub struct ShareConfig {\n\n\t/// The length of the random Identifier in bits\n\n\tpub id_length_bits: u8,\n\n\t/// The number of words in the word list\n\n\tpub radix: u16,\n\n\t/// The length of the radix in bits\n\n\tpub radix_bits: u8,\n\n\t/// id exp length\n\n\tpub id_exp_length_words: u8,\n\n\t/// The customization string used in the RS1024 checksum and in the PBKDF2 salt\n\n\tpub customization_string: Vec<u8>,\n\n\t/// The length of the RS1024 checksum in words\n\n\tpub checksum_length_words: u8,\n\n\t/// The length of mnemonic is words without the share value\n\n\tpub metadata_length_words: u8,\n\n\t/// The minimum allowed length of the mnemonic in words\n\n\tpub min_mnemonic_length_words: u8,\n\n\t/// The length of the iteration exponent in bits\n\n\tpub iteration_exp_length_bits: u8,\n\n\t/// The minimum allowed entropy of the master secret\n", "file_path": "src/shamir/share.rs", "rank": 56, "score": 18.58702760678384 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\n\n\tuse crate::error::Error;\n\n\n\n\t#[test]\n\n\tfn bit_packer() -> Result<(), Error> {\n\n\t\tlet mut bp = BitPacker::new();\n\n\t\tlet val1: u16 = 32534;\n\n\t\tlet val2: u8 = 12;\n\n\t\tlet val3: u8 = 15;\n\n\t\tlet val4: u8 = 8;\n\n\t\tlet val5: u16 = 934;\n\n\t\tbp.append_u16(val1, 15)?;\n\n\t\tbp.append_u8(val2, 5)?;\n\n\t\tbp.append_u8(val3, 4)?;\n\n\t\tbp.append_u8(val4, 4)?;\n\n\t\tbp.append_u16(val5, 10)?;\n", "file_path": "src/util/bitpacker.rs", "rank": 57, "score": 18.398873479807676 }, { "content": "\t\t\t}\n\n\t\t}\n\n\t\tOk(retval)\n\n\t}\n\n\n\n\t/// Retrieve num_bits from the given index as a u32\n\n\tpub fn get_u32(&self, index: usize, num_bits: usize) -> Result<u32, Error> {\n\n\t\tlet mut retval: u32 = 0;\n\n\t\tfor i in index..index + num_bits {\n\n\t\t\tif i < self.bv.len() && self.bv[i] {\n\n\t\t\t\tretval += 1;\n\n\t\t\t}\n\n\t\t\tif i < index + num_bits - 1 {\n\n\t\t\t\tretval <<= 1;\n\n\t\t\t}\n\n\t\t}\n\n\t\tOk(retval)\n\n\t}\n\n\n\n\t/// Return length of internal bit vector\n", "file_path": "src/util/bitpacker.rs", "rank": 58, "score": 18.313196018313324 }, { "content": "\t\t\t\t\"Invalid mnemonic length. The length of each mnemonic must be at least {} words.\",\n\n\t\t\t\tself.config.min_mnemonic_length_words,\n\n\t\t\t)))?;\n\n\t\t}\n\n\t\tlet mut bp = BitPacker::new();\n\n\t\tfor s in mn {\n\n\t\t\tif !WORD_INDEX_MAP.contains_key(s) {\n\n\t\t\t\treturn Err(ErrorKind::Mnemonic(format!(\n\n\t\t\t\t\t\"Invalid mnemonic. '{}' is not an SSSMC39 word.\",\n\n\t\t\t\t\ts,\n\n\t\t\t\t)))?;\n\n\t\t\t}\n\n\t\t\tbp.append_u16(WORD_INDEX_MAP[s] as u16, self.config.radix_bits)?;\n\n\t\t}\n\n\t\tself.parse_bp(&mut bp)\n\n\t}\n\n\n\n\tfn parse_bp(&mut self, bp: &mut BitPacker) -> Result<(), Error> {\n\n\t\tlet mut sum_data: Vec<u32> = vec![];\n\n\t\tfor i in (0..bp.len()).step_by(self.config.radix_bits as usize) {\n", "file_path": "src/shamir/share.rs", "rank": 59, "score": 18.193038736618583 }, { "content": "\n\n\t\tOk(())\n\n\t}\n\n\n\n\tfn generate_random_identifier(&self) -> u16 {\n\n\t\tlet retval: u16 = thread_rng().gen();\n\n\t\tretval & ((1 << self.config.id_length_bits) - 1)\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\n\n\tuse crate::error::Error;\n\n\n\n\t#[test]\n\n\tfn share_to_mnemonic() -> Result<(), Error> {\n\n\t\t// Test vectors taken from python reference implementation\n\n\t\tlet expected_res: Vec<String> = vec![\n", "file_path": "src/shamir/share.rs", "rank": 60, "score": 18.039217898620773 }, { "content": "\t\tfor i in index..index + num_bits {\n\n\t\t\tif i < self.bv.len() && self.bv[i] {\n\n\t\t\t\tretval += 1;\n\n\t\t\t}\n\n\t\t\tif i < index + num_bits - 1 {\n\n\t\t\t\tretval <<= 1;\n\n\t\t\t}\n\n\t\t}\n\n\t\tOk(retval)\n\n\t}\n\n\n\n\t/// Retrieve num_bits from the given index as a u16\n\n\tpub fn get_u16(&self, index: usize, num_bits: usize) -> Result<u16, Error> {\n\n\t\tlet mut retval: u16 = 0;\n\n\t\tfor i in index..index + num_bits {\n\n\t\t\tif i < self.bv.len() && self.bv[i] {\n\n\t\t\t\tretval += 1;\n\n\t\t\t}\n\n\t\t\tif i < index + num_bits - 1 {\n\n\t\t\t\tretval <<= 1;\n", "file_path": "src/util/bitpacker.rs", "rank": 61, "score": 17.90746570941207 }, { "content": "\t\tlet mut ret_vec = vec![];\n\n\t\tfor s in &self.member_shares {\n\n\t\t\tret_vec.push(s.to_mnemonic()?.iter().fold(String::new(), |mut acc, s| {\n\n\t\t\t\tacc.push_str(s);\n\n\t\t\t\tacc.push_str(\" \");\n\n\t\t\t\tacc\n\n\t\t\t}))\n\n\t\t}\n\n\t\tOk(ret_vec)\n\n\t}\n\n\n\n\t/// decode member shares to single share\n\n\tpub fn decode_shares(&mut self) -> Result<Share, Error> {\n\n\t\tlet sp = Splitter::new(None);\n\n\t\tsp.recover_secret(&self.member_shares, self.member_threshold)\n\n\t}\n\n}\n\n\n\n/// Split a master secret into mnemonic shares\n\n/// group_threshold: The number of groups required to reconstruct the master secret\n\n/// groups: A list of (member_threshold, member_count) pairs for each group, where member_count\n\n/// is the number of shares to generate for the group and member_threshold is the number of\n\n/// members required to reconstruct the group secret.\n\n/// master_secret: The master secret to split.\n\n/// passphrase: The passphrase used to encrypt the master secret.\n\n/// iteration_exponent: The iteration exponent.\n\n/// return: List of mnemonics.\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 62, "score": 16.777135760008136 }, { "content": "\tfn create_digest(&self, random_data: &[u8], shared_secret: &[u8]) -> Vec<u8> {\n\n\t\tlet mut mac = HmacSha256::new_varkey(random_data).expect(\"HMAC error\");\n\n\t\tmac.input(shared_secret);\n\n\t\tlet mut result = [0u8; 32];\n\n\t\tresult.copy_from_slice(mac.result().code().as_slice());\n\n\t\tlet mut ret_vec = result.to_vec();\n\n\t\tret_vec.split_off(4);\n\n\t\tret_vec\n\n\t}\n\n\n\n\tfn check_digest(\n\n\t\t&self,\n\n\t\tshares: &[Share],\n\n\t\tshared_secret: &Share,\n\n\t\tproto_share: &Share,\n\n\t) -> Result<(), Error> {\n\n\t\tlet digest_share = self.interpolate(shares, self.config.digest_index, proto_share)?;\n\n\t\tlet mut digest = digest_share.share_value.clone();\n\n\t\tlet random_part = digest.split_off(self.config.digest_length_bytes as usize);\n\n\t\tif digest != self.create_digest(&random_part, &shared_secret.share_value) {\n", "file_path": "src/shamir/splitter.rs", "rank": 63, "score": 16.381695355726677 }, { "content": "\tpub member_threshold: u8,\n\n\t/// Member shares for the group\n\n\tpub member_shares: Vec<Share>,\n\n}\n\n\n\nimpl Default for GroupShare {\n\n\tfn default() -> Self {\n\n\t\tGroupShare {\n\n\t\t\tgroup_id: 0,\n\n\t\t\titeration_exponent: 0,\n\n\t\t\tgroup_index: 0,\n\n\t\t\tgroup_threshold: 0,\n\n\t\t\tgroup_count: 0,\n\n\t\t\tmember_threshold: 0,\n\n\t\t\tmember_shares: vec![],\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl fmt::Display for Share {\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 64, "score": 16.299135370208845 }, { "content": "\t\t\t} else {\n\n\t\t\t\tself.bv.push(true);\n\n\t\t\t}\n\n\t\t}\n\n\t\tOk(())\n\n\t}\n\n\n\n\t/// Append first num_bits of a u16 to the bitvec. num_bits must be <= 16\n\n\tpub fn append_u16(&mut self, val: u16, num_bits: u8) -> Result<(), Error> {\n\n\t\tif num_bits > 16 {\n\n\t\t\treturn Err(ErrorKind::BitVec(\"number of bits to pack must be <= 16\".to_string()))?;\n\n\t\t}\n\n\t\tfor i in (0u8..num_bits).rev() {\n\n\t\t\tif val & 2u16.pow(u32::from(i)) == 0 {\n\n\t\t\t\tself.bv.push(false);\n\n\t\t\t} else {\n\n\t\t\t\tself.bv.push(true);\n\n\t\t\t}\n\n\t\t}\n\n\t\tOk(())\n", "file_path": "src/util/bitpacker.rs", "rank": 65, "score": 16.243214414035133 }, { "content": "\tpub fn len(&self) -> usize {\n\n\t\tself.bv.len()\n\n\t}\n\n\n\n\t/// Return bitvec between m and n\n\n\tpub fn split_out(&mut self, m: usize, n: usize) {\n\n\t\tself.bv.split_off(n);\n\n\t\tself.bv = self.bv.split_off(m);\n\n\t}\n\n\n\n\t/// Return bitvec between m and n\n\n\tpub fn remove_padding(&mut self, num_bits: usize) -> Result<(), Error> {\n\n\t\tlet mut removed = self.bv.clone();\n\n\t\tself.bv = removed.split_off(num_bits);\n\n\t\tif removed.count_ones() > 0 {\n\n\t\t\treturn Err(ErrorKind::Padding)?;\n\n\t\t}\n\n\t\tOk(())\n\n\t}\n\n}\n", "file_path": "src/util/bitpacker.rs", "rank": 66, "score": 16.07341742106886 }, { "content": "\t\t\tacc\n\n\t\t})\n\n\t}*/\n\n\n\n\t/// Convert share data to u8 vec\n\n\tpub fn to_u8_vec(&self) -> Result<Vec<u8>, Error> {\n\n\t\tlet bp = self.pack_bits()?;\n\n\n\n\t\t// Read bits as u8 vec\n\n\t\tlet mut ret_vec: Vec<u8> = vec![];\n\n\t\tfor i in (0..bp.len()).step_by(8) {\n\n\t\t\tret_vec.push(bp.get_u8(i, 8)?);\n\n\t\t}\n\n\t\tOk(ret_vec)\n\n\t}\n\n\n\n\t/// convert mnemonic back to share\n\n\tfn fill_with_mnemonic(&mut self, mn: &[String]) -> Result<(), Error> {\n\n\t\tif mn.len() < self.config.min_mnemonic_length_words as usize {\n\n\t\t\treturn Err(ErrorKind::Mnemonic(format!(\n", "file_path": "src/shamir/share.rs", "rank": 67, "score": 15.84194522183547 }, { "content": "\t\tlet id_length_bits = 15;\n\n\t\tlet max_share_count = 16;\n\n\t\tlet digest_length_bytes = 4;\n\n\t\tlet secret_index = 255;\n\n\t\tlet digest_index = 254;\n\n\n\n\t\tSplitterConfig {\n\n\t\t\tid_length_bits,\n\n\t\t\tmax_share_count,\n\n\t\t\tdigest_length_bytes,\n\n\t\t\tsecret_index,\n\n\t\t\tdigest_index,\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl SplitterConfig {\n\n\t/// Just use defaults for now\n\n\tpub fn new() -> Self {\n\n\t\tSplitterConfig {\n", "file_path": "src/shamir/splitter.rs", "rank": 68, "score": 15.737145269544975 }, { "content": "\t\tproto_share.identifier,\n\n\t);\n\n\n\n\tlet sp = Splitter::new(None);\n\n\n\n\tlet group_shares = sp.split_secret(\n\n\t\t&proto_share,\n\n\t\tgroup_threshold,\n\n\t\tgroups.len() as u8,\n\n\t\t&encrypted_master_secret,\n\n\t)?;\n\n\n\n\tlet mut retval: Vec<GroupShare> = vec![];\n\n\n\n\tlet gs_len = group_shares.len();\n\n\tfor (i, elem) in group_shares.into_iter().enumerate() {\n\n\t\tproto_share.group_index = i as u8;\n\n\t\tproto_share.group_threshold = group_threshold;\n\n\t\tproto_share.group_count = gs_len as u8;\n\n\t\tlet (member_threshold, member_count) = groups[i];\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 69, "score": 15.665284617179871 }, { "content": "\n\n\t/// recover a secret\n\n\tpub fn recover_secret(&self, shares: &[Share], threshold: u8) -> Result<Share, Error> {\n\n\t\tif shares.is_empty() {\n\n\t\t\treturn Err(ErrorKind::Value(\"Share set must not be empty.\".to_string()))?;\n\n\t\t}\n\n\t\tlet mut proto_share = shares[0].clone();\n\n\t\tproto_share.share_value = vec![];\n\n\n\n\t\tlet shared_secret = self.interpolate(&shares, self.config.secret_index, &proto_share)?;\n\n\n\n\t\tif threshold != 1 {\n\n\t\t\tself.check_digest(&shares, &shared_secret, &proto_share)?;\n\n\t\t}\n\n\n\n\t\tOk(shared_secret)\n\n\t}\n\n\n\n\tfn interpolate(&self, shares: &[Share], x: u8, proto_share: &Share) -> Result<Share, Error> {\n\n\t\tlet x_coords: Vec<u8> = shares.iter().map(|s| s.member_index).collect();\n", "file_path": "src/shamir/splitter.rs", "rank": 70, "score": 15.584794692332393 }, { "content": "\n\n\t/// Remove bits from end to meet boundary (for reading in u8 arrays)\n\n\tpub fn normalize(&mut self, radix: usize) {\n\n\t\twhile self.bv.len() % radix != 0 {\n\n\t\t\tself.bv.pop();\n\n\t\t}\n\n\t}\n\n\n\n\t/// Append num_bits of zero padding to the internal bitvec\n\n\tpub fn append_padding(&mut self, num_bits: u8) {\n\n\t\tfor _ in 0..num_bits {\n\n\t\t\tself.bv.push(false);\n\n\t\t}\n\n\t}\n\n\n\n\t/// Append each element of a u8 vec to the bitvec\n\n\tpub fn append_vec_u8(&mut self, data: &[u8]) -> Result<(), Error> {\n\n\t\tfor b in data {\n\n\t\t\tself.append_u8(*b, 8)?;\n\n\t\t}\n", "file_path": "src/util/bitpacker.rs", "rank": 71, "score": 14.827066913956319 }, { "content": "\t\t\treturn Err(ErrorKind::Digest(\"Invalid digest of the shared secret\".to_string()))?;\n\n\t\t}\n\n\t\tOk(())\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\tuse rand::{thread_rng, Rng};\n\n\n\n\t// run split and recover given shares and thresholds, then check random combinations of threshold\n\n\t// shares reconstruct the secret\n\n\tfn split_recover_impl(\n\n\t\tsecret_length_bytes: usize,\n\n\t\tthreshold: u8,\n\n\t\ttotal_shares: u8,\n\n\t) -> Result<(), Error> {\n\n\t\tlet sp = Splitter::new(None);\n\n\t\tlet secret = util::fill_vec_rand(secret_length_bytes);\n", "file_path": "src/shamir/splitter.rs", "rank": 72, "score": 14.672250541422756 }, { "content": "\tfn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n\t\tlet _ = write!(f, \"(\");\n\n\t\tfor i in 0..self.coeffs.len() {\n\n\t\t\tlet _ = write!(f, \"{:?}, \", self.coeffs[i]);\n\n\t\t}\n\n\t\twriteln!(f, \")\")\n\n\t}\n\n}\n\n\n\nimpl Poly {\n\n\tpub fn new(coeffs: Vec<Gf256>) -> Self {\n\n\t\tSelf { coeffs }\n\n\t}\n\n\n\n\tpub fn _evaluate_at_zero(&self) -> Gf256 {\n\n\t\tself.coeffs[0]\n\n\t}\n\n\n\n\tpub fn evaluate_at(&self, x: Gf256) -> Gf256 {\n\n\t\tassert!(self.coeffs.len() < MAX_COEFFS);\n", "file_path": "src/field/poly.rs", "rank": 73, "score": 14.484010747843977 }, { "content": "\t\tassert_eq!(bp.len(), 38);\n\n\t\tassert_eq!(val1, bp.get_u16(0, 15)?);\n\n\t\tassert_eq!(val2, bp.get_u8(15, 5)?);\n\n\t\tassert_eq!(val3, bp.get_u8(20, 4)?);\n\n\t\tassert_eq!(val4, bp.get_u8(24, 4)?);\n\n\t\tassert_eq!(val5, bp.get_u16(28, 10)?);\n\n\t\tassert_eq!(u32::from(val5), bp.get_u32(28, 10)?);\n\n\t\tOk(())\n\n\t}\n\n}\n", "file_path": "src/util/bitpacker.rs", "rank": 74, "score": 14.284479455095648 }, { "content": "\tfn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n\t\tlet m = self.to_mnemonic().unwrap();\n\n\t\tlet mut table = prettytable::Table::new();\n\n\n\n\t\ttable.add_row(row![\"\", \"1\", \"2\", \"3\", \"4\"]);\n\n\t\tlet iter = m.chunks(4).enumerate();\n\n\t\tfor (idx, row) in iter {\n\n\t\t\tlet idx = idx as u64;\n\n\t\t\tlet n1 = row.get(0).map(|s| s.as_str()).unwrap_or(\"\");\n\n\t\t\tlet n2 = row.get(1).map(|s| s.as_str()).unwrap_or(\"\");\n\n\t\t\tlet n3 = row.get(2).map(|s| s.as_str()).unwrap_or(\"\");\n\n\t\t\tlet n4 = row.get(3).map(|s| s.as_str()).unwrap_or(\"\");\n\n\t\t\ttable.add_row(row![format!(\"{}\", idx), n1, n2, n3, n4]);\n\n\t\t}\n\n\n\n\t\tlet mut out: Vec<u8> = Vec::new();\n\n\t\ttable.print(&mut out).unwrap();\n\n\t\tlet string = String::from_utf8(out).unwrap();\n\n\t\twrite!(f, \"{}\", string)?;\n\n\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 75, "score": 14.041315811583948 }, { "content": "\t\tbp.append_u8(\n\n\t\t\tself.iteration_exponent,\n\n\t\t\tself.config.iteration_exp_length_bits,\n\n\t\t)?;\n\n\t\tbp.append_u8(self.group_index, 4)?;\n\n\t\tbp.append_u8(self.group_threshold - 1, 4)?;\n\n\t\tbp.append_u8(self.group_count - 1, 4)?;\n\n\t\tbp.append_u8(self.member_index, 4)?;\n\n\t\tbp.append_u8(self.member_threshold - 1, 4)?;\n\n\t\tbp.append_padding(padding_bit_count);\n\n\t\tbp.append_vec_u8(&self.share_value)?;\n\n\n\n\t\tif bp.len() % self.config.radix_bits as usize != 0 {\n\n\t\t\treturn Err(ErrorKind::Mnemonic(format!(\n\n\t\t\t\t\"Incorrect share bit length. Must be a multiple of {}, actual length: {}\",\n\n\t\t\t\tself.config.radix_bits,\n\n\t\t\t\tbp.len(),\n\n\t\t\t)))?;\n\n\t\t}\n\n\n", "file_path": "src/shamir/share.rs", "rank": 76, "score": 13.506726080403636 }, { "content": "\n\n\t\tlet mut tmp: u16 = 1;\n\n\t\tfor power in 0..255usize {\n\n\t\t\ttabs.exp[power] = tmp as u8;\n\n\t\t\ttabs.log[tmp as usize] = power as u8;\n\n\t\t\t// Multiply poly by the polynomial x + 1\n\n\t\t\ttmp = (tmp << 1) ^ tmp;\n\n\t\t\t// Reduce poly by x^8 + x^4 + x^3 +x + 1\n\n\t\t\tif (tmp & 0x100) > 0 {\n\n\t\t\t\ttmp ^= 0x11B;\n\n\t\t\t}\n\n\t\t}\n\n\t\ttabs\n\n\t}\n\n}\n\n\n\nlazy_static! {\n\n\t/// Static reference to Generated tables\n\n\tpub static ref TABLES: Tables = { Tables::generate() };\n\n}\n\n\n", "file_path": "src/field/gf256.rs", "rank": 77, "score": 13.257168139350831 }, { "content": "\tif master_secret.len() % 2 != 0 {\n\n\t\treturn Err(ErrorKind::Value(\n\n\t\t\t\"The length of the master secret in bytes must be an even number\".to_string(),\n\n\t\t))?;\n\n\t}\n\n\n\n\tif group_threshold as usize > groups.len() {\n\n\t\treturn Err(ErrorKind::Value(format!(\n\n\t\t\t\"The requested group threshold ({}) must not exceed the number of groups ({}).\",\n\n\t\t\tgroup_threshold,\n\n\t\t\tgroups.len()\n\n\t\t)))?;\n\n\t}\n\n\n\n\tlet encoder = util::encrypt::MasterSecretEnc::new()?;\n\n\n\n\tlet encrypted_master_secret = encoder.encrypt(\n\n\t\tmaster_secret,\n\n\t\tpassphrase,\n\n\t\titeration_exponent,\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 78, "score": 12.996974074703006 }, { "content": "\t}\n\n\tpub fn pow(mut self, mut exp: u8) -> Gf256 {\n\n\t\tlet mut acc = Self::one();\n\n\n\n\t\twhile exp > 1 {\n\n\t\t\tif (exp & 1) == 1 {\n\n\t\t\t\tacc *= self;\n\n\t\t\t}\n\n\t\t\texp /= 2;\n\n\t\t\tself *= self;\n\n\t\t}\n\n\n\n\t\tif exp == 1 {\n\n\t\t\tacc *= self;\n\n\t\t}\n\n\n\n\t\tacc\n\n\t}\n\n}\n\n\n", "file_path": "src/field/gf256.rs", "rank": 79, "score": 12.73187598477635 }, { "content": "\t\t// Create checksum\n\n\t\tlet mut sum_data: Vec<u32> = vec![];\n\n\t\tfor i in (0..bp.len()).step_by(self.config.radix_bits as usize) {\n\n\t\t\tsum_data.push(bp.get_u32(i, self.config.radix_bits as usize)?);\n\n\t\t}\n\n\n\n\t\tlet checksum = rs1024::create_checksum(\n\n\t\t\t&self.config.customization_string,\n\n\t\t\t&sum_data,\n\n\t\t\tself.config.checksum_length_words,\n\n\t\t);\n\n\n\n\t\tfor c in checksum {\n\n\t\t\tbp.append_u32(c, self.config.radix_bits)?;\n\n\t\t}\n\n\n\n\t\tOk(bp)\n\n\t}\n\n\n\n\t/// Convert share data to a share mnemonic\n", "file_path": "src/shamir/share.rs", "rank": 80, "score": 12.565510633650577 }, { "content": "#![deny(non_snake_case)]\n\n#![deny(unused_mut)]\n\n#![warn(missing_docs)]\n\n\n\n#[macro_use]\n\nextern crate lazy_static;\n\n\n\nmod error;\n\nmod field;\n\nmod shamir;\n\nmod util;\n\n\n\npub use error::{Error, ErrorKind};\n\npub use shamir::{GroupShare, Share};\n\n// TODO: only exposed for tests\n\npub use util::hex::{to_hex, from_hex};\n\n\n\n//TODO: Proper docs\n\n/// Generates shares from the provided master secret (e.g. BIP39 entropy)\n", "file_path": "src/lib.rs", "rank": 81, "score": 12.476374135888973 }, { "content": "\t\t\t\t.to_string(),\n\n\t\t))?;\n\n\t}\n\n\n\n\t// TODO: Should probably return info making problem mnemonics easier to identify\n\n\tfor g in groups.iter() {\n\n\t\tif g.member_shares.len() < g.member_threshold as usize {\n\n\t\t\treturn Err(ErrorKind::Mnemonic(format!(\n\n\t\t\t\t\"Insufficient number of mnemonics (Group {}). At least {} mnemonics \\\n\n\t\t\t\t are required.\",\n\n\t\t\t\tg.group_index, g.member_threshold,\n\n\t\t\t)))?;\n\n\t\t}\n\n\t\tlet test_share = g.member_shares[0].clone();\n\n\t\tfor ms in g.member_shares.iter() {\n\n\t\t\tif test_share.member_threshold != ms.member_threshold {\n\n\t\t\t\treturn Err(ErrorKind::Mnemonic(\n\n\t\t\t\t\t\"Mismatching member thresholds\".to_string(),\n\n\t\t\t\t))?;\n\n\t\t\t}\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 82, "score": 12.19598778446499 }, { "content": "\t}\n\n\n\n\t/// split secret\n\n\t/// member_threshold, share_count, shared_secret at least 128 bits and a multiple of 16\n\n\t/// returns shares\n\n\tpub fn split_secret(\n\n\t\t&self,\n\n\t\tproto_share: &Share,\n\n\t\tthreshold: u8,\n\n\t\tshare_count: u8,\n\n\t\tshared_secret: &[u8],\n\n\t) -> Result<Vec<Share>, Error> {\n\n\t\tif threshold == 0 || threshold > self.config.max_share_count {\n\n\t\t\treturn Err(ErrorKind::Argument(format!(\n\n\t\t\t\t\"Threshold must be between 1 and {}\",\n\n\t\t\t\tself.config.max_share_count\n\n\t\t\t)))?;\n\n\t\t}\n\n\t\tif share_count < threshold || share_count > self.config.max_share_count {\n\n\t\t\treturn Err(ErrorKind::Argument(format!(\n", "file_path": "src/shamir/splitter.rs", "rank": 83, "score": 11.76565548137804 }, { "content": "\t\t}\n\n\t\tOk(())\n\n\t}\n\n\n\n\t#[test]\n\n\tfn split_recover() -> Result<(), Error> {\n\n\t\t// test invalid inputs\n\n\t\tassert!(split_recover_impl(14, 3, 5).is_err());\n\n\t\tassert!(split_recover_impl(2047, 10, 12).is_err());\n\n\t\tassert!(split_recover_impl(16, 0, 5).is_err());\n\n\t\tassert!(split_recover_impl(16, 5, 3).is_err());\n\n\t\tassert!(split_recover_impl(16, 5, 0).is_err());\n\n\t\t// test a range of thresholds\n\n\t\tlet config = SplitterConfig::new();\n\n\t\tfor sc in 1..=config.max_share_count {\n\n\t\t\tfor t in 1..=sc {\n\n\t\t\t\tsplit_recover_impl(16, t, sc)?;\n\n\t\t\t}\n\n\t\t}\n\n\t\t// test a range of lengths\n", "file_path": "src/shamir/splitter.rs", "rank": 84, "score": 11.72474632517414 }, { "content": "\t\t\t\t\treturn TestResult::failed();\n\n\t\t\t\t}\n\n\t\t\t}\n\n\n\n\t\t\tTestResult::passed()\n\n\t\t}\n\n\n\n\t\tfn interpolate_evaluate_at_0_eq_evaluate_at(ys: Vec<u8>) -> TestResult {\n\n\t\t\tif ys.is_empty() || ys.len() > std::u8::MAX as usize {\n\n\t\t\t\treturn TestResult::discard();\n\n\t\t\t}\n\n\n\n\t\t\tlet points = ys.into_iter()\n\n\t\t\t\t\t\t .zip(1..std::u8::MAX)\n\n\t\t\t\t\t\t .map(|(y, x)| (x, y))\n\n\t\t\t\t\t\t .collect::<Vec<_>>();\n\n\n\n\t\t\tlet elems = points\n\n\t\t\t\t.iter()\n\n\t\t\t\t.map(|&(x, y)| (gf256!(x), gf256!(y)))\n", "file_path": "src/field/lagrange.rs", "rank": 85, "score": 11.45210641985264 }, { "content": "\n\n\t\tlet random_share_count = threshold - 2;\n\n\n\n\t\tfor i in 0..random_share_count {\n\n\t\t\tlet mut s = proto_share.clone();\n\n\t\t\ts.member_index = i;\n\n\t\t\ts.member_threshold = threshold;\n\n\t\t\ts.share_value = util::fill_vec_rand(shared_secret.len());\n\n\t\t\tshares.push(s);\n\n\t\t}\n\n\n\n\t\tlet random_part =\n\n\t\t\tutil::fill_vec_rand(shared_secret.len() - self.config.digest_length_bytes as usize);\n\n\t\tlet mut digest = self.create_digest(&random_part.to_vec(), &shared_secret);\n\n\t\tdigest.append(&mut random_part.to_vec());\n\n\n\n\t\tlet mut base_shares = shares.clone();\n\n\t\tlet mut s = proto_share.clone();\n\n\t\ts.member_index = self.config.digest_index;\n\n\t\ts.member_threshold = threshold;\n", "file_path": "src/shamir/splitter.rs", "rank": 86, "score": 11.368441750125143 }, { "content": "\t\tlet member_shares = sp.split_secret(\n\n\t\t\t&proto_share,\n\n\t\t\tmember_threshold,\n\n\t\t\tmember_count,\n\n\t\t\t&elem.share_value,\n\n\t\t)?;\n\n\t\tretval.push(GroupShare {\n\n\t\t\tgroup_id: proto_share.identifier,\n\n\t\t\titeration_exponent,\n\n\t\t\tgroup_index: i as u8,\n\n\t\t\tgroup_threshold,\n\n\t\t\tgroup_count: gs_len as u8,\n\n\t\t\tmember_threshold,\n\n\t\t\tmember_shares,\n\n\t\t});\n\n\t}\n\n\n\n\tOk(retval)\n\n}\n\n\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 87, "score": 11.262270199133612 }, { "content": "\t\t}\n\n\t}\n\n}\n\n\n\nimpl MulAssign<Gf256> for Gf256 {\n\n\tfn mul_assign(&mut self, rhs: Gf256) {\n\n\t\t*self = *self * rhs;\n\n\t}\n\n}\n\n\n\nimpl Div<Gf256> for Gf256 {\n\n\ttype Output = Gf256;\n\n\tfn div(self, rhs: Gf256) -> Gf256 {\n\n\t\tlet l2 = rhs.log().expect(\"division by zero\");\n\n\t\tif let Some(l1) = self.log() {\n\n\t\t\tlet tmp = (u16::from(l1) + 255 - u16::from(l2)) % 255;\n\n\t\t\tGf256::exp(tmp as u8)\n\n\t\t} else {\n\n\t\t\tGf256 { poly: 0 }\n\n\t\t}\n", "file_path": "src/field/gf256.rs", "rank": 88, "score": 11.225418110666439 }, { "content": "\tuse super::*;\n\n\tuse crate::field::gf256::*;\n\n\tuse quickcheck::*;\n\n\tuse std;\n\n\n\n\tquickcheck! {\n\n\n\n\t\tfn interpolate_evaluate_at_works(ys: Vec<Gf256>) -> TestResult {\n\n\t\t\tif ys.is_empty() || ys.len() > std::u8::MAX as usize {\n\n\t\t\t\treturn TestResult::discard();\n\n\t\t\t}\n\n\n\n\t\t\tlet points = ys.into_iter()\n\n\t\t\t\t\t\t .zip(1..std::u8::MAX)\n\n\t\t\t\t\t\t .map(|(y, x)| (gf256!(x), y))\n\n\t\t\t\t\t\t .collect::<Vec<_>>();\n\n\t\t\tlet poly = interpolate(&points);\n\n\n\n\t\t\tfor (x, y) in points {\n\n\t\t\t\tif poly.evaluate_at(x) != y {\n", "file_path": "src/field/lagrange.rs", "rank": 89, "score": 11.196195979490875 }, { "content": "\t\t\t\t\"Share count with given member threshold must be between {} and {}\",\n\n\t\t\t\tthreshold, self.config.max_share_count\n\n\t\t\t)))?;\n\n\t\t}\n\n\t\tif shared_secret.len() < 16 || shared_secret.len() % 2 != 0 {\n\n\t\t\treturn Err(ErrorKind::Argument(\"Secret must be at least 16 bytes in length and a multiple of 2\".to_string()))?;\n\n\t\t}\n\n\n\n\t\tlet mut shares = vec![];\n\n\t\t// if the threshold is 1, then the digest of the shared secret is not used\n\n\t\tif threshold == 1 {\n\n\t\t\tfor i in 0..share_count {\n\n\t\t\t\tlet mut s = proto_share.clone();\n\n\t\t\t\ts.member_index = i;\n\n\t\t\t\ts.member_threshold = threshold;\n\n\t\t\t\ts.share_value = shared_secret.to_owned();\n\n\t\t\t\tshares.push(s);\n\n\t\t\t}\n\n\t\t\treturn Ok(shares);\n\n\t\t}\n", "file_path": "src/shamir/splitter.rs", "rank": 90, "score": 11.194532405759718 }, { "content": "\t\t\t4,\n\n\t\t)?;\n\n\t\tself.group_threshold = bp.get_u8(24, 4)? + 1;\n\n\t\tself.group_count = bp.get_u8(28, 4)? + 1;\n\n\t\tself.member_index = bp.get_u8(32, 4)?;\n\n\t\tself.member_threshold = bp.get_u8(36, 4)? + 1;\n\n\n\n\t\tif self.group_count < self.group_threshold {\n\n\t\t\treturn Err(ErrorKind::Mnemonic(\"Invalid mnemonic. Group threshold cannot be greater than group count.\".to_string()))?;\n\n\t\t}\n\n\n\n\t\t// remove padding and recover data\n\n\t\tbp.split_out(\n\n\t\t\t40,\n\n\t\t\tbp.len() - self.config.radix_bits as usize * self.config.checksum_length_words as usize,\n\n\t\t);\n\n\n\n\t\tbp.remove_padding(bp.len() % 16)?;\n\n\n\n\t\tself.share_value = bp.get_vec_u8(0, bp.len() / 8)?;\n", "file_path": "src/shamir/share.rs", "rank": 91, "score": 11.178315595074245 }, { "content": "//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\n//! This module provides the Gf256 type which is used to represent\n\n//! elements of a finite field with 256 elements.\n\n\n\nuse std::fmt;\n\nuse std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign};\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Tables {\n\n\tpub exp: [u8; 255],\n\n\tpub log: [u8; 256],\n", "file_path": "src/field/gf256.rs", "rank": 92, "score": 10.872722407655164 }, { "content": "\t}\n\n\n\n\tlet mut group_index_map = BTreeMap::new();\n\n\n\n\tfor s in shares {\n\n\t\tif !group_index_map.contains_key(&s.group_index) {\n\n\t\t\tlet mut group_share = GroupShare::default();\n\n\t\t\tgroup_share.group_id = s.identifier;\n\n\t\t\tgroup_share.group_index = s.group_index;\n\n\t\t\tgroup_share.group_threshold = s.group_threshold;\n\n\t\t\tgroup_share.iteration_exponent = s.iteration_exponent;\n\n\t\t\tgroup_share.group_count = s.group_count;\n\n\t\t\tgroup_share.member_shares = vec![s.clone()];\n\n\t\t\tgroup_share.member_threshold = s.member_threshold;\n\n\t\t\tgroup_index_map.insert(group_share.group_index, group_share);\n\n\t\t} else {\n\n\t\t\tlet e = group_index_map.get_mut(&s.group_index).unwrap();\n\n\t\t\te.member_shares.push(s);\n\n\t\t}\n\n\t}\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 93, "score": 10.840687180742634 }, { "content": "\t}\n\n\t#[inline]\n\n\tpub fn from_byte(b: u8) -> Gf256 {\n\n\t\tGf256 { poly: b }\n\n\t}\n\n\t#[inline]\n\n\tpub fn to_byte(self) -> u8 {\n\n\t\tself.poly\n\n\t}\n\n\tpub fn exp(power: u8) -> Gf256 {\n\n\t\tlet tabs = get_tables();\n\n\t\tGf256::from_byte(tabs.exp[power as usize])\n\n\t}\n\n\tpub fn log(self) -> Option<u8> {\n\n\t\tif self.poly == 0 {\n\n\t\t\tNone\n\n\t\t} else {\n\n\t\t\tlet tabs = get_tables();\n\n\t\t\tSome(tabs.log[self.poly as usize])\n\n\t\t}\n", "file_path": "src/field/gf256.rs", "rank": 94, "score": 10.78781223264914 }, { "content": "\t\t\t\tself.member_shares.len()\n\n\t\t\t)?;\n\n\t\t\twriteln!(f, \"{} \", s)?;\n\n\t\t}\n\n\t\tOk(())\n\n\t}\n\n}\n\n\n\nimpl GroupShare {\n\n\t/// return list of mnemonics\n\n\tpub fn mnemonic_list(&self) -> Result<Vec<Vec<String>>, Error> {\n\n\t\tlet mut ret_vec = vec![];\n\n\t\tfor s in &self.member_shares {\n\n\t\t\tret_vec.push(s.to_mnemonic()?);\n\n\t\t}\n\n\t\tOk(ret_vec)\n\n\t}\n\n\n\n\t/// return list of mnemonics as space separated strings\n\n\tpub fn mnemonic_list_flat(&self) -> Result<Vec<String>, Error> {\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 95, "score": 10.754121695587123 }, { "content": "\tfn sub(self, rhs: Gf256) -> Gf256 {\n\n\t\tGf256::from_byte(self.poly ^ rhs.poly)\n\n\t}\n\n}\n\n\n\nimpl SubAssign<Gf256> for Gf256 {\n\n\t#[inline]\n\n\tfn sub_assign(&mut self, rhs: Gf256) {\n\n\t\t*self = *self - rhs;\n\n\t}\n\n}\n\n\n\nimpl Mul<Gf256> for Gf256 {\n\n\ttype Output = Gf256;\n\n\tfn mul(self, rhs: Gf256) -> Gf256 {\n\n\t\tif let (Some(l1), Some(l2)) = (self.log(), rhs.log()) {\n\n\t\t\tlet tmp = (u16::from(l1) + u16::from(l2)) % 255;\n\n\t\t\tGf256::exp(tmp as u8)\n\n\t\t} else {\n\n\t\t\tGf256 { poly: 0 }\n", "file_path": "src/field/gf256.rs", "rank": 96, "score": 10.712170594828667 }, { "content": "\t\tfor i in 0..255 {\n\n\t\t\tlet _ = write!(f, \"{}, \", self.exp[i]);\n\n\t\t}\n\n\t\tlet _ = writeln!(f, \")\");\n\n\t\tlet _ = writeln!(f, \"log: (\");\n\n\t\tfor i in 0..256 {\n\n\t\t\tlet _ = write!(f, \"{}, \", self.log[i]);\n\n\t\t}\n\n\t\twriteln!(f, \")\")\n\n\t}\n\n}\n\n\n\nimpl Tables {\n\n\t/// Generates a table of discrete logarithms and exponents in Gf(256) using the polynomial\n\n\t/// x + 1 as the base\n\n\tpub fn generate() -> Tables {\n\n\t\tlet mut tabs = Tables {\n\n\t\t\texp: [0; 255],\n\n\t\t\tlog: [0; 256],\n\n\t\t};\n", "file_path": "src/field/gf256.rs", "rank": 97, "score": 10.685100522059663 }, { "content": "\t\t\t|| s.iteration_exponent != check_share.iteration_exponent\n\n\t\t{\n\n\t\t\treturn Err(ErrorKind::Mnemonic(format!(\n\n\t\t\t\t\"Invalid set of mnemonics. All mnemonics must begin with the same {} words. \\\n\n\t\t\t\t (Identifier and iteration exponent must be the same).\",\n\n\t\t\t\ts.config.id_exp_length_words,\n\n\t\t\t)))?;\n\n\t\t}\n\n\t\tif s.group_threshold != check_share.group_threshold {\n\n\t\t\treturn Err(ErrorKind::Mnemonic(\n\n\t\t\t\t\"Invalid set of mnemonics. All mnemonics must have the same group threshold\"\n\n\t\t\t\t\t.to_string(),\n\n\t\t\t))?;\n\n\t\t}\n\n\t\tif s.group_count != check_share.group_count {\n\n\t\t\treturn Err(ErrorKind::Mnemonic(\n\n\t\t\t\t\"Invalid set of mnemonics. All mnemonics must have the same group count\"\n\n\t\t\t\t\t.to_string(),\n\n\t\t\t))?;\n\n\t\t}\n", "file_path": "src/shamir/sssmc39_scheme.rs", "rank": 98, "score": 10.535422040904209 }, { "content": "\t/// indicates the total number of groups. The actual value is encoded as g = G - 1\n\n\t/// (4 bits)\n\n\tpub group_count: u8,\n\n\t/// Member index, or x value of the member share in the given group (4 bits)\n\n\tpub member_index: u8,\n\n\t/// indicates how many member shares are needed to reconstruct the group share. The actual value\n\n\t/// is encoded as t = T − 1. (4 bits)\n\n\tpub member_threshold: u8,\n\n\t/// corresponds to a list of the SSS part's fk(x) values 1 ≤ k ≤ n. Each fk(x) value is encoded\n\n\t/// as a string of eight bits in big-endian order. The concatenation of these bit strings is\n\n\t/// the share value. This value is left-padded with \"0\" bits so that the length of the padded\n\n\t/// share value in bits becomes the nearest multiple of 10. (padding + 8n bits)\n\n\tpub share_value: Vec<u8>,\n\n\t/// an RS1024 checksum of the data part of the share\n\n\t/// (that is id || e || GI || Gt || g || I || t || ps). The customization string (cs) of\n\n\t/// RS1024 is \"shamir\". (30 bits)\n\n\tpub checksum: u32,\n\n\t/// configuration values\n\n\tpub config: ShareConfig,\n\n}\n", "file_path": "src/shamir/share.rs", "rank": 99, "score": 10.504726390949427 } ]
Rust
runner-integration-tests/src/testkind/optimization.rs
comprakt/comprakt
2315e85972e63ea327c4d115ffe623253b520440
use crate::*; use optimization::{self, Optimization}; use serde_derive::Deserialize; use std::{ fs::File, io::{self, Write}, path::PathBuf, process::Stdio, }; #[derive(Debug, Deserialize, Clone)] #[serde(rename_all = "kebab-case")] pub enum AsmComparisonOutcome { Unchanged, Change, IdenticalTo(ExpectedData), } #[derive(Debug, Deserialize, Clone)] pub struct OptimizationTestData { pub compiler_optimized_stderr: Option<ExpectedData>, pub compiler_optimized_stdout: Option<ExpectedData>, pub compiler_optimized_exitcode: Option<ExpectedData>, pub compiler_reference_stderr: Option<ExpectedData>, pub compiler_reference_stdout: Option<ExpectedData>, pub compiler_reference_exitcode: Option<ExpectedData>, pub stderr: Option<ExpectedData>, pub stdout: Option<ExpectedData>, pub exitcode: Option<ExpectedData>, pub stdin: Option<ExpectedData>, pub optimizations: Vec<optimization::Kind>, pub expect: AsmComparisonOutcome, pub backend_asm: Option<Vec<Backend>>, pub backend: Option<Vec<Backend>>, } impl FromReferencesPath<OptimizationTestData> for OptimizationTestData { fn from_reference_path(_base: &PathBuf) -> Self { Self { compiler_optimized_stderr: None, compiler_optimized_stdout: None, compiler_optimized_exitcode: None, compiler_reference_stderr: None, compiler_reference_stdout: None, compiler_reference_exitcode: None, stderr: None, stdout: None, exitcode: None, stdin: None, optimizations: vec![], expect: AsmComparisonOutcome::Change, backend: Some(vec![Backend::Own, Backend::Libfirm]), backend_asm: Some(vec![Backend::Own, Backend::Libfirm]), } } } impl IntoReferenceData for OptimizationTestData { fn into_reference_data(self, base: &PathBuf) -> ReferenceData { self.into_optimizing_compiler_reference_data(base) } } impl OptimizationTestData { fn into_optimizing_compiler_reference_data(self, _base: &PathBuf) -> ReferenceData { ReferenceData { stderr: self .compiler_optimized_stderr .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), stdout: self .compiler_optimized_stdout .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), exitcode: self .compiler_optimized_exitcode .unwrap_or_else(|| ExpectedData::Inline("0".to_owned())), } } fn into_reference_compiler_reference_data(self, _base: &PathBuf) -> ReferenceData { ReferenceData { stderr: self .compiler_reference_stderr .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), stdout: self .compiler_reference_stdout .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), exitcode: self .compiler_reference_exitcode .unwrap_or_else(|| ExpectedData::Inline("0".to_owned())), } } fn into_binary_reference_data(self, _compiler_base: &PathBuf) -> ReferenceData { ReferenceData { stderr: self .stderr .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), stdout: self .stdout .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), exitcode: self .exitcode .unwrap_or_else(|| ExpectedData::Inline("0".to_owned())), } } } pub fn exec_optimization_test(input: PathBuf, backend: Backend) { let path_binary_optimized = input.with_extension(&format!("{}.optimized.out", backend.to_ascii_label())); let path_binary_reference = input.with_extension(&format!("{}.reference.out", backend.to_ascii_label())); let path_asm_optimized = input.with_extension(&format!("{}.optimized.S", backend.to_ascii_label())); let path_asm_reference = input.with_extension(&format!("{}.reference.S", backend.to_ascii_label())); let setup = TestSpec { references: input.clone(), input: input.clone(), generate_tentatives: true, }; let (input_without_yaml_path, test_data) = load_test_data::<OptimizationTestData>(&setup); if test_data.reference.optimizations.is_empty() { panic!("you MUST at least specify one optimization. none given."); } if !test_data .reference .backend .as_ref() .map(|v| v.contains(&backend)) .unwrap_or(true) { log::warn!( "ignoring {} test for backend {:?}", input.display(), backend ); return; } let callinfo_actual = CompilerCall::RawCompiler(CompilerPhase::Binary { backend, output: path_binary_optimized.clone(), assembly: Some(path_asm_optimized.clone()), optimizations: optimization::Level::Custom( test_data .reference .optimizations .clone() .iter() .map(|kind| Optimization { kind: *kind, flags: vec![], }) .collect(), ), }); let mut cmd_actual = compiler_call(callinfo_actual, &input_without_yaml_path); println!("Executing: {:?}", cmd_actual); let output_actual = cmd_actual .output() .expect("failed to call compiler under test for actual input"); assert_output( &output_actual, test_data .reference .clone() .into_optimizing_compiler_reference_data(&path_binary_optimized), &TestSpec { input: path_binary_optimized.clone(), references: path_binary_optimized.clone(), generate_tentatives: true, }, ); let reference_input = match test_data.reference.expect { AsmComparisonOutcome::Change | AsmComparisonOutcome::Unchanged | AsmComparisonOutcome::IdenticalTo(ExpectedData::Ignore) => { input_without_yaml_path.clone() } AsmComparisonOutcome::IdenticalTo(ExpectedData::Inline(ref mj_str)) => { let path = add_extension(&setup.input, "reference"); write(&Some(path.clone()), mj_str).expect( "Failed to write reference mini java \ file to disk (required for input to the compiler under test)", ); path } AsmComparisonOutcome::IdenticalTo(ExpectedData::InFile(ref mj_rel_path)) => { reference_to_absolute_path(&setup, mj_rel_path) } }; let callinfo_reference = CompilerCall::RawCompiler(CompilerPhase::Binary { backend, output: path_binary_reference.clone(), assembly: Some(path_asm_reference.clone()), optimizations: optimization::Level::None, }); let mut cmd_reference = compiler_call(callinfo_reference, &reference_input); println!("Executing: {:?}", cmd_reference); let output_reference = cmd_reference .output() .expect("failed to call compiler under test for reference input"); assert_output( &output_reference, test_data .reference .clone() .into_reference_compiler_reference_data(&path_binary_reference), &TestSpec { input: path_binary_reference.clone(), references: path_binary_reference.clone(), generate_tentatives: true, }, ); assert_binary( &path_binary_optimized, &test_data.reference.stdin, &setup, test_data .reference .clone() .into_binary_reference_data(&path_binary_optimized), ); assert_binary( &path_binary_reference, &test_data.reference.stdin, &setup, test_data .reference .clone() .into_binary_reference_data(&path_binary_reference), ); let asm_optimized = read(&Some(path_asm_optimized.clone())).unwrap(); let asm_reference = read(&Some(path_asm_reference.clone())).unwrap(); let normalized_optimized_asm = normalize_asm(&asm_optimized); let normalized_reference_asm = normalize_asm(&asm_reference); write( &Some(add_extension(&path_asm_optimized, "normalized")), &normalized_optimized_asm, ) .unwrap(); write( &Some(add_extension(&path_asm_reference, "normalized")), &normalized_reference_asm, ) .unwrap(); if !test_data .reference .backend_asm .as_ref() .map(|v| v.contains(&backend)) .unwrap_or(true) { log::warn!( "ignoring asm comparison in {} test for backend {:?}", input.display(), backend ); return; } match test_data.reference.expect { AsmComparisonOutcome::Change => { if normalized_reference_asm == normalized_optimized_asm { panic!( "asserted assembly to NOT be identical to the reference. \ But they are the same." ); } } AsmComparisonOutcome::IdenticalTo(ExpectedData::Ignore) => {} AsmComparisonOutcome::Unchanged | AsmComparisonOutcome::IdenticalTo(_) => assert_changeset( &TestSpec { input: path_asm_optimized, references: path_asm_reference, generate_tentatives: false, }, "asm", &normalized_reference_asm, &normalized_optimized_asm, ) .unwrap_or_else(|msg| match test_data.reference.expect { AsmComparisonOutcome::Unchanged => { panic!("{}. expected asm to be unchanged.", msg.to_string()) } _ => panic!( "{}. expected asm to be identical to reference.", msg.to_string() ), }), }; } fn strip_comments(s: &str) -> String { let regex = regex::Regex::new("/\\*.*?\\*/").unwrap(); regex.replace_all(s, "").to_string() } fn remove_labels(s: &str) -> String { let regex = regex::RegexBuilder::new(r"^\.L[0-9]+:\n") .multi_line(true) .build() .unwrap(); let s = regex.replace_all(s, "").to_string(); let regex = regex::RegexBuilder::new(r"^.*j(mp|lt|gt|e|ne|ge|le) \.L[0-9]+\n") .multi_line(true) .build() .unwrap(); regex.replace_all(&s, "").to_string() } fn sort_functions(s: &str) -> String { let mut blocks = s .split("# -- Begin ") .map(|block| block.trim()) .collect::<Vec<&str>>(); blocks.sort(); format!("# -- Begin {}", blocks.join("\n# -- Begin ")) } fn remove_trailing_whitespace(s: &str) -> String { let mut lines: Vec<&str> = vec![]; for line in s.lines() { let trimmed = line.trim_end(); if !trimmed.is_empty() { lines.push(trimmed); } } lines.join("\n") } fn normalize_asm(asm: &str) -> String { [ strip_comments, remove_trailing_whitespace, remove_labels, sort_functions, ] .iter() .fold(asm.to_owned(), |acc, transform| transform(&acc)) } fn assert_binary( binary_path: &PathBuf, stdin: &Option<ExpectedData>, setup: &TestSpec, references: ReferenceData, ) { let output = run_binary(binary_path, stdin, &setup); assert_output( &output, references, &TestSpec { input: binary_path.clone(), references: binary_path.clone(), generate_tentatives: true, }, ); } fn run_binary(binary_path: &PathBuf, stdin: &Option<ExpectedData>, setup: &TestSpec) -> Output { let mut cmd = std::process::Command::new(&binary_path); let mut child = cmd .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn() .expect("failed to invoke generated binary"); if let Some(ref stdin_data) = stdin { match stdin_data { ExpectedData::Ignore => {} ExpectedData::Inline(stdin_str) => { let stdin = child.stdin.as_mut().expect("Failed to open stdin"); stdin .write_all(stdin_str.as_bytes()) .expect("Failed to write to stdin of generated binary"); } ExpectedData::InFile(rel_path) => { let stdin = child.stdin.as_mut().expect("Failed to open stdin"); let stdin_path = reference_to_absolute_path(&setup, &rel_path); let mut stdin_reader = File::open(&stdin_path).expect("failed to open stdin file"); io::copy(&mut stdin_reader, stdin) .expect("failed to write to stdin of generated binary"); } } } child .wait_with_output() .expect("failed to invoke generated binary") }
use crate::*; use optimization::{self, Optimization}; use serde_derive::Deserialize; use std::{ fs::File, io::{self, Write}, path::PathBuf, process::Stdio, }; #[derive(Debug, Deserialize, Clone)] #[serde(rename_all = "kebab-case")] pub enum AsmComparisonOutcome { Unchanged, Change, IdenticalTo(ExpectedData), } #[derive(Debug, Deserialize, Clone)] pub struct OptimizationTestData { pub compiler_optimized_stderr: Option<ExpectedData>, pub compiler_optimized_stdout: Option<ExpectedData>, pub compiler_optimized_exitcode: Option<ExpectedData>, pub compiler_reference_stderr: Option<ExpectedData>, pub compiler_reference_stdout: Option<ExpectedData>, pub compiler_reference_exitcode: Option<ExpectedData>, pub stderr: Option<ExpectedData>, pub stdout: Option<ExpectedData>, pub exitcode: Option<ExpectedData>, pub stdin: Option<ExpectedData>, pub optimizations: Vec<optimization::Kind>, pub expect: AsmComparisonOutcome, pub backend_asm: Option<Vec<Backend>>, pub backend: Option<Vec<Backend>>, } impl FromReferencesPath<OptimizationTestData> for OptimizationTestData { fn from_reference_path(_base: &PathBuf) -> Self { Self { compiler_optimized_stderr: None, compiler_optimized_stdout: None, compiler_optimized_exitcode: None, compiler_reference_stderr: None, compiler_reference_stdout: None, compiler_reference_exitcode: None, stderr: None, stdout: None, exitcode: None, stdin: None, optimizations: vec![], expect: AsmComparisonOutcome::Change, backend: Some(vec![Backend::Own, Backend::Libfirm]), backend_asm: Some(vec![Backend::Own, Backend::Libfirm]), } } } impl IntoReferenceData for OptimizationTestData { fn into_reference_data(self, base: &PathBuf) -> ReferenceData { self.into_optimizing_compiler_reference_data(base) } } impl OptimizationTestData { fn into_optimizing_compiler_reference_data(self, _base: &PathBuf) -> ReferenceData { ReferenceData { stderr: self .compiler_optimized_stderr .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), stdout: self .compiler_optimized_stdout .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), exitcode: self .compiler_optimized_exitcode .unwrap_or_else(|| ExpectedData::Inline("0".to_owned())), } } fn into_reference_compiler_reference_data(self, _base: &PathBuf) -> ReferenceData { ReferenceData { stderr: self .compiler_reference_stderr .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), stdout: self .compiler_reference_stdout .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), exitcode: self .compiler_reference_exitcode .unwrap_or_else(|| ExpectedData::Inline("0".to_owned())), } } fn into_binary_reference_data(self, _compiler_base: &PathBuf) -> ReferenceData { ReferenceData { stderr: self .stderr .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), stdout: self .stdout .unwrap_or_else(|| ExpectedData::Inline("".to_owned())), exitcode: self .exitcode .unwrap_or_else(|| ExpectedData::Inline("0".to_owned())), } } } pub fn exec_optimization_test(input: PathBuf, backend: Backend) { let path_binary_optimized = input.with_extension(&format!("{}.optimized.out", backend.to_ascii_label())); let path_binary_reference = input.with_extension(&format!("{}.reference.out", backend.to_ascii_label())); let path_asm_optimized = input.with_extension(&format!("{}.optimized.S", backend.to_ascii_label())); let path_asm_reference = input.with_extension(&format!("{}.reference.S", backend.to_ascii_label())); let setup = TestSpec { references: input.clone(), input: input.clone(), generate_tentatives: true, }; let (input_without_yaml_path, test_data) = load_test_data::<OptimizationTestData>(&setup); if test_data.reference.optimizations.is_empty() { panic!("you MUST at least specify one optimization. none given."); } if !test_data .reference .backend .as_ref() .map(|v| v.contains(&backend)) .unwrap_or(true) { log::warn!( "ignoring {} test for backend {:?}", input.display(), backend ); return; } let callinfo_actual = CompilerCall::RawCompiler(CompilerPhase::Binary { backend, output: path_binary_optimized.clone(), assembly: Some(path_asm_optimized.clone()), optimizations: optimization::Level::Custom( test_data .reference .optimizations .clone() .iter() .map(|kind| Optimization { kind: *kind, flags: vec![], }) .collect(), ), }); let mut cmd_actual = c
.into_optimizing_compiler_reference_data(&path_binary_optimized), &TestSpec { input: path_binary_optimized.clone(), references: path_binary_optimized.clone(), generate_tentatives: true, }, ); let reference_input = match test_data.reference.expect { AsmComparisonOutcome::Change | AsmComparisonOutcome::Unchanged | AsmComparisonOutcome::IdenticalTo(ExpectedData::Ignore) => { input_without_yaml_path.clone() } AsmComparisonOutcome::IdenticalTo(ExpectedData::Inline(ref mj_str)) => { let path = add_extension(&setup.input, "reference"); write(&Some(path.clone()), mj_str).expect( "Failed to write reference mini java \ file to disk (required for input to the compiler under test)", ); path } AsmComparisonOutcome::IdenticalTo(ExpectedData::InFile(ref mj_rel_path)) => { reference_to_absolute_path(&setup, mj_rel_path) } }; let callinfo_reference = CompilerCall::RawCompiler(CompilerPhase::Binary { backend, output: path_binary_reference.clone(), assembly: Some(path_asm_reference.clone()), optimizations: optimization::Level::None, }); let mut cmd_reference = compiler_call(callinfo_reference, &reference_input); println!("Executing: {:?}", cmd_reference); let output_reference = cmd_reference .output() .expect("failed to call compiler under test for reference input"); assert_output( &output_reference, test_data .reference .clone() .into_reference_compiler_reference_data(&path_binary_reference), &TestSpec { input: path_binary_reference.clone(), references: path_binary_reference.clone(), generate_tentatives: true, }, ); assert_binary( &path_binary_optimized, &test_data.reference.stdin, &setup, test_data .reference .clone() .into_binary_reference_data(&path_binary_optimized), ); assert_binary( &path_binary_reference, &test_data.reference.stdin, &setup, test_data .reference .clone() .into_binary_reference_data(&path_binary_reference), ); let asm_optimized = read(&Some(path_asm_optimized.clone())).unwrap(); let asm_reference = read(&Some(path_asm_reference.clone())).unwrap(); let normalized_optimized_asm = normalize_asm(&asm_optimized); let normalized_reference_asm = normalize_asm(&asm_reference); write( &Some(add_extension(&path_asm_optimized, "normalized")), &normalized_optimized_asm, ) .unwrap(); write( &Some(add_extension(&path_asm_reference, "normalized")), &normalized_reference_asm, ) .unwrap(); if !test_data .reference .backend_asm .as_ref() .map(|v| v.contains(&backend)) .unwrap_or(true) { log::warn!( "ignoring asm comparison in {} test for backend {:?}", input.display(), backend ); return; } match test_data.reference.expect { AsmComparisonOutcome::Change => { if normalized_reference_asm == normalized_optimized_asm { panic!( "asserted assembly to NOT be identical to the reference. \ But they are the same." ); } } AsmComparisonOutcome::IdenticalTo(ExpectedData::Ignore) => {} AsmComparisonOutcome::Unchanged | AsmComparisonOutcome::IdenticalTo(_) => assert_changeset( &TestSpec { input: path_asm_optimized, references: path_asm_reference, generate_tentatives: false, }, "asm", &normalized_reference_asm, &normalized_optimized_asm, ) .unwrap_or_else(|msg| match test_data.reference.expect { AsmComparisonOutcome::Unchanged => { panic!("{}. expected asm to be unchanged.", msg.to_string()) } _ => panic!( "{}. expected asm to be identical to reference.", msg.to_string() ), }), }; } fn strip_comments(s: &str) -> String { let regex = regex::Regex::new("/\\*.*?\\*/").unwrap(); regex.replace_all(s, "").to_string() } fn remove_labels(s: &str) -> String { let regex = regex::RegexBuilder::new(r"^\.L[0-9]+:\n") .multi_line(true) .build() .unwrap(); let s = regex.replace_all(s, "").to_string(); let regex = regex::RegexBuilder::new(r"^.*j(mp|lt|gt|e|ne|ge|le) \.L[0-9]+\n") .multi_line(true) .build() .unwrap(); regex.replace_all(&s, "").to_string() } fn sort_functions(s: &str) -> String { let mut blocks = s .split("# -- Begin ") .map(|block| block.trim()) .collect::<Vec<&str>>(); blocks.sort(); format!("# -- Begin {}", blocks.join("\n# -- Begin ")) } fn remove_trailing_whitespace(s: &str) -> String { let mut lines: Vec<&str> = vec![]; for line in s.lines() { let trimmed = line.trim_end(); if !trimmed.is_empty() { lines.push(trimmed); } } lines.join("\n") } fn normalize_asm(asm: &str) -> String { [ strip_comments, remove_trailing_whitespace, remove_labels, sort_functions, ] .iter() .fold(asm.to_owned(), |acc, transform| transform(&acc)) } fn assert_binary( binary_path: &PathBuf, stdin: &Option<ExpectedData>, setup: &TestSpec, references: ReferenceData, ) { let output = run_binary(binary_path, stdin, &setup); assert_output( &output, references, &TestSpec { input: binary_path.clone(), references: binary_path.clone(), generate_tentatives: true, }, ); } fn run_binary(binary_path: &PathBuf, stdin: &Option<ExpectedData>, setup: &TestSpec) -> Output { let mut cmd = std::process::Command::new(&binary_path); let mut child = cmd .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn() .expect("failed to invoke generated binary"); if let Some(ref stdin_data) = stdin { match stdin_data { ExpectedData::Ignore => {} ExpectedData::Inline(stdin_str) => { let stdin = child.stdin.as_mut().expect("Failed to open stdin"); stdin .write_all(stdin_str.as_bytes()) .expect("Failed to write to stdin of generated binary"); } ExpectedData::InFile(rel_path) => { let stdin = child.stdin.as_mut().expect("Failed to open stdin"); let stdin_path = reference_to_absolute_path(&setup, &rel_path); let mut stdin_reader = File::open(&stdin_path).expect("failed to open stdin file"); io::copy(&mut stdin_reader, stdin) .expect("failed to write to stdin of generated binary"); } } } child .wait_with_output() .expect("failed to invoke generated binary") }
ompiler_call(callinfo_actual, &input_without_yaml_path); println!("Executing: {:?}", cmd_actual); let output_actual = cmd_actual .output() .expect("failed to call compiler under test for actual input"); assert_output( &output_actual, test_data .reference .clone()
random
[ { "content": "pub fn default_reference_exitcode(base: &PathBuf) -> ExpectedData {\n\n ExpectedData::InFile(add_extension(base, \"exitcode\"))\n\n}\n\n\n\nimpl OptionalReferenceData {\n\n pub fn all_from_own_file(base: &PathBuf) -> Self {\n\n Self {\n\n stderr: Some(default_reference_stderr(base)),\n\n stdout: Some(default_reference_stdout(base)),\n\n exitcode: Some(default_reference_exitcode(base)),\n\n }\n\n }\n\n}\n\n\n\nimpl ReferenceData {\n\n pub fn all_from_own_file(base: &PathBuf) -> Self {\n\n Self {\n\n stderr: default_reference_stderr(base),\n\n stdout: default_reference_stdout(base),\n\n exitcode: default_reference_exitcode(base),\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 2, "score": 375486.03899062163 }, { "content": "pub fn default_reference_stdout(base: &PathBuf) -> ExpectedData {\n\n ExpectedData::InFile(add_extension(base, \"stdout\"))\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 3, "score": 375486.0389906217 }, { "content": "pub fn default_reference_stderr(base: &PathBuf) -> ExpectedData {\n\n ExpectedData::InFile(add_extension(base, \"stderr\"))\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 4, "score": 375483.3523498681 }, { "content": "fn assert_output(actual: &Output, expected: ReferenceData, setup: &TestSpec) {\n\n let stdout_result = {\n\n let stdout = &String::from_utf8_lossy(&actual.stdout);\n\n load_reference(setup, &stdout, expected.stdout, \"stdout\").and_then(|expected| {\n\n if let Some(reference) = expected {\n\n assert_changeset(setup, \"stdout\", &reference, &stdout)\n\n } else {\n\n Ok(())\n\n }\n\n })\n\n };\n\n\n\n let stderr_result = {\n\n let stderr = normalize_stderr(&String::from_utf8_lossy(&actual.stderr));\n\n load_reference(setup, &stderr, expected.stderr, \"stderr\").and_then(|expected| {\n\n if let Some(reference) = expected {\n\n assert_changeset(setup, \"stderr\", &reference, &stderr)\n\n } else {\n\n Ok(())\n\n }\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 5, "score": 372546.43866043334 }, { "content": "pub fn exec_binary_test(input: PathBuf, optimizations: optimization::Level, backend: Backend) {\n\n let binary_path = input.with_extension(format!(\"{}.out\", backend.to_ascii_label()));\n\n let assembly_file = input.with_extension(format!(\"{}.out.S\", backend.to_ascii_label()));\n\n let reference_file_path = input.with_extension(\"out\");\n\n\n\n let setup = TestSpec {\n\n references: input.clone(),\n\n input: input.clone(),\n\n generate_tentatives: true,\n\n };\n\n\n\n // TODO: instead of panicing when there are no references\n\n // continue and generate references for the binary.\n\n let metadata = assert_compiler_phase::<BinaryTestData>(\n\n CompilerCall::RawCompiler(CompilerPhase::Binary {\n\n output: binary_path.clone(),\n\n backend,\n\n optimizations,\n\n assembly: Some(assembly_file),\n\n }),\n", "file_path": "runner-integration-tests/src/testkind/binary.rs", "rank": 6, "score": 367404.644356075 }, { "content": "pub fn exec_timeout_test(input: PathBuf, optimizations: optimization::Level, backend: Backend) {\n\n use wait_timeout::ChildExt;\n\n let binary_path = input.with_extension(format!(\"{}.out\", backend.to_ascii_label()));\n\n let assembly_file = input.with_extension(format!(\"{}.out.S\", backend.to_ascii_label()));\n\n\n\n let test_data = assert_compiler_phase::<Data>(\n\n CompilerCall::RawCompiler(CompilerPhase::Binary {\n\n output: binary_path.clone(),\n\n backend,\n\n optimizations,\n\n assembly: Some(assembly_file),\n\n }),\n\n &TestSpec {\n\n references: input.clone(),\n\n input,\n\n generate_tentatives: true,\n\n },\n\n );\n\n\n\n // reaching this line means the compiler assertions were correct\n", "file_path": "runner-integration-tests/src/testkind/timeout.rs", "rank": 7, "score": 367404.644356075 }, { "content": "pub fn exec_ast_inspector_test(input: PathBuf) {\n\n let spec = TestSpec {\n\n references: input.clone(),\n\n input,\n\n generate_tentatives: true,\n\n };\n\n\n\n let (input_without_yaml_path, data) = load_test_data::<Data>(&spec);\n\n\n\n let callinfo = CompilerCall::AstInspector {\n\n content: data.reference.content.clone(),\n\n kind: data.reference.kind.clone(),\n\n typeinfo: data.reference.typeinfo.clone(),\n\n };\n\n\n\n let mut call = compiler_call(callinfo, &input_without_yaml_path);\n\n println!(\"Executing: {:?}\", call);\n\n\n\n let output = call.output().expect(\"failed to call ast-inspector\");\n\n\n\n assert_output(\n\n &output,\n\n data.reference.clone().into_reference_data(&spec.references),\n\n &spec,\n\n );\n\n}\n", "file_path": "runner-integration-tests/src/testkind/spans.rs", "rank": 8, "score": 281458.86861349124 }, { "content": "fn reference_to_absolute_path(setup: &TestSpec, rel_path: &PathBuf) -> PathBuf {\n\n let mut path = setup.references.clone();\n\n path.pop(); // remove filename, so we get the base directory\n\n path.push(rel_path);\n\n path\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 9, "score": 269709.9437636802 }, { "content": "pub fn print(ast: &ast::AST<'_>, out: &mut dyn std::io::Write) -> Result<(), Error> {\n\n let mut printer = IndentPrinter::new(out);\n\n do_prettyprint(&NodeKind::from(ast), &mut printer);\n\n Ok(())\n\n}\n\n\n", "file_path": "compiler-lib/src/print/pretty.rs", "rank": 10, "score": 266144.87911293353 }, { "content": "pub fn print<'f>(ast: &ast::AST<'f>, out: &mut dyn std::io::Write) -> Result<(), Error> {\n\n let mut printer = Printer {\n\n indent: 0,\n\n writer: out,\n\n };\n\n let n = NodeKind::from(ast);\n\n do_structureprint(&n, &mut printer)\n\n}\n\n\n", "file_path": "compiler-lib/src/print/structure.rs", "rank": 11, "score": 262722.07875215786 }, { "content": "// dummy_writer returns a WriteColor meant for use in tests.\n\npub fn dummy_writer() -> impl termcolor::WriteColor {\n\n use termcolor::Buffer;\n\n // FIXME: actually have something that discards the output\n\n Buffer::no_color()\n\n}\n", "file_path": "compiler-shared/src/context.rs", "rank": 12, "score": 244165.0291788776 }, { "content": "fn generate_tentative_reference(setup: &TestSpec, actual: &str, label: &str) -> Option<PathBuf> {\n\n if !setup.generate_tentatives {\n\n return None;\n\n }\n\n\n\n let file_tentative = tentative_file_path(&add_extension(&setup.references, label));\n\n\n\n File::create(&file_tentative)\n\n .and_then(|mut file| file.write_all(actual.as_bytes()))\n\n .ok();\n\n\n\n Some(file_tentative)\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 13, "score": 240930.9989266354 }, { "content": "pub fn get_files<T: FromReferencesPath<T> + DeserializeOwned>(\n\n path_input: &PathBuf,\n\n path_references: &PathBuf,\n\n) -> Result<TestData<T>, Error> {\n\n let input: String = if let Ok(utf8_data) = read(&Some(path_input.clone())) {\n\n utf8_data\n\n } else {\n\n // we do not support yaml in front of non UTF-8 files.\n\n // Use default\n\n return Ok(TestData {\n\n input: InputData::NotLoaded(path_input.clone()),\n\n reference: T::from_reference_path(path_references),\n\n });\n\n };\n\n let input_file = yaml::FrontMatter::new(&input);\n\n\n\n // 1.) Try to find reference output in yaml front matter\n\n if let Some(yaml) = input_file.front_matter() {\n\n let reference: T = serde_yaml::from_str(yaml).context(DataError::InvalidFrontMatter {\n\n path: path_input.clone(),\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 14, "score": 239505.64749232488 }, { "content": "pub fn write(file: &Option<PathBuf>, contents: &str) -> Result<(), Error> {\n\n if let Some(path) = file {\n\n let mut file = File::create(path)?;\n\n file.write_all(contents.as_bytes())?;\n\n } else {\n\n stdout().write_all(contents.as_bytes())?;\n\n };\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 15, "score": 235830.70101077933 }, { "content": "pub trait AsmOut: std::io::Write + std::os::unix::io::AsRawFd {}\n\n\n\nimpl AsmOut for std::fs::File {}\n\n\n", "file_path": "compiler-lib/src/backend.rs", "rank": 16, "score": 235588.08828870376 }, { "content": "/// Print error objects in a format intended for end users\n\npub fn print_error(writer: &mut dyn io::Write, err: &Error) -> Result<(), Error> {\n\n writeln!(writer, \"error: {}\", err.as_fail())?;\n\n for cause in err.iter_causes() {\n\n writeln!(writer, \"caused by: {}\", cause)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "compiler-cli/src/main.rs", "rank": 17, "score": 224964.86596605217 }, { "content": "fn write_token<O: io::Write>(out: &mut O, token: &TokenKind<'_>) -> Result<(), Error> {\n\n match token {\n\n TokenKind::Whitespace | TokenKind::Comment(_) => Ok(()),\n\n _ => {\n\n writeln!(out, \"{}\", lextest::Output::new(&token))?;\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "compiler-cli/src/main.rs", "rank": 18, "score": 215631.1958175485 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_assembly_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"assembly\", \"\", |test_name, mj_file| {\n\n default_test_generator(\n\n &quote! { CompilerCall::RawCompiler(CompilerPhase::Assembly) },\n\n test_name,\n\n mj_file,\n\n )\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 19, "score": 214151.89358764005 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_optimization_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"optimization\", \"\", |test_name, mj_file| {\n\n let function_name = Ident::new(&test_name, Span::call_site());\n\n let path_str = mj_file.to_str().unwrap();\n\n\n\n quote! {\n\n #[test]\n\n fn #function_name() {\n\n let input = PathBuf::from(#path_str);\n\n exec_optimization_test(input.clone(), Backend::Own);\n\n exec_optimization_test(input.clone(), Backend::Libfirm);\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 20, "score": 213173.236664908 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_ast_reference_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"ast\", \"\", |test_name, mj_file| {\n\n default_test_generator(\n\n &quote! { CompilerCall::RawCompiler(CompilerPhase::Ast) },\n\n test_name,\n\n mj_file,\n\n )\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 21, "score": 211064.60306716818 }, { "content": "#[proc_macro]\n\npub fn gen_syntax_tests(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let gen_args = GenArgs::must_from_token_stream(input);\n\n let cases = SyntaxTestCase::all().expect(\"could not load test cases\");\n\n // generate test cases\n\n let mut out = proc_macro2::TokenStream::new();\n\n cases\n\n .into_iter()\n\n .for_each(|tc| gen_args.gen_testcase::<SyntaxTestCase>(&tc, &mut out));\n\n out.into()\n\n}\n\n\n", "file_path": "mjtest-rs/mjtest_macros/src/lib.rs", "rank": 22, "score": 208889.74998613092 }, { "content": "#[proc_macro]\n\npub fn gen_semantic_tests(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let gen_args = GenArgs::must_from_token_stream(input);\n\n let cases = SemanticTestCase::all().expect(\"could not load test cases\");\n\n // generate test cases\n\n let mut out = proc_macro2::TokenStream::new();\n\n cases\n\n .into_iter()\n\n .for_each(|tc| gen_args.gen_testcase(&tc, &mut out));\n\n out.into()\n\n}\n", "file_path": "mjtest-rs/mjtest_macros/src/lib.rs", "rank": 23, "score": 208889.74998613092 }, { "content": "#[allow(clippy::similar_names)]\n\npub fn run_backend(\n\n firm_program: &FirmProgram<'_, '_>,\n\n out: &mut impl std::io::Write,\n\n no_peep: bool,\n\n) -> std::io::Result<()> {\n\n let mut lir = LIR::from(firm_program);\n\n debugging::breakpoint!(\"LIR stage 1\", lir, &|block: &lir::BasicBlock| {\n\n lir::debugging::default_lir_label(block)\n\n });\n\n\n\n writeln!(out, \"\\t.text\")?;\n\n\n\n // TODO predictable order\n\n for f in &mut lir.functions {\n\n basic_block_scheduling::basic_block_scheduling(f);\n\n let lva_result = live_variable_analysis::live_variable_analysis(\n\n &f.graph.blocks_scheduled.as_ref().unwrap(),\n\n &lir.allocator,\n\n );\n\n\n", "file_path": "lowering/src/lib.rs", "rank": 24, "score": 203817.8797325215 }, { "content": "pub trait IntoReferenceData {\n\n fn into_reference_data(self, base: &PathBuf) -> ReferenceData;\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 25, "score": 199793.91576007294 }, { "content": "pub fn label_for_late_placement(\n\n node: &Node,\n\n current_node: Node,\n\n earliest_allowed: nodes::Block,\n\n latest_allowed: Option<nodes::Block>,\n\n) -> Label {\n\n let mut label = dom_info_box(node);\n\n\n\n if let Node::Block(rendered_block) = node {\n\n // is within the chain of possibilities\n\n if let Some(latest_block) = latest_allowed {\n\n if earliest_allowed.dominates(*rendered_block) && rendered_block.dominates(latest_block)\n\n {\n\n label = label\n\n .add_style(Style::Filled)\n\n .fillcolor(X11Color::Pink)\n\n .fontcolor(X11Color::White);\n\n }\n\n }\n\n\n", "file_path": "optimization/src/code_placement.rs", "rank": 26, "score": 195655.40261027217 }, { "content": "fn normalize_stderr(stderr: &str) -> String {\n\n stderr.replace(&*ROOT_DIR, \"{ROOT}\")\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TestSpec {\n\n pub input: PathBuf,\n\n pub references: PathBuf,\n\n pub generate_tentatives: bool,\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 27, "score": 195018.34416772355 }, { "content": "/// append another extension to a filename\n\npub fn add_extension(path: &PathBuf, extension: &str) -> PathBuf {\n\n let mut filepath = path.clone();\n\n\n\n let original_extension = filepath\n\n .extension()\n\n .unwrap_or_else(|| OsStr::new(\"\"))\n\n .to_os_string();\n\n\n\n filepath.set_extension({\n\n let mut ext = original_extension.clone();\n\n ext.push(OsStr::new(\".\"));\n\n ext.push(OsStr::new(extension));\n\n ext\n\n });\n\n\n\n filepath\n\n}\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 28, "score": 193419.08377107774 }, { "content": "#[allow(dead_code)]\n\npub fn assert_compiler_phase<\n\n TestMetadata: IntoReferenceData + FromReferencesPath<TestMetadata> + DeserializeOwned + Clone,\n\n>(\n\n phase: CompilerCall,\n\n spec: &TestSpec,\n\n) -> TestData<TestMetadata> {\n\n let (input_without_yaml_path, test_data) = load_test_data::<TestMetadata>(spec);\n\n let mut call = compiler_call(phase, &input_without_yaml_path);\n\n println!(\"Executing: {:?}\", call);\n\n let output = call.output().expect(\"failed to call compiler under test\");\n\n\n\n assert_output(\n\n &output,\n\n test_data\n\n .reference\n\n .clone()\n\n .into_reference_data(&spec.references),\n\n &spec,\n\n );\n\n\n\n test_data\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 29, "score": 193193.87997899746 }, { "content": "pub fn basic_block_scheduling(func: &mut lir::Function) {\n\n // reverse postorder\n\n let ordered = func\n\n .graph\n\n .postorder_blocks()\n\n .into_iter()\n\n .rev()\n\n .collect::<Vec<_>>();\n\n func.graph.blocks_scheduled = Some(ordered);\n\n}\n", "file_path": "lowering/src/basic_block_scheduling.rs", "rank": 30, "score": 192940.51636268414 }, { "content": "fn write_eof_token<O: io::Write>(out: &mut O) -> Result<(), Error> {\n\n writeln!(out, \"EOF\")?;\n\n Ok(())\n\n}\n", "file_path": "compiler-cli/src/main.rs", "rank": 31, "score": 192569.64291883464 }, { "content": "fn sort_copy_prop(copies: &[Mov], instrs: &mut Vec<Instruction>) {\n\n use self::Instruction::Mov;\n\n\n\n // Resolve reg-to-reg copy cycles using RegGraph\n\n let mut mov_imm = vec![];\n\n let mut transfers: Vec<RegToRegTransfer<SortCopyPropEntity>> = vec![];\n\n for instr in copies.iter() {\n\n if let SrcOperand::Imm(tv) = instr.src {\n\n mov_imm.push(Mov(MovInstruction {\n\n src: SrcOperand::Imm(tv),\n\n dst: instr.dst,\n\n comment: \"copy prop imm move\".to_owned(),\n\n }))\n\n } else {\n\n transfers.push(RegToRegTransfer {\n\n src: SortCopyPropEntity(instr.src.try_into().unwrap()),\n\n dst: instr.dst.into(),\n\n });\n\n }\n\n }\n", "file_path": "lowering/src/codegen.rs", "rank": 32, "score": 192558.7572674256 }, { "content": "pub trait Lattice: Eq + Clone {\n\n fn is_progression_of(&self, other: &Self) -> bool;\n\n fn join(&self, other: &Self, context: &mut JoinContext) -> Self;\n\n\n\n fn join_default(&self, other: &Self) -> Self {\n\n self.join(other, &mut JoinContext::None)\n\n }\n\n\n\n /*fn join_many<'t, I>(vals: I) -> Option<Self>\n\n where\n\n I: IntoIterator<Item = &'t Self>,\n\n Self: 't,\n\n {\n\n let mut cur: Option<Self> = None;\n\n for val in vals {\n\n cur = Some(if let Some(cur) = cur {\n\n cur.join(val)\n\n } else {\n\n val.clone()\n\n })\n", "file_path": "optimization/src/lattices/mod.rs", "rank": 33, "score": 189754.7216538919 }, { "content": "fn parse_flag(s: &str) -> Result<optimization::Flag, ParseError> {\n\n match s.to_ascii_lowercase().as_str() {\n\n \"d\" | \"vcg\" => Ok(optimization::Flag::DumpVcg),\n\n \"g\" | \"gui\" => {\n\n if cfg!(feature = \"debugger_gui\") {\n\n Ok(optimization::Flag::Gui)\n\n } else {\n\n Err(ParseError::NoDebuggerSupport {\n\n flag: s.to_string(),\n\n })\n\n }\n\n }\n\n _ => Err(ParseError::UnknownFlag {\n\n name: s.to_string(),\n\n }),\n\n }\n\n}\n\n\n", "file_path": "compiler-cli/src/optimization_arg.rs", "rank": 34, "score": 188821.7549783308 }, { "content": "/// Deserializes a `Duration` via the humantime crate.\n\n///\n\n/// This function can be used with `serde_derive`'s `with` and\n\n/// `deserialize_with` annotations.\n\npub fn deserialize<'de, D>(d: D) -> Result<Duration, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct V;\n\n\n\n impl<'de2> Visitor<'de2> for V {\n\n type Value = Duration;\n\n\n\n fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n fmt.write_str(\"a duration\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Duration, E>\n\n where\n\n E: Error,\n\n {\n\n humantime::parse_duration(v).map_err(|_| E::invalid_value(Unexpected::Str(v), &self))\n\n }\n\n }\n\n\n\n d.deserialize_str(V)\n\n}\n", "file_path": "runner-integration-tests/src/serde_humantime.rs", "rank": 35, "score": 188588.8275583253 }, { "content": "fn tentative_file_path(reference: &PathBuf) -> PathBuf {\n\n let update_references = env::var(\"UPDATE_REFERENCES\");\n\n\n\n if update_references.is_ok() {\n\n reference.clone()\n\n } else {\n\n add_extension(reference, \"actual\")\n\n }\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 36, "score": 183781.21345873823 }, { "content": "pub fn compiler_call(compiler_call: CompilerCall, filepath: &PathBuf) -> Command {\n\n match compiler_call {\n\n CompilerCall::RawCompiler(phase) => {\n\n let mut cmd = env::var(\"COMPILER_BINARY\")\n\n .map(|path| {\n\n log::debug!(\"Test run using alternate compiler binary at {}\", path);\n\n Command::new(path)\n\n })\n\n .unwrap_or_else(|_| {\n\n let binary = project_binary(Some(\"compiler-cli\"));\n\n log::debug!(\"Test run using the default compiler binary at {:?}\", binary);\n\n Command::new(binary)\n\n });\n\n\n\n cmd.env(\"TERM\", \"dumb\"); // disable color output\n\n cmd.env(compile_time_assertions::ENV_VAR_NAME, \"enabled\");\n\n match phase {\n\n CompilerPhase::Parser | CompilerPhase::Linter => {\n\n cmd.env(\"CHOCOLATE\", \"1\");\n\n }\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 37, "score": 180933.77153261076 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_semantic_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"semantic\", \"\", |test_name, mj_file| {\n\n default_test_generator(\n\n &quote! { CompilerCall::RawCompiler(CompilerPhase::Semantic) },\n\n test_name,\n\n mj_file,\n\n )\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 38, "score": 176937.01173786347 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_binary_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"binary\", \"\", |test_name, mj_file| {\n\n let function_name = Ident::new(&test_name, Span::call_site());\n\n let path_str = mj_file.to_str().unwrap();\n\n\n\n quote! {\n\n #[test]\n\n fn #function_name() {\n\n let input = PathBuf::from(#path_str);\n\n exec_binary_test(input.clone(), Level::None, Backend::Own);\n\n exec_binary_test(input.clone(), Level::None, Backend::Libfirm);\n\n exec_binary_test(input.clone(), Level::Aggressive, Backend::Own);\n\n exec_binary_test(input.clone(), Level::Aggressive, Backend::Libfirm);\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 39, "score": 176937.01173786347 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_lexer_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"lexer\", \"\", |test_name, mj_file| {\n\n default_test_generator(\n\n &quote! { CompilerCall::RawCompiler(CompilerPhase::Lexer) },\n\n test_name,\n\n mj_file,\n\n )\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 40, "score": 176937.01173786347 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_parser_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"parser\", \"\", |test_name, mj_file| {\n\n default_test_generator(\n\n &quote! { CompilerCall::RawCompiler(CompilerPhase::Parser) },\n\n test_name,\n\n mj_file,\n\n )\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 41, "score": 176937.01173786347 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_lints_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"lints\", \"\", |test_name, mj_file| {\n\n default_test_generator(\n\n &quote! { CompilerCall::RawCompiler(CompilerPhase::Linter) },\n\n test_name,\n\n mj_file,\n\n )\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 42, "score": 176937.01173786347 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_ast_inspector_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"spans\", \"\", |test_name, mj_file| {\n\n let function_name = Ident::new(&test_name, Span::call_site());\n\n let path_str = mj_file.to_str().unwrap();\n\n\n\n quote! {\n\n #[test]\n\n fn #function_name() {\n\n let input = PathBuf::from(#path_str);\n\n exec_ast_inspector_test(input);\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 43, "score": 176937.01173786347 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_timeout_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"timeout\", \"\", |test_name, mj_file| {\n\n let function_name = Ident::new(&test_name, Span::call_site());\n\n let path_str = mj_file.to_str().unwrap();\n\n\n\n quote! {\n\n #[test]\n\n fn #function_name() {\n\n let input = PathBuf::from(#path_str);\n\n exec_timeout_test(input.clone(), Level::None, Backend::Own);\n\n exec_timeout_test(input.clone(), Level::None, Backend::Libfirm);\n\n exec_timeout_test(input.clone(), Level::Aggressive, Backend::Own);\n\n exec_timeout_test(input.clone(), Level::Aggressive, Backend::Libfirm);\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 44, "score": 176937.01173786347 }, { "content": "pub fn read(file: &Option<PathBuf>) -> Result<String, io::Error> {\n\n // TODO: stream instead of reading everything into string\n\n let mut contents = String::new();\n\n\n\n if let Some(path) = file {\n\n File::open(&path)?.read_to_string(&mut contents)?;\n\n } else {\n\n stdin().read_to_string(&mut contents)?;\n\n };\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 45, "score": 175961.76464772254 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\nenum InfoKind {\n\n Allocator,\n\n Cache,\n\n}\n\n\n\nimpl InfoKind {\n\n pub fn is_allocator(self) -> bool {\n\n match self {\n\n Allocator => true,\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n\nuse self::InfoKind::*;\n\n\n\nimpl Heap {\n\n pub fn start() -> Self {\n\n Self {\n\n object_infos: HashMap::new(),\n", "file_path": "optimization/src/lattices/heap.rs", "rank": 46, "score": 175194.36879707908 }, { "content": "fn parse_custom_sequence(s: &str) -> Result<Vec<optimization::Optimization>, ParseError> {\n\n let mut list = Vec::new();\n\n for opt in s.split(',').filter(|s| !s.is_empty()) {\n\n let mut fields = opt.split('.');\n\n let kind = optimization::Kind::from_str(fields.next().unwrap()).map_err(|_| {\n\n ParseError::UnknownOptimization {\n\n name: opt.to_string(),\n\n }\n\n })?;\n\n let flags = fields\n\n .map(parse_flag)\n\n .collect::<Result<Vec<optimization::Flag>, _>>()?;\n\n\n\n list.push(optimization::Optimization { kind, flags });\n\n }\n\n\n\n Ok(list)\n\n}\n\n\n\n#[derive(Debug, Clone, Default)]\n", "file_path": "compiler-cli/src/optimization_arg.rs", "rank": 47, "score": 175055.03236214456 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_ast_idempotence_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"ast\", \"_idempotence\", |test_name, mj_file| {\n\n let function_name = Ident::new(&test_name, Span::call_site());\n\n let path_str = mj_file.to_str().unwrap();\n\n\n\n quote! {\n\n #[test]\n\n fn #function_name() {\n\n let input = PathBuf::from(#path_str);\n\n\n\n assert_compiler_phase::<OptionalReferenceData>(\n\n CompilerCall::RawCompiler(CompilerPhase::Ast),\n\n &TestSpec {\n\n input: add_extension(&input, \"stdout\"),\n\n references: input.clone(),\n\n generate_tentatives: false\n\n }\n\n );\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 48, "score": 174662.89523670514 }, { "content": "/// Print error objects in a format intended for end users\n\nfn print_error(writer: &mut dyn io::Write, err: &Error) -> Result<(), Error> {\n\n writeln!(writer, \"error: {}\", err.as_fail())?;\n\n for cause in err.iter_causes() {\n\n writeln!(writer, \"caused by: {}\", cause)?;\n\n }\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug, Fail)]\n\npub enum Messages {\n\n #[fail(display = \"Matched kind '{}'\", kind)]\n\n Matched { kind: String },\n\n #[fail(display = \"Matched kind 'expression' of type `{}`\", typing)]\n\n MatchedExpr { typing: String },\n\n}\n\n\n\npub struct AstInspector<'f> {\n\n cfg: CliCommand,\n\n context: &'f context::Context<'f>,\n\n type_analysis: &'f TypeAnalysis<'f, 'f>,\n", "file_path": "inspect-ast/src/main.rs", "rank": 49, "score": 174175.9484678732 }, { "content": "// TODO: deduplicate after 172 is merged\n\npub fn escape_record_content(text: &str) -> String {\n\n text.replace(\"|\", \"\\\\|\")\n\n .replace(\"{\", \"\\\\{\")\n\n .replace(\"}\", \"\\\\}\")\n\n .replace(\"<\", \"\\\\<\")\n\n .replace(\">\", \"\\\\>\")\n\n}\n\n\n", "file_path": "optimization/src/code_placement.rs", "rank": 50, "score": 173950.1391732264 }, { "content": "pub fn dom_info_box(node: &Node) -> Label {\n\n let label = if let Node::Block(block) = node {\n\n let dom_depth = unsafe { bindings::get_Block_dom_depth(node.internal_ir_node()) };\n\n Label::from_text(format!(\n\n r#\"{{{body}|{{Dom Depth|{dom_depth}}}|{{Loop Depth|{loop_depth}}}}}\"#,\n\n dom_depth = dom_depth,\n\n loop_depth = block.loop_depth(),\n\n body = escape_record_content(&format!(\"{:?}\", block)),\n\n ))\n\n .shape(Shape::Record)\n\n .styles(vec![Style::Rounded, Style::Filled])\n\n } else {\n\n default_label(node)\n\n };\n\n\n\n if !is_movable(*node) {\n\n label.add_style(Style::Bold)\n\n } else {\n\n label.add_style(Style::Dashed)\n\n }\n\n}\n\n\n", "file_path": "optimization/src/code_placement.rs", "rank": 51, "score": 173950.1391732264 }, { "content": "pub fn external_val(ty: Ty) -> NodeValue {\n\n debug_assert!(!ty.mode().is_pointer() || PointerTy::from(ty).is_some());\n\n NodeValue::non_const_val(ty.mode(), MemoryArea::external())\n\n}\n\n\n", "file_path": "optimization/src/lattices/heap.rs", "rank": 52, "score": 173950.1391732264 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n c.bench_function_over_inputs(\n\n \"scope_walk\",\n\n |bencher, input| {\n\n let defs: Vec<String> = (0..*input).map(|i| format!(\"scope#{}\", i)).collect();\n\n let mut scoped = Scoped::new();\n\n for def in defs.iter() {\n\n scoped.define(def, ()).unwrap();\n\n scoped.enter_scope();\n\n }\n\n bencher.iter(|| scope_walk(&scoped, &defs));\n\n },\n\n vec![1, 10, 50, 100, 500],\n\n );\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "compiler-lib/benches/symtab_bench.rs", "rank": 53, "score": 173578.03837038932 }, { "content": "fn dominance_tree_in_dot_format(writer: &mut dyn Write, graph_name: &str, graph: Graph) {\n\n // TODO: onyl render if dominators are computed. Side effects in\n\n // debugging code is a bad idea\n\n graph.compute_doms();\n\n graph.assure_loopinfo();\n\n\n\n let mut list = Vec::new();\n\n graph.walk_dom_tree_postorder(|block| {\n\n list.push(*block);\n\n });\n\n\n\n writeln!(writer, \"digraph {} {{\", dot_string(graph_name)).unwrap();\n\n for block in list.iter() {\n\n let label = dom_info_box(&Node::Block(*block));\n\n label.write_dot_format(block.node_id(), writer);\n\n\n\n if let Some(idom) = block.immediate_dominator() {\n\n writeln!(\n\n writer,\n\n \"{:?} -> {:?} [color=blue];\",\n\n idom.node_id(),\n\n block.node_id()\n\n )\n\n .unwrap();\n\n }\n\n }\n\n writeln!(writer, \"}}\").unwrap();\n\n}\n\n\n", "file_path": "debugging/src/dot/implementations.rs", "rank": 54, "score": 172743.15128430928 }, { "content": "/// Arguments that should be given to the compiler under test\n\nfn compiler_args(phase: CompilerPhase) -> Vec<OsString> {\n\n let args: &[&str] = match phase {\n\n CompilerPhase::Lexer => &[\"--lextest\"],\n\n CompilerPhase::Parser => &[\"--parsetest\"],\n\n CompilerPhase::Ast => &[\"--print-ast\"],\n\n CompilerPhase::Semantic => &[\"--check\"],\n\n CompilerPhase::Linter => &[\"--check\", \"--lint\"],\n\n CompilerPhase::Binary {\n\n backend,\n\n output,\n\n assembly,\n\n optimizations,\n\n } => {\n\n let cmd_flag = match backend {\n\n Backend::Libfirm => \"--compile-firm\",\n\n Backend::Own => \"--compile\",\n\n };\n\n\n\n let mut flags = vec![\n\n OsString::from(cmd_flag),\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 55, "score": 170582.48146056395 }, { "content": "fn post_dominance_tree_in_dot_format(writer: &mut dyn Write, graph_name: &str, graph: Graph) {\n\n // TODO: onyl render if post dominators are computed. Side effects in\n\n // debugging code is a bad idea\n\n // TODO: deduplicate with dominance_tree_in_dot_format\n\n graph.compute_postdoms();\n\n graph.assure_loopinfo();\n\n\n\n let mut list = Vec::new();\n\n graph.walk_postdom_tree_postorder(|block| {\n\n list.push(*block);\n\n });\n\n\n\n writeln!(writer, \"digraph {} {{\", dot_string(graph_name)).unwrap();\n\n for block in list.iter() {\n\n let label = dom_info_box(&Node::Block(*block));\n\n label.write_dot_format(block.node_id(), writer);\n\n\n\n if let Some(idom) = block.immediate_post_dominator() {\n\n writeln!(\n\n writer,\n\n \"{:?} -> {:?} [color=red];\",\n\n idom.node_id(),\n\n block.node_id()\n\n )\n\n .unwrap();\n\n }\n\n }\n\n writeln!(writer, \"}}\").unwrap();\n\n}\n\n\n", "file_path": "debugging/src/dot/implementations.rs", "rank": 56, "score": 170579.25336014733 }, { "content": "#[allow(clippy::ptr_arg)]\n\nfn print_argument_list(args: &crate::ast::ArgumentList<'_>, printer: &mut IndentPrinter<'_>) {\n\n for (i, arg) in args.iter().enumerate() {\n\n // no parenthesizes for arguments in function calls\n\n do_prettyprint_expr(&arg.data, printer);\n\n\n\n if i != args.len() - 1 {\n\n printer.print_str(&\", \");\n\n }\n\n }\n\n}\n\n\n", "file_path": "compiler-lib/src/print/pretty.rs", "rank": 57, "score": 169681.848503806 }, { "content": "/// Returns `None` if the assertion should not be run, returns\n\n/// `Some(node)`, if the assertion should be run on `node`.\n\nfn get_args(call: nodes::Call) -> Vec<Node> {\n\n // skip `this` argument.\n\n call.args().skip(1).collect()\n\n}\n\n\n", "file_path": "optimization/src/compile_time_assertions.rs", "rank": 58, "score": 169310.66890984838 }, { "content": "fn load_reference(\n\n setup: &TestSpec,\n\n actual: &str,\n\n expected: ExpectedData,\n\n label: &str,\n\n) -> Result<Option<String>, TestFailure> {\n\n match expected {\n\n ExpectedData::Inline(data) => Ok(Some(data)),\n\n ExpectedData::Ignore => Ok(None),\n\n ExpectedData::InFile(rel_path) => {\n\n let path = reference_to_absolute_path(setup, &rel_path);\n\n\n\n if !path.is_file() {\n\n Err(match generate_tentative_reference(setup, actual, label) {\n\n None => TestFailure::NotFound { tried: path },\n\n Some(wrote) => TestFailure::NotFoundWroteReference { tried: path, wrote },\n\n })\n\n } else {\n\n Ok(Some(lookup::read(&Some(path.clone())).unwrap_or_else(\n\n |msg| {\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 59, "score": 169098.0592783391 }, { "content": "#[allow(clippy::cyclomatic_complexity)]\n\nfn do_prettyprint(n: &NodeKind<'_, '_>, printer: &mut IndentPrinter<'_>) {\n\n use crate::visitor::NodeKind::*;\n\n match n {\n\n AST(ast) => {\n\n use crate::ast::AST::*;\n\n match ast {\n\n Empty => (), // TODO newline?\n\n Program(p) => do_prettyprint(&NodeKind::Program(&p), printer),\n\n }\n\n }\n\n\n\n Program(program) => {\n\n let mut classes = program.classes.clone();\n\n classes.sort_by_key(|c| c.clone().data.name.data);\n\n classes\n\n .into_iter()\n\n .for_each(|class| do_prettyprint(&NodeKind::from(&class), printer));\n\n }\n\n\n\n ClassDeclaration(decl) => {\n", "file_path": "compiler-lib/src/print/pretty.rs", "rank": 61, "score": 162393.1720771782 }, { "content": "pub fn label_with_cse_info(node: &Node, highlight: &Node) -> Label {\n\n let mut label = default_label(node);\n\n\n\n if node == highlight {\n\n label = label\n\n .add_style(Style::Filled)\n\n .fillcolor(X11Color::Blue)\n\n .fontcolor(X11Color::White);\n\n }\n\n\n\n if !CommonSubExpr::node_qualifies_for_elim(*node) {\n\n label.add_style(Style::Bold)\n\n } else {\n\n label.add_style(Style::Dashed)\n\n }\n\n}\n", "file_path": "optimization/src/common_subexpr_elim.rs", "rank": 62, "score": 159967.52432552425 }, { "content": "pub trait FromReferencesPath<T> {\n\n fn from_reference_path(base: &PathBuf) -> T;\n\n}\n\n\n\nimpl FromReferencesPath<OptionalReferenceData> for OptionalReferenceData {\n\n fn from_reference_path(base: &PathBuf) -> Self {\n\n Self::all_from_own_file(base)\n\n }\n\n}\n\n\n\nimpl FromReferencesPath<ReferenceData> for ReferenceData {\n\n fn from_reference_path(base: &PathBuf) -> Self {\n\n Self::all_from_own_file(base)\n\n }\n\n}\n\n\n\nimpl IntoReferenceData for ReferenceData {\n\n fn into_reference_data(self, _base: &PathBuf) -> ReferenceData {\n\n self\n\n }\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 63, "score": 158907.87645751092 }, { "content": "pub fn init() {\n\n INIT.call_once(|| unsafe {\n\n bindings::ir_init_library();\n\n });\n\n}\n", "file_path": "libfirm-rs/src/lib.rs", "rank": 64, "score": 157628.71074485005 }, { "content": "pub fn print() {\n\n if std::env::var(\"MEASURE_STDERR\").is_ok() {\n\n eprintln!(\"Performance Analysis\");\n\n eprintln!(\"====================\\n\");\n\n\n\n if cfg!(feature = \"debugger_gui\") {\n\n eprintln!(\"Measurements not available with enabled breakpoints\");\n\n } else {\n\n eprintln!(\"{}\", TIMINGS.lock().unwrap());\n\n }\n\n }\n\n\n\n if let Ok(path) = std::env::var(\"MEASURE_JSON\") {\n\n let file = File::create(path).unwrap();\n\n serde_json::to_writer(\n\n file,\n\n &CompilerMeasurements::from(TIMINGS.lock().unwrap().clone()),\n\n )\n\n .unwrap();\n\n }\n", "file_path": "compiler-shared/src/timing.rs", "rank": 65, "score": 157628.71074485005 }, { "content": "fn do_structureprint(n: &NodeKind<'_, '_>, printer: &mut Printer<'_>) -> Result<(), Error> {\n\n gen_nodekind_match!(n, a => a.as_ast_node(printer))?;\n\n printer.add(2)?;\n\n n.for_each_child(&mut |child| {\n\n // TODO refactor\n\n // pub fn for_each_child(&self, cb: &mut dyn FnMut(NodeKind<'a, 't>))\n\n // into\n\n // pub fn for_each_child<R>(&self, cb: &mut dyn FnMut(NodeKind<'a, 't>) -> R)\n\n // -> R\n\n do_structureprint(&child, printer).unwrap(); // after refactor, unwrap can go\n\n });\n\n printer.add(-2)\n\n}\n", "file_path": "compiler-lib/src/print/structure.rs", "rank": 66, "score": 155723.77349540868 }, { "content": "pub fn label_with_dom_info(graph: Graph, node: &Node, highlight: &Node) -> Label {\n\n // TODO: no side effects in debug code\n\n graph.assure_loopinfo();\n\n\n\n let mut label = dom_info_box(node);\n\n\n\n let highlight_block = if let Node::Block(block) = highlight {\n\n *block\n\n } else {\n\n highlight.block()\n\n };\n\n\n\n if let Node::Block(node_block) = node {\n\n // NOTE: block also dominates itself!\n\n if highlight_block.dominates(*node_block) {\n\n label = label\n\n .add_style(Style::Filled)\n\n .fillcolor(X11Color::Pink)\n\n .fontcolor(X11Color::White);\n\n }\n", "file_path": "optimization/src/code_placement.rs", "rank": 67, "score": 152749.33049832095 }, { "content": "pub fn compare_class_member(\n\n a: &ast::ClassMember<'_>,\n\n b: &ast::ClassMember<'_>,\n\n) -> std::cmp::Ordering {\n\n use crate::ast::ClassMemberKind::*;\n\n match (&a.kind, &b.kind) {\n\n (Field(..), Field(..))\n\n | (Method(..), Method(..))\n\n | (MainMethod(..), MainMethod(..))\n\n | (Method(..), MainMethod(..))\n\n | (MainMethod(..), Method(..)) => a.name.cmp(&b.name),\n\n (Method(..), Field(..)) | (MainMethod(..), Field(..)) => std::cmp::Ordering::Less,\n\n (Field(..), Method(..)) | (Field(..), MainMethod(..)) => std::cmp::Ordering::Greater,\n\n }\n\n}\n\n\n\n// clippy::ptr-arg wants args to be a slice of Expr,\n\n// but that doesn't improve expressiveness here\n", "file_path": "compiler-lib/src/print/pretty.rs", "rank": 71, "score": 150796.7924504322 }, { "content": "/// `check` returns an `Err` iff at least one errors was emitted through\n\n/// `context`.\n\npub fn check<'a, 'f>(\n\n strtab: &mut strtab::StringTable<'f>,\n\n ast: &'a ast::AST<'f>,\n\n context: &Context<'f>,\n\n) -> Result<(TypeSystem<'f, 'a>, TypeAnalysis<'f, 'a>), ()> {\n\n let mut first_pass_visitor = ClassesAndMembersVisitor::new(context);\n\n first_pass_visitor.do_visit(&NodeKind::from(ast));\n\n\n\n // Check if a static method was found. If multiple static methods were found or\n\n // the static method is not called `main` the error is already emitted in\n\n // the visitor\n\n if first_pass_visitor.static_method_found == 0 {\n\n context\n\n .diagnostics\n\n .error(&MaybeSpanned::WithoutSpan(SemanticError::NoMainMethod));\n\n }\n\n\n\n if context.diagnostics.errored() {\n\n return Err(());\n\n }\n\n let res = super::check(strtab, &ast, &context);\n\n if context.diagnostics.errored() {\n\n return Err(());\n\n }\n\n Ok(res)\n\n}\n\n\n", "file_path": "type_checking/src/semantics.rs", "rank": 72, "score": 148751.46305453734 }, { "content": "pub fn check<'ast, 'src>(\n\n strtab: &'_ mut StringTable<'src>,\n\n ast: &'ast ast::AST<'src>,\n\n context: &Context<'src>,\n\n) -> (TypeSystem<'src, 'ast>, TypeAnalysis<'src, 'ast>) {\n\n let mut sem_context = SemanticContext::new(context);\n\n\n\n let mut type_system = TypeSystem::default();\n\n let mut type_analysis = TypeAnalysis::new();\n\n\n\n if let ast::AST::Program(program) = ast {\n\n let builtin_types = BuiltinTypes::add_to(&mut type_system, strtab, &mut sem_context);\n\n\n\n add_types_from_ast(\n\n strtab,\n\n &mut type_system,\n\n &mut type_analysis,\n\n &builtin_types,\n\n &sem_context,\n\n program,\n", "file_path": "type_checking/src/checker.rs", "rank": 75, "score": 144064.92004840687 }, { "content": "/// Print an error in a format intended for end users and terminate\n\n/// the program.\n\npub fn exit_with_error(err: &Error) -> ! {\n\n let mut stderr = io::stderr();\n\n print_error(&mut stderr, err).expect(\"unable to print error\");\n\n exit(1);\n\n}\n\n\n", "file_path": "compiler-cli/src/main.rs", "rank": 76, "score": 141903.6295873743 }, { "content": "pub fn checked_type_from_ty<'src, 'ast>(\n\n ty: &'ast ast::Type<'src>,\n\n context: &SemanticContext<'_, 'src>,\n\n type_system: &TypeSystem<'src, 'ast>,\n\n void_handling: VoidIs,\n\n) -> CheckedType<'src> {\n\n let void_handling = if ty.array_depth > 0 {\n\n VoidIs::Forbidden\n\n } else {\n\n void_handling\n\n };\n\n\n\n let mut checked_ty = checked_type_from_basic_ty(&ty.basic, context, type_system, void_handling);\n\n\n\n for _ in 0..ty.array_depth {\n\n checked_ty = CheckedType::Array(Box::new(checked_ty));\n\n }\n\n\n\n checked_ty\n\n}\n", "file_path": "type_checking/src/checker.rs", "rank": 77, "score": 139847.1071088979 }, { "content": "#[derive(Debug, Fail)]\n\nenum TestFailure {\n\n #[fail(\n\n display = \"not found! was expected at {:?}. wrote reference to {:?}\",\n\n tried, wrote\n\n )]\n\n NotFoundWroteReference { tried: PathBuf, wrote: PathBuf },\n\n #[fail(display = \"Not found! was expected at {:?}\", tried)]\n\n NotFound { tried: PathBuf },\n\n #[fail(display = \"Wrong! does not match reference output\")]\n\n DiffMismatch,\n\n #[fail(\n\n display = \"Wrong! does not match reference output. Wrote actual output to {:?}\",\n\n wrote\n\n )]\n\n DiffMismatchWroteReference { wrote: PathBuf },\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 78, "score": 138988.67249316684 }, { "content": "pub fn is_jit_operand(node: Node) -> bool {\n\n match node {\n\n Node::Const(_) | Node::Address(_) | Node::Member(_) | Node::Sel(_) | Node::Size(_) => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub enum Operand {\n\n Var(Var),\n\n\n\n /// NOTE: Tarval contains a raw pointer, thus Imm(t) is only valid for the\n\n /// lifetime of that pointer (the FIRM graph).\n\n Imm(Tarval),\n\n}\n\n\n\nimpl fmt::Debug for Operand {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n use self::Operand::*;\n\n match self {\n", "file_path": "lowering/src/lir.rs", "rank": 79, "score": 138299.6789993479 }, { "content": "type NodeFactoryFn = fn(*mut bindings::ir_node) -> Node;\n\npub struct NodeFactory(HashMap<u32, NodeFactoryFn>);\n\n#[allow(clippy::new_without_default_derive)]\n\nimpl NodeFactory {\n\n pub fn new() -> Self {\n\n let mut map = HashMap::<u32, NodeFactoryFn>::new();\n\n unsafe {\n\n let op = bindings::get_op_Add();\n\n map.insert(bindings::get_op_code(op), Self::create_add);\n\n let op = bindings::get_op_Address();\n\n map.insert(bindings::get_op_code(op), Self::create_address);\n\n let op = bindings::get_op_Align();\n\n map.insert(bindings::get_op_code(op), Self::create_align);\n\n let op = bindings::get_op_Alloc();\n\n map.insert(bindings::get_op_code(op), Self::create_alloc);\n\n let op = bindings::get_op_Anchor();\n\n map.insert(bindings::get_op_code(op), Self::create_anchor);\n\n let op = bindings::get_op_And();\n\n map.insert(bindings::get_op_code(op), Self::create_and);\n\n let op = bindings::get_op_Bad();\n", "file_path": "libfirm-rs/src/nodes/nodes_gen.rs", "rank": 80, "score": 137955.00138199364 }, { "content": "/// `None` indicates that the given type is not convertible, which\n\n/// is not necessarily an error (e.g. `void`)\n\npub fn ty_from_checked_type<'src, 'ast>(\n\n ct: &CheckedType<'src>,\n\n type_system: &'_ TypeSystem<'src, 'ast>,\n\n program: &'_ FirmProgram<'src, 'ast>,\n\n) -> Option<Ty> {\n\n let ty = match ct {\n\n CheckedType::Int => PrimitiveTy::i32().into(),\n\n CheckedType::Void => return None,\n\n CheckedType::TypeRef(class_def_id) => {\n\n let def = type_system.class(*class_def_id);\n\n let class = program.class(def).unwrap();\n\n let ty = class.borrow().entity.ty();\n\n ty.pointer().into()\n\n // If, for some unforeseen reason, the line above does not work,\n\n // return this instead: `PrimitiveTy::ptr().into()`.\n\n // However, this looses the class type we are pointing at.\n\n // We need this information in optimizations.\n\n }\n\n CheckedType::Array(checked_type) => {\n\n ty_from_checked_type(checked_type, type_system, program)\n", "file_path": "firm-construction/src/type_translation.rs", "rank": 81, "score": 137898.537461185 }, { "content": "pub fn checked_type_from_basic_ty<'src, 'ast>(\n\n basic_ty: &'ast Spanned<'src, ast::BasicType<'src>>,\n\n context: &SemanticContext<'_, 'src>,\n\n type_system: &TypeSystem<'src, 'ast>,\n\n void_handling: VoidIs,\n\n) -> CheckedType<'src> {\n\n use self::ast::BasicType::*;\n\n match &basic_ty.data {\n\n Int => CheckedType::Int,\n\n Boolean => CheckedType::Boolean,\n\n Void => match void_handling {\n\n VoidIs::Allowed => CheckedType::Void,\n\n VoidIs::Forbidden => {\n\n context.report_error(&basic_ty.span, SemanticError::VoidNotAllowed);\n\n CheckedType::Void\n\n }\n\n },\n\n Custom(name) => match type_system.lookup_class(*name) {\n\n Some((_, class_id)) => CheckedType::TypeRef(class_id),\n\n None => {\n", "file_path": "type_checking/src/checker.rs", "rank": 82, "score": 137887.91498128965 }, { "content": "pub fn dot_string(string: &str) -> String {\n\n format!(\"\\\"{}\\\"\", string.replace(\"\\\"\", \"\\\\\\\"\").replace(\"\\n\", \"\\\\n\"))\n\n}\n", "file_path": "debugging/src/dot/mod.rs", "rank": 83, "score": 136243.15652087147 }, { "content": "pub fn u8_to_printable_representation(byte: u8) -> String {\n\n let bytes = escape_default(byte).collect::<Vec<u8>>();\n\n let rep = unsafe { std::str::from_utf8_unchecked(&bytes) };\n\n rep.to_owned()\n\n}\n\n\n", "file_path": "diagnostics/src/diagnostics.rs", "rank": 84, "score": 136243.15652087147 }, { "content": "pub fn default_label(node: &Node) -> Label {\n\n let mut label = Label::from_text(format!(\"{:?}\", node));\n\n if Node::is_proj(*node) {\n\n label = label.shape(Shape::Note);\n\n }\n\n if let Some(span) = Spans::lookup_span(*node) {\n\n label = label.append(format!(\" [src:{}]\", span));\n\n }\n\n label\n\n}\n\n\n\nimpl<S: BuildHasher> LabelMaker<Node> for HashMap<Node, Label, S> {\n\n fn label_for_node(&self, node: &Node) -> Label {\n\n self.get(&node)\n\n .cloned()\n\n .unwrap_or_else(|| Label::from_text(\"\".to_string()))\n\n }\n\n}\n\n\n", "file_path": "debugging/src/dot/implementations.rs", "rank": 85, "score": 136243.15652087147 }, { "content": "pub trait AsmBackend {\n\n fn emit_asm(&mut self, out: &mut dyn AsmOut) -> std::io::Result<()>;\n\n}\n\n\n\npub mod amd64 {\n\n\n\n use crate::firm_context::FirmContext;\n\n use lowering;\n\n\n\n pub struct Backend<'src, 'ast> {\n\n // member lir holds raw pointers to data stored in firm_ctx\n\n pub firm_ctx: FirmContext<'src, 'ast>,\n\n pub no_peep: bool,\n\n }\n\n\n\n use super::{AsmBackend, AsmOut};\n\n\n\n impl AsmBackend for Backend<'_, '_> {\n\n fn emit_asm(&mut self, out: &mut dyn AsmOut) -> std::io::Result<()> {\n\n compiler_shared::timed_scope!(\"backend\");\n\n let firm_program = self.firm_ctx.use_external_backend();\n\n lowering::run_backend(firm_program, &mut box out, self.no_peep)\n\n }\n\n }\n\n\n\n}\n", "file_path": "compiler-lib/src/backend.rs", "rank": 86, "score": 135081.0983029951 }, { "content": "pub fn dom_info_box(node: &Node) -> Label {\n\n if let Node::Block(block) = node {\n\n let dom_depth = unsafe { bindings::get_Block_dom_depth(node.internal_ir_node()) };\n\n Label::from_text(format!(\n\n r#\"{{{body}|{{Dom Depth|{dom_depth}}}|{{Loop Depth|{loop_depth}}}}}\"#,\n\n dom_depth = dom_depth,\n\n loop_depth = block.loop_depth(),\n\n body = escape_record_content(&format!(\"{:?}\", block)),\n\n ))\n\n .shape(Shape::Record)\n\n .styles(vec![Style::Rounded, Style::Filled])\n\n } else {\n\n default_label(node)\n\n }\n\n}\n\n\n", "file_path": "debugging/src/dot/implementations.rs", "rank": 87, "score": 134283.96439326322 }, { "content": "// TODO: deduplicate after 172 is merged\n\npub fn escape_record_content(text: &str) -> String {\n\n text.replace(\"|\", \"\\\\|\")\n\n .replace(\"{\", \"\\\\{\")\n\n .replace(\"}\", \"\\\\}\")\n\n .replace(\"<\", \"\\\\<\")\n\n .replace(\">\", \"\\\\>\")\n\n}\n", "file_path": "debugging/src/dot/implementations.rs", "rank": 88, "score": 134283.96439326322 }, { "content": "fn default_test_generator(\n\n phase: &proc_macro2::TokenStream,\n\n test_name: &str,\n\n mj_file: &PathBuf,\n\n) -> proc_macro2::TokenStream {\n\n let function_name = Ident::new(&test_name, Span::call_site());\n\n let path_str = mj_file.to_str().unwrap();\n\n\n\n quote! {\n\n #[test]\n\n fn #function_name() {\n\n let input = PathBuf::from(#path_str);\n\n\n\n assert_compiler_phase::<OptionalReferenceData>(#phase, &TestSpec {\n\n references: input.clone(),\n\n input,\n\n generate_tentatives: true\n\n });\n\n }\n\n }\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 89, "score": 133654.068979486 }, { "content": "fn load_test_data<\n\n TestMetadata: IntoReferenceData + FromReferencesPath<TestMetadata> + DeserializeOwned + Clone,\n\n>(\n\n spec: &TestSpec,\n\n) -> (PathBuf, TestData<TestMetadata>) {\n\n let test_data = lookup::get_files::<TestMetadata>(&spec.input, &spec.references)\n\n .unwrap_or_else(|msg| {\n\n panic!(\n\n \"Failed to load test case {:?}, because {:?}\",\n\n spec.input, msg\n\n )\n\n });\n\n\n\n let input_without_yaml_path = match test_data.input {\n\n InputData::WasStripped(ref mj_str) => {\n\n let path = add_extension(&spec.input, \"stripped\");\n\n lookup::write(&Some(path.clone()), mj_str).expect(\n\n \"Failed to write yaml front matter stripped \\\n\n file to disk (required for input to the compiler under test)\",\n\n );\n\n path\n\n }\n\n InputData::NotStripped(ref mj_path, ref _mj_str) => mj_path.clone(),\n\n InputData::NotLoaded(ref mj_path) => mj_path.clone(),\n\n };\n\n\n\n (input_without_yaml_path, test_data)\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 90, "score": 133654.068979486 }, { "content": "struct Inline {\n\n graph: Graph,\n\n depth: usize,\n\n}\n\n\n", "file_path": "optimization/src/inlining.rs", "rank": 91, "score": 133591.15604829715 }, { "content": "pub fn default_lir_label(block: &BasicBlock) -> Label {\n\n let mut s = Vec::new();\n\n\n\n write!(&mut s, \"\\\\lCOPY IN\\\\l\").unwrap();\n\n for instr in block.code.copy_in.iter() {\n\n write!(&mut s, \"{:?}\\\\l\", &**instr).unwrap();\n\n }\n\n\n\n write!(&mut s, \"\\\\lBODY\\\\l\").unwrap();\n\n for instr in block.code.body.iter() {\n\n write!(&mut s, \"{:?}\\\\l\", &**instr).unwrap();\n\n }\n\n\n\n write!(&mut s, \"\\\\lCOPY OUT\\\\l\").unwrap();\n\n for instr in block.code.copy_out.iter() {\n\n write!(&mut s, \"{:?}\\\\l\", &**instr).unwrap();\n\n }\n\n\n\n write!(&mut s, \"\\\\lLEAVE\\\\l\").unwrap();\n\n for instr in block.code.leave.iter() {\n\n write!(&mut s, \"{:?}\\\\l\", &**instr).unwrap();\n\n }\n\n\n\n lir_box(block, &format!(\"{}\\\\l\", String::from_utf8(s).unwrap()))\n\n}\n\n\n", "file_path": "lowering/src/lir/debugging.rs", "rank": 92, "score": 132415.35272143714 }, { "content": "fn assert_node_equality(call: nodes::Call, phase: &Phase, expect_same: bool) {\n\n let nodes = get_args(call);\n\n debug_assert!(\n\n nodes.len() >= 2,\n\n build_assert_msg(\n\n format!(\n\n \"same node checks need at least 2 nodes, {} given.\",\n\n nodes.len()\n\n ),\n\n call\n\n )\n\n );\n\n\n\n for (idx, curr_node) in nodes.iter().enumerate() {\n\n for other in &nodes[(idx + 1)..] {\n\n debug_assert!(\n\n if expect_same {\n\n other == curr_node\n\n } else {\n\n other != curr_node\n", "file_path": "optimization/src/compile_time_assertions.rs", "rank": 93, "score": 132246.17393005718 }, { "content": "#[derive(PartialEq, Eq, Clone, Copy)]\n\nstruct Priority {\n\n topo_order: u32,\n\n priority: u32, // highest priority first\n\n}\n\n\n\nimpl std::cmp::Ord for Priority {\n\n fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n\n (self.priority)\n\n .cmp(&other.priority)\n\n .then_with(|| self.topo_order.cmp(&other.topo_order).reverse())\n\n }\n\n}\n\n\n\nimpl PartialOrd for Priority {\n\n fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\npub struct ConstantFolding {\n", "file_path": "optimization/src/constant_folding.rs", "rank": 94, "score": 130942.76624178478 }, { "content": "#[derive(Debug, Display)]\n\nstruct InlineError {}\n\n\n\nimpl From<std::option::NoneError> for InlineError {\n\n fn from(_err: std::option::NoneError) -> Self {\n\n Self {}\n\n }\n\n}\n\n\n", "file_path": "optimization/src/inlining.rs", "rank": 95, "score": 130937.41872400262 }, { "content": "struct MoveResult {\n\n child_of_split_node: bool,\n\n}\n\n\n\nimpl Inline {\n\n pub fn inline(call: Call) -> Result<(), InlineError> {\n\n let graph = call.graph();\n\n graph.assure_outs();\n\n\n\n let mut i = Self { graph, depth: 0 };\n\n i.inline_with_context(call)\n\n }\n\n\n\n #[allow(clippy::items_after_statements)]\n\n fn inline_with_context(&mut self, call: Call) -> Result<(), InlineError> {\n\n let graph = self.graph;\n\n let proj_m = call.out_proj_m().unwrap();\n\n let call_result = call.out_proj_t_result().and_then(|r| r.out_nodes().idx(0));\n\n\n\n log::debug!(\"Call to inline: {:?}, result: {:?}\", call, call_result);\n", "file_path": "optimization/src/inlining.rs", "rank": 96, "score": 130937.41872400262 }, { "content": "fn exactly<'f>(thing: impl Into<Exactly<'f>>) -> Exactly<'f> {\n\n thing.into()\n\n}\n\n\n\nimpl<'f> ExpectedToken<'f> for Exactly<'f> {\n\n type Yields = ();\n\n fn matching(&self, token: &TokenKind<'f>) -> Option<Self::Yields> {\n\n if &self.0 == token {\n\n Some(())\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl<'f> ExpectedToken<'f> for BinaryOp {\n\n type Yields = (ast::BinaryOp, Precedence, Assoc);\n\n fn matching(&self, token: &TokenKind<'f>) -> Option<Self::Yields> {\n\n match token {\n\n TokenKind::Operator(op) => BINARY_OPERATORS\n", "file_path": "parser/src/parser.rs", "rank": 97, "score": 130782.3361586389 }, { "content": "pub fn pad_left(s: &str, pad: usize) -> String {\n\n pad_left_with_char(s, pad, ' ')\n\n}\n\n\n", "file_path": "diagnostics/src/diagnostics.rs", "rank": 98, "score": 130247.95366692939 }, { "content": "#[derive(Debug, Clone)]\n\nstruct EnumRewriter {\n\n rules: Vec<ConstifiedEnumRewrite>,\n\n rewrites: HashMap<String, String>, // bindgen_enum_name => trim_start_matches prefix\n\n}\n\n\n\nimpl EnumRewriter {\n\n fn from_rewrites(rules: Vec<ConstifiedEnumRewrite>) -> Self {\n\n let mut rewrites = HashMap::new();\n\n for rule in &rules {\n\n let res = rewrites.insert(rule.bindgen_enum_name(), rule.trim_start_matches_prefix());\n\n debug_assert!(\n\n res.is_none(),\n\n \"duplicate in constified_enum_rewrites: {:?}\",\n\n rule\n\n );\n\n }\n\n return EnumRewriter { rewrites, rules };\n\n }\n\n fn builder_with_rewrites(&self, builder: bindgen::Builder) -> bindgen::Builder {\n\n let mut builder = builder;\n", "file_path": "libfirm-rs-bindings/build.rs", "rank": 99, "score": 129590.83681666851 } ]
Rust
game_plugin/src/loading.rs
will-hart/cloud-surfer
4363e0bdf506c3049b77ee110cebc08fcd9e828c
mod paths; use crate::loading::paths::PATHS; use crate::GameState; use bevy::asset::LoadState; use bevy::prelude::*; use bevy_kira_audio::AudioSource; pub struct LoadingPlugin; impl Plugin for LoadingPlugin { fn build(&self, app: &mut AppBuilder) { app.add_system_set( SystemSet::on_enter(GameState::Loading).with_system(start_loading.system()), ) .add_system_set(SystemSet::on_update(GameState::Loading).with_system(check_state.system())); } } pub struct LoadingState { textures: Vec<HandleUntyped>, fonts: Vec<HandleUntyped>, audio: Vec<HandleUntyped>, } pub struct FontAssets { pub fira_sans: Handle<Font>, } pub struct AudioAssets { pub collect: Handle<AudioSource>, pub music: Handle<AudioSource>, pub tether_break: Handle<AudioSource>, } pub struct TextureAssets { pub cloud_001: Handle<Texture>, pub player_left: Handle<Texture>, pub player_right: Handle<Texture>, pub laser: Handle<Texture>, pub grass: Handle<Texture>, } pub struct LoadingItem; pub struct LoadingText; fn start_loading( mut commands: Commands, asset_server: Res<AssetServer>, mut materials: ResMut<Assets<ColorMaterial>>, ) { let mut fonts: Vec<HandleUntyped> = vec![]; fonts.push(asset_server.load_untyped(PATHS.fira_sans)); let mut audio: Vec<HandleUntyped> = vec![]; audio.push(asset_server.load_untyped(PATHS.audio_collect)); audio.push(asset_server.load_untyped(PATHS.audio_music)); let mut textures: Vec<HandleUntyped> = vec![]; textures.push(asset_server.load_untyped(PATHS.cloud_001)); textures.push(asset_server.load_untyped(PATHS.player_left)); textures.push(asset_server.load_untyped(PATHS.player_right)); textures.push(asset_server.load_untyped(PATHS.laser)); textures.push(asset_server.load_untyped(PATHS.grass)); commands.insert_resource(LoadingState { textures, fonts, audio, }); commands .spawn_bundle(NodeBundle { style: Style { size: Size::new(Val::Percent(100.), Val::Percent(100.)), align_items: AlignItems::Center, justify_content: JustifyContent::Center, flex_direction: FlexDirection::ColumnReverse, ..Default::default() }, material: materials.add(Color::BLACK.into()), ..Default::default() }) .insert(LoadingItem) .with_children(|node| { node.spawn_bundle(TextBundle { text: Text { sections: vec![TextSection { value: "Loading".to_string(), style: TextStyle { font: asset_server.get_handle("fonts/FiraSans-Bold.ttf"), font_size: 30.0, color: Color::rgb(0.9, 0.9, 0.9), }, }], alignment: Default::default(), }, ..Default::default() }) .insert(LoadingText); }); } fn check_state( mut commands: Commands, mut state: ResMut<State<GameState>>, asset_server: Res<AssetServer>, loading_state: Res<LoadingState>, mut loading_text: Query<&mut Text, With<LoadingText>>, loading_items: Query<Entity, With<LoadingItem>>, ) { if LoadState::Loaded != asset_server.get_group_load_state(loading_state.fonts.iter().map(|handle| handle.id)) { loading_text.single_mut().unwrap().sections[0].value = "Loading fonts...".into(); return; } if LoadState::Loaded != asset_server.get_group_load_state(loading_state.textures.iter().map(|handle| handle.id)) { loading_text.single_mut().unwrap().sections[0].value = "Loading textures...".into(); return; } if LoadState::Loaded != asset_server.get_group_load_state(loading_state.audio.iter().map(|handle| handle.id)) { loading_text.single_mut().unwrap().sections[0].value = "Loading audio...".into(); return; } commands.insert_resource(FontAssets { fira_sans: asset_server.get_handle(PATHS.fira_sans), }); commands.insert_resource(AudioAssets { collect: asset_server.get_handle(PATHS.audio_collect), music: asset_server.get_handle(PATHS.audio_music), tether_break: asset_server.get_handle(PATHS.audio_game_over), }); commands.insert_resource(TextureAssets { cloud_001: asset_server.get_handle(PATHS.cloud_001), player_left: asset_server.get_handle(PATHS.player_left), player_right: asset_server.get_handle(PATHS.player_right), laser: asset_server.get_handle(PATHS.laser), grass: asset_server.get_handle(PATHS.grass), }); state.set(GameState::Menu).unwrap(); for item in loading_items.iter() { commands.entity(item).despawn_recursive(); } }
mod paths; use crate::loading::paths::PATHS; use crate::GameState; use bevy::asset::LoadState; use bevy::prelude::*; use bevy_kira_audio::AudioSource; pub struct LoadingPlugin; impl Plugin for LoadingPlugin { fn build(&self, app: &mut AppBuilder) { app.add_system_set( SystemSet::on_enter(GameState::Loading).with_system(start_loading.system()), ) .add_system_set(SystemSet::on_update(GameState::Loading).with_system(check_state.system())); } } pub struct LoadingState { textures: Vec<HandleUntyped>, fonts: Vec<HandleUntyped>, audio: Vec<HandleUntyped>, } pub struct FontAssets { pub fira_sans: Handle<Font>, } pub struct AudioAssets { pub collect: Handle<AudioSource>, pub music: Handle<AudioSource>, pub tether_break: Handle<AudioSource>, } pub struct TextureAssets { pub cloud_001: Handle<Texture>, pub player_left: Handle<Texture>, pub player_right: Handle<Texture>, pub laser: Handle<Texture>, pub grass: Handle<Texture>, } pub struct LoadingItem; pub struct LoadingText; fn start_loading( mut commands: Commands, asset_server: Res<AssetServer>, mut materials: ResMut<Assets<ColorMaterial>>, ) { let mut fonts: Vec<HandleUntyped> = vec![]; fonts.push(asset_server.load_untyped(PATHS.fira_sans)); let mut audio: Vec<HandleUntyped> = vec![]; audio.push(asset_server.load_untyped(PATHS.audio_collect)); audio.push(asset_server.load_untyped(PATHS.audio_music)); let mut textures: Vec<HandleUntyped> = vec![]; textures.push(asset_server.load_untyped(PATHS.cloud_001)); textures.push(asset_server.load_untyped(PATHS.player_left)); textures.push(asset_server.load_untyped(PATHS.player_right)); textures.push(asset_server.load_untyped(PATHS.laser)); textures.push(asset_server.load_untyped(PATHS.grass)); commands.insert_resource(LoadingState { textures, fonts, audio, }); commands .spawn_bundle(NodeBundle { style: Style { size: Size::new(Val::Percent(100.), Val::Percent(100.)), align_items: AlignItems::Center, justify_content: JustifyContent::Center, flex_direction: FlexDirection::ColumnReverse, ..Default::default() }, material: materials.add(Color::BLACK.into()), ..Default::default() }) .insert(LoadingItem) .with_children(|node| { node.spawn_bundle(TextBundle { text: Text { sections: vec![TextSection { value: "Loading".to_string(), style: TextStyle { font: asset_server.get_handle("fonts/FiraSans-Bold.ttf"), font_size: 30.0, color: Color::rgb(0.9, 0.9, 0.9), }, }], alignment: Default::default(), }, ..Default::default() }) .insert(LoadingText); }); }
fn check_state( mut commands: Commands, mut state: ResMut<State<GameState>>, asset_server: Res<AssetServer>, loading_state: Res<LoadingState>, mut loading_text: Query<&mut Text, With<LoadingText>>, loading_items: Query<Entity, With<LoadingItem>>, ) { if LoadState::Loaded != asset_server.get_group_load_state(loading_state.fonts.iter().map(|handle| handle.id)) { loading_text.single_mut().unwrap().sections[0].value = "Loading fonts...".into(); return; } if LoadState::Loaded != asset_server.get_group_load_state(loading_state.textures.iter().map(|handle| handle.id)) { loading_text.single_mut().unwrap().sections[0].value = "Loading textures...".into(); return; } if LoadState::Loaded != asset_server.get_group_load_state(loading_state.audio.iter().map(|handle| handle.id)) { loading_text.single_mut().unwrap().sections[0].value = "Loading audio...".into(); return; } commands.insert_resource(FontAssets { fira_sans: asset_server.get_handle(PATHS.fira_sans), }); commands.insert_resource(AudioAssets { collect: asset_server.get_handle(PATHS.audio_collect), music: asset_server.get_handle(PATHS.audio_music), tether_break: asset_server.get_handle(PATHS.audio_game_over), }); commands.insert_resource(TextureAssets { cloud_001: asset_server.get_handle(PATHS.cloud_001), player_left: asset_server.get_handle(PATHS.player_left), player_right: asset_server.get_handle(PATHS.player_right), laser: asset_server.get_handle(PATHS.laser), grass: asset_server.get_handle(PATHS.grass), }); state.set(GameState::Menu).unwrap(); for item in loading_items.iter() { commands.entity(item).despawn_recursive(); } }
function_block-full_function
[ { "content": "fn spawn_camera(mut commands: Commands) {\n\n commands.spawn_bundle(OrthographicCameraBundle::new_2d());\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 0, "score": 112530.4475544525 }, { "content": "fn play_game_music(audio: Res<Audio>, channels: Res<AudioChannels>) {\n\n audio.set_volume_in_channel(0.3, &channels.music);\n\n}\n", "file_path": "game_plugin/src/audio.rs", "rank": 1, "score": 110875.01887107045 }, { "content": "fn spawn_ui_camera(mut commands: Commands) {\n\n commands.spawn_bundle(UiCameraBundle::default());\n\n}\n\n\n", "file_path": "game_plugin/src/menu.rs", "rank": 2, "score": 109979.88561682012 }, { "content": "/// Starts the obstacle spawn timer\n\nfn setup_obstacle_spawning(mut commands: Commands) {\n\n commands\n\n .spawn()\n\n .insert(SpawnTimer)\n\n .insert(Timer::from_seconds(3., true));\n\n}\n\n\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 3, "score": 109979.88561682012 }, { "content": "fn play_menu_music(\n\n audio_assets: Res<AudioAssets>,\n\n audio: Res<Audio>,\n\n channels: Res<AudioChannels>,\n\n mut audio_spawned: ResMut<AudioSpawned>,\n\n) {\n\n audio.set_volume_in_channel(0.5, &channels.music);\n\n\n\n if audio_spawned.0 {\n\n return;\n\n }\n\n\n\n audio_spawned.0 = true;\n\n audio.stop_channel(&channels.music);\n\n audio.play_looped_in_channel(audio_assets.music.clone(), &channels.music);\n\n}\n\n\n", "file_path": "game_plugin/src/audio.rs", "rank": 4, "score": 99424.21869122605 }, { "content": "fn despawn_menu(mut commands: Commands, items: Query<Entity, With<MenuItem>>) {\n\n for item in items.iter() {\n\n commands.entity(item).despawn_recursive();\n\n }\n\n}\n", "file_path": "game_plugin/src/menu.rs", "rank": 5, "score": 93384.9736745611 }, { "content": "/// despawns the score ui\n\nfn despawn_score_ui(mut commands: Commands, items: Query<Entity, With<ScoreItem>>) {\n\n commands.remove_resource::<Score>();\n\n\n\n for ent in items.iter() {\n\n commands.entity(ent).despawn_recursive();\n\n }\n\n}\n", "file_path": "game_plugin/src/score.rs", "rank": 6, "score": 91460.09883759586 }, { "content": "fn despawn_background(mut commands: Commands, items: Query<Entity, With<ScrollingBackground>>) {\n\n for item in items.iter() {\n\n commands.entity(item).despawn_recursive();\n\n }\n\n}\n", "file_path": "game_plugin/src/scrolling_background.rs", "rank": 7, "score": 91460.09883759586 }, { "content": "/// check if a player is ded\n\npub fn is_player_dead_checks(\n\n mut commands: Commands,\n\n game_map: Res<GameMap>,\n\n mut ship: ResMut<PlayerShip>,\n\n players: Query<Entity, (With<Player>, Without<IsDead>)>,\n\n ship_side_tx_query: Query<&Transform, With<PlayerShipSide>>,\n\n) {\n\n match players.single() {\n\n Ok(player) => {\n\n // first check if the players bash into each other\n\n let diff = ship_side_tx_query\n\n .iter()\n\n .fold(0., |acc, tx| tx.translation.x - acc);\n\n\n\n if diff.abs() < game_map.sprite_size * 0.75 {\n\n println!(\"Bashed into each other!\");\n\n ship.is_dead = true;\n\n commands\n\n .entity(player)\n\n .insert(IsDead(\"The tractors collided!\".into()));\n", "file_path": "game_plugin/src/player.rs", "rank": 8, "score": 91318.5093434306 }, { "content": "struct AudioSpawned(bool);\n\n\n\nimpl Plugin for InternalAudioPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.insert_resource(AudioChannels {\n\n effects: AudioChannel::new(\"effects\".to_owned()),\n\n music: AudioChannel::new(\"music\".to_owned()),\n\n })\n\n .insert_resource(AudioSpawned(false))\n\n .add_plugin(AudioPlugin)\n\n .add_system_set(SystemSet::on_enter(GameState::Menu).with_system(play_menu_music.system()))\n\n .add_system_set(\n\n SystemSet::on_enter(GameState::Playing).with_system(play_game_music.system()),\n\n );\n\n }\n\n}\n\n\n\npub struct AudioChannels {\n\n pub effects: AudioChannel,\n\n pub music: AudioChannel,\n\n}\n\n\n", "file_path": "game_plugin/src/audio.rs", "rank": 9, "score": 90616.89317081023 }, { "content": "struct ButtonMaterials {\n\n normal: Handle<ColorMaterial>,\n\n hovered: Handle<ColorMaterial>,\n\n}\n\n\n\nimpl FromWorld for ButtonMaterials {\n\n fn from_world(world: &mut World) -> Self {\n\n let mut materials = world.get_resource_mut::<Assets<ColorMaterial>>().unwrap();\n\n ButtonMaterials {\n\n normal: materials.add(Color::rgb(0.15, 0.15, 0.15).into()),\n\n hovered: materials.add(Color::rgb(0.25, 0.25, 0.25).into()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/menu.rs", "rank": 10, "score": 87982.36855512683 }, { "content": "fn despawn_game_over_ui(mut commands: Commands, items: Query<Entity, With<GameOverUiItem>>) {\n\n for item in items.iter() {\n\n commands.entity(item).despawn_recursive();\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/game_over_ui.rs", "rank": 11, "score": 87893.31283230412 }, { "content": "fn handle_audio_events(\n\n audio_assets: Res<AudioAssets>,\n\n audio: Res<Audio>,\n\n channels: Res<AudioChannels>,\n\n mut audio_events: EventReader<PlayAudioEffectEvent>,\n\n) {\n\n let mut has_played_collect = false;\n\n\n\n for ev in audio_events.iter() {\n\n match ev.0 {\n\n AudioEffect::Collect => {\n\n if !has_played_collect {\n\n has_played_collect = true;\n\n audio.play_in_channel(audio_assets.collect.clone(), &channels.effects)\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/audio_events.rs", "rank": 12, "score": 80450.30851131858 }, { "content": "fn speed_up_game_over_time(mut game_time: ResMut<GameTime>) {\n\n game_time.multiplier = 1. + game_time.elapsed / GAME_TIME_DOUBLING_TIME;\n\n}\n", "file_path": "game_plugin/src/game_time.rs", "rank": 13, "score": 78090.70862250267 }, { "content": "/// Resets the game timer to start a new game\n\nfn setup_game_time(mut game_time: ResMut<GameTime>) {\n\n game_time.multiplier = 1.;\n\n game_time.elapsed = 0.;\n\n game_time.delta = 0.;\n\n game_time.delta_duration = Duration::from_secs(0);\n\n game_time.fixed_update = false;\n\n game_time.next_fixed_update = 0.5;\n\n}\n\n\n", "file_path": "game_plugin/src/game_time.rs", "rank": 14, "score": 78090.70862250267 }, { "content": "/// Draws and animates \"laser\" between the two ships\n\nfn update_laser(\n\n time: Res<GameTime>,\n\n game_map: Res<GameMap>,\n\n mut ship: ResMut<PlayerShip>,\n\n mut lasers: Query<(&mut Transform, &mut TextureAtlasSprite, &mut Timer), With<Laser>>,\n\n mut ship_sides: Query<(&Transform, &PlayerShipSide), Without<Laser>>,\n\n) {\n\n if ship.is_dead {\n\n return;\n\n }\n\n\n\n let sides = ship_sides\n\n .iter_mut()\n\n .fold((Vec3::ZERO, Vec3::ZERO), |acc, (tx, side)| {\n\n by_side!(side, (tx.translation, acc.1), (acc.0, tx.translation))\n\n });\n\n\n\n // stretch the laser to fit between the two sides\n\n let dx = sides.1.x - sides.0.x;\n\n let x_scale = 0.4 + dx / game_map.sprite_size;\n", "file_path": "game_plugin/src/player.rs", "rank": 15, "score": 77344.48135114444 }, { "content": "/// Gest the ship sides from the PlayerShipSide query\n\nfn get_ship_sides(ship_sides: &mut Query<(&mut Transform, &PlayerShipSide)>) -> (Vec3, Vec3) {\n\n ship_sides\n\n .iter_mut()\n\n .fold((Vec3::ZERO, Vec3::ZERO), |acc, (tx, side)| {\n\n by_side!(side, (tx.translation, acc.1), (acc.0, tx.translation))\n\n })\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 16, "score": 74403.87387560139 }, { "content": "/// Updates the score UI\n\nfn update_score_text_ui(\n\n score: Res<Score>,\n\n ship: Res<PlayerShip>,\n\n mut score_text: Query<&mut Text, With<ScoreText>>,\n\n) {\n\n for mut text in score_text.iter_mut() {\n\n text.sections[0].value = format!(\"{:.0}, tether strain: \", score.current.floor());\n\n\n\n text.sections[1].value = format!(\n\n \"{:.0}%\",\n\n 100. * ship.separation_strain / MAX_SEPARATION_STRAIN\n\n );\n\n text.sections[1].style.color = Color::rgb(\n\n 0.3 + 0.5 * (ship.separation_strain / MAX_SEPARATION_STRAIN),\n\n 0.3,\n\n 0.3,\n\n );\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/score.rs", "rank": 17, "score": 72857.78207879819 }, { "content": "fn play_game_over_sound(\n\n audio_assets: Res<AudioAssets>,\n\n audio: Res<Audio>,\n\n channels: Res<AudioChannels>,\n\n) {\n\n println!(\"Playing game over sounds\");\n\n audio.play_in_channel(audio_assets.tether_break.clone(), &channels.effects)\n\n}\n", "file_path": "game_plugin/src/audio_events.rs", "rank": 18, "score": 72585.67045929377 }, { "content": "/// Updates the game timer\n\nfn update_game_time(time: Res<Time>, mut game_time: ResMut<GameTime>) {\n\n let dt = time.delta_seconds() * game_time.multiplier;\n\n\n\n game_time.elapsed += dt;\n\n game_time.delta = dt;\n\n game_time.delta_duration = time.delta().mul_f32(game_time.multiplier);\n\n\n\n if time.seconds_since_startup() > game_time.next_fixed_update {\n\n game_time.next_fixed_update = time.seconds_since_startup() + 0.1;\n\n game_time.fixed_update = true;\n\n } else {\n\n game_time.fixed_update = false;\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/game_time.rs", "rank": 19, "score": 69795.95445285854 }, { "content": "/// Queries actions every frame (allows navigation etc in the menu)\n\nfn set_movement_actions(mut actions: ResMut<Actions>, keyboard_input: Res<Input<KeyCode>>) {\n\n actions.player_left_move = 0;\n\n actions.player_right_move = 0;\n\n\n\n if keyboard_input.pressed(KeyCode::A) {\n\n actions.player_left_move -= 1;\n\n }\n\n\n\n if keyboard_input.pressed(KeyCode::D) {\n\n actions.player_left_move += 1;\n\n }\n\n\n\n if keyboard_input.pressed(KeyCode::J) {\n\n actions.player_right_move -= 1;\n\n }\n\n\n\n if keyboard_input.pressed(KeyCode::L) {\n\n actions.player_right_move += 1;\n\n }\n\n\n\n actions.restart_requested = keyboard_input.just_pressed(KeyCode::Space);\n\n}\n", "file_path": "game_plugin/src/actions.rs", "rank": 20, "score": 68660.26861736747 }, { "content": "/// Increments the score by the time\n\nfn update_score(time: Res<GameTime>, ship: Res<PlayerShip>, mut score: ResMut<Score>) {\n\n if ship.is_dead {\n\n return;\n\n }\n\n\n\n score.current += time.delta * score.multiplier;\n\n}\n\n\n", "file_path": "game_plugin/src/score.rs", "rank": 21, "score": 65631.86919923422 }, { "content": "struct MenuItem;\n\n\n", "file_path": "game_plugin/src/menu.rs", "rank": 22, "score": 62313.99678670497 }, { "content": "struct PlayButton;\n\n\n", "file_path": "game_plugin/src/menu.rs", "rank": 23, "score": 62313.99678670497 }, { "content": "struct GameOverUiItem;\n\n\n", "file_path": "game_plugin/src/game_over_ui.rs", "rank": 24, "score": 59488.25267455281 }, { "content": "/// Despawns the player and related objects\n\nfn despawn_level(\n\n mut commands: Commands,\n\n players: Query<Entity, With<Player>>,\n\n lasers: Query<Entity, With<Laser>>,\n\n) {\n\n for player in players.iter() {\n\n commands.entity(player).despawn_recursive();\n\n }\n\n\n\n for laser in lasers.iter() {\n\n commands.entity(laser).despawn_recursive();\n\n }\n\n}\n", "file_path": "game_plugin/src/player.rs", "rank": 25, "score": 51673.042503035176 }, { "content": "fn spawn_player(\n\n mut commands: Commands,\n\n textures: Res<TextureAssets>,\n\n game_map: Res<GameMap>,\n\n mut texture_atlases: ResMut<Assets<TextureAtlas>>,\n\n) {\n\n println!(\"Spawning player\");\n\n\n\n let ship = PlayerShip {\n\n is_dead: false,\n\n speed: 150.,\n\n\n\n max_separation: 5. * game_map.sprite_size,\n\n separation_strain: 0.,\n\n };\n\n\n\n commands.insert_resource(ship);\n\n\n\n // spawn the player + tractors\n\n commands\n", "file_path": "game_plugin/src/player.rs", "rank": 28, "score": 51673.042503035176 }, { "content": "/// Animates the player sprites\n\nfn animate_player(\n\n game_time: Res<GameTime>,\n\n mut sprites: Query<&mut TextureAtlasSprite, With<PlayerShipSide>>,\n\n) {\n\n if !game_time.fixed_update {\n\n return;\n\n }\n\n\n\n for mut sprite in sprites.iter_mut() {\n\n sprite.index = (sprite.index + 1) % 4;\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 29, "score": 51673.042503035176 }, { "content": "/// Spawns obstacles at the top of the screen\n\nfn spawn_obstacles(\n\n mut commands: Commands,\n\n time: Res<GameTime>,\n\n ship: Res<PlayerShip>,\n\n textures: Res<TextureAssets>,\n\n game_map: Res<GameMap>,\n\n patterns: Res<AvailableSpawnPatterns>,\n\n score: Res<Score>,\n\n mut texture_atlases: ResMut<Assets<TextureAtlas>>,\n\n mut timers: Query<&mut Timer, With<SpawnTimer>>,\n\n) {\n\n if ship.is_dead {\n\n return;\n\n }\n\n\n\n let mut timer = timers.single_mut().unwrap();\n\n timer.tick(time.delta_duration);\n\n if !timer.just_finished() {\n\n return;\n\n }\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 30, "score": 51673.042503035176 }, { "content": "fn setup_menu(\n\n mut commands: Commands,\n\n asset_server: Res<AssetServer>,\n\n button_materials: Res<ButtonMaterials>,\n\n) {\n\n commands\n\n .spawn_bundle(NodeBundle {\n\n style: Style {\n\n size: Size::new(Val::Percent(100.), Val::Percent(100.)),\n\n align_items: AlignItems::Center,\n\n justify_content: JustifyContent::Center,\n\n flex_direction: FlexDirection::ColumnReverse,\n\n ..Default::default()\n\n },\n\n material: button_materials.normal.clone(),\n\n ..Default::default()\n\n })\n\n .insert(MenuItem)\n\n .with_children(|node| {\n\n node.spawn_bundle(TextBundle {\n", "file_path": "game_plugin/src/menu.rs", "rank": 31, "score": 51673.042503035176 }, { "content": "/// Moves the obstacles down towards the player\n\nfn move_obstacles(\n\n mut commands: Commands,\n\n time: Res<GameTime>,\n\n mut ship: ResMut<PlayerShip>,\n\n game_map: Res<GameMap>,\n\n mut audio_events: EventWriter<PlayAudioEffectEvent>,\n\n players: Query<Entity, (With<Player>, Without<IsDead>)>,\n\n ship_sides: Query<(&Transform, &PlayerShipSide), Without<Player>>,\n\n mut obstacles: Query<\n\n (\n\n Entity,\n\n &mut Transform,\n\n &mut Visible,\n\n &mut Timer,\n\n &mut TextureAtlasSprite,\n\n ),\n\n (With<Obstacle>, Without<PlayerShipSide>, Without<Player>),\n\n >,\n\n) {\n\n if ship.is_dead {\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 32, "score": 51673.042503035176 }, { "content": "/// Moves a player based on input towards their target position\n\nfn move_player(\n\n time: Res<GameTime>,\n\n actions: Res<Actions>,\n\n game_map: Res<GameMap>,\n\n ship: Res<PlayerShip>,\n\n mut ship_sides: Query<(&mut Transform, &PlayerShipSide)>,\n\n) {\n\n // if we don't have a player, don't move\n\n if ship.is_dead {\n\n return;\n\n }\n\n\n\n // remove the multiplier from delta_move so the ship doesn't get faster over time\n\n let delta_move = ship.speed * 1.5 * (time.delta / time.multiplier);\n\n\n\n // calculate movement\n\n let moves = (actions.player_left_move, actions.player_right_move);\n\n let sides = get_ship_sides(&mut ship_sides);\n\n let x_bound = game_map.get_x_bound();\n\n let target_x = (\n", "file_path": "game_plugin/src/player.rs", "rank": 33, "score": 51673.042503035176 }, { "content": "/// despawns all obstacles and the spawn timer\n\nfn despawn_obstacles(\n\n mut commands: Commands,\n\n obstacles: Query<Entity, With<Obstacle>>,\n\n spawn_timers: Query<Entity, With<SpawnTimer>>,\n\n) {\n\n for timer in spawn_timers.iter() {\n\n commands.entity(timer).despawn();\n\n }\n\n\n\n for obstacle in obstacles.iter() {\n\n commands.entity(obstacle).despawn();\n\n }\n\n}\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 34, "score": 51673.042503035176 }, { "content": "fn spawn_background(\n\n mut commands: Commands,\n\n game_map: Res<GameMap>,\n\n textures: Res<TextureAssets>,\n\n mut texture_atlases: ResMut<Assets<TextureAtlas>>,\n\n) {\n\n let texture_atlas =\n\n TextureAtlas::from_grid(textures.grass.clone(), Vec2::new(32., 32.0), 15, 1);\n\n let texture_atlas_handle = texture_atlases.add(texture_atlas);\n\n\n\n let w = game_map.width + 2. * game_map.pad_x;\n\n let h = game_map.height + 2. * game_map.pad_y;\n\n\n\n let offset = Vec3::new(\n\n game_map.sprite_size / 2. - w * game_map.sprite_size / 2.,\n\n game_map.sprite_size / 2. - h * game_map.sprite_size / 2.,\n\n 0.,\n\n );\n\n\n\n let w = w as u32;\n", "file_path": "game_plugin/src/scrolling_background.rs", "rank": 35, "score": 50280.57101013884 }, { "content": "/// Increments the score by the time\n\nfn score_captured_obstacles(\n\n mut commands: Commands,\n\n ship: Res<PlayerShip>,\n\n mut score: ResMut<Score>,\n\n captured_obstacles: Query<Entity, With<CapturedObstacle>>,\n\n) {\n\n if ship.is_dead {\n\n return;\n\n }\n\n\n\n for entity in captured_obstacles.iter() {\n\n score.current += 10.;\n\n // prevent continuously scoring from this obstacle\n\n commands.entity(entity).remove::<CapturedObstacle>();\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/score.rs", "rank": 36, "score": 50280.57101013884 }, { "content": "/// spawns the score UI\n\nfn spawn_score_ui(\n\n mut commands: Commands,\n\n asset_server: Res<AssetServer>,\n\n mut materials: ResMut<Assets<ColorMaterial>>,\n\n) {\n\n commands.insert_resource(Score::default());\n\n\n\n commands\n\n .spawn_bundle(NodeBundle {\n\n style: Style {\n\n size: Size::new(Val::Percent(100.0), Val::Percent(100.0)),\n\n justify_content: JustifyContent::FlexStart,\n\n align_items: AlignItems::Center,\n\n flex_direction: FlexDirection::ColumnReverse,\n\n ..Default::default()\n\n },\n\n material: materials.add(Color::NONE.into()),\n\n ..Default::default()\n\n })\n\n .insert(ScoreItem)\n", "file_path": "game_plugin/src/score.rs", "rank": 37, "score": 50280.57101013884 }, { "content": "fn click_play_button(\n\n actions: Res<Actions>,\n\n button_materials: Res<ButtonMaterials>,\n\n mut state: ResMut<State<GameState>>,\n\n mut interaction_query: Query<ButtonInteraction, (Changed<Interaction>, With<Button>)>,\n\n) {\n\n if actions.restart_requested {\n\n state.set(GameState::Playing).unwrap();\n\n return;\n\n }\n\n\n\n for (_, interaction, mut material, _) in interaction_query.iter_mut() {\n\n match *interaction {\n\n Interaction::Clicked => {\n\n state.set(GameState::Playing).unwrap();\n\n }\n\n Interaction::Hovered => {\n\n *material = button_materials.hovered.clone();\n\n }\n\n Interaction::None => {\n\n *material = button_materials.normal.clone();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/menu.rs", "rank": 38, "score": 50280.57101013884 }, { "content": "/// removes dead obstacles that are off the map\n\nfn remove_dead_obstacles(\n\n game_map: Res<GameMap>,\n\n mut commands: Commands,\n\n mut obstacles: Query<(&mut Transform, Entity), With<Obstacle>>,\n\n) {\n\n let min_y = game_map.bottom_y() - 3. * game_map.pad_y * game_map.sprite_size;\n\n\n\n for (tx, entity) in obstacles.iter_mut() {\n\n if tx.translation.y < min_y {\n\n println!(\"Destroying obstacle\");\n\n commands.entity(entity).despawn();\n\n }\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 39, "score": 50280.57101013884 }, { "content": "fn animate_background(\n\n game_time: Res<GameTime>,\n\n mut tiles: Query<&mut TextureAtlasSprite, With<ScrollingBackground>>,\n\n) {\n\n if !game_time.fixed_update {\n\n return;\n\n }\n\n\n\n for mut tile in tiles.iter_mut() {\n\n tile.index = (tile.index + 1) % 15\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/scrolling_background.rs", "rank": 40, "score": 50280.57101013884 }, { "content": "fn transition_to_game_over(\n\n mut commands: Commands,\n\n asset_server: Res<AssetServer>,\n\n score: Res<Score>,\n\n mut materials: ResMut<Assets<ColorMaterial>>,\n\n players: Query<&IsDead>,\n\n mut state: ResMut<State<GameState>>,\n\n) {\n\n let dead_player = players.single();\n\n if dead_player.is_err() {\n\n return;\n\n }\n\n\n\n let reason = dead_player.unwrap().0.clone();\n\n\n\n commands\n\n .spawn_bundle(NodeBundle {\n\n style: Style {\n\n size: Size::new(Val::Percent(100.), Val::Percent(100.)),\n\n align_items: AlignItems::Center,\n", "file_path": "game_plugin/src/game_over_ui.rs", "rank": 41, "score": 50280.57101013884 }, { "content": "fn restart_game(\n\n ship: Res<PlayerShip>,\n\n mut actions: ResMut<Actions>,\n\n mut state: ResMut<State<GameState>>,\n\n) {\n\n if !ship.is_dead {\n\n return;\n\n }\n\n\n\n if actions.restart_requested {\n\n state.set(GameState::Menu).unwrap();\n\n actions.restart_requested = false;\n\n return;\n\n }\n\n}\n", "file_path": "game_plugin/src/game_over_ui.rs", "rank": 42, "score": 50280.57101013884 }, { "content": "use crate::loading::AudioAssets;\n\nuse crate::GameState;\n\nuse bevy::prelude::*;\n\nuse bevy_kira_audio::{Audio, AudioChannel, AudioPlugin};\n\n\n\npub struct InternalAudioPlugin;\n\n\n", "file_path": "game_plugin/src/audio.rs", "rank": 43, "score": 43576.326980355116 }, { "content": "pub struct AssetPaths {\n\n pub fira_sans: &'static str,\n\n pub audio_collect: &'static str,\n\n pub audio_music: &'static str,\n\n pub audio_game_over: &'static str,\n\n pub cloud_001: &'static str,\n\n pub player_left: &'static str,\n\n pub player_right: &'static str,\n\n pub laser: &'static str,\n\n pub grass: &'static str,\n\n}\n\n\n\npub const PATHS: AssetPaths = AssetPaths {\n\n fira_sans: \"fonts/FiraSans-Bold.ttf\",\n\n audio_collect: \"audio/collect.ogg\",\n\n audio_music: \"audio/music.ogg\",\n\n audio_game_over: \"audio/game_over.ogg\",\n\n cloud_001: \"textures/cloud_001.png\",\n\n player_left: \"textures/player_left.png\",\n\n player_right: \"textures/player_right.png\",\n\n laser: \"textures/laser.png\",\n\n grass: \"textures/grass.png\",\n\n};\n", "file_path": "game_plugin/src/loading/paths.rs", "rank": 44, "score": 42187.47727975682 }, { "content": "use bevy::prelude::*;\n\nuse bevy_kira_audio::Audio;\n\n\n\nuse crate::{audio::AudioChannels, loading::AudioAssets, GameState};\n\n\n\npub enum AudioEffect {\n\n Collect,\n\n}\n\n\n\npub struct PlayAudioEffectEvent(pub AudioEffect);\n\n\n\npub struct AudioEventsPlugin;\n\n\n\nimpl Plugin for AudioEventsPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.add_event::<PlayAudioEffectEvent>()\n\n .add_system_set(\n\n SystemSet::on_update(GameState::Playing).with_system(handle_audio_events.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_enter(GameState::GameOver).with_system(play_game_over_sound.system()),\n\n );\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/audio_events.rs", "rank": 45, "score": 41878.51396445001 }, { "content": "fn main() {\n\n let target = env::var(\"TARGET\").unwrap();\n\n if target.contains(\"windows\") {\n\n embed_resource::compile(\"build/windows/icon.rc\");\n\n }\n\n}\n", "file_path": "build.rs", "rank": 46, "score": 40924.49381769767 }, { "content": "fn main() {\n\n let game_map = GameMap::default();\n\n\n\n let mut app = App::build();\n\n app\n\n // .insert_resource(Msaa { samples: 4 })\n\n .insert_resource(ClearColor(Color::rgb(\n\n 121. / 255.,\n\n 179. / 255.,\n\n 206. / 255.,\n\n )))\n\n .insert_resource(WindowDescriptor {\n\n width: (game_map.width + 2. * game_map.pad_x) * game_map.sprite_size,\n\n height: (game_map.height + 2. * game_map.pad_y) * game_map.sprite_size,\n\n title: \"Hyper Farmer\".to_string(),\n\n ..Default::default()\n\n })\n\n .insert_resource(game_map)\n\n .add_plugins(DefaultPlugins)\n\n .add_plugin(GamePlugin);\n\n\n\n #[cfg(target_arch = \"wasm32\")]\n\n app.add_plugin(bevy_webgl2::WebGL2Plugin);\n\n\n\n app.run();\n\n}\n", "file_path": "src/main.rs", "rank": 47, "score": 39751.625415442 }, { "content": " sections: vec![TextSection {\n\n value: \"Hmm... What if (hear me out)... what if we use a big laser?\"\n\n .to_string(),\n\n style: TextStyle {\n\n font: asset_server.get_handle(\"fonts/FiraSans-Bold.ttf\"),\n\n font_size: 20.0,\n\n color: Color::rgb(0.9, 0.9, 0.9),\n\n },\n\n }],\n\n alignment: Default::default(),\n\n },\n\n ..Default::default()\n\n });\n\n\n\n node.spawn_bundle(TextBundle {\n\n style: Style {\n\n margin: Rect {\n\n left: Val::Px(0.),\n\n right: Val::Px(0.),\n\n top: Val::Px(20.),\n", "file_path": "game_plugin/src/menu.rs", "rank": 48, "score": 16146.624678584776 }, { "content": " bottom: Val::Px(0.),\n\n },\n\n ..Default::default()\n\n },\n\n text: Text {\n\n sections: vec![TextSection {\n\n value:\n\n \"Capture the hay with your laser tether, but don't move too far apart\"\n\n .to_string(),\n\n style: TextStyle {\n\n font: asset_server.get_handle(\"fonts/FiraSans-Bold.ttf\"),\n\n font_size: 20.0,\n\n color: Color::rgb(0.9, 0.9, 0.9),\n\n },\n\n }],\n\n alignment: Default::default(),\n\n },\n\n ..Default::default()\n\n });\n\n\n", "file_path": "game_plugin/src/menu.rs", "rank": 49, "score": 16146.19458356315 }, { "content": " .with_children(|parent| {\n\n parent\n\n .spawn_bundle(TextBundle {\n\n text: Text {\n\n sections: vec![\n\n TextSection {\n\n value: \"\".to_string(),\n\n style: TextStyle {\n\n font: asset_server.get_handle(\"fonts/FiraSans-Bold.ttf\"),\n\n font_size: 30.0,\n\n color: Color::rgb(0.3, 0.3, 0.3),\n\n },\n\n },\n\n TextSection {\n\n value: \"\".to_string(),\n\n style: TextStyle {\n\n font: asset_server.get_handle(\"fonts/FiraSans-Bold.ttf\"),\n\n font_size: 30.0,\n\n color: Color::rgb(0.3, 0.3, 0.3),\n\n },\n", "file_path": "game_plugin/src/score.rs", "rank": 52, "score": 16145.346254405062 }, { "content": " right: Val::Px(0.),\n\n top: Val::Px(20.),\n\n bottom: Val::Px(0.),\n\n },\n\n ..Default::default()\n\n },\n\n text: Text {\n\n sections: vec![TextSection {\n\n value:\n\n \"Use A/D to move the left tractor, and J/L to move the right tractor.\"\n\n .to_string(),\n\n style: TextStyle {\n\n font: asset_server.get_handle(\"fonts/FiraSans-Bold.ttf\"),\n\n font_size: 20.0,\n\n color: Color::rgb(0.9, 0.9, 0.9),\n\n },\n\n }],\n\n alignment: Default::default(),\n\n },\n\n ..Default::default()\n", "file_path": "game_plugin/src/menu.rs", "rank": 53, "score": 16144.558280768284 }, { "content": " ..Default::default()\n\n })\n\n .insert(PlayButton)\n\n .with_children(|parent| {\n\n parent.spawn_bundle(TextBundle {\n\n text: Text {\n\n sections: vec![TextSection {\n\n value: \"Play\".to_string(),\n\n style: TextStyle {\n\n font: asset_server.get_handle(\"fonts/FiraSans-Bold.ttf\"),\n\n font_size: 40.0,\n\n color: Color::rgb(0.9, 0.9, 0.9),\n\n },\n\n }],\n\n alignment: Default::default(),\n\n },\n\n ..Default::default()\n\n });\n\n });\n\n });\n\n}\n\n\n", "file_path": "game_plugin/src/menu.rs", "rank": 54, "score": 16144.406656818777 }, { "content": " text: Text {\n\n sections: vec![TextSection {\n\n value: \"HyperFarmer!1!!\".to_string(),\n\n style: TextStyle {\n\n font: asset_server.get_handle(\"fonts/FiraSans-Bold.ttf\"),\n\n font_size: 30.0,\n\n color: Color::rgb(0.9, 0.9, 0.9),\n\n },\n\n }],\n\n alignment: Default::default(),\n\n },\n\n ..Default::default()\n\n });\n\n\n\n node.spawn_bundle(TextBundle {\n\n style: Style {\n\n margin: Rect {\n\n left: Val::Px(0.),\n\n right: Val::Px(0.),\n\n top: Val::Px(20.),\n", "file_path": "game_plugin/src/menu.rs", "rank": 55, "score": 16144.020820550753 }, { "content": " node.spawn_bundle(TextBundle {\n\n text: Text {\n\n sections: vec![TextSection {\n\n value: \"or <insert reason here>, and we all know how bad that could be!\"\n\n .to_string(),\n\n style: TextStyle {\n\n font: asset_server.get_handle(\"fonts/FiraSans-Bold.ttf\"),\n\n font_size: 20.0,\n\n color: Color::rgb(0.9, 0.9, 0.9),\n\n },\n\n }],\n\n alignment: Default::default(),\n\n },\n\n ..Default::default()\n\n });\n\n\n\n node.spawn_bundle(TextBundle {\n\n style: Style {\n\n margin: Rect {\n\n left: Val::Px(0.),\n", "file_path": "game_plugin/src/menu.rs", "rank": 56, "score": 16144.01914728402 }, { "content": " bottom: Val::Px(0.),\n\n },\n\n ..Default::default()\n\n },\n\n text: Text {\n\n sections: vec![TextSection {\n\n value: \"Oh no! Its about to rain and we need to save our hay.\".to_string(),\n\n style: TextStyle {\n\n font: asset_server.get_handle(\"fonts/FiraSans-Bold.ttf\"),\n\n font_size: 20.0,\n\n color: Color::rgb(0.9, 0.9, 0.9),\n\n },\n\n }],\n\n alignment: Default::default(),\n\n },\n\n ..Default::default()\n\n });\n\n\n\n node.spawn_bundle(TextBundle {\n\n text: Text {\n", "file_path": "game_plugin/src/menu.rs", "rank": 57, "score": 16144.012293951051 }, { "content": " });\n\n\n\n node.spawn_bundle(TextBundle {\n\n style: Style {\n\n margin: Rect {\n\n left: Val::Px(0.),\n\n right: Val::Px(0.),\n\n top: Val::Px(0.),\n\n bottom: Val::Px(20.),\n\n },\n\n ..Default::default()\n\n },\n\n text: Text {\n\n sections: vec![TextSection {\n\n value: \"Hit the space bar or play below to start.\".to_string(),\n\n style: TextStyle {\n\n font: asset_server.get_handle(\"fonts/FiraSans-Bold.ttf\"),\n\n font_size: 20.0,\n\n color: Color::rgb(0.9, 0.9, 0.9),\n\n },\n", "file_path": "game_plugin/src/menu.rs", "rank": 61, "score": 16141.999872888047 }, { "content": "mod actions;\n\nmod audio;\n\nmod audio_events;\n\npub mod game_map;\n\nmod game_over_ui;\n\nmod game_time;\n\nmod loading;\n\nmod menu;\n\nmod obstacles;\n\nmod player;\n\nmod score;\n\nmod scrolling_background;\n\n\n\nuse crate::actions::ActionsPlugin;\n\nuse crate::audio::InternalAudioPlugin;\n\nuse crate::audio_events::AudioEventsPlugin;\n\nuse crate::game_over_ui::GameOverPlugin;\n\nuse crate::game_time::GameTimePlugin;\n\nuse crate::loading::LoadingPlugin;\n\nuse crate::menu::MenuPlugin;\n", "file_path": "game_plugin/src/lib.rs", "rank": 62, "score": 16138.547730216234 }, { "content": "use bevy::prelude::*;\n\n\n\npub struct ActionsPlugin;\n\n\n\nimpl Plugin for ActionsPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.init_resource::<Actions>()\n\n .add_system(set_movement_actions.system());\n\n }\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct Actions {\n\n pub player_left_move: i8,\n\n pub player_right_move: i8,\n\n pub restart_requested: bool,\n\n}\n\n\n\n/// Queries actions every frame (allows navigation etc in the menu)\n", "file_path": "game_plugin/src/actions.rs", "rank": 63, "score": 16137.275258331883 }, { "content": "use crate::{actions::Actions, GameState};\n\nuse bevy::prelude::*;\n\n\n\npub struct MenuPlugin;\n\n\n\nimpl Plugin for MenuPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.init_resource::<ButtonMaterials>()\n\n .add_startup_system(spawn_ui_camera.system())\n\n .add_system_set(SystemSet::on_enter(GameState::Menu).with_system(setup_menu.system()))\n\n .add_system_set(\n\n SystemSet::on_update(GameState::Menu).with_system(click_play_button.system()),\n\n )\n\n .add_system_set(SystemSet::on_exit(GameState::Menu).with_system(despawn_menu.system()));\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/menu.rs", "rank": 64, "score": 16136.033408168627 }, { "content": "use bevy::prelude::*;\n\nuse rand::{seq::SliceRandom, thread_rng, Rng};\n\n\n\nuse crate::{\n\n audio_events::{AudioEffect, PlayAudioEffectEvent},\n\n by_side,\n\n game_map::GameMap,\n\n game_time::GameTime,\n\n loading::TextureAssets,\n\n player::{IsDead, Player, PlayerShip, PlayerShipSide},\n\n score::{CapturedObstacle, Score},\n\n GameState, SystemLabels,\n\n};\n\n\n\n/// Possible spawn patterns for obstacles, specified as sprite sized offsets from the main\n\n#[derive(Clone)]\n\npub struct SpawnPattern {\n\n pub offsets: Vec<Vec2>,\n\n pub min_score: f32,\n\n}\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 65, "score": 16135.18365428762 }, { "content": " });\n\n\n\n // spawn the laser texture atlas\n\n let texture_atlas =\n\n TextureAtlas::from_grid(textures.laser.clone(), Vec2::new(32., 16.0), 10, 1);\n\n let texture_atlas_handle = texture_atlases.add(texture_atlas);\n\n\n\n // spawn the laser between the two ships\n\n let mut laser_tx = Transform::from_translation(Vec3::new(0., game_map.bottom_y(), 0.5));\n\n laser_tx.scale.x = 1.4;\n\n\n\n commands\n\n .spawn_bundle(SpriteSheetBundle {\n\n texture_atlas: texture_atlas_handle,\n\n transform: laser_tx,\n\n ..Default::default()\n\n })\n\n .insert(Laser)\n\n .insert(Timer::from_seconds(0.1, true));\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 67, "score": 16133.728068176883 }, { "content": " }],\n\n alignment: Default::default(),\n\n },\n\n ..Default::default()\n\n });\n\n\n\n node.spawn_bundle(ButtonBundle {\n\n style: Style {\n\n size: Size::new(Val::Px(120.0), Val::Px(50.0)),\n\n margin: Rect {\n\n left: Val::Px(0.),\n\n right: Val::Px(0.),\n\n top: Val::Px(20.),\n\n bottom: Val::Px(0.),\n\n },\n\n justify_content: JustifyContent::Center,\n\n align_items: AlignItems::Center,\n\n ..Default::default()\n\n },\n\n material: button_materials.normal.clone(),\n", "file_path": "game_plugin/src/menu.rs", "rank": 68, "score": 16133.715067584582 }, { "content": "use bevy::prelude::*;\n\n\n\nuse crate::{\n\n game_time::GameTime,\n\n player::{PlayerShip, MAX_SEPARATION_STRAIN},\n\n GameState, SystemLabels,\n\n};\n\n\n\npub struct CapturedObstacle;\n\n\n\npub struct ScorePlugin;\n\n\n\npub struct Score {\n\n pub current: f32,\n\n pub multiplier: f32,\n\n}\n\n\n\npub struct ScoreItem;\n\npub struct ScoreText;\n\n\n", "file_path": "game_plugin/src/score.rs", "rank": 69, "score": 16133.48069463017 }, { "content": "use crate::obstacles::ObstaclePlugin;\n\nuse crate::player::PlayerPlugin;\n\nuse crate::score::ScorePlugin;\n\nuse crate::scrolling_background::ScrollingBackgroundPlugin;\n\n\n\nuse bevy::app::AppBuilder;\n\n// use bevy::diagnostic::{FrameTimeDiagnosticsPlugin, LogDiagnosticsPlugin};\n\nuse bevy::prelude::*;\n\n\n\n#[derive(Clone, Eq, PartialEq, Debug, Hash)]\n", "file_path": "game_plugin/src/lib.rs", "rank": 70, "score": 16133.02364639345 }, { "content": "impl Default for Score {\n\n fn default() -> Self {\n\n Score {\n\n current: 0.,\n\n multiplier: 1.,\n\n }\n\n }\n\n}\n\n\n\nimpl Plugin for ScorePlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.add_system_set(\n\n SystemSet::on_enter(GameState::Playing).with_system(spawn_score_ui.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_update(GameState::Playing)\n\n .with_system(\n\n update_score\n\n .system()\n\n .label(SystemLabels::UpdateScore)\n", "file_path": "game_plugin/src/score.rs", "rank": 71, "score": 16132.95445734528 }, { "content": "\n\n let mut rng = thread_rng();\n\n let x_extents = -(game_map.width / 2.)..=(game_map.width / 2.);\n\n\n\n let spawn_x = rng.gen_range(x_extents).floor() * game_map.sprite_size;\n\n let spawn_patterns = patterns\n\n .patterns\n\n .iter()\n\n .filter_map(|pattern| {\n\n if pattern.min_score < score.current {\n\n Some(pattern.clone())\n\n } else {\n\n None\n\n }\n\n })\n\n .collect::<Vec<_>>();\n\n let spawn_pattern = spawn_patterns.choose(&mut rng).unwrap();\n\n\n\n println!(\"Spawning obstacle\");\n\n let texture_atlas =\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 72, "score": 16132.50481925087 }, { "content": " Right,\n\n}\n\n\n\nimpl Plugin for PlayerPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.add_system_set(\n\n SystemSet::on_enter(GameState::Playing)\n\n .with_system(spawn_player.system())\n\n .with_system(spawn_camera.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_update(GameState::Playing)\n\n .with_system(\n\n move_player\n\n .system()\n\n .label(SystemLabels::MovePlayer)\n\n .after(SystemLabels::UpdateTime),\n\n )\n\n .with_system(\n\n is_player_dead_checks\n", "file_path": "game_plugin/src/player.rs", "rank": 73, "score": 16131.948177149936 }, { "content": "\n\nimpl Plugin for ObstaclePlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.insert_resource(AvailableSpawnPatterns::new())\n\n .add_system_set(\n\n SystemSet::on_enter(GameState::Playing)\n\n .with_system(setup_obstacle_spawning.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_update(GameState::Playing)\n\n .with_system(\n\n spawn_obstacles\n\n .system()\n\n .label(SystemLabels::SpawnObstacles)\n\n .after(SystemLabels::UpdateTime),\n\n )\n\n .with_system(\n\n move_obstacles\n\n .system()\n\n .label(SystemLabels::MoveObstacles)\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 74, "score": 16131.530899914214 }, { "content": " SpawnPattern {\n\n offsets: vec![\n\n Vec2::new(-4., 0.),\n\n Vec2::new(-2., 0.),\n\n Vec2::ZERO,\n\n Vec2::new(2., 0.),\n\n Vec2::new(4., 0.),\n\n ],\n\n min_score: 100.,\n\n },\n\n ],\n\n }\n\n }\n\n}\n\n\n\npub struct Obstacle;\n\n\n\npub struct SpawnTimer;\n\n\n\npub struct ObstaclePlugin;\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 75, "score": 16131.397443962214 }, { "content": "\n\npub struct PlayerPlugin;\n\n\n\npub struct Player;\n\n\n\npub struct IsDead(pub String);\n\n\n\npub struct Laser;\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct PlayerShip {\n\n pub is_dead: bool,\n\n pub speed: f32,\n\n\n\n pub max_separation: f32,\n\n pub separation_strain: f32,\n\n}\n\n\n\npub enum PlayerShipSide {\n\n Left,\n", "file_path": "game_plugin/src/player.rs", "rank": 76, "score": 16131.33748132673 }, { "content": "\n\npub struct AvailableSpawnPatterns {\n\n pub patterns: Vec<SpawnPattern>,\n\n}\n\n\n\nimpl AvailableSpawnPatterns {\n\n fn new() -> Self {\n\n AvailableSpawnPatterns {\n\n patterns: vec![\n\n SpawnPattern {\n\n offsets: vec![Vec2::ZERO],\n\n min_score: -1.,\n\n },\n\n SpawnPattern {\n\n offsets: vec![Vec2::new(-1., 0.), Vec2::ZERO, Vec2::new(1., 0.)],\n\n min_score: -1.,\n\n },\n\n SpawnPattern {\n\n offsets: vec![\n\n Vec2::new(-2., 0.),\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 77, "score": 16131.147166756171 }, { "content": " TextureAtlas::from_grid(textures.cloud_001.clone(), Vec2::new(32., 32.0), 4, 1);\n\n let texture_atlas_handle = texture_atlases.add(texture_atlas);\n\n\n\n for offset in spawn_pattern.offsets.iter() {\n\n commands\n\n .spawn_bundle(SpriteSheetBundle {\n\n texture_atlas: texture_atlas_handle.clone(),\n\n transform: Transform::from_translation(Vec3::new(\n\n spawn_x + offset.x * game_map.sprite_size,\n\n game_map.top_y()\n\n + game_map.pad_y * 3. * game_map.sprite_size\n\n + offset.y * game_map.sprite_size, // spawn out of sight\n\n 1.,\n\n )),\n\n ..Default::default()\n\n })\n\n .insert(Timer::from_seconds(0.2, true))\n\n .insert(Obstacle);\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 78, "score": 16130.916474609483 }, { "content": "\n\n // update separation strain\n\n if dx > ship.max_separation {\n\n // increase strain\n\n ship.separation_strain += time.delta;\n\n } else {\n\n // reduce strain\n\n if ship.separation_strain > 0. {\n\n ship.separation_strain = (ship.separation_strain - 0.75 * time.delta).max(0.);\n\n }\n\n }\n\n\n\n for (mut laser, mut sprite, mut timer) in lasers.iter_mut() {\n\n // reposition the laser\n\n laser.scale.x = x_scale;\n\n laser.translation.x = sides.0.x + dx / 2.;\n\n\n\n // update the animation frame for the laser\n\n // show flickering if stress > 0.5\n\n timer.tick(time.delta_duration);\n", "file_path": "game_plugin/src/player.rs", "rank": 79, "score": 16130.008076948741 }, { "content": "use bevy::prelude::*;\n\n\n\nuse crate::actions::Actions;\n\nuse crate::game_map::GameMap;\n\nuse crate::game_time::GameTime;\n\nuse crate::loading::TextureAssets;\n\nuse crate::GameState;\n\nuse crate::SystemLabels;\n\n\n\n#[macro_export]\n\nmacro_rules! by_side {\n\n ($side:expr, $left:expr, $right:expr) => {{\n\n match $side {\n\n PlayerShipSide::Left => $left,\n\n PlayerShipSide::Right => $right,\n\n }\n\n }};\n\n}\n\n\n\npub const MAX_SEPARATION_STRAIN: f32 = 7.;\n", "file_path": "game_plugin/src/player.rs", "rank": 80, "score": 16129.980295153879 }, { "content": " // only check visible obstacles to see if they just crossed the tether line\n\n if vis.is_visible && before > by && after < by {\n\n let obs_x = tx.translation.x;\n\n\n\n // crossed over! Check if we collided with player ships or went through the tether\n\n if (obs_x - sides.0).abs() < min_x_sep || (obs_x - sides.1).abs() < min_x_sep {\n\n println!(\"Hit tractor!\");\n\n commands\n\n .entity(players.single().unwrap())\n\n .insert(IsDead(\"A tractor hit an obstacle!\".into()));\n\n ship.is_dead = true;\n\n return;\n\n }\n\n\n\n // otherwise check if we went through the tether\n\n if obs_x > sides.0 && obs_x < sides.1 {\n\n println!(\"Hit tether!\");\n\n vis.is_visible = false;\n\n commands.entity(entity).insert(CapturedObstacle);\n\n audio_events.send(PlayAudioEffectEvent(AudioEffect::Collect));\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 81, "score": 16129.550888780106 }, { "content": " },\n\n ],\n\n alignment: Default::default(),\n\n },\n\n ..Default::default()\n\n })\n\n .insert(ScoreText);\n\n });\n\n}\n\n\n", "file_path": "game_plugin/src/score.rs", "rank": 82, "score": 16129.40461724368 }, { "content": " })\n\n .insert(PlayerShipSide::Left);\n\n\n\n let texture_atlas =\n\n TextureAtlas::from_grid(textures.player_right.clone(), Vec2::new(32., 32.0), 4, 1);\n\n let texture_atlas_handle = texture_atlases.add(texture_atlas);\n\n\n\n parent\n\n .spawn_bundle({\n\n SpriteSheetBundle {\n\n texture_atlas: texture_atlas_handle,\n\n transform: Transform::from_translation(Vec3::new(\n\n game_map.sprite_size / 2.,\n\n game_map.bottom_y(),\n\n 1.,\n\n )),\n\n ..Default::default()\n\n }\n\n })\n\n .insert(PlayerShipSide::Right);\n", "file_path": "game_plugin/src/player.rs", "rank": 83, "score": 16128.818647121834 }, { "content": " .spawn()\n\n .insert(Transform::from_translation(Vec3::ZERO))\n\n .insert(GlobalTransform::from_translation(Vec3::ZERO))\n\n .insert(Player)\n\n .with_children(|parent| {\n\n let texture_atlas =\n\n TextureAtlas::from_grid(textures.player_left.clone(), Vec2::new(32., 32.0), 4, 1);\n\n let texture_atlas_handle = texture_atlases.add(texture_atlas);\n\n\n\n parent\n\n .spawn_bundle({\n\n SpriteSheetBundle {\n\n texture_atlas: texture_atlas_handle,\n\n transform: Transform::from_translation(Vec3::new(\n\n -game_map.sprite_size / 2.,\n\n game_map.bottom_y(),\n\n 1.,\n\n )),\n\n ..Default::default()\n\n }\n", "file_path": "game_plugin/src/player.rs", "rank": 84, "score": 16128.675470486562 }, { "content": " return;\n\n }\n\n\n\n let by = game_map.bottom_y();\n\n let sides = ship_sides.iter().fold((0., 0.), |acc, (tx, side)| {\n\n by_side!(side, (tx.translation.x, acc.1), (acc.0, tx.translation.x))\n\n });\n\n let min_x_sep = 0.8 * game_map.sprite_size;\n\n\n\n for (entity, mut tx, mut vis, mut timer, mut sprite) in obstacles.iter_mut() {\n\n // update the sprite\n\n timer.tick(time.delta_duration);\n\n if timer.just_finished() {\n\n sprite.index = (sprite.index + 1) % 4;\n\n }\n\n\n\n let before = tx.translation.y;\n\n let after = tx.translation.y - 150. * time.delta;\n\n tx.translation.y = after;\n\n\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 85, "score": 16128.308885895925 }, { "content": " Vec2::new(-1., 0.),\n\n Vec2::ZERO,\n\n Vec2::new(1., 0.),\n\n Vec2::new(2., 0.),\n\n ],\n\n min_score: 50.,\n\n },\n\n SpawnPattern {\n\n offsets: vec![\n\n Vec2::new(-2., 0.),\n\n Vec2::ZERO,\n\n Vec2::new(1., 0.),\n\n Vec2::new(2., 0.),\n\n ],\n\n min_score: 60.,\n\n },\n\n SpawnPattern {\n\n offsets: vec![Vec2::new(-2., -2.), Vec2::ZERO, Vec2::new(2., 2.)],\n\n min_score: 30.,\n\n },\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 86, "score": 16126.323133278464 }, { "content": " } else if ship.separation_strain > MAX_SEPARATION_STRAIN {\n\n println!(\"Tether broke!\");\n\n ship.is_dead = true;\n\n commands\n\n .entity(player)\n\n .insert(IsDead(\"The tether broke!\".into()));\n\n }\n\n }\n\n Err(_) => {}\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 87, "score": 16126.099357780819 }, { "content": " (sides.0.x + (moves.0 as f32) * delta_move).clamp(-x_bound, x_bound),\n\n (sides.1.x + (moves.1 as f32) * delta_move).clamp(-x_bound, x_bound),\n\n );\n\n\n\n let rotations = (\n\n if moves.0 == 0 {\n\n 0.\n\n } else {\n\n moves.0.signum() as f32 * -std::f32::consts::FRAC_PI_8\n\n },\n\n if moves.1 == 0 {\n\n 0.\n\n } else {\n\n moves.1.signum() as f32 * -std::f32::consts::FRAC_PI_8\n\n },\n\n );\n\n\n\n // update the ship side positions and rotations\n\n // TODO could make this a bit nicer with a macro\n\n for (mut tx, side) in ship_sides.iter_mut() {\n\n tx.translation.x = by_side!(side, target_x.0, target_x.1);\n\n tx.rotation = Quat::from_axis_angle(Vec3::Z, by_side!(side, rotations.0, rotations.1));\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 88, "score": 16125.816360783778 }, { "content": " .system()\n\n .after(SystemLabels::MovePlayer),\n\n )\n\n .with_system(animate_player.system())\n\n .with_system(update_laser.system().after(SystemLabels::MovePlayer)),\n\n )\n\n .add_system_set(SystemSet::on_exit(GameState::Playing).with_system(despawn_level.system()));\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 89, "score": 16125.775285264297 }, { "content": " .after(SystemLabels::UpdateTime),\n\n )\n\n .with_system(score_captured_obstacles.system())\n\n .with_system(\n\n update_score_text_ui\n\n .system()\n\n .after(SystemLabels::UpdateScore),\n\n ),\n\n )\n\n .add_system_set(\n\n SystemSet::on_exit(GameState::Playing).with_system(despawn_score_ui.system()),\n\n );\n\n }\n\n}\n\n\n\n/// spawns the score UI\n", "file_path": "game_plugin/src/score.rs", "rank": 90, "score": 16125.574933980699 }, { "content": " .after(SystemLabels::SpawnObstacles),\n\n )\n\n .with_system(\n\n remove_dead_obstacles\n\n .system()\n\n .after(SystemLabels::MoveObstacles),\n\n ),\n\n )\n\n .add_system_set(\n\n SystemSet::on_exit(GameState::Playing).with_system(despawn_obstacles.system()),\n\n );\n\n }\n\n}\n\n\n\n/// Starts the obstacle spawn timer\n", "file_path": "game_plugin/src/obstacles.rs", "rank": 91, "score": 16123.680167449671 }, { "content": " if timer.just_finished() {\n\n let frame_count = if dx > ship.max_separation {\n\n if ship.separation_strain > MAX_SEPARATION_STRAIN * 0.66 {\n\n 10\n\n } else if ship.separation_strain > MAX_SEPARATION_STRAIN * 0.33 {\n\n 5\n\n } else {\n\n 3\n\n }\n\n } else {\n\n 2\n\n };\n\n sprite.index = (sprite.index + 1) % frame_count;\n\n }\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 92, "score": 16123.680167449671 }, { "content": " color: Color::rgb(0.3, 0.3, 0.3),\n\n },\n\n }],\n\n alignment: Default::default(),\n\n },\n\n ..Default::default()\n\n });\n\n\n\n node.spawn_bundle(TextBundle {\n\n text: Text {\n\n sections: vec![TextSection {\n\n value: \"Hit space to return to the menu\".into(),\n\n style: TextStyle {\n\n font: asset_server.get_handle(\"fonts/FiraSans-Bold.ttf\"),\n\n font_size: 20.0,\n\n color: Color::rgb(0.3, 0.3, 0.3),\n\n },\n\n }],\n\n alignment: Default::default(),\n\n },\n\n ..Default::default()\n\n });\n\n });\n\n state.set(GameState::GameOver).unwrap();\n\n}\n\n\n", "file_path": "game_plugin/src/game_over_ui.rs", "rank": 93, "score": 15514.351114970836 }, { "content": " justify_content: JustifyContent::Center,\n\n flex_direction: FlexDirection::ColumnReverse,\n\n ..Default::default()\n\n },\n\n material: materials.add(Color::NONE.into()),\n\n ..Default::default()\n\n })\n\n .insert(GameOverUiItem)\n\n .with_children(|node| {\n\n node.spawn_bundle(TextBundle {\n\n text: Text {\n\n sections: vec![TextSection {\n\n value: format!(\n\n \"Oh Noooo! {} You scored {:.0}\",\n\n reason,\n\n score.current.floor()\n\n ),\n\n style: TextStyle {\n\n font: asset_server.get_handle(\"fonts/FiraSans-Bold.ttf\"),\n\n font_size: 20.0,\n", "file_path": "game_plugin/src/game_over_ui.rs", "rank": 94, "score": 15511.751898803197 }, { "content": "use bevy::prelude::*;\n\n\n\nuse crate::{game_map::GameMap, game_time::GameTime, loading::TextureAssets, GameState};\n\n\n\npub struct ScrollingBackground;\n\n\n\npub struct ScrollingBackgroundPlugin;\n\n\n\nimpl Plugin for ScrollingBackgroundPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.add_system_set(\n\n SystemSet::on_enter(GameState::Playing).with_system(spawn_background.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_update(GameState::Playing).with_system(animate_background.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_exit(GameState::Playing).with_system(despawn_background.system()),\n\n );\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/scrolling_background.rs", "rank": 95, "score": 15506.258329101345 }, { "content": "use crate::{\n\n actions::Actions,\n\n player::{IsDead, PlayerShip},\n\n score::Score,\n\n GameState,\n\n};\n\nuse bevy::prelude::*;\n\n\n\npub struct GameOverPlugin;\n\n\n\nimpl Plugin for GameOverPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.add_system_set(\n\n SystemSet::on_update(GameState::Playing).with_system(transition_to_game_over.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_update(GameState::GameOver).with_system(restart_game.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_exit(GameState::GameOver).with_system(despawn_game_over_ui.system()),\n\n );\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/game_over_ui.rs", "rank": 96, "score": 15504.740500314587 }, { "content": "use std::time::Duration;\n\n\n\nuse bevy::prelude::*;\n\n\n\nuse crate::{GameState, SystemLabels};\n\n\n\npub const GAME_TIME_DOUBLING_TIME: f32 = 60.; // e.g. 60 == double speed every minute\n\n\n\npub struct GameTime {\n\n pub multiplier: f32,\n\n pub elapsed: f32,\n\n pub delta: f32,\n\n pub delta_duration: Duration,\n\n pub fixed_update: bool,\n\n next_fixed_update: f64,\n\n}\n\n\n\npub struct GameTimePlugin;\n\n\n\nimpl Plugin for GameTimePlugin {\n", "file_path": "game_plugin/src/game_time.rs", "rank": 97, "score": 15503.816113143977 }, { "content": "impl GameMap {\n\n /// Determines the \"bottom\" of the map in world coordinates\n\n pub fn bottom_y(&self) -> f32 {\n\n -(self.height / 2.) * self.sprite_size\n\n }\n\n\n\n /// Determines the \"top\" of the map in world coordinates\n\n pub fn top_y(&self) -> f32 {\n\n (self.height / 2.) * self.sprite_size\n\n }\n\n\n\n /// Get the maximum x-value in the positive x-direction (right side).\n\n /// The map is symmetrical so can negate this to find the left boundary.\n\n /// NOTE: this boundary includes the x-padding\n\n pub fn get_x_bound(&self) -> f32 {\n\n (self.pad_x + self.width / 2.) * self.sprite_size - (self.sprite_size / 2.)\n\n }\n\n}\n", "file_path": "game_plugin/src/game_map.rs", "rank": 98, "score": 15502.324996395893 }, { "content": "pub struct GameMap {\n\n pub width: f32,\n\n pub height: f32,\n\n pub pad_x: f32,\n\n pub pad_y: f32,\n\n pub sprite_size: f32,\n\n}\n\n\n\nimpl Default for GameMap {\n\n fn default() -> Self {\n\n GameMap {\n\n width: 16.,\n\n height: 16.,\n\n pad_x: 3.,\n\n pad_y: 3.,\n\n sprite_size: 32.,\n\n }\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/game_map.rs", "rank": 99, "score": 15501.662784626009 } ]
Rust
src/bin/client/gui.rs
CreativeWolfies/sharmat
c139a0adffee32fa6541b284d71934156a57099b
use super::style::SharmatStyleSheet; use super::settings::*; use sharmat::{game::*, player::PlayerColor}; use std::cell::RefCell; use std::collections::HashMap; use std::hash::Hash; use std::rc::Rc; use iced::{ executor, Application, Background, Color, Command, Container, Element, Length, Point, Row, Size, }; use iced_native::{ layout, widget::{svg::Handle, Widget}, MouseCursor, Rectangle, }; use iced_wgpu::{Defaults, Primitive, Renderer}; #[derive(Debug)] pub struct Sharmat { pub game: Rc<RefCell<Game>>, pub stylesheet: SharmatStyleSheet, pub settings: SharmatSettings, pub piece_assets: Rc<HashMap<String, Handle>>, } #[derive(Debug)] pub enum SharmatMessage {} type Message = SharmatMessage; #[derive(Debug)] pub struct GBoard { pub game: Rc<RefCell<Game>>, pub fill_dark: Color, pub fill_light: Color, pub fill_dark_hl: Color, pub fill_light_hl: Color, pub highlight_border_ratio: f32, pub settings: SharmatSettings, pub piece_assets: Rc<HashMap<String, Handle>>, pub flip_board: bool, } impl Application for Sharmat { type Executor = executor::Null; type Message = Message; type Flags = (HashMap<String, Handle>, Game, HashMap<String, SharmatSettingType>); fn new(flags: Self::Flags) -> (Self, Command<Self::Message>) { ( Self { game: Rc::new(RefCell::new(flags.1)), stylesheet: SharmatStyleSheet::default(), piece_assets: Rc::new(flags.0), settings: SharmatSettings::new(flags.2), }, Command::none(), ) } fn title(&self) -> String { String::from("Sharmat") } fn view(&mut self) -> Element<Self::Message> { Container::new( Row::new().push( Container::new::<iced_native::Element<_, _>>( GBoard::new( self.game.clone(), self.piece_assets.clone(), self.settings.clone(), true, ) .into(), ) .width(Length::Units(600)) .height(Length::Units(600)) .padding(10), ), ) .padding(10) .width(Length::Fill) .height(Length::Fill) .center_x() .center_y() .style(self.stylesheet) .into() } fn update(&mut self, _message: Self::Message) -> Command<Self::Message> { Command::none() } } impl GBoard { pub fn new( game: Rc<RefCell<Game>>, piece_assets: Rc<HashMap<String, Handle>>, settings: SharmatSettings, flip_board: bool, ) -> GBoard { GBoard { game, fill_dark: Color::from_rgb8(226, 149, 120), fill_light: Color::from_rgb8(255, 221, 210), fill_dark_hl: Color::from_rgb8(113, 129, 120), fill_light_hl: Color::from_rgb8(128, 165, 165), piece_assets, settings: settings.clone(), highlight_border_ratio: 0.15, flip_board, } } #[inline] pub fn get_board_width(&self) -> usize { self.game.borrow().board().width.get() } #[inline] pub fn get_board_height(&self) -> usize { self.game.borrow().board().height.get() } #[inline] pub fn tile_size(&self, width: f32, height: f32) -> f32 { (width / self.get_board_width() as f32).min(height / self.get_board_height() as f32) } #[inline] pub fn get_raw(&self, x: usize, y: usize) -> Option<(usize, PlayerColor)> { self.game.borrow().board().get(x, y).ok().flatten() } #[inline] pub fn get(&self, x: usize, y: usize) -> Option<(usize, PlayerColor)> { self.game.borrow().board().get(x, y).ok().flatten() } fn get_hints(&self, m_x: usize, m_y: usize) -> Vec<(usize, usize)> { let hovered_piece_raw = if m_x == std::usize::MAX { None } else { self.get(m_x, m_y) }; if hovered_piece_raw.is_some() && (hovered_piece_raw.unwrap().1 == self .game .borrow() .current_player() .expect("No player?") .color || self.render_hints_opponent()) && self.render_hints() { let raw = &hovered_piece_raw.unwrap(); let game = self.game.borrow(); let hovered_piece = game .pieces() .get(raw.0) .expect(&format!("Couldn't find piece {}", raw.0)); let hovered_player = game .player(raw.1) .expect(&format!("Couldn't find player {:?}", raw.1)); hovered_piece.movement_type()[0] .flatten(self.game.borrow().board(), hovered_player, m_x, m_y) .unwrap() .into_iter() .map(|(dx, dy)| ((m_x as isize + dx) as usize, (m_y as isize + dy) as usize)) .collect() } else { vec![] } } fn get_mouse_pos(&self, bounds: Rectangle, mouse: Point, tile_size: f32) -> (usize, usize) { if bounds.contains(mouse) { ( ((mouse.x - bounds.x) / tile_size).floor() as usize, ((mouse.y - bounds.y) / tile_size).floor() as usize, ) } else { (std::usize::MAX, std::usize::MAX) } } pub fn render_hints(&self) -> bool { self.settings.get_bool("render_hints").unwrap_or(true) } pub fn render_hints_opponent(&self) -> bool { self.settings.get_bool("render_hints_opponent").unwrap_or(false) } } impl<'a, Message> Widget<Message, Renderer> for GBoard { fn width(&self) -> Length { Length::Fill } fn height(&self) -> Length { Length::Fill } fn layout(&self, _renderer: &Renderer, limits: &layout::Limits) -> layout::Node { layout::Node::new(Size::new( self.tile_size(limits.max().width, limits.max().height) * self.get_board_width() as f32, self.tile_size(limits.max().width, limits.max().height) * self.get_board_height() as f32, )) } fn hash_layout(&self, hasher: &mut iced_native::Hasher) { self.game.borrow().board().hash(hasher); } fn draw( &self, _renderer: &mut Renderer, _defaults: &Defaults, layout: layout::Layout<'_>, mouse: Point, ) -> (Primitive, MouseCursor) { let mut res: Vec<Primitive> = Vec::new(); let tile_size = self.tile_size(layout.bounds().width, layout.bounds().height); let hl_width = tile_size as f32 * self.highlight_border_ratio; let (m_x, m_y) = self.get_mouse_pos(layout.bounds(), mouse, tile_size); let hints = self.get_hints(m_x, m_y); for y in 0..self.get_board_height() { for x in 0..self.get_board_width() { let v_x = layout.bounds().x + tile_size * x as f32; let v_y = layout.bounds().y + tile_size * y as f32; let bounds = Rectangle { x: v_x, y: v_y, width: tile_size, height: tile_size, }; let sub_bounds = Rectangle { x: v_x + hl_width, y: v_y + hl_width, width: tile_size - 2.0 * hl_width, height: tile_size - 2.0 * hl_width, }; if let Some((piece_index, piece_color)) = self.get(x, y) { if let Some(piece) = self.game.borrow().pieces().get(piece_index) { res.push(Primitive::Svg { handle: self .piece_assets .get(if piece_color.white() { piece.display_white() } else { piece.display_black() }) .unwrap() .clone(), bounds, }); } else { panic!("Piece index {} out of bound!", piece_index); } } if hints.iter().find(|(x2, y2)| x == *x2 && y == *y2).is_some() { res.push(Primitive::Quad { bounds: bounds.clone(), background: if (x + y) % 2 == 0 { Background::Color(self.fill_light_hl) } else { Background::Color(self.fill_dark_hl) }, border_radius: 0, border_width: 0, border_color: Color::TRANSPARENT, }); res.push(Primitive::Quad { bounds: sub_bounds.clone(), background: if (x + y) % 2 == 0 { Background::Color(self.fill_light) } else { Background::Color(self.fill_dark) }, border_radius: 0, border_width: 0, border_color: Color::TRANSPARENT, }); } else { res.push(Primitive::Quad { bounds: bounds.clone(), background: if (x + y) % 2 == 0 { Background::Color(self.fill_light) } else { Background::Color(self.fill_dark) }, border_radius: 0, border_width: 0, border_color: Color::TRANSPARENT, }); } } } ( Primitive::Group { primitives: res }, if m_x != std::usize::MAX && self.get(m_x, m_y).is_some() { MouseCursor::Pointer } else { MouseCursor::Idle }, ) } } impl<'a, Message> Into<iced_native::Element<'a, Message, Renderer>> for GBoard { fn into(self) -> iced_native::Element<'a, Message, Renderer> { iced_native::Element::new(self) } }
use super::style::SharmatStyleSheet; use super::settings::*; use sharmat::{game::*, player::PlayerColor}; use std::cell::RefCell; use std::collections::HashMap; use std::hash::Hash; use std::rc::Rc; use iced::{ executor, Application, Background, Color, Command, Container, Element, Length, Point, Row, Size, }; use iced_native::{ layout, widget::{svg::Handle, Widget}, MouseCursor, Rectangle, }; use iced_wgpu::{Defaults, Primitive, Renderer}; #[derive(Debug)] pub struct Sharmat { pub game: Rc<RefCell<Game>>, pub stylesheet: SharmatStyleSheet, pub settings: SharmatSettings, pub piece_assets: Rc<HashMap<String, Handle>>, } #[derive(Debug)] pub enum SharmatMessage {} type Message = SharmatMessage; #[derive(Debug)] pub struct GBoard { pub game: Rc<RefCell<Game>>, pub fill_dark: Color, pub fill_light: Color, pub fill_dark_hl: Color, pub fill_light_hl: Color, pub highlight_border_ratio: f32, pub settings: SharmatSettings, pub piece_assets: Rc<HashMap<String, Handle>>, pub flip_board: bool, } impl Application for Sharmat { type Executor = executor::Null; type Message = Message; type Flags = (HashMap<String, Handle>, Game, HashMap<String, SharmatSettingType>); fn new(flags: Self::Flags) -> (Self, Command<Self::Message>) { ( Self { game: Rc::new(RefCell::new(flags.1)), stylesheet: SharmatStyleSheet::default(), piece_assets: Rc::new(flags.0), settings: SharmatSettings::new(flags.2), }, Command::none(), ) } fn title(&self) -> String { String::from("Sharmat") } fn view(&mut self) -> Element<Self::Message> { Container::new( Row::new().push( Container::new::<iced_native::Element<_, _>>( GBoard::new( self.game.clone(), self.piece_assets.clone(), self.settings.clone(), true, ) .into(), ) .width(Length::Units(600)) .height(Length::Units(600)) .padding(10), ), ) .padding(10) .width(Length::Fill) .height(Length::Fill) .center_x() .center_y() .style(self.stylesheet) .into() } fn update(&mut self, _message: Self::Message) -> Command<Self::Message> { Command::none() } } impl GBoard { pub fn new( game: Rc<RefCell<Game>>, piece_assets: Rc<HashMap<String, Handle>>, settings: SharmatSettings, flip_board: bool, ) -> GBoard { GBoard { game, fill_dark: Color::from_rgb8(226, 149, 120), fill_light: Color::from_rgb8(255, 221, 210), fill_dark_hl: Color::from_rgb8(113, 129, 120), fill_light_hl: Color::from_rgb8(128, 165, 165), piece_assets, settings: settings.clone(), highlight_border_ratio: 0.15, flip_board, } } #[inline] pub fn get_board_width(&self) -> usize { self.game.borrow().board().width.get() } #[inline] pub fn get_board_height(&self) -> usize { self.game.borrow().board().height.get() } #[inline] pub fn tile_size(&self, width: f32, height: f32) -> f32 { (width / self.get_board_width() as f32).min(height / self.get_board_height() as f32) } #[inline] pub fn get_raw(&self, x: usize, y: usize) -> Option<(usize, PlayerColor)> { self.game.borrow().board().get(x, y).ok().flatten() } #[inline] pub fn get(&self, x: usize, y: usize) -> Option<(usize, PlayerColor)> { self.game.borrow().board().get(x, y).ok().flatten() } fn get_hints(&self, m_x: usize, m_y: usize) -> Vec<(usize, usize)> { let hovered_piece_raw = if m_x == std::usize::MAX { None } else { self.get(m_x, m_y) }; if hovered_piece_raw.is_some() && (hovered_piece_raw.unwrap().1 == self .game .borrow() .current_player() .expect("No player?") .color || self.render_hints_opponent()) && self.render_hints() { let raw = &hovered_piece_raw.unwrap(); let game = self.game.borrow(); let hovered_piece = game .pieces() .get(raw.0) .expect(&format!("Couldn't find piece {}", raw.0)); let hovered_player = game .player(raw.1) .expect(&format!("Couldn't find player {:?}", raw.1)); hovered_piece.movement_type()[0] .flatten(self.game.borrow().board(), hovered_player, m_x, m_y) .unwrap() .into_iter() .map(|(dx, dy)| ((m_x as isize + dx) as usize, (m_y as isize + dy) as usize)) .collect() } else { vec![] } } fn get_mouse_pos(&self, bounds: Rectangle, mouse: Point, tile_size: f32) -> (us
pub fn render_hints(&self) -> bool { self.settings.get_bool("render_hints").unwrap_or(true) } pub fn render_hints_opponent(&self) -> bool { self.settings.get_bool("render_hints_opponent").unwrap_or(false) } } impl<'a, Message> Widget<Message, Renderer> for GBoard { fn width(&self) -> Length { Length::Fill } fn height(&self) -> Length { Length::Fill } fn layout(&self, _renderer: &Renderer, limits: &layout::Limits) -> layout::Node { layout::Node::new(Size::new( self.tile_size(limits.max().width, limits.max().height) * self.get_board_width() as f32, self.tile_size(limits.max().width, limits.max().height) * self.get_board_height() as f32, )) } fn hash_layout(&self, hasher: &mut iced_native::Hasher) { self.game.borrow().board().hash(hasher); } fn draw( &self, _renderer: &mut Renderer, _defaults: &Defaults, layout: layout::Layout<'_>, mouse: Point, ) -> (Primitive, MouseCursor) { let mut res: Vec<Primitive> = Vec::new(); let tile_size = self.tile_size(layout.bounds().width, layout.bounds().height); let hl_width = tile_size as f32 * self.highlight_border_ratio; let (m_x, m_y) = self.get_mouse_pos(layout.bounds(), mouse, tile_size); let hints = self.get_hints(m_x, m_y); for y in 0..self.get_board_height() { for x in 0..self.get_board_width() { let v_x = layout.bounds().x + tile_size * x as f32; let v_y = layout.bounds().y + tile_size * y as f32; let bounds = Rectangle { x: v_x, y: v_y, width: tile_size, height: tile_size, }; let sub_bounds = Rectangle { x: v_x + hl_width, y: v_y + hl_width, width: tile_size - 2.0 * hl_width, height: tile_size - 2.0 * hl_width, }; if let Some((piece_index, piece_color)) = self.get(x, y) { if let Some(piece) = self.game.borrow().pieces().get(piece_index) { res.push(Primitive::Svg { handle: self .piece_assets .get(if piece_color.white() { piece.display_white() } else { piece.display_black() }) .unwrap() .clone(), bounds, }); } else { panic!("Piece index {} out of bound!", piece_index); } } if hints.iter().find(|(x2, y2)| x == *x2 && y == *y2).is_some() { res.push(Primitive::Quad { bounds: bounds.clone(), background: if (x + y) % 2 == 0 { Background::Color(self.fill_light_hl) } else { Background::Color(self.fill_dark_hl) }, border_radius: 0, border_width: 0, border_color: Color::TRANSPARENT, }); res.push(Primitive::Quad { bounds: sub_bounds.clone(), background: if (x + y) % 2 == 0 { Background::Color(self.fill_light) } else { Background::Color(self.fill_dark) }, border_radius: 0, border_width: 0, border_color: Color::TRANSPARENT, }); } else { res.push(Primitive::Quad { bounds: bounds.clone(), background: if (x + y) % 2 == 0 { Background::Color(self.fill_light) } else { Background::Color(self.fill_dark) }, border_radius: 0, border_width: 0, border_color: Color::TRANSPARENT, }); } } } ( Primitive::Group { primitives: res }, if m_x != std::usize::MAX && self.get(m_x, m_y).is_some() { MouseCursor::Pointer } else { MouseCursor::Idle }, ) } } impl<'a, Message> Into<iced_native::Element<'a, Message, Renderer>> for GBoard { fn into(self) -> iced_native::Element<'a, Message, Renderer> { iced_native::Element::new(self) } }
ize, usize) { if bounds.contains(mouse) { ( ((mouse.x - bounds.x) / tile_size).floor() as usize, ((mouse.y - bounds.y) / tile_size).floor() as usize, ) } else { (std::usize::MAX, std::usize::MAX) } }
function_block-function_prefixed
[ { "content": "type RawPiece = Option<(usize, PlayerColor)>;\n\n\n\n#[derive(Debug, PartialEq, Eq, Hash, Clone)]\n\npub struct Board {\n\n pub width: NonZeroUsize,\n\n pub height: NonZeroUsize,\n\n board: Vec<Vec<RawPiece>>,\n\n name: String,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub enum BoardError {\n\n OutOfBounds(usize, usize),\n\n}\n\n\n\npub type BoardResult<T> = Result<T, BoardError>;\n\n\n\nimpl Board {\n\n pub fn new(width: NonZeroUsize, height: NonZeroUsize) -> Self {\n\n let mut board = Vec::with_capacity(width.get());\n", "file_path": "src/board.rs", "rank": 0, "score": 137357.87066629453 }, { "content": "#[inline]\n\nfn is_within_bounds(board: &Board, x: isize, y: isize) -> bool {\n\n x >= 0 && x < board.width.get() as isize && y >= 0 && y < board.height.get() as isize\n\n}\n", "file_path": "src/movement.rs", "rank": 1, "score": 99026.57907109303 }, { "content": "/// Returns `a ⊂ b`\n\n/// O(n²), I don't care\n\nfn set_inclusion<T: PartialEq>(a: &Vec<T>, b: &Vec<T>) -> bool {\n\n a.len() == 0\n\n || a.iter()\n\n .all(|a_elem| b.iter().find(|b_elem| a_elem == *b_elem).is_some())\n\n}\n\n\n", "file_path": "tests/movement.rs", "rank": 2, "score": 92923.71456456697 }, { "content": "/// Returns `a = b <=> a ⊂ b & b ⊂ a`\n\nfn set_equal<T: PartialEq>(a: &Vec<T>, b: &Vec<T>) -> bool {\n\n set_inclusion(a, b) && set_inclusion(b, a)\n\n}\n\n\n", "file_path": "tests/movement.rs", "rank": 3, "score": 92923.71456456697 }, { "content": "#[test]\n\nfn game_get_pieces_with_piece() {\n\n let piece = PieceBuilder::new().build();\n\n let game = GameBuilder::new().piece(piece.clone()).build();\n\n assert_eq!(*game.pieces(), vec![piece]);\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 4, "score": 75275.39671286414 }, { "content": "#[test]\n\nfn game_get_pieces_with_piece_and_pieces_push() {\n\n let piece = PieceBuilder::new().build();\n\n let piece2 = PieceBuilder::new().build();\n\n let piece3 = PieceBuilder::new().build();\n\n let game = GameBuilder::new()\n\n .piece(piece.clone())\n\n .pieces(vec![piece2.clone(), piece3.clone()])\n\n .build();\n\n assert_eq!(*game.pieces(), vec![piece, piece2, piece3]);\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 5, "score": 74034.39650287828 }, { "content": "#[test]\n\nfn game_get_pieces_with_pieces_push() {\n\n let piece = PieceBuilder::new().build();\n\n let piece2 = PieceBuilder::new().build();\n\n let game = GameBuilder::new()\n\n .pieces(vec![piece.clone(), piece2.clone()])\n\n .build();\n\n assert_eq!(*game.pieces(), vec![piece, piece2]);\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 6, "score": 72632.02815895327 }, { "content": "#[test]\n\nfn game_get_pieces_with_piece_push() {\n\n let piece = PieceBuilder::new().build();\n\n let piece2 = PieceBuilder::new().build();\n\n let game = GameBuilder::new()\n\n .piece(piece.clone())\n\n .piece(piece2.clone())\n\n .build();\n\n assert_eq!(*game.pieces(), vec![piece, piece2]);\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 7, "score": 72632.02815895327 }, { "content": "#[test]\n\nfn game_create_with_piece_and_pieces_push() {\n\n let piece = PieceBuilder::new().build();\n\n let piece2 = PieceBuilder::new().build();\n\n let piece3 = PieceBuilder::new().build();\n\n let _game = GameBuilder::new()\n\n .piece(piece)\n\n .pieces(vec![piece2, piece3])\n\n .build();\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 8, "score": 72632.02815895327 }, { "content": "#[test]\n\nfn game_create_with_piece() {\n\n let piece = PieceBuilder::new().build();\n\n let _game = GameBuilder::new().piece(piece).build();\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 9, "score": 71295.7491095327 }, { "content": "#[test]\n\nfn game_create_with_pieces_push() {\n\n let piece = PieceBuilder::new().build();\n\n let piece2 = PieceBuilder::new().build();\n\n let _game = GameBuilder::new().pieces(vec![piece, piece2]).build();\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 10, "score": 68364.00988814552 }, { "content": "#[test]\n\nfn game_create_with_piece_push() {\n\n let piece = PieceBuilder::new().build();\n\n let piece2 = PieceBuilder::new().build();\n\n let _game = GameBuilder::new().piece(piece).piece(piece2).build();\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 11, "score": 68364.00988814552 }, { "content": "#[test]\n\nfn game_search_piece_by_id_fail() {\n\n let piece = PieceBuilder::new().build();\n\n let piece2 = PieceBuilder::new().build();\n\n let piece3 = PieceBuilder::new().build();\n\n let game = GameBuilder::new()\n\n .pieces(vec![piece, piece2, piece3])\n\n .build();\n\n assert_eq!(game.search_piece(\"random\"), None);\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 12, "score": 65696.72417839675 }, { "content": "#[test]\n\nfn game_search_piece_by_alias_fail() {\n\n let piece = PieceBuilder::new().alias(\"test\").build();\n\n let piece2 = PieceBuilder::new().alias(\"foo\").alias(\"test\").build();\n\n let piece3 = PieceBuilder::new().build();\n\n let game = GameBuilder::new()\n\n .pieces(vec![piece, piece2, piece3])\n\n .build();\n\n assert_eq!(game.search_piece_alias(\"hmm\"), Vec::<&Piece>::new());\n\n}\n", "file_path": "tests/game.rs", "rank": 13, "score": 65696.72417839675 }, { "content": "#[test]\n\nfn game_search_piece_by_id_success() {\n\n let piece = PieceBuilder::new().build();\n\n let piece2 = PieceBuilder::new().build();\n\n let piece3 = PieceBuilder::new().build();\n\n let game = GameBuilder::new()\n\n .pieces(vec![piece.clone(), piece2, piece3])\n\n .build();\n\n assert_eq!(game.search_piece(piece.id()), Some(&piece));\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 14, "score": 65696.72417839675 }, { "content": "#[test]\n\nfn game_search_piece_by_alias_success() {\n\n let piece = PieceBuilder::new().alias(\"test\").build();\n\n let piece2 = PieceBuilder::new().alias(\"foo\").alias(\"test\").build();\n\n let piece3 = PieceBuilder::new().build();\n\n let game = GameBuilder::new()\n\n .pieces(vec![piece.clone(), piece2.clone(), piece3])\n\n .build();\n\n assert_eq!(game.search_piece_alias(\"test\"), vec![&piece, &piece2]);\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 15, "score": 65696.72417839675 }, { "content": "#[test]\n\n#[allow(unused_must_use)]\n\nfn board_set_piece() {\n\n let mut board = Board::new(NonZeroUsize::new(9).unwrap(), NonZeroUsize::new(8).unwrap());\n\n board.set(0, 0, Some(2));\n\n}\n\n\n", "file_path": "tests/board.rs", "rank": 16, "score": 65422.409635312106 }, { "content": "#[test]\n\nfn board_oob_set_piece() {\n\n let mut board = Board::new(NonZeroUsize::new(9).unwrap(), NonZeroUsize::new(8).unwrap());\n\n assert_eq!(\n\n board.set(20, 20, Some(2)).unwrap_err(),\n\n BoardError::OutOfBounds(20, 20)\n\n );\n\n}\n\n\n", "file_path": "tests/board.rs", "rank": 17, "score": 62358.91701842799 }, { "content": "fn assert_set_equal<T: PartialEq + std::fmt::Debug>(a: Vec<T>, b: Vec<T>) {\n\n let res = set_equal(&a, &b);\n\n assert!(\n\n res,\n\n \"Sets A and B are not equal:\\nA = {:#?}\\nB = {:#?}\",\n\n a, b\n\n )\n\n}\n\n\n", "file_path": "tests/movement.rs", "rank": 18, "score": 62034.19587679714 }, { "content": "#[test]\n\nfn game_create() {\n\n let _game = GameBuilder::new();\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 19, "score": 54439.760717823076 }, { "content": "#[test]\n\nfn game_create_with_board() {\n\n let board = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let _game = GameBuilder::new().board(board).build();\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 20, "score": 52320.1238899854 }, { "content": "#[test]\n\nfn piece_id_override() {\n\n let piece = PieceBuilder::new()\n\n .id(\"piece_name\")\n\n .id(\"other_piece_name\")\n\n .build();\n\n assert_eq!(piece.id(), \"other_piece_name\");\n\n}\n\n\n", "file_path": "tests/piece.rs", "rank": 21, "score": 51099.38303183214 }, { "content": "#[test]\n\nfn piece_create_with_alias() {\n\n let _piece = PieceBuilder::new()\n\n .id(\"piece_name\")\n\n .alias(\"Piece name\")\n\n .build();\n\n}\n\n\n", "file_path": "tests/piece.rs", "rank": 22, "score": 51099.38303183214 }, { "content": "#[test]\n\nfn piece_get_id() {\n\n let piece = PieceBuilder::new().id(\"piece_name\").build();\n\n assert_eq!(piece.id(), \"piece_name\");\n\n}\n\n\n", "file_path": "tests/piece.rs", "rank": 23, "score": 51099.38303183214 }, { "content": "#[test]\n\nfn piece_alias_append() {\n\n let piece = PieceBuilder::new()\n\n .id(\"piece_name\")\n\n .alias(\"Insert text\")\n\n .alias(\"So-called test piece\")\n\n .build();\n\n assert_eq!(piece.alias(), \"Insert text; So-called test piece\");\n\n}\n\n\n", "file_path": "tests/piece.rs", "rank": 24, "score": 51099.38303183214 }, { "content": "#[test]\n\nfn piece_create_with_id() {\n\n let _piece = PieceBuilder::new().id(\"piece_name\").build();\n\n}\n\n\n", "file_path": "tests/piece.rs", "rank": 25, "score": 51099.38303183214 }, { "content": "#[test]\n\nfn piece_get_description() {\n\n let piece = PieceBuilder::new()\n\n .id(\"piece_name\")\n\n .desc(\"It's just a test piece\")\n\n .build();\n\n assert_eq!(piece.desc(), \"It's just a test piece\");\n\n}\n\n\n", "file_path": "tests/piece.rs", "rank": 26, "score": 51099.38303183214 }, { "content": "#[test]\n\nfn piece_create_with_description() {\n\n let _piece = PieceBuilder::new()\n\n .id(\"piece_name\")\n\n .desc(\"It's just a test piece\")\n\n .build();\n\n}\n\n\n", "file_path": "tests/piece.rs", "rank": 27, "score": 51099.38303183214 }, { "content": "#[test]\n\nfn piece_description_append() {\n\n let piece = PieceBuilder::new()\n\n .id(\"piece_name\")\n\n .desc(\"First line\")\n\n .desc(\"Second line\")\n\n .build();\n\n assert_eq!(piece.desc(), \"First line\\nSecond line\");\n\n}\n", "file_path": "tests/piece.rs", "rank": 28, "score": 51099.38303183214 }, { "content": "#[test]\n\nfn piece_get_alias() {\n\n let piece = PieceBuilder::new()\n\n .id(\"piece_name\")\n\n .alias(\"Piece name\")\n\n .build();\n\n assert_eq!(piece.alias(), \"Piece name\");\n\n}\n\n\n", "file_path": "tests/piece.rs", "rank": 29, "score": 51099.38303183214 }, { "content": "#[test]\n\nfn piece_create_builder() {\n\n let _piece = PieceBuilder::new();\n\n}\n\n\n", "file_path": "tests/piece.rs", "rank": 30, "score": 51099.38303183214 }, { "content": "#[test]\n\nfn game_create_with_board_push() {\n\n let board = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let board2 = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let _game = GameBuilder::new().board(board).board(board2).build();\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 31, "score": 50389.54125077267 }, { "content": "#[test]\n\nfn game_create_with_boards_push() {\n\n let board = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let board2 = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let _game = GameBuilder::new().boards(vec![board, board2]).build();\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 32, "score": 50389.54125077267 }, { "content": "#[test]\n\nfn game_create_with_board_and_boards() {\n\n let board = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let board2 = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let board3 = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let _game = GameBuilder::new()\n\n .board(board)\n\n .boards(vec![board2, board3])\n\n .build();\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 33, "score": 50389.54125077267 }, { "content": "#[test]\n\nfn game_get_boards_with_board() {\n\n let board = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let game = GameBuilder::new().board(board.clone()).build();\n\n assert_eq!(game.boards(), &vec![board]);\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 34, "score": 50389.54125077267 }, { "content": "#[test]\n\nfn game_get_boards_with_boards_push() {\n\n let board = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let board2 = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let game = GameBuilder::new()\n\n .boards(vec![board.clone(), board2.clone()])\n\n .build();\n\n assert_eq!(*game.boards(), vec![board, board2]);\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 35, "score": 48623.0641780709 }, { "content": "#[test]\n\nfn game_get_boards_with_board_push() {\n\n let board = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let board2 = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let game = GameBuilder::new()\n\n .board(board.clone())\n\n .board(board2.clone())\n\n .build();\n\n assert_eq!(*game.boards(), vec![board, board2]);\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 36, "score": 48623.0641780709 }, { "content": "#[test]\n\nfn game_get_boards_with_board_and_boards_push() {\n\n let board = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let board2 = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let board3 = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let game = GameBuilder::new()\n\n .board(board.clone())\n\n .boards(vec![board2.clone(), board3.clone()])\n\n .build();\n\n assert_eq!(*game.boards(), vec![board, board2, board3]);\n\n}\n\n\n", "file_path": "tests/game.rs", "rank": 37, "score": 47000.05609090016 }, { "content": "#[test]\n\nfn board_set_name() {\n\n let mut board = Board::new(NonZeroUsize::new(1).unwrap(), NonZeroUsize::new(1).unwrap());\n\n board.set_name(\"Hello\");\n\n}\n\n\n", "file_path": "tests/board.rs", "rank": 38, "score": 46444.21786074854 }, { "content": "#[test]\n\n#[allow(unused_must_use)]\n\nfn board_get_piece() {\n\n for x in 0..9 {\n\n for y in 0..8 {\n\n let mut board =\n\n Board::new(NonZeroUsize::new(9).unwrap(), NonZeroUsize::new(8).unwrap());\n\n board.set(x, y, Some(1));\n\n assert_eq!(board.get(x, y).unwrap(), Some(1));\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/board.rs", "rank": 39, "score": 44248.34539207333 }, { "content": "#[test]\n\n#[allow(unused_must_use)]\n\nfn board_clear_piece() {\n\n let mut board = Board::new(NonZeroUsize::new(9).unwrap(), NonZeroUsize::new(8).unwrap());\n\n board.set(0, 0, Some(1));\n\n board.clear_pos(0, 0);\n\n assert_eq!(board.get(0, 0).unwrap(), None);\n\n}\n\n\n", "file_path": "tests/board.rs", "rank": 40, "score": 44248.34539207333 }, { "content": "#[test]\n\n#[allow(unused_must_use)]\n\nfn board_move_piece() {\n\n let mut board = Board::new(NonZeroUsize::new(9).unwrap(), NonZeroUsize::new(8).unwrap());\n\n board.set(0, 0, Some(1));\n\n board.move_piece(0, 0, 3, 3);\n\n assert_eq!(board.get(3, 3).unwrap(), Some(1));\n\n}\n\n\n", "file_path": "tests/board.rs", "rank": 41, "score": 44248.34539207333 }, { "content": "#[test]\n\nfn board_oob_clear_piece() {\n\n let mut board = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n assert_eq!(\n\n board.clear_pos(6, 6).unwrap_err(),\n\n BoardError::OutOfBounds(6, 6)\n\n );\n\n}\n\n\n", "file_path": "tests/board.rs", "rank": 42, "score": 42301.99930096831 }, { "content": "#[test]\n\nfn board_oob_get_piece() {\n\n let board = Board::new(NonZeroUsize::new(3).unwrap(), NonZeroUsize::new(3).unwrap());\n\n assert_eq!(board.get(5, 5).unwrap_err(), BoardError::OutOfBounds(5, 5));\n\n}\n\n\n", "file_path": "tests/board.rs", "rank": 43, "score": 42301.99930096831 }, { "content": "#[test]\n\nfn board_oob_move_piece_scnd_pos() {\n\n let mut board = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n assert_eq!(\n\n board.move_piece(0, 0, 6, 6).unwrap_err(),\n\n BoardError::OutOfBounds(6, 6)\n\n );\n\n}\n\n\n", "file_path": "tests/board.rs", "rank": 44, "score": 38971.03286661026 }, { "content": "#[test]\n\nfn board_oob_move_piece_first_pos() {\n\n let mut board = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n assert_eq!(\n\n board.move_piece(6, 6, 0, 0).unwrap_err(),\n\n BoardError::OutOfBounds(6, 6)\n\n );\n\n}\n\n\n", "file_path": "tests/board.rs", "rank": 45, "score": 38971.03286661026 }, { "content": "#[test]\n\nfn movement_range_any() {\n\n let rook_movement = MovementType::RangeAny(Box::new(MovementType::Undirected(1, 0)));\n\n let board = Board::new(NonZeroUsize::new(8).unwrap(), NonZeroUsize::new(8).unwrap());\n\n let player = Player::new(true);\n\n assert_set_equal(\n\n rook_movement.flatten(&board, &player, 4, 3).unwrap(),\n\n vec![\n\n (1, 0),\n\n (2, 0),\n\n (3, 0),\n\n (-1, 0),\n\n (-2, 0),\n\n (-3, 0),\n\n (-4, 0),\n\n (0, 1),\n\n (0, 2),\n\n (0, 3),\n\n (0, 4),\n\n (0, -1),\n\n (0, -2),\n\n (0, -3),\n\n ],\n\n );\n\n}\n", "file_path": "tests/movement.rs", "rank": 46, "score": 26323.97622082086 }, { "content": "#[test]\n\nfn movement_undirected() {\n\n let knight_movement = MovementType::Undirected(2, 1);\n\n let board = Board::new(NonZeroUsize::new(8).unwrap(), NonZeroUsize::new(8).unwrap());\n\n let player = Player::new(true);\n\n assert_set_equal(\n\n knight_movement.flatten(&board, &player, 4, 4).unwrap(),\n\n vec![\n\n (2, 1),\n\n (1, 2),\n\n (-2, 1),\n\n (-1, 2),\n\n (2, -1),\n\n (1, -2),\n\n (-2, -1),\n\n (-1, -2),\n\n ],\n\n );\n\n}\n\n\n", "file_path": "tests/movement.rs", "rank": 47, "score": 26323.97622082086 }, { "content": "#[test]\n\nfn movement_directed() {\n\n let pawn_movement = MovementType::Directed(0, 1);\n\n let board = Board::new(NonZeroUsize::new(8).unwrap(), NonZeroUsize::new(8).unwrap());\n\n let player = Player::new(true);\n\n assert_set_equal(\n\n pawn_movement.flatten(&board, &player, 4, 4).unwrap(),\n\n vec![(0, 1)],\n\n );\n\n}\n\n\n", "file_path": "tests/movement.rs", "rank": 48, "score": 26323.97622082086 }, { "content": "#[test]\n\nfn movement_range() {\n\n let double_wazir_movement = MovementType::Range(Box::new(MovementType::Undirected(1, 0)), 2);\n\n let board = Board::new(NonZeroUsize::new(8).unwrap(), NonZeroUsize::new(8).unwrap());\n\n let player = Player::new(true);\n\n assert_set_equal(\n\n double_wazir_movement\n\n .flatten(&board, &player, 4, 4)\n\n .unwrap(),\n\n vec![\n\n (1, 0),\n\n (2, 0),\n\n (-1, 0),\n\n (-2, 0),\n\n (0, 1),\n\n (0, 2),\n\n (0, -1),\n\n (0, -2),\n\n ],\n\n );\n\n}\n\n\n", "file_path": "tests/movement.rs", "rank": 49, "score": 26323.97622082086 }, { "content": "#[test]\n\nfn board_create() {\n\n let _board = Board::new(NonZeroUsize::new(9).unwrap(), NonZeroUsize::new(8).unwrap());\n\n}\n\n\n", "file_path": "tests/board.rs", "rank": 50, "score": 26323.97622082086 }, { "content": "#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]\n\npub enum PlayerColor {\n\n White,\n\n Black,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Player {\n\n pub color: PlayerColor,\n\n}\n\n\n\nimpl PlayerColor {\n\n pub fn white(&self) -> bool {\n\n *self == PlayerColor::White\n\n }\n\n\n\n pub fn black(&self) -> bool {\n\n *self == PlayerColor::Black\n\n }\n\n}\n\n\n\nimpl Player {\n\n pub fn new(color: PlayerColor) -> Self {\n\n Self { color }\n\n }\n\n}\n", "file_path": "src/player.rs", "rank": 51, "score": 25685.278395896716 }, { "content": "#[test]\n\n#[allow(unused_must_use)]\n\nfn board_clear_board() {\n\n let empty_board = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n let mut board = Board::new(NonZeroUsize::new(5).unwrap(), NonZeroUsize::new(5).unwrap());\n\n board.set(0, 0, Some(1));\n\n board.set(0, 3, Some(2));\n\n board.set(3, 0, Some(3));\n\n board.clear();\n\n assert_eq!(board, empty_board);\n\n}\n", "file_path": "tests/board.rs", "rank": 52, "score": 25272.720172526027 }, { "content": "#[test]\n\nfn board_get_name() {\n\n let mut board = Board::new(NonZeroUsize::new(1).unwrap(), NonZeroUsize::new(1).unwrap());\n\n board.set_name(\"Hello\");\n\n assert_eq!(board.name(), \"Hello\");\n\n}\n\n\n", "file_path": "tests/board.rs", "rank": 53, "score": 25270.153617509768 }, { "content": "pub struct GameBuilder {\n\n game_pieces: Vec<Piece>,\n\n game_board: Board,\n\n game_players: Vec<Player>,\n\n}\n\n\n\nimpl Default for GameBuilder {\n\n fn default() -> Self {\n\n GameBuilder {\n\n game_pieces: vec![],\n\n game_board: Board::new(NonZeroUsize::new(1).unwrap(), NonZeroUsize::new(1).unwrap()),\n\n game_players: vec![],\n\n }\n\n }\n\n}\n\n\n\nimpl GameBuilder {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n", "file_path": "src/game.rs", "rank": 54, "score": 23877.35682265116 }, { "content": "use crate::board::Board;\n\nuse crate::piece::Piece;\n\nuse crate::player::{Player, PlayerColor};\n\nuse std::default::Default;\n\nuse std::num::NonZeroUsize;\n\n\n\n#[derive(Debug)]\n\npub struct Game {\n\n pieces: Vec<Piece>,\n\n board: Board,\n\n pub players: Vec<Player>,\n\n current_player: usize,\n\n}\n\n\n\nimpl Game {\n\n pub fn pieces(&self) -> &Vec<Piece> {\n\n &self.pieces\n\n }\n\n\n\n pub fn board(&self) -> &Board {\n", "file_path": "src/game.rs", "rank": 55, "score": 23874.896566693267 }, { "content": "\n\n pub fn board(mut self, board: Board) -> Self {\n\n self.game_board = board;\n\n self\n\n }\n\n\n\n pub fn piece(mut self, piece: Piece) -> Self {\n\n self.game_pieces.push(piece);\n\n self\n\n }\n\n\n\n pub fn pieces(mut self, mut pieces: Vec<Piece>) -> Self {\n\n self.game_pieces.append(&mut pieces);\n\n self\n\n }\n\n\n\n pub fn player(mut self, player: Player) -> Self {\n\n self.game_players.push(player);\n\n self\n\n }\n", "file_path": "src/game.rs", "rank": 56, "score": 23872.560902597685 }, { "content": " &self.board\n\n }\n\n\n\n pub fn search_piece<'a>(&'a self, id: &str) -> Option<&'a Piece> {\n\n self.pieces.iter().find(|x| x.id() == id)\n\n }\n\n\n\n pub fn search_piece_alias<'a>(&'a self, alias: &str) -> Vec<&'a Piece> {\n\n self.pieces\n\n .iter()\n\n .filter(|x| x.alias_list().contains(&alias.to_string()))\n\n .collect()\n\n }\n\n\n\n pub fn set(&mut self, x: usize, y: usize, piece: &str, color: PlayerColor) -> Option<()> {\n\n let piece_index = self\n\n .pieces\n\n .iter()\n\n .enumerate()\n\n .find(|(_k, x)| x.id() == piece || x.alias_list().contains(&piece.to_string()))?\n", "file_path": "src/game.rs", "rank": 57, "score": 23872.274081251267 }, { "content": "use sharmat::board::*;\n\nuse sharmat::game::*;\n\nuse sharmat::piece::*;\n\nuse std::num::NonZeroUsize;\n\n\n\n#[test]\n", "file_path": "tests/game.rs", "rank": 58, "score": 23870.29045436327 }, { "content": "\n\n pub fn build(self) -> Game {\n\n Game {\n\n board: self.game_board,\n\n pieces: self.game_pieces,\n\n players: self.game_players,\n\n current_player: 0,\n\n }\n\n }\n\n}\n", "file_path": "src/game.rs", "rank": 59, "score": 23869.634564749733 }, { "content": " .0;\n\n self.board.set(x, y, Some((piece_index, color))).ok()\n\n }\n\n\n\n pub fn player(&self, color: PlayerColor) -> Option<&Player> {\n\n self.players.iter().find(|p| p.color == color)\n\n }\n\n\n\n pub fn current_player(&self) -> Option<&Player> {\n\n self.players.get(self.current_player)\n\n }\n\n\n\n pub fn next_player(&mut self) {\n\n self.current_player += 1;\n\n if self.current_player >= self.players.len() {\n\n self.current_player = 0;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/game.rs", "rank": 60, "score": 23869.20046326982 }, { "content": " }\n\n }\n\n\n\n pub fn desc(&self) -> &str {\n\n &self.desc\n\n }\n\n\n\n pub fn movement_type(&self) -> &Vec<MovementType> {\n\n &self.movement_type\n\n }\n\n}\n\n\n\npub struct PieceBuilder {\n\n piece_id: String,\n\n piece_alias: Vec<String>,\n\n piece_desc: String,\n\n piece_display_white: usize,\n\n piece_display_black: usize,\n\n piece_movement_type: Vec<MovementType>,\n\n}\n", "file_path": "src/piece.rs", "rank": 61, "score": 22795.535762933963 }, { "content": "use super::movement::*;\n\nuse std::default::Default;\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Piece {\n\n id: String,\n\n alias: Vec<String>,\n\n display_white: usize,\n\n display_black: usize,\n\n desc: String,\n\n movement_type: Vec<MovementType>,\n\n}\n\n\n\nimpl Piece {\n\n pub fn id(&self) -> &str {\n\n &self.id\n\n }\n\n\n\n pub fn alias(&self) -> String {\n", "file_path": "src/piece.rs", "rank": 62, "score": 22794.989152875794 }, { "content": "\n\nimpl Default for PieceBuilder {\n\n fn default() -> Self {\n\n PieceBuilder {\n\n piece_id: SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"Couldn't get the system clock?\")\n\n .as_millis()\n\n .to_string(),\n\n piece_alias: vec![],\n\n piece_desc: String::new(),\n\n piece_display_white: 0,\n\n piece_display_black: 0,\n\n piece_movement_type: vec![MovementType::Stay],\n\n }\n\n }\n\n}\n\n\n\nimpl PieceBuilder {\n\n pub fn new() -> Self {\n", "file_path": "src/piece.rs", "rank": 63, "score": 22794.466559707234 }, { "content": " self.piece_display_black = self.piece_alias.len() + 1;\n\n self.piece_alias.push(id.to_string());\n\n self\n\n }\n\n\n\n pub fn desc(mut self, desc: &str) -> Self {\n\n if !self.piece_desc.is_empty() {\n\n self.piece_desc.push('\\n');\n\n }\n\n self.piece_desc.push_str(desc);\n\n self\n\n }\n\n\n\n pub fn movement(mut self, movement_type: Vec<MovementType>) -> Self {\n\n self.piece_movement_type = movement_type;\n\n self\n\n }\n\n\n\n pub fn build(self) -> Piece {\n\n Piece {\n", "file_path": "src/piece.rs", "rank": 64, "score": 22792.631366096433 }, { "content": " Self::default()\n\n }\n\n\n\n pub fn id(mut self, id: &str) -> Self {\n\n self.piece_id = id.to_string();\n\n self\n\n }\n\n\n\n pub fn alias(mut self, alias: &str) -> Self {\n\n self.piece_alias.push(alias.to_string());\n\n self\n\n }\n\n\n\n pub fn display_white(mut self, id: &str) -> Self {\n\n self.piece_display_white = self.piece_alias.len() + 1;\n\n self.piece_alias.push(id.to_string());\n\n self\n\n }\n\n\n\n pub fn display_black(mut self, id: &str) -> Self {\n", "file_path": "src/piece.rs", "rank": 65, "score": 22791.150850888935 }, { "content": "use sharmat::piece::*;\n\n\n\n#[test]\n", "file_path": "tests/piece.rs", "rank": 66, "score": 22790.544283688552 }, { "content": " id: self.piece_id,\n\n alias: self.piece_alias,\n\n desc: self.piece_desc,\n\n display_black: self.piece_display_black,\n\n display_white: self.piece_display_white,\n\n movement_type: self.piece_movement_type,\n\n }\n\n }\n\n}\n", "file_path": "src/piece.rs", "rank": 67, "score": 22787.63177485856 }, { "content": " self.alias.join(\"; \")\n\n }\n\n\n\n pub fn alias_list(&self) -> &Vec<String> {\n\n &self.alias\n\n }\n\n\n\n pub fn display_white(&self) -> &str {\n\n if self.display_white == 0 || self.alias.len() < self.display_white {\n\n &self.id\n\n } else {\n\n &self.alias[self.display_white - 1]\n\n }\n\n }\n\n\n\n pub fn display_black(&self) -> &str {\n\n if self.display_black == 0 || self.alias.len() < self.display_black {\n\n &self.id\n\n } else {\n\n &self.alias[self.display_black - 1]\n", "file_path": "src/piece.rs", "rank": 68, "score": 22786.63311514479 }, { "content": "# Sharmat\n\n\n\nA shogi and chess variants app!\n", "file_path": "README.md", "rank": 69, "score": 14793.709797894728 }, { "content": "# Contributing to Sharmat\n\n\n\nWe're open to suggestions and pull requests. So don't hesitate!\n\n\n\nWe just ask for documented and good code.\n\n\n\nAnd you **must** respect the Developer Certificate of Origin:\n\n\n\nDeveloper Certificate of Origin\n\nVersion 1.1\n\n\n\nCopyright (C) 2004, 2006 The Linux Foundation and its contributors.\n\n1 Letterman Drive\n\nSuite D4700\n\nSan Francisco, CA, 94129\n\n\n\nEveryone is permitted to copy and distribute verbatim copies of this\n\nlicense document, but changing it is not allowed.\n\n\n\n\n\nDeveloper's Certificate of Origin 1.1\n\n\n\nBy making a contribution to this project, I certify that:\n\n\n\n(a) The contribution was created in whole or in part by me and I\n\n have the right to submit it under the open source license\n\n indicated in the file; or\n\n\n\n(b) The contribution is based upon previous work that, to the best\n\n of my knowledge, is covered under an appropriate open source\n\n license and I have the right under that license to submit that\n\n work with modifications, whether created in whole or in part\n\n by me, under the same open source license (unless I am\n\n permitted to submit under a different license), as indicated\n\n in the file; or\n\n\n\n(c) The contribution was provided directly to me by some other\n\n person who certified (a), (b) or (c) and I have not modified\n\n it.\n\n\n\n(d) I understand and agree that this project and the contribution\n\n are public and that a record of the contribution (including all\n\n personal information I submit with it, including my sign-off) is\n\n maintained indefinitely and may be redistributed consistent with\n\n this project or the open source license(s) involved.\n", "file_path": "CONTRIBUTING.md", "rank": 70, "score": 14791.482639137592 }, { "content": " let mut res = vec![];\n\n let mut try_append = |dx: isize, dy: isize| {\n\n if is_within_bounds(board, x as isize + dx, y as isize + dy) {\n\n let target_piece = board\n\n .get((x as isize + dx) as usize, (y as isize + dy) as usize)\n\n .ok()\n\n .flatten();\n\n if target_piece.is_none() || target_piece.unwrap().1 != player.color {\n\n res.push((dx, dy));\n\n }\n\n }\n\n };\n\n let dx = *dx as isize;\n\n let dy = *dy as isize;\n\n if dx == dy {\n\n try_append(dx, dy);\n\n try_append(-dx, dy);\n\n try_append(dx, -dy);\n\n try_append(-dx, -dy);\n\n } else {\n", "file_path": "src/movement.rs", "rank": 71, "score": 15.79844791276911 }, { "content": " MovementCondition::AsBlack => player.color.black(),\n\n MovementCondition::Custom(f) => f(board, player, x, y, dx, dy),\n\n }\n\n }\n\n}\n\n\n\nimpl MovementType {\n\n /**\n\n Evaluates a MovementType's branches down into a set of possible, raw movements (dx, dy).\n\n **/\n\n pub fn flatten(\n\n &self,\n\n board: &Board,\n\n player: &Player,\n\n x: usize,\n\n y: usize,\n\n ) -> Option<Vec<RawMovement>> {\n\n match self {\n\n MovementType::Stay => Some(vec![]),\n\n MovementType::Undirected(dx, dy) => {\n", "file_path": "src/movement.rs", "rank": 72, "score": 15.371463572427553 }, { "content": " for x in 0..width.get() {\n\n board.push(Vec::with_capacity(height.get()));\n\n for _y in 0..height.get() {\n\n board[x].push(None);\n\n }\n\n }\n\n Board {\n\n width,\n\n height,\n\n board,\n\n name: String::from(\"Board\"),\n\n }\n\n }\n\n\n\n pub fn set(&mut self, x: usize, y: usize, piece: RawPiece) -> BoardResult<()> {\n\n self.check_pos(x, y)?;\n\n self.board[x][y] = piece;\n\n Ok(())\n\n }\n\n\n", "file_path": "src/board.rs", "rank": 73, "score": 14.089433065450887 }, { "content": " let mut res = vec![];\n\n for child_movement in mv.flatten(board, player, x, y)?.into_iter() {\n\n let (dx, dy) = child_movement.clone();\n\n for mult in 1..=(board.width.get().max(board.height.get()) as isize) {\n\n if is_within_bounds(board, x as isize + dx * mult, y as isize + dy * mult) {\n\n let target_piece = board\n\n .get(\n\n (x as isize + dx * mult) as usize,\n\n (y as isize + dy * mult) as usize,\n\n )\n\n .ok()\n\n .flatten();\n\n if target_piece.is_some() && target_piece.unwrap().1 != player.color {\n\n res.push((dx * mult, dy * mult));\n\n break;\n\n } else if target_piece.is_some() {\n\n break;\n\n } else {\n\n res.push((dx * mult, dy * mult));\n\n }\n", "file_path": "src/movement.rs", "rank": 74, "score": 13.747006650542737 }, { "content": " &self,\n\n board: &Board,\n\n player: &Player,\n\n x: usize,\n\n y: usize,\n\n dx: isize,\n\n dy: isize,\n\n ) -> bool {\n\n match self {\n\n MovementCondition::Capture => board\n\n .get((x as isize + dx) as usize, (y as isize + dy) as usize)\n\n .ok()\n\n .flatten()\n\n .is_some(),\n\n MovementCondition::NoCapture => board\n\n .get((x as isize + dx) as usize, (y as isize + dy) as usize)\n\n .ok()\n\n .flatten()\n\n .is_none(),\n\n MovementCondition::AsWhite => player.color.white(),\n", "file_path": "src/movement.rs", "rank": 75, "score": 13.690956990399949 }, { "content": "use sharmat::board::*;\n\nuse sharmat::game::*;\n\nuse sharmat::movement::*;\n\nuse sharmat::piece::*;\n\nuse sharmat::player::*;\n\nuse sharmat::rule::*;\n\nuse std::num::NonZeroUsize;\n\n\n", "file_path": "tests/movement.rs", "rank": 76, "score": 11.892101992277691 }, { "content": " pub fn get(&self, x: usize, y: usize) -> BoardResult<RawPiece> {\n\n self.check_pos(x, y)?;\n\n Ok(self.board[x][y])\n\n }\n\n\n\n pub fn move_piece(&mut self, x: usize, y: usize, dx: usize, dy: usize) -> BoardResult<()> {\n\n self.check_pos(x, y)?;\n\n self.check_pos(dx, dy)?;\n\n self.board[dx][dy] = self.board[x][y];\n\n self.board[x][y] = None;\n\n Ok(())\n\n }\n\n\n\n pub fn clear_pos(&mut self, x: usize, y: usize) -> BoardResult<()> {\n\n self.check_pos(x, y)?;\n\n self.board[x][y] = None;\n\n Ok(())\n\n }\n\n\n\n pub fn clear(&mut self) {\n", "file_path": "src/board.rs", "rank": 77, "score": 11.654350405247394 }, { "content": " } else {\n\n break;\n\n }\n\n }\n\n }\n\n Some(res)\n\n }\n\n MovementType::Range(mv, max_range) => {\n\n let mut res = vec![];\n\n for child_movement in mv.flatten(board, player, x, y)?.into_iter() {\n\n let (dx, dy) = child_movement.clone();\n\n for mult in 1..=(*max_range as isize) {\n\n if is_within_bounds(board, x as isize + dx * mult, y as isize + dy * mult) {\n\n let target_piece = board\n\n .get(\n\n (x as isize + dx * mult) as usize,\n\n (y as isize + dy * mult) as usize,\n\n )\n\n .ok()\n\n .flatten();\n", "file_path": "src/movement.rs", "rank": 78, "score": 11.406675678192483 }, { "content": " // hard-coded permutations; idc we're in 2d\n\n try_append(dx, dy);\n\n try_append(-dx, dy);\n\n try_append(dx, -dy);\n\n try_append(-dx, -dy);\n\n try_append(dy, dx);\n\n try_append(-dy, dx);\n\n try_append(dy, -dx);\n\n try_append(-dy, -dx);\n\n }\n\n Some(res)\n\n }\n\n MovementType::Directed(dx, dy) => {\n\n if is_within_bounds(board, x as isize + *dx, y as isize + *dy) {\n\n Some(vec![(*dx, *dy)])\n\n } else {\n\n Some(vec![])\n\n }\n\n }\n\n MovementType::RangeAny(mv) => {\n", "file_path": "src/movement.rs", "rank": 79, "score": 11.337395131979159 }, { "content": " if target_piece.is_some() && target_piece.unwrap().1 != player.color {\n\n res.push((dx * mult, dy * mult));\n\n break;\n\n } else if target_piece.is_some() {\n\n break;\n\n } else {\n\n res.push((dx * mult, dy * mult));\n\n }\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n Some(res)\n\n }\n\n MovementType::Union(moves) => {\n\n let mut res = vec![];\n\n for mv in moves {\n\n for raw_mv in mv.flatten(board, player, x, y)?.into_iter() {\n\n res.push(raw_mv);\n", "file_path": "src/movement.rs", "rank": 80, "score": 11.30607071692953 }, { "content": " self.board.iter_mut().for_each(|column| {\n\n column.iter_mut().for_each(|cell| {\n\n *cell = None;\n\n });\n\n });\n\n }\n\n\n\n fn check_pos(&self, x: usize, y: usize) -> BoardResult<()> {\n\n if x >= self.width.get() || y >= self.height.get() {\n\n println!(\"{}:{} / {}:{}\", x, self.width.get(), y, self.height.get());\n\n return Err(OutOfBounds(x, y));\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn set_name<'a>(&'a mut self, name: &'a str) {\n\n self.name = name.to_string();\n\n }\n\n\n\n pub fn name<'a>(&'a self) -> String {\n\n self.name.clone()\n\n }\n\n}\n", "file_path": "src/board.rs", "rank": 81, "score": 10.580604868329612 }, { "content": " /// If the target square is not occupied by any piece\n\n NoCapture,\n\n /// If the current player is white\n\n AsWhite,\n\n /// If the current player is black\n\n AsBlack,\n\n /// A custom condition\n\n Custom(&'static (dyn Fn(&Board, &Player, usize, usize, isize, isize) -> bool + 'static)),\n\n}\n\n\n\npub type RawMovement = (isize, isize);\n\n\n\nimpl Copy for MovementCondition {}\n\n\n\nimpl Clone for MovementCondition {\n\n fn clone(&self) -> Self {\n\n match self {\n\n MovementCondition::Capture => MovementCondition::Capture,\n\n MovementCondition::NoCapture => MovementCondition::NoCapture,\n\n MovementCondition::AsWhite => MovementCondition::AsWhite,\n", "file_path": "src/movement.rs", "rank": 82, "score": 10.139818498422816 }, { "content": " /// Describes a piece's unique movement on the (x, y) basis.\n\n /// `Directed(dx, dy)` is equivalent to moving a piece that is on `(x, y)` to `(x + dx, y + dy)`.\n\n ///\n\n /// ## Example:\n\n ///\n\n /// ```rust,ignore\n\n /// let white_pawn_movement = MovementType::Directed(0, 1);\n\n /// ```\n\n Directed(isize, isize),\n\n\n\n /// Turns regular movement types (Undirected, Directed) into a ranging movement type\n\n /// `RangeAny(Directed(dx, dy))` is equivalent to moving a piece that is on `(x, y)` to `(x + n*dx, y + n*dy)`, with any `n > 1`.\n\n /// No other piece must stand in that piece's path; any opponent's piece will be taken and the piece will stop.\n\n ///\n\n /// ## Example:\n\n ///\n\n /// ```rust,ignore\n\n /// let lance_movement = MovementType::RangeAny(Box::new(MovementType::Directed(0, 1)));\n\n /// ```\n\n RangeAny(Box<MovementType>),\n", "file_path": "src/movement.rs", "rank": 83, "score": 9.335780402708526 }, { "content": "#[derive(Debug)]\n\npub struct Rule;\n\n\n\nimpl Rule {\n\n pub fn new() -> Self {\n\n Rule\n\n }\n\n}\n", "file_path": "src/rule.rs", "rank": 84, "score": 9.070518190304034 }, { "content": "use self::BoardError::*;\n\nuse super::player::PlayerColor;\n\nuse std::num::NonZeroUsize;\n\n\n", "file_path": "src/board.rs", "rank": 85, "score": 9.059058367702807 }, { "content": "// The ~engine~ (the thing that handles rules & stuff)\n\n\n\npub mod board;\n\npub mod game;\n\npub mod movement;\n\npub mod piece;\n\npub mod player;\n\npub mod rule;\n", "file_path": "src/lib.rs", "rank": 86, "score": 8.671477668142469 }, { "content": "// The ~tests~ (the thing that tests functions & stuff)\n\n\n\nuse sharmat::player::*;\n\nuse sharmat::rule::*;\n\n\n\n#[cfg(test)]\n\nmod board;\n\n\n\n#[cfg(test)]\n\nmod movement;\n\n\n\n// Engine basic tests\n\n\n\n#[cfg(test)]\n\nmod player {\n\n use super::*;\n\n\n\n #[test]\n\n fn player_create() {\n\n let _player = Player::new(true /* UNKNOWN */);\n", "file_path": "tests/test.rs", "rank": 87, "score": 8.567980867030146 }, { "content": "\n\n /// Turns regular movement types (Undirected, Directed) into a limited, ranging movement type\n\n /// `Range(Directed(dx, dy), max)` is equivalent to moving a piece that is on `(x, y)` to `(x + n*dx, y + n*dy)`, with any `1 < n ≤ max`.\n\n /// No other piece must stand in that piece's path; any opponent's piece will be taken and the piece will stop.\n\n ///\n\n /// ## Example:\n\n ///\n\n /// ```rust,ignore\n\n /// let double_pawn_movement = MovementType::Range(Box::new(MovementType::Directed(0, 1)), 2);\n\n /// ```\n\n Range(Box<MovementType>, usize),\n\n\n\n /// Assembles two movement types into a union of both of them\n\n /// `Union(a, b, c, ..., ω)` is equivalent to being able to do `a` OR `b` OR `c` OR ... OR `ω`.\n\n ///\n\n /// ## Example:\n\n ///\n\n /// ```rust,ignore\n\n /// let king_movement = MovementType::Union(vec![MovementType::Undirected(0, 1), MovementType::Undirected(1, 1)]);\n\n /// ```\n", "file_path": "src/movement.rs", "rank": 88, "score": 8.060157808965984 }, { "content": " }\n\n }\n\n Some(res)\n\n }\n\n MovementType::Condition(mv, tags) => {\n\n let mut res = vec![];\n\n for raw_mv in mv.flatten(board, player, x, y)?.into_iter() {\n\n if tags\n\n .iter()\n\n .all(|t| t.validate(board, player, x, y, raw_mv.0, raw_mv.1))\n\n {\n\n res.push(raw_mv);\n\n }\n\n }\n\n Some(res)\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[inline]\n", "file_path": "src/movement.rs", "rank": 89, "score": 7.885181723136181 }, { "content": "use super::board::Board;\n\nuse super::player::Player;\n\nuse std::fmt;\n\n\n\n#[derive(Clone, Debug)]\n\npub enum MovementType {\n\n /// The null movement, yields by itself ∅\n\n Stay,\n\n /// Describes a piece's movements based on any orthogonal basis.\n\n /// `Undirected(a, b)` is equivalent to moving `a` squares in any direction and `b` in the other direction.\n\n ///\n\n /// ## Example:\n\n ///\n\n /// ```rust,ignore\n\n /// let knight_movement = MovementType::Undirected(2, 1);\n\n /// let elephant_movement = MovementType::Undirected(2, 2);\n\n /// let wazir_movement = MovementType::Undirected(1, 0); // could also be MovementType::Undirected(0, 1)\n\n /// ```\n\n Undirected(usize, usize),\n\n\n", "file_path": "src/movement.rs", "rank": 90, "score": 7.69685530067466 }, { "content": "use sharmat::board::*;\n\nuse std::num::NonZeroUsize;\n\n\n\n#[test]\n", "file_path": "tests/board.rs", "rank": 91, "score": 6.9724976015199065 }, { "content": " Union(Vec<MovementType>),\n\n\n\n /// Adds one or more conditions to a movement type.\n\n /// See `MovementCondition` for more information on the different, possible conditions.\n\n ///\n\n /// ## Example:\n\n ///\n\n /// ```rust,ignore\n\n /// let pawn_movement = MovementType::Union(vec![\n\n /// MovementType::Condition(Box::new(MovementType::Directed(0, 1)), MovementCondition::AsWhite),\n\n /// MovementType::Condition(Box::new(MovementType::Directed(0, -1)), MovementCondition::AsBlack),\n\n /// ]);\n\n /// ```\n\n Condition(Box<MovementType>, Vec<MovementCondition>),\n\n // Custom?\n\n}\n\n\n\npub enum MovementCondition {\n\n /// If the target square must be occupied by an opponent's piece\n\n Capture,\n", "file_path": "src/movement.rs", "rank": 92, "score": 6.4730297611079655 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod rule {\n\n use super::*;\n\n\n\n #[test]\n\n fn rule_create() {\n\n let _rule = Rule::new(/* UNKNOWN */);\n\n }\n\n}\n", "file_path": "tests/test.rs", "rank": 93, "score": 3.7453901930996603 }, { "content": " MovementCondition::AsBlack => MovementCondition::AsBlack,\n\n MovementCondition::Custom(f) => MovementCondition::Custom(*f),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Debug for MovementCondition {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n MovementCondition::Capture => write!(f, \"Capture\"),\n\n MovementCondition::NoCapture => write!(f, \"NoCapture\"),\n\n MovementCondition::AsWhite => write!(f, \"AsWhite\"),\n\n MovementCondition::AsBlack => write!(f, \"AsBlack\"),\n\n MovementCondition::Custom(_) => write!(f, \"Custom(<fn>)\"),\n\n }\n\n }\n\n}\n\n\n\nimpl MovementCondition {\n\n pub fn validate(\n", "file_path": "src/movement.rs", "rank": 94, "score": 2.079801101758331 } ]
Rust
src/auto/misc.rs
talklittle/gtk
b3af34228bef07e0c22829437d73144857fa44d7
use Buildable; use Widget; use ffi; use glib; use glib::StaticType; use glib::Value; use glib::object::Downcast; use glib::object::IsA; use glib::signal::SignalHandlerId; use glib::signal::connect; use glib::translate::*; use glib_ffi; use gobject_ffi; use std::boxed::Box as Box_; use std::mem; use std::mem::transmute; use std::ptr; glib_wrapper! { pub struct Misc(Object<ffi::GtkMisc, ffi::GtkMiscClass>): Widget, Buildable; match fn { get_type => || ffi::gtk_misc_get_type(), } } pub trait MiscExt { #[cfg_attr(feature = "v3_14", deprecated)] fn get_alignment(&self) -> (f32, f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_padding(&self) -> (i32, i32); #[cfg_attr(feature = "v3_14", deprecated)] fn set_alignment(&self, xalign: f32, yalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn set_padding(&self, xpad: i32, ypad: i32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_xalign(&self) -> f32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_xalign(&self, xalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_xpad(&self) -> i32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_xpad(&self, xpad: i32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_yalign(&self) -> f32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_yalign(&self, yalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_ypad(&self) -> i32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_ypad(&self, ypad: i32); #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_xalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_xpad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_yalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_ypad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<Misc> + IsA<glib::object::Object>> MiscExt for O { fn get_alignment(&self) -> (f32, f32) { unsafe { let mut xalign = mem::uninitialized(); let mut yalign = mem::uninitialized(); ffi::gtk_misc_get_alignment(self.to_glib_none().0, &mut xalign, &mut yalign); (xalign, yalign) } } fn get_padding(&self) -> (i32, i32) { unsafe { let mut xpad = mem::uninitialized(); let mut ypad = mem::uninitialized(); ffi::gtk_misc_get_padding(self.to_glib_none().0, &mut xpad, &mut ypad); (xpad, ypad) } } fn set_alignment(&self, xalign: f32, yalign: f32) { unsafe { ffi::gtk_misc_set_alignment(self.to_glib_none().0, xalign, yalign); } } fn set_padding(&self, xpad: i32, ypad: i32) { unsafe { ffi::gtk_misc_set_padding(self.to_glib_none().0, xpad, ypad); } } fn get_property_xalign(&self) -> f32 { unsafe { let mut value = Value::from_type(<f32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "xalign".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_xalign(&self, xalign: f32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "xalign".to_glib_none().0, Value::from(&xalign).to_glib_none().0); } } fn get_property_xpad(&self) -> i32 { unsafe { let mut value = Value::from_type(<i32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "xpad".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_xpad(&self, xpad: i32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "xpad".to_glib_none().0, Value::from(&xpad).to_glib_none().0); } } fn get_property_yalign(&self) -> f32 { unsafe { let mut value = Value::from_type(<f32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "yalign".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_yalign(&self, yalign: f32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "yalign".to_glib_none().0, Value::from(&yalign).to_glib_none().0); } } fn get_property_ypad(&self) -> i32 { unsafe { let mut value = Value::from_type(<i32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "ypad".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_ypad(&self, ypad: i32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "ypad".to_glib_none().0, Value::from(&ypad).to_glib_none().0); } } fn connect_property_xalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::xalign", transmute(notify_xalign_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_xpad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::xpad", transmute(notify_xpad_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_yalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::yalign", transmute(notify_yalign_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_ypad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::ypad", transmute(notify_ypad_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } } unsafe extern "C" fn notify_xalign_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_xpad_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_yalign_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_ypad_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) }
use Buildable; use Widget; use ffi; use glib; use glib::StaticType; use glib::Value; use glib::object::Downcast; use glib::object::IsA; use glib::signal::SignalHandlerId; use glib::signal::connect; use glib::translate::*; use glib_ffi; use gobject_ffi; use std::boxed::Box as Box_; use std::mem; use std::mem::transmute; use std::ptr; glib_wrapper! { pub struct Misc(Object<ffi::GtkMisc, ffi::GtkMiscClass>): Widget, Buildable; match fn { get_type => || ffi::gtk_misc_get_type(), } } pub trait MiscExt { #[cfg_attr(feature = "v3_14", deprecated)] fn get_alignment(&self) -> (f32, f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_padding(&self) -> (i32, i32); #[cfg_attr(feature = "v3_14", deprecated)] fn set_alignment(&self, xalign: f32, yalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn set_padding(&self, xpad: i32, ypad: i32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_xalign(&self) -> f32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_xalign(&self, xalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_xpad(&self) -> i32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_xpad(&self, xpad: i32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_yalign(&self) -> f32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_yalign(&self, yalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_ypad(&self) -> i32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_ypad(&self, ypad: i32); #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_xalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_xpad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_yalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_ypad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<Misc> + IsA<glib::object::Object>> MiscExt for O { fn get_alignment(&self) -> (f32, f32) { unsafe { let mut xalign = mem::uninitialized(); let mut yalign = mem::uninitialized(); ffi::gtk_misc_get_alignment(self.to_glib_none().0, &mut xalign, &mut yalign); (xalign, yalign) } } fn get_padding(&self) -> (i32, i32) { unsafe { let mut xpad = mem::uninitialized(); let mut ypad = mem::uninitialized(); ffi::gtk_misc_get_padding(self.to_glib_none().0, &mut xpad, &mut ypad); (xpad, ypad) } } fn set_alignment(&self, xalign: f32, yalign: f32) { unsafe { ffi::gtk_misc_set_alignment(self.to_glib_none().0, xalign, yalign); } } fn set_padding(&self, xpad: i32, ypad: i32) { unsafe { ffi::gtk_misc_set_padding(self.to_glib_none().0, xpad, ypad); } } fn get_property_xalign(&self) -> f32 { unsafe { let mut value = Value::from_type(<f32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "xalign".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_xalign(&self, xalign: f32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "xalign".to_glib_none().0, Value::from(&xalign).to_glib_none().0); } } fn get_property_xpad(&self) -> i32 { unsafe { let mut value = Value::from_type(<i32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "xpad".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_xpad(&self, xpad: i32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "xpad".to_glib_none().0, Value::from(&xpad).to_glib_none().0); } } fn get_property_yalign(&self) -> f32 { unsafe { let mut value = Value::from_type(<f32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "yalign".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_yalign(&self, yalign: f32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "yalign".to_glib_none().0, Value::from(&yalign).to_glib_none().0); } } fn get_property_ypad(&self) -> i32 { unsafe { let mut value = Value::from_type(<i32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "ypad".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_ypad(&self, ypad: i32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "ypad".to_glib_none().0, Value::from(&ypad).to_glib_none().0); } } fn connect_property_xalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::xalign", transmute(notify_xalign_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_xpad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::xpad", transmute(notify_xpad_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_yalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerI
fn connect_property_ypad_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::ypad", transmute(notify_ypad_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } } unsafe extern "C" fn notify_xalign_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_xpad_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_yalign_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_ypad_trampoline<P>(this: *mut ffi::GtkMisc, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Misc> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Misc::from_glib_borrow(this).downcast_unchecked()) }
d { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::yalign", transmute(notify_yalign_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } }
function_block-function_prefixed
[ { "content": "fn into_raw<F: FnMut() -> Continue + 'static>(func: F) -> gpointer {\n\n let func: Box<RefCell<Box<FnMut() -> Continue + 'static>>> =\n\n Box::new(RefCell::new(Box::new(func)));\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n", "file_path": "src/signal.rs", "rank": 0, "score": 312540.24244178244 }, { "content": "pub fn get_event_widget(event: &mut gdk::Event) -> Option<Widget> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_none(ffi::gtk_get_event_widget(event.to_glib_none_mut().0))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 1, "score": 298578.43256408046 }, { "content": "pub fn propagate_event<P: IsA<Widget>>(widget: &P, event: &mut gdk::Event) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_propagate_event(widget.to_glib_none().0, event.to_glib_none_mut().0);\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 2, "score": 294971.0691831984 }, { "content": "pub fn test_find_sibling<P: IsA<Widget>>(base_widget: &P, widget_type: glib::types::Type) -> Option<Widget> {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib_none(ffi::gtk_test_find_sibling(base_widget.to_glib_none().0, widget_type.to_glib()))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 3, "score": 285720.0704260303 }, { "content": "pub fn test_find_widget<P: IsA<Widget>>(widget: &P, label_pattern: &str, widget_type: glib::types::Type) -> Option<Widget> {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib_none(ffi::gtk_test_find_widget(widget.to_glib_none().0, label_pattern.to_glib_none().0, widget_type.to_glib()))\n\n }\n\n}\n\n\n\n//pub fn test_init(argvp: /*Unimplemented*/Vec<String>, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) {\n\n// unsafe { TODO: call ffi::gtk_test_init() }\n\n//}\n\n\n\n//pub fn test_list_all_types() -> /*Unimplemented*/CArray TypeId { ns_id: 0, id: 30 } {\n\n// unsafe { TODO: call ffi::gtk_test_list_all_types() }\n\n//}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 4, "score": 283975.33101636835 }, { "content": "#[cfg_attr(feature = \"v3_20\", deprecated)]\n\npub fn test_slider_get_value<P: IsA<Widget>>(widget: &P) -> f64 {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_test_slider_get_value(widget.to_glib_none().0)\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 5, "score": 277074.1155075176 }, { "content": "pub fn bindings_activate_event<P: IsA<glib::Object>>(object: &P, event: &mut gdk::EventKey) -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(ffi::gtk_bindings_activate_event(object.to_glib_none().0, event.to_glib_none_mut().0))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 6, "score": 254359.77092766558 }, { "content": "/// Adds a closure to be called by the default main loop when it's idle.\n\n///\n\n/// `func` will be called repeatedly until it returns `Continue(false)`.\n\n///\n\n/// Similar to `glib::idle_add` but only callable from the main thread and\n\n/// doesn't require `Send`.\n\npub fn idle_add<F>(func: F) -> SourceId\n\n where F: FnMut() -> Continue + 'static {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(glib_ffi::g_idle_add_full(glib_ffi::G_PRIORITY_DEFAULT_IDLE, Some(trampoline),\n\n into_raw(func), Some(destroy_closure)))\n\n }\n\n}\n\n\n", "file_path": "src/signal.rs", "rank": 7, "score": 246177.197771133 }, { "content": "#[cfg(any(feature = \"v3_10\", feature = \"dox\"))]\n\npub fn test_widget_wait_for_draw<P: IsA<Widget>>(widget: &P) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_test_widget_wait_for_draw(widget.to_glib_none().0);\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 8, "score": 243434.50520761352 }, { "content": "pub fn selection_remove_all<P: IsA<Widget>>(widget: &P) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_selection_remove_all(widget.to_glib_none().0);\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 9, "score": 242590.98834403558 }, { "content": "/// Adds a closure to be called by the default main loop at regular intervals\n\n/// with millisecond granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` milliseconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events. Prefer `timeout_add_seconds` when millisecond\n\n/// precision is not necessary.\n\n///\n\n/// Similar to `glib::timeout_add` but only callable from the main thread and\n\n/// doesn't require `Send`.\n\npub fn timeout_add<F>(interval: u32, func: F) -> SourceId\n\n where F: FnMut() -> Continue + 'static {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(\n\n glib_ffi::g_timeout_add_full(glib_ffi::G_PRIORITY_DEFAULT, interval, Some(trampoline),\n\n into_raw(func), Some(destroy_closure)))\n\n }\n\n}\n\n\n", "file_path": "src/signal.rs", "rank": 10, "score": 235605.97058626017 }, { "content": "/// Tries to initialize GTK+.\n\n///\n\n/// Call either this function or [`Application::new`][new] before using any\n\n/// other GTK+ functions.\n\n///\n\n/// [new]: struct.Application.html#method.new\n\n///\n\n/// Note that this function calls `gtk_init_check()` rather than `gtk_init()`,\n\n/// so will not cause the program to terminate if GTK could not be initialized.\n\n/// Instead, an Ok is returned if the windowing system was successfully\n\n/// initialized otherwise an Err is returned.\n\npub fn init() -> Result<(), glib::BoolError> {\n\n skip_assert_initialized!();\n\n if is_initialized_main_thread() {\n\n return Ok(());\n\n }\n\n else if is_initialized() {\n\n panic!(\"Attempted to initialize GTK from two different threads.\");\n\n }\n\n unsafe {\n\n if pre_init() && from_glib(ffi::gtk_init_check(ptr::null_mut(), ptr::null_mut())) {\n\n set_initialized();\n\n Ok(())\n\n }\n\n else {\n\n Err(glib::BoolError(\"Failed to initialize GTK\"))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/rt.rs", "rank": 11, "score": 233684.43136530352 }, { "content": "pub fn grab_get_current() -> Option<Widget> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_none(ffi::gtk_grab_get_current())\n\n }\n\n}\n\n\n\n//pub fn init_check(argv: /*Unimplemented*/Vec<String>) -> bool {\n\n// unsafe { TODO: call ffi::gtk_init_check() }\n\n//}\n\n\n\n//pub fn init_with_args<'a, 'b, P: Into<Option<&'a str>>, Q: Into<Option<&'b str>>>(argv: /*Unimplemented*/Vec<String>, parameter_string: P, entries: /*Ignored*/&[&glib::OptionEntry], translation_domain: Q) -> Result<(), Error> {\n\n// unsafe { TODO: call ffi::gtk_init_with_args() }\n\n//}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 12, "score": 233076.36969293962 }, { "content": "/// Adds a closure to be called by the default main loop at regular intervals\n\n/// with second granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` seconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events.\n\n///\n\n/// Similar to `glib::timeout_add_seconds` but only callable from the main thread and\n\n/// doesn't require `Send`.\n\npub fn timeout_add_seconds<F>(interval: u32, func: F) -> SourceId\n\n where F: FnMut() -> Continue + 'static {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(glib_ffi::g_timeout_add_seconds_full(glib_ffi::G_PRIORITY_DEFAULT, interval,\n\n Some(trampoline), into_raw(func), Some(destroy_closure)))\n\n }\n\n}\n\n\n", "file_path": "src/signal.rs", "rank": 13, "score": 233051.04300298914 }, { "content": "pub fn test_find_label<P: IsA<Widget>>(widget: &P, label_pattern: &str) -> Option<Widget> {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib_none(ffi::gtk_test_find_label(widget.to_glib_none().0, label_pattern.to_glib_none().0))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 14, "score": 229947.5363402729 }, { "content": "#[cfg_attr(feature = \"v3_20\", deprecated)]\n\npub fn test_text_set<P: IsA<Widget>>(widget: &P, string: &str) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_test_text_set(widget.to_glib_none().0, string.to_glib_none().0);\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 15, "score": 229811.75680986122 }, { "content": "#[cfg_attr(feature = \"v3_20\", deprecated)]\n\npub fn test_text_get<P: IsA<Widget>>(widget: &P) -> Option<String> {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib_full(ffi::gtk_test_text_get(widget.to_glib_none().0))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 16, "score": 229811.75680986122 }, { "content": "#[cfg_attr(feature = \"v3_20\", deprecated)]\n\npub fn test_slider_set_perc<P: IsA<Widget>>(widget: &P, percentage: f64) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_test_slider_set_perc(widget.to_glib_none().0, percentage);\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 17, "score": 227351.61458502483 }, { "content": "pub fn selection_clear_targets<P: IsA<Widget>>(widget: &P, selection: &gdk::Atom) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_selection_clear_targets(widget.to_glib_none().0, selection.to_glib_none().0);\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 18, "score": 225123.0011037981 }, { "content": "pub fn main_do_event(event: &mut gdk::Event) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gtk_main_do_event(event.to_glib_none_mut().0);\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 19, "score": 224367.55018470448 }, { "content": "pub trait BuildableExtManual {\n\n fn get_name(&self) -> Option<String>;\n\n\n\n fn set_name(&self, name: &str);\n\n}\n\n\n\nimpl<O: IsA<Buildable>> BuildableExtManual for O {\n\n fn get_name(&self) -> Option<String> {\n\n unsafe {\n\n from_glib_none(ffi::gtk_buildable_get_name(self.to_glib_none().0))\n\n }\n\n }\n\n\n\n fn set_name(&self, name: &str) {\n\n unsafe {\n\n ffi::gtk_buildable_set_name(self.to_glib_none().0, name.to_glib_none().0);\n\n }\n\n }\n\n}\n", "file_path": "src/buildable.rs", "rank": 20, "score": 220676.63745312998 }, { "content": "pub trait BuildableExt {\n\n fn add_child<'a, P: IsA<glib::Object>, Q: Into<Option<&'a str>>>(&self, builder: &Builder, child: &P, type_: Q);\n\n\n\n fn construct_child(&self, builder: &Builder, name: &str) -> Option<glib::Object>;\n\n\n\n //fn custom_finished<'a, P: IsA<glib::Object> + 'a, Q: Into<Option<&'a P>>, R: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, builder: &Builder, child: Q, tagname: &str, data: R);\n\n\n\n //fn custom_tag_end<'a, P: IsA<glib::Object> + 'a, Q: Into<Option<&'a P>>, R: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, builder: &Builder, child: Q, tagname: &str, data: R);\n\n\n\n //fn custom_tag_start<'a, P: IsA<glib::Object> + 'a, Q: Into<Option<&'a P>>>(&self, builder: &Builder, child: Q, tagname: &str, parser: /*Ignored*/glib::MarkupParser, data: /*Unimplemented*/&mut Option<Fundamental: Pointer>) -> bool;\n\n\n\n fn get_internal_child(&self, builder: &Builder, childname: &str) -> Option<glib::Object>;\n\n\n\n fn parser_finished(&self, builder: &Builder);\n\n\n\n fn set_buildable_property(&self, builder: &Builder, name: &str, value: &glib::Value);\n\n}\n\n\n\nimpl<O: IsA<Buildable>> BuildableExt for O {\n\n fn add_child<'a, P: IsA<glib::Object>, Q: Into<Option<&'a str>>>(&self, builder: &Builder, child: &P, type_: Q) {\n", "file_path": "src/auto/buildable.rs", "rank": 21, "score": 220676.63745312998 }, { "content": "pub trait WidgetExtManual {\n\n fn drag_dest_set(&self, flags: DestDefaults, targets: &[TargetEntry], actions: DragAction);\n\n\n\n fn drag_source_set(&self, start_button_mask: ModifierType, targets: &[TargetEntry], actions: DragAction);\n\n\n\n fn intersect(&self, area: &Rectangle, intersection: Option<&mut Rectangle>) -> bool;\n\n\n\n fn override_font(&self, font: &pango::FontDescription);\n\n\n\n #[cfg(any(feature = \"v3_8\", feature = \"dox\"))]\n\n fn add_tick_callback<F>(&self, func: F) -> u32\n\n where\n\n F: FnMut(&Self, &FrameClock) -> Continue + 'static;\n\n\n\n fn connect_map_event<F: Fn(&Self, &Event) -> Inhibit + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_unmap_event<F: Fn(&Self, &Event) -> Inhibit + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Widget> + IsA<Object>> WidgetExtManual for O {\n", "file_path": "src/widget.rs", "rank": 22, "score": 220150.18464054592 }, { "content": "pub trait WidgetExt {\n\n fn activate(&self) -> bool;\n\n\n\n fn add_accelerator(&self, accel_signal: &str, accel_group: &AccelGroup, accel_key: u32, accel_mods: gdk::ModifierType, accel_flags: AccelFlags);\n\n\n\n fn add_device_events<P: IsA<gdk::Device>>(&self, device: &P, events: gdk::EventMask);\n\n\n\n fn add_events(&self, events: i32);\n\n\n\n fn add_mnemonic_label<P: IsA<Widget>>(&self, label: &P);\n\n\n\n //#[cfg(any(feature = \"v3_8\", feature = \"dox\"))]\n\n //fn add_tick_callback<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, callback: /*Unknown conversion*//*Unimplemented*/TickCallback, user_data: P, notify: /*Unknown conversion*//*Unimplemented*/DestroyNotify) -> u32;\n\n\n\n fn can_activate_accel(&self, signal_id: u32) -> bool;\n\n\n\n fn child_focus(&self, direction: DirectionType) -> bool;\n\n\n\n fn child_notify(&self, child_property: &str);\n\n\n", "file_path": "src/auto/widget.rs", "rank": 23, "score": 220150.18464054592 }, { "content": "#[cfg_attr(feature = \"v3_20\", deprecated)]\n\npub fn test_widget_click<P: IsA<Widget>>(widget: &P, button: u32, modifiers: gdk::ModifierType) -> bool {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib(ffi::gtk_test_widget_click(widget.to_glib_none().0, button, modifiers.to_glib()))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 24, "score": 217042.77969453955 }, { "content": "pub fn test_widget_send_key<P: IsA<Widget>>(widget: &P, keyval: u32, modifiers: gdk::ModifierType) -> bool {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib(ffi::gtk_test_widget_send_key(widget.to_glib_none().0, keyval, modifiers.to_glib()))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 25, "score": 215052.47575546813 }, { "content": "pub fn device_grab_remove<P: IsA<Widget>, Q: IsA<gdk::Device>>(widget: &P, device: &Q) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_device_grab_remove(widget.to_glib_none().0, device.to_glib_none().0);\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 26, "score": 214467.67380405264 }, { "content": "pub fn cairo_transform_to_window<P: IsA<Widget>>(cr: &cairo::Context, widget: &P, window: &gdk::Window) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_cairo_transform_to_window(mut_override(cr.to_glib_none().0), widget.to_glib_none().0, window.to_glib_none().0);\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 27, "score": 212246.73049925017 }, { "content": "pub trait ColorChooserWidgetExt {\n\n fn get_property_show_editor(&self) -> bool;\n\n\n\n fn set_property_show_editor(&self, show_editor: bool);\n\n\n\n fn connect_property_show_editor_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<ColorChooserWidget> + IsA<glib::object::Object>> ColorChooserWidgetExt for O {\n\n fn get_property_show_editor(&self) -> bool {\n\n unsafe {\n\n let mut value = Value::from_type(<bool as StaticType>::static_type());\n\n gobject_ffi::g_object_get_property(self.to_glib_none().0, \"show-editor\".to_glib_none().0, value.to_glib_none_mut().0);\n\n value.get().unwrap()\n\n }\n\n }\n\n\n\n fn set_property_show_editor(&self, show_editor: bool) {\n\n unsafe {\n\n gobject_ffi::g_object_set_property(self.to_glib_none().0, \"show-editor\".to_glib_none().0, Value::from(&show_editor).to_glib_none().0);\n", "file_path": "src/auto/color_chooser_widget.rs", "rank": 28, "score": 208079.07031559694 }, { "content": "pub trait AppChooserWidgetExt {\n\n fn get_default_text(&self) -> Option<String>;\n\n\n\n fn get_show_all(&self) -> bool;\n\n\n\n fn get_show_default(&self) -> bool;\n\n\n\n fn get_show_fallback(&self) -> bool;\n\n\n\n fn get_show_other(&self) -> bool;\n\n\n\n fn get_show_recommended(&self) -> bool;\n\n\n\n fn set_default_text(&self, text: &str);\n\n\n\n fn set_show_all(&self, setting: bool);\n\n\n\n fn set_show_default(&self, setting: bool);\n\n\n\n fn set_show_fallback(&self, setting: bool);\n", "file_path": "src/auto/app_chooser_widget.rs", "rank": 29, "score": 208079.07031559694 }, { "content": "pub trait FileChooserWidgetExt {\n\n fn get_property_search_mode(&self) -> bool;\n\n\n\n fn set_property_search_mode(&self, search_mode: bool);\n\n\n\n fn get_property_subtitle(&self) -> Option<String>;\n\n\n\n fn connect_desktop_folder<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn emit_desktop_folder(&self);\n\n\n\n fn connect_down_folder<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn emit_down_folder(&self);\n\n\n\n fn connect_home_folder<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn emit_home_folder(&self);\n\n\n\n fn connect_location_popup<F: Fn(&Self, &str) + 'static>(&self, f: F) -> SignalHandlerId;\n", "file_path": "src/auto/file_chooser_widget.rs", "rank": 30, "score": 208079.07031559694 }, { "content": "pub fn selection_add_target<P: IsA<Widget>>(widget: &P, selection: &gdk::Atom, target: &gdk::Atom, info: u32) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_selection_add_target(widget.to_glib_none().0, selection.to_glib_none().0, target.to_glib_none().0, info);\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 31, "score": 204513.15761367237 }, { "content": "pub fn device_grab_add<P: IsA<Widget>, Q: IsA<gdk::Device>>(widget: &P, device: &Q, block_others: bool) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_device_grab_add(widget.to_glib_none().0, device.to_glib_none().0, block_others.to_glib());\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 32, "score": 204513.15761367237 }, { "content": "pub fn selection_convert<P: IsA<Widget>>(widget: &P, selection: &gdk::Atom, target: &gdk::Atom, time_: u32) -> bool {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib(ffi::gtk_selection_convert(widget.to_glib_none().0, selection.to_glib_none().0, target.to_glib_none().0, time_))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 33, "score": 202885.56457492246 }, { "content": "#[cfg_attr(feature = \"v3_20\", deprecated)]\n\npub fn test_create_simple_window(window_title: &str, dialog_text: &str) -> Option<Widget> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_none(ffi::gtk_test_create_simple_window(window_title.to_glib_none().0, dialog_text.to_glib_none().0))\n\n }\n\n}\n\n\n\n//#[cfg_attr(feature = \"v3_20\", deprecated)]\n\n//pub fn test_create_widget<'a, P: Into<Option<&'a str>>>(widget_type: glib::types::Type, first_property_name: P, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Option<Widget> {\n\n// unsafe { TODO: call ffi::gtk_test_create_widget() }\n\n//}\n\n\n\n//#[cfg_attr(feature = \"v3_20\", deprecated)]\n\n//pub fn test_display_button_window(window_title: &str, dialog_text: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Option<Widget> {\n\n// unsafe { TODO: call ffi::gtk_test_display_button_window() }\n\n//}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 34, "score": 202180.09629148513 }, { "content": "pub fn accel_groups_from_object<P: IsA<glib::Object>>(object: &P) -> Vec<AccelGroup> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n FromGlibPtrContainer::from_glib_none(ffi::gtk_accel_groups_from_object(object.to_glib_none().0))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 35, "score": 200375.3459881057 }, { "content": "pub fn selection_owner_set<'a, P: IsA<Widget> + 'a, Q: Into<Option<&'a P>>>(widget: Q, selection: &gdk::Atom, time_: u32) -> bool {\n\n assert_initialized_main_thread!();\n\n let widget = widget.into();\n\n let widget = widget.to_glib_none();\n\n unsafe {\n\n from_glib(ffi::gtk_selection_owner_set(widget.0, selection.to_glib_none().0, time_))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 37, "score": 195059.38628745882 }, { "content": "#[cfg_attr(feature = \"v3_10\", deprecated)]\n\npub fn render_icon_pixbuf(context: &StyleContext, source: &IconSource, size: i32) -> Option<gdk_pixbuf::Pixbuf> {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib_full(ffi::gtk_render_icon_pixbuf(context.to_glib_none().0, source.to_glib_none().0, size))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 38, "score": 189149.05268618301 }, { "content": "pub fn bindings_activate<P: IsA<glib::Object>>(object: &P, keyval: u32, modifiers: gdk::ModifierType) -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(ffi::gtk_bindings_activate(object.to_glib_none().0, keyval, modifiers.to_glib()))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 39, "score": 185360.26793767352 }, { "content": "fn into_raw<F>(func: F) -> gpointer\n\n where F: Fn(&TreeModel, &TreeIter) -> bool + 'static {\n\n skip_assert_initialized!();\n\n let func: Box<Box<Fn(&TreeModel, &TreeIter) -> bool + 'static>> =\n\n Box::new(Box::new(func));\n\n Box::into_raw(func) as gpointer\n\n}\n", "file_path": "src/tree_model_filter.rs", "rank": 40, "score": 184920.52941920815 }, { "content": "pub fn selection_owner_set_for_display<'a, P: IsA<Widget> + 'a, Q: Into<Option<&'a P>>>(display: &gdk::Display, widget: Q, selection: &gdk::Atom, time_: u32) -> bool {\n\n assert_initialized_main_thread!();\n\n let widget = widget.into();\n\n let widget = widget.to_glib_none();\n\n unsafe {\n\n from_glib(ffi::gtk_selection_owner_set_for_display(display.to_glib_none().0, widget.0, selection.to_glib_none().0, time_))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 41, "score": 183843.28340428992 }, { "content": "fn into_raw<F, T>(func: F) -> gpointer\n\n where F: Fn(&T, &TreeIter, &TreeIter) -> Ordering + 'static {\n\n skip_assert_initialized!();\n\n let func: Box<Box<Fn(&T, &TreeIter, &TreeIter) -> Ordering + 'static>> =\n\n Box::new(Box::new(func));\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n\nimpl<O: IsA<TreeModel> + IsA<TreeSortable>> TreeSortableExtManual for O {\n\n #[inline]\n\n fn get_sort_column_id(&self) -> Option<(SortColumn, SortType)> {\n\n unsafe {\n\n let mut sort_column_id = mem::uninitialized();\n\n let mut order = mem::uninitialized();\n\n ffi::gtk_tree_sortable_get_sort_column_id(self.to_glib_none().0, &mut sort_column_id, &mut order);\n\n if sort_column_id != ffi::GTK_TREE_SORTABLE_UNSORTED_SORT_COLUMN_ID {\n\n Some((from_glib(sort_column_id), from_glib(order)))\n\n } else {\n\n None\n\n }\n", "file_path": "src/tree_sortable.rs", "rank": 42, "score": 182759.18724263235 }, { "content": "pub fn accel_groups_activate<P: IsA<glib::Object>>(object: &P, accel_key: u32, accel_mods: gdk::ModifierType) -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(ffi::gtk_accel_groups_activate(object.to_glib_none().0, accel_key, accel_mods.to_glib()))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 43, "score": 179112.23871248594 }, { "content": "pub fn tree_set_row_drag_data<P: IsA<TreeModel>>(selection_data: &SelectionData, tree_model: &P, path: &mut TreePath) -> bool {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib(ffi::gtk_tree_set_row_drag_data(mut_override(selection_data.to_glib_none().0), tree_model.to_glib_none().0, path.to_glib_none_mut().0))\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 44, "score": 175340.96952749992 }, { "content": "pub fn render_insertion_cursor(context: &StyleContext, cr: &cairo::Context, x: f64, y: f64, layout: &pango::Layout, index: i32, direction: pango::Direction) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_render_insertion_cursor(context.to_glib_none().0, mut_override(cr.to_glib_none().0), x, y, layout.to_glib_none().0, index, direction.to_glib());\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 45, "score": 167532.37427620805 }, { "content": "pub fn main_quit() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n if ffi::gtk_main_level() > 0 {\n\n ffi::gtk_main_quit();\n\n }\n\n else if cfg!(debug_assertions) {\n\n panic!(\"Attempted to quit a GTK main loop when none is running.\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/rt.rs", "rank": 46, "score": 163084.78579710593 }, { "content": "pub fn main() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gtk_main();\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 47, "score": 163084.78579710593 }, { "content": "#[inline]\n\npub fn is_initialized() -> bool {\n\n skip_assert_initialized!();\n\n INITIALIZED.load(Ordering::Acquire)\n\n}\n\n\n\n/// Returns `true` if GTK has been initialized and this is the main thread.\n", "file_path": "src/rt.rs", "rank": 48, "score": 160747.24262042082 }, { "content": "pub fn disable_setlocale() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gtk_disable_setlocale();\n\n }\n\n}\n\n\n\n//pub fn distribute_natural_allocation(extra_space: i32, n_requested_sizes: u32, sizes: /*Ignored*/&mut RequestedSize) -> i32 {\n\n// unsafe { TODO: call ffi::gtk_distribute_natural_allocation() }\n\n//}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 49, "score": 160654.78268305544 }, { "content": "pub trait OverlaySignals {\n\n fn connect_get_child_position<F>(&self, f: F) -> SignalHandlerId\n\n where\n\n F: Fn(&Self, &Widget) -> Option<Rectangle> + 'static;\n\n}\n\n\n\nmod overlay {\n\n use gdk::Rectangle;\n\n use ffi::{GtkOverlay, GtkWidget};\n\n use gdk_ffi::GdkRectangle;\n\n use glib::signal::{connect, SignalHandlerId};\n\n use glib::translate::*;\n\n use glib::object::Downcast;\n\n use std::mem::transmute;\n\n use std::ptr;\n\n use glib_ffi::{gboolean, gpointer};\n\n use IsA;\n\n use Object;\n\n use Overlay;\n\n use Widget;\n", "file_path": "src/signal.rs", "rank": 50, "score": 159096.65473733813 }, { "content": "pub trait EditableSignals {\n\n fn connect_changed<F>(&self, changed_func: F) -> SignalHandlerId\n\n where F: Fn(&Self) + 'static;\n\n fn connect_delete_text<F>(&self, delete_text_func: F) -> SignalHandlerId\n\n where F: Fn(&Self, i32, i32) + 'static;\n\n fn connect_insert_text<F>(&self, insert_text_func: F) -> SignalHandlerId\n\n where F: Fn(&Self, &str, &mut i32) + 'static;\n\n}\n\n\n\nmod editable {\n\n use Editable;\n\n use Object;\n\n use std::mem::transmute;\n\n use ffi::GtkEditable;\n\n use glib::signal::{SignalHandlerId, connect};\n\n use glib::translate::*;\n\n use IsA;\n\n use libc::{c_char, c_int, c_uchar};\n\n use std::ffi::CStr;\n\n use std::str;\n", "file_path": "src/signal.rs", "rank": 51, "score": 159096.65473733813 }, { "content": "pub fn test_register_all_types() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gtk_test_register_all_types();\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 52, "score": 158333.69326649557 }, { "content": "pub fn true_() -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(ffi::gtk_true())\n\n }\n\n}\n", "file_path": "src/auto/functions.rs", "rank": 53, "score": 158317.23950637027 }, { "content": "pub fn false_() -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(ffi::gtk_false())\n\n }\n\n}\n\n\n", "file_path": "src/auto/functions.rs", "rank": 54, "score": 158317.23950637027 }, { "content": "pub trait MenuExt {\n\n fn attach<P: IsA<Widget>>(&self, child: &P, left_attach: u32, right_attach: u32, top_attach: u32, bottom_attach: u32);\n\n\n\n //fn attach_to_widget<'a, P: IsA<Widget>, Q: Into<Option<&'a /*Unimplemented*/MenuDetachFunc>>>(&self, attach_widget: &P, detacher: Q);\n\n\n\n fn detach(&self);\n\n\n\n fn get_accel_group(&self) -> Option<AccelGroup>;\n\n\n\n fn get_accel_path(&self) -> Option<String>;\n\n\n\n fn get_active(&self) -> Option<Widget>;\n\n\n\n fn get_attach_widget(&self) -> Option<Widget>;\n\n\n\n fn get_monitor(&self) -> i32;\n\n\n\n fn get_reserve_toggle_size(&self) -> bool;\n\n\n\n #[cfg_attr(feature = \"v3_10\", deprecated)]\n", "file_path": "src/auto/menu.rs", "rank": 55, "score": 156670.24458993023 }, { "content": "pub trait LayoutExt {\n\n fn get_bin_window(&self) -> Option<gdk::Window>;\n\n\n\n fn get_size(&self) -> (u32, u32);\n\n\n\n fn move_<P: IsA<Widget>>(&self, child_widget: &P, x: i32, y: i32);\n\n\n\n fn put<P: IsA<Widget>>(&self, child_widget: &P, x: i32, y: i32);\n\n\n\n fn set_size(&self, width: u32, height: u32);\n\n\n\n fn get_property_height(&self) -> u32;\n\n\n\n fn set_property_height(&self, height: u32);\n\n\n\n fn get_property_width(&self) -> u32;\n\n\n\n fn set_property_width(&self, width: u32);\n\n\n\n fn get_child_x<T: IsA<Widget>>(&self, item: &T) -> i32;\n", "file_path": "src/auto/layout.rs", "rank": 56, "score": 156670.24458993023 }, { "content": "pub trait NotebookExt {\n\n #[cfg(any(feature = \"v3_16\", feature = \"dox\"))]\n\n fn detach_tab<P: IsA<Widget>>(&self, child: &P);\n\n\n\n fn get_action_widget(&self, pack_type: PackType) -> Option<Widget>;\n\n\n\n fn get_group_name(&self) -> Option<String>;\n\n\n\n fn get_menu_label<P: IsA<Widget>>(&self, child: &P) -> Option<Widget>;\n\n\n\n fn get_menu_label_text<P: IsA<Widget>>(&self, child: &P) -> Option<String>;\n\n\n\n fn get_scrollable(&self) -> bool;\n\n\n\n fn get_show_border(&self) -> bool;\n\n\n\n fn get_show_tabs(&self) -> bool;\n\n\n\n fn get_tab_detachable<P: IsA<Widget>>(&self, child: &P) -> bool;\n\n\n", "file_path": "src/auto/notebook.rs", "rank": 57, "score": 156670.24458993023 }, { "content": "pub trait StackExt {\n\n #[cfg(any(feature = \"v3_10\", feature = \"dox\"))]\n\n fn add_named<P: IsA<Widget>>(&self, child: &P, name: &str);\n\n\n\n #[cfg(any(feature = \"v3_10\", feature = \"dox\"))]\n\n fn add_titled<P: IsA<Widget>>(&self, child: &P, name: &str, title: &str);\n\n\n\n #[cfg(any(feature = \"v3_12\", feature = \"dox\"))]\n\n fn get_child_by_name(&self, name: &str) -> Option<Widget>;\n\n\n\n #[cfg(any(feature = \"v3_16\", feature = \"dox\"))]\n\n fn get_hhomogeneous(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v3_10\", feature = \"dox\"))]\n\n fn get_homogeneous(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v3_18\", feature = \"dox\"))]\n\n fn get_interpolate_size(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v3_10\", feature = \"dox\"))]\n", "file_path": "src/auto/stack.rs", "rank": 58, "score": 156670.24458993023 }, { "content": "pub trait ContainerExt {\n\n fn add<P: IsA<Widget>>(&self, widget: &P);\n\n\n\n //fn add_with_properties<P: IsA<Widget>>(&self, widget: &P, first_prop_name: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n\n\n fn check_resize(&self);\n\n\n\n //fn child_get<P: IsA<Widget>>(&self, child: &P, first_prop_name: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n\n\n //fn child_get_valist<P: IsA<Widget>>(&self, child: &P, first_property_name: &str, var_args: /*Unknown conversion*//*Unimplemented*/Unsupported);\n\n\n\n fn child_notify<P: IsA<Widget>>(&self, child: &P, child_property: &str);\n\n\n\n //#[cfg(any(feature = \"v3_18\", feature = \"dox\"))]\n\n //fn child_notify_by_pspec<P: IsA<Widget>, Q: IsA</*Ignored*/glib::ParamSpec>>(&self, child: &P, pspec: &Q);\n\n\n\n //fn child_set<P: IsA<Widget>>(&self, child: &P, first_prop_name: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n\n\n //fn child_set_valist<P: IsA<Widget>>(&self, child: &P, first_property_name: &str, var_args: /*Unknown conversion*//*Unimplemented*/Unsupported);\n\n\n", "file_path": "src/auto/container.rs", "rank": 59, "score": 156670.24458993023 }, { "content": "pub trait PlugExt {\n\n fn construct(&self, socket_id: xlib::Window);\n\n\n\n fn construct_for_display(&self, display: &gdk::Display, socket_id: xlib::Window);\n\n\n\n fn get_embedded(&self) -> bool;\n\n\n\n fn get_id(&self) -> xlib::Window;\n\n\n\n fn get_socket_window(&self) -> Option<gdk::Window>;\n\n\n\n fn connect_embedded<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_property_embedded_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_property_socket_window_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Plug> + IsA<glib::object::Object>> PlugExt for O {\n\n fn construct(&self, socket_id: xlib::Window) {\n", "file_path": "src/auto/plug.rs", "rank": 60, "score": 156670.24458993023 }, { "content": "pub trait BuilderExt {\n\n //#[cfg(any(feature = \"v3_10\", feature = \"dox\"))]\n\n //fn add_callback_symbol(&self, callback_name: &str, callback_symbol: /*Unknown conversion*//*Unimplemented*/Callback);\n\n\n\n //#[cfg(any(feature = \"v3_10\", feature = \"dox\"))]\n\n //fn add_callback_symbols(&self, first_callback_name: &str, first_callback_symbol: /*Unknown conversion*//*Unimplemented*/Callback, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n\n\n fn add_from_resource(&self, resource_path: &str) -> Result<(), Error>;\n\n\n\n fn add_from_string(&self, buffer: &str) -> Result<(), Error>;\n\n\n\n fn add_objects_from_resource(&self, resource_path: &str, object_ids: &[&str]) -> Result<(), Error>;\n\n\n\n fn add_objects_from_string(&self, buffer: &str, object_ids: &[&str]) -> Result<(), Error>;\n\n\n\n //fn connect_signals<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, user_data: P);\n\n\n\n //fn connect_signals_full<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, func: /*Unknown conversion*//*Unimplemented*/BuilderConnectFunc, user_data: P);\n\n\n\n #[cfg(any(feature = \"v3_8\", feature = \"dox\"))]\n", "file_path": "src/auto/builder.rs", "rank": 61, "score": 156670.24458993023 }, { "content": "pub trait GridExt {\n\n fn attach<P: IsA<Widget>>(&self, child: &P, left: i32, top: i32, width: i32, height: i32);\n\n\n\n fn attach_next_to<'a, P: IsA<Widget>, Q: IsA<Widget> + 'a, R: Into<Option<&'a Q>>>(&self, child: &P, sibling: R, side: PositionType, width: i32, height: i32);\n\n\n\n #[cfg(any(feature = \"v3_10\", feature = \"dox\"))]\n\n fn get_baseline_row(&self) -> i32;\n\n\n\n fn get_child_at(&self, left: i32, top: i32) -> Option<Widget>;\n\n\n\n fn get_column_homogeneous(&self) -> bool;\n\n\n\n fn get_column_spacing(&self) -> u32;\n\n\n\n #[cfg(any(feature = \"v3_10\", feature = \"dox\"))]\n\n fn get_row_baseline_position(&self, row: i32) -> BaselinePosition;\n\n\n\n fn get_row_homogeneous(&self) -> bool;\n\n\n\n fn get_row_spacing(&self) -> u32;\n", "file_path": "src/auto/grid.rs", "rank": 62, "score": 156670.24458993023 }, { "content": "pub trait PopoverExt {\n\n #[cfg(any(feature = \"v3_12\", feature = \"dox\"))]\n\n fn bind_model<'a, 'b, P: IsA<gio::MenuModel> + 'a, Q: Into<Option<&'a P>>, R: Into<Option<&'b str>>>(&self, model: Q, action_namespace: R);\n\n\n\n #[cfg(any(feature = \"v3_20\", feature = \"dox\"))]\n\n fn get_constrain_to(&self) -> PopoverConstraint;\n\n\n\n #[cfg(any(feature = \"v3_18\", feature = \"dox\"))]\n\n fn get_default_widget(&self) -> Option<Widget>;\n\n\n\n #[cfg(any(feature = \"v3_12\", feature = \"dox\"))]\n\n fn get_modal(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v3_12\", feature = \"dox\"))]\n\n fn get_pointing_to(&self) -> Option<gdk::Rectangle>;\n\n\n\n #[cfg(any(feature = \"v3_12\", feature = \"dox\"))]\n\n fn get_position(&self) -> PositionType;\n\n\n\n #[cfg(any(feature = \"v3_12\", feature = \"dox\"))]\n", "file_path": "src/auto/popover.rs", "rank": 63, "score": 156670.24458993023 }, { "content": "pub trait SwitchExt {\n\n fn get_active(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v3_14\", feature = \"dox\"))]\n\n fn get_state(&self) -> bool;\n\n\n\n fn set_active(&self, is_active: bool);\n\n\n\n #[cfg(any(feature = \"v3_14\", feature = \"dox\"))]\n\n fn set_state(&self, state: bool);\n\n\n\n fn connect_activate<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn emit_activate(&self);\n\n\n\n #[cfg(any(feature = \"v3_14\", feature = \"dox\"))]\n\n fn connect_state_set<F: Fn(&Self, bool) -> Inhibit + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_property_active_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n", "file_path": "src/auto/switch.rs", "rank": 64, "score": 156670.24458993023 }, { "content": "pub trait ExpanderExt {\n\n fn get_expanded(&self) -> bool;\n\n\n\n fn get_label(&self) -> Option<String>;\n\n\n\n fn get_label_fill(&self) -> bool;\n\n\n\n fn get_label_widget(&self) -> Option<Widget>;\n\n\n\n fn get_resize_toplevel(&self) -> bool;\n\n\n\n #[cfg_attr(feature = \"v3_20\", deprecated)]\n\n fn get_spacing(&self) -> i32;\n\n\n\n fn get_use_markup(&self) -> bool;\n\n\n\n fn get_use_underline(&self) -> bool;\n\n\n\n fn set_expanded(&self, expanded: bool);\n\n\n", "file_path": "src/auto/expander.rs", "rank": 65, "score": 156670.24458993023 }, { "content": "pub trait FixedExt {\n\n fn move_<P: IsA<Widget>>(&self, widget: &P, x: i32, y: i32);\n\n\n\n fn put<P: IsA<Widget>>(&self, widget: &P, x: i32, y: i32);\n\n}\n\n\n\nimpl<O: IsA<Fixed>> FixedExt for O {\n\n fn move_<P: IsA<Widget>>(&self, widget: &P, x: i32, y: i32) {\n\n unsafe {\n\n ffi::gtk_fixed_move(self.to_glib_none().0, widget.to_glib_none().0, x, y);\n\n }\n\n }\n\n\n\n fn put<P: IsA<Widget>>(&self, widget: &P, x: i32, y: i32) {\n\n unsafe {\n\n ffi::gtk_fixed_put(self.to_glib_none().0, widget.to_glib_none().0, x, y);\n\n }\n\n }\n\n}\n", "file_path": "src/auto/fixed.rs", "rank": 66, "score": 156670.24458993023 }, { "content": "pub trait GestureExt {\n\n #[cfg(any(feature = \"v3_14\", feature = \"dox\"))]\n\n fn get_bounding_box(&self) -> Option<gdk::Rectangle>;\n\n\n\n #[cfg(any(feature = \"v3_14\", feature = \"dox\"))]\n\n fn get_bounding_box_center(&self) -> Option<(f64, f64)>;\n\n\n\n #[cfg(any(feature = \"v3_14\", feature = \"dox\"))]\n\n fn get_device(&self) -> Option<gdk::Device>;\n\n\n\n #[cfg(any(feature = \"v3_14\", feature = \"dox\"))]\n\n fn get_group(&self) -> Vec<Gesture>;\n\n\n\n //#[cfg(any(feature = \"v3_14\", feature = \"dox\"))]\n\n //fn get_last_event(&self, sequence: /*Ignored*/&gdk::EventSequence) -> Option<gdk::Event>;\n\n\n\n //#[cfg(any(feature = \"v3_14\", feature = \"dox\"))]\n\n //fn get_last_updated_sequence(&self) -> /*Ignored*/Option<gdk::EventSequence>;\n\n\n\n //#[cfg(any(feature = \"v3_14\", feature = \"dox\"))]\n", "file_path": "src/auto/gesture.rs", "rank": 67, "score": 156670.24458993023 }, { "content": "pub trait BinExt {\n\n fn get_child(&self) -> Option<Widget>;\n\n}\n\n\n\nimpl<O: IsA<Bin>> BinExt for O {\n\n fn get_child(&self) -> Option<Widget> {\n\n unsafe {\n\n from_glib_none(ffi::gtk_bin_get_child(self.to_glib_none().0))\n\n }\n\n }\n\n}\n", "file_path": "src/auto/bin.rs", "rank": 68, "score": 156670.24458993023 }, { "content": "pub trait EditableExt {\n\n fn copy_clipboard(&self);\n\n\n\n fn cut_clipboard(&self);\n\n\n\n fn delete_selection(&self);\n\n\n\n fn delete_text(&self, start_pos: i32, end_pos: i32);\n\n\n\n fn get_chars(&self, start_pos: i32, end_pos: i32) -> Option<String>;\n\n\n\n fn get_editable(&self) -> bool;\n\n\n\n fn get_position(&self) -> i32;\n\n\n\n fn get_selection_bounds(&self) -> Option<(i32, i32)>;\n\n\n\n fn insert_text(&self, new_text: &str, position: &mut i32);\n\n\n\n fn paste_clipboard(&self);\n", "file_path": "src/auto/editable.rs", "rank": 69, "score": 156670.24458993023 }, { "content": "pub trait ClipboardExtManual {\n\n fn set_with_data<F: Fn(&Clipboard, &SelectionData, u32) + 'static>(&self, targets: &[TargetEntry], f: F) -> bool;\n\n}\n\n\n\nimpl<O: IsA<Clipboard>> ClipboardExtManual for O {\n\n fn set_with_data<F: Fn(&Clipboard, &SelectionData, u32) + 'static>(&self, targets: &[TargetEntry], f: F) -> bool {\n\n let stashed_targets: Vec<_> = targets.iter().map(|e| e.to_glib_none()).collect();\n\n let mut t = Vec::with_capacity(stashed_targets.len());\n\n for stash in &stashed_targets {\n\n unsafe {\n\n t.push(ffi::GtkTargetEntry {\n\n target: (*stash.0).target,\n\n flags: (*stash.0).flags,\n\n info: (*stash.0).info,\n\n });\n\n }\n\n }\n\n let t_ptr: *mut ffi::GtkTargetEntry = t.as_mut_ptr();\n\n let f: Box_<Box_<Fn(&Clipboard, &SelectionData, u32) + 'static>> = Box_::new(Box_::new(f));\n\n let user_data = Box_::into_raw(f) as *mut _;\n", "file_path": "src/clipboard.rs", "rank": 70, "score": 156670.24458993023 }, { "content": "pub trait ButtonExt {\n\n fn clicked(&self);\n\n\n\n #[cfg_attr(feature = \"v3_14\", deprecated)]\n\n fn get_alignment(&self) -> (f32, f32);\n\n\n\n #[cfg(any(feature = \"v3_6\", feature = \"dox\"))]\n\n fn get_always_show_image(&self) -> bool;\n\n\n\n fn get_event_window(&self) -> Option<gdk::Window>;\n\n\n\n #[cfg_attr(feature = \"v3_20\", deprecated)]\n\n #[cfg(any(not(feature = \"v3_20\"), feature = \"dox\"))]\n\n fn get_focus_on_click(&self) -> bool;\n\n\n\n fn get_image(&self) -> Option<Widget>;\n\n\n\n fn get_image_position(&self) -> PositionType;\n\n\n\n fn get_label(&self) -> Option<String>;\n", "file_path": "src/auto/button.rs", "rank": 71, "score": 156670.24458993023 }, { "content": "pub trait OrientableExt {\n\n fn get_orientation(&self) -> Orientation;\n\n\n\n fn set_orientation(&self, orientation: Orientation);\n\n\n\n fn connect_property_orientation_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Orientable> + IsA<glib::object::Object>> OrientableExt for O {\n\n fn get_orientation(&self) -> Orientation {\n\n unsafe {\n\n from_glib(ffi::gtk_orientable_get_orientation(self.to_glib_none().0))\n\n }\n\n }\n\n\n\n fn set_orientation(&self, orientation: Orientation) {\n\n unsafe {\n\n ffi::gtk_orientable_set_orientation(self.to_glib_none().0, orientation.to_glib());\n\n }\n\n }\n", "file_path": "src/auto/orientable.rs", "rank": 72, "score": 156670.24458993023 }, { "content": "pub trait SpinnerExt {\n\n fn start(&self);\n\n\n\n fn stop(&self);\n\n\n\n fn get_property_active(&self) -> bool;\n\n\n\n fn set_property_active(&self, active: bool);\n\n\n\n fn connect_property_active_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Spinner> + IsA<glib::object::Object>> SpinnerExt for O {\n\n fn start(&self) {\n\n unsafe {\n\n ffi::gtk_spinner_start(self.to_glib_none().0);\n\n }\n\n }\n\n\n\n fn stop(&self) {\n", "file_path": "src/auto/spinner.rs", "rank": 73, "score": 156670.24458993023 }, { "content": "pub trait FrameExt {\n\n fn get_label(&self) -> Option<String>;\n\n\n\n fn get_label_align(&self) -> (f32, f32);\n\n\n\n fn get_label_widget(&self) -> Option<Widget>;\n\n\n\n fn get_shadow_type(&self) -> ShadowType;\n\n\n\n fn set_label<'a, P: Into<Option<&'a str>>>(&self, label: P);\n\n\n\n fn set_label_align(&self, xalign: f32, yalign: f32);\n\n\n\n fn set_label_widget<'a, P: IsA<Widget> + 'a, Q: Into<Option<&'a P>>>(&self, label_widget: Q);\n\n\n\n fn set_shadow_type(&self, type_: ShadowType);\n\n\n\n fn get_property_label_xalign(&self) -> f32;\n\n\n\n fn set_property_label_xalign(&self, label_xalign: f32);\n", "file_path": "src/auto/frame.rs", "rank": 74, "score": 156670.24458993023 }, { "content": "pub trait AdjustmentExt {\n\n #[cfg_attr(feature = \"v3_18\", deprecated)]\n\n fn changed(&self);\n\n\n\n fn clamp_page(&self, lower: f64, upper: f64);\n\n\n\n fn configure(&self, value: f64, lower: f64, upper: f64, step_increment: f64, page_increment: f64, page_size: f64);\n\n\n\n fn get_lower(&self) -> f64;\n\n\n\n fn get_minimum_increment(&self) -> f64;\n\n\n\n fn get_page_increment(&self) -> f64;\n\n\n\n fn get_page_size(&self) -> f64;\n\n\n\n fn get_step_increment(&self) -> f64;\n\n\n\n fn get_upper(&self) -> f64;\n\n\n", "file_path": "src/auto/adjustment.rs", "rank": 75, "score": 156670.24458993023 }, { "content": "pub trait AboutDialogExt {\n\n fn add_credit_section(&self, section_name: &str, people: &[&str]);\n\n\n\n fn get_artists(&self) -> Vec<String>;\n\n\n\n fn get_authors(&self) -> Vec<String>;\n\n\n\n fn get_comments(&self) -> Option<String>;\n\n\n\n fn get_copyright(&self) -> Option<String>;\n\n\n\n fn get_documenters(&self) -> Vec<String>;\n\n\n\n fn get_license(&self) -> Option<String>;\n\n\n\n fn get_license_type(&self) -> License;\n\n\n\n fn get_logo(&self) -> Option<gdk_pixbuf::Pixbuf>;\n\n\n\n fn get_logo_icon_name(&self) -> Option<String>;\n", "file_path": "src/auto/about_dialog.rs", "rank": 76, "score": 156670.24458993023 }, { "content": "pub trait DialogExt {\n\n fn add_action_widget<P: IsA<Widget>>(&self, child: &P, response_id: i32);\n\n\n\n fn add_button(&self, button_text: &str, response_id: i32) -> Widget;\n\n\n\n //fn add_buttons(&self, first_button_text: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n\n\n #[cfg_attr(feature = \"v3_12\", deprecated)]\n\n fn get_action_area(&self) -> Widget;\n\n\n\n fn get_content_area(&self) -> Box;\n\n\n\n #[cfg(any(feature = \"v3_12\", feature = \"dox\"))]\n\n fn get_header_bar(&self) -> Option<Widget>;\n\n\n\n fn get_response_for_widget<P: IsA<Widget>>(&self, widget: &P) -> i32;\n\n\n\n fn get_widget_for_response(&self, response_id: i32) -> Option<Widget>;\n\n\n\n fn response(&self, response_id: i32);\n", "file_path": "src/auto/dialog.rs", "rank": 77, "score": 156670.24458993023 }, { "content": "pub trait AssistantExt {\n\n fn add_action_widget<P: IsA<Widget>>(&self, child: &P);\n\n\n\n fn append_page<P: IsA<Widget>>(&self, page: &P) -> i32;\n\n\n\n fn commit(&self);\n\n\n\n fn get_current_page(&self) -> i32;\n\n\n\n fn get_n_pages(&self) -> i32;\n\n\n\n fn get_nth_page(&self, page_num: i32) -> Option<Widget>;\n\n\n\n fn get_page_complete<P: IsA<Widget>>(&self, page: &P) -> bool;\n\n\n\n #[cfg(any(feature = \"v3_18\", feature = \"dox\"))]\n\n fn get_page_has_padding<P: IsA<Widget>>(&self, page: &P) -> bool;\n\n\n\n fn get_page_title<P: IsA<Widget>>(&self, page: &P) -> Option<String>;\n\n\n", "file_path": "src/auto/assistant.rs", "rank": 78, "score": 156670.24458993023 }, { "content": "pub trait FixedExtManual {\n\n fn get_child_x<T: IsA<Widget>>(&self, item: &T) -> i32;\n\n\n\n fn set_child_x<T: IsA<Widget>>(&self, item: &T, x: i32);\n\n\n\n fn get_child_y<T: IsA<Widget>>(&self, item: &T) -> i32;\n\n\n\n fn set_child_y<T: IsA<Widget>>(&self, item: &T, y: i32);\n\n}\n\n\n\nimpl<O: IsA<Fixed> + IsA<Container>> FixedExtManual for O {\n\n fn get_child_x<T: IsA<Widget>>(&self, item: &T) -> i32 {\n\n assert!(has_widget(self, item), \"this item isn't in the Fixed's widget list\");\n\n let mut value = Value::from(&0);\n\n unsafe {\n\n ffi::gtk_container_child_get_property(self.to_glib_none().0, item.to_glib_none().0, \"x\".to_glib_none().0, value.to_glib_none_mut().0);\n\n }\n\n value.get().unwrap()\n\n }\n\n\n", "file_path": "src/fixed.rs", "rank": 79, "score": 156670.24458993023 }, { "content": "pub trait EntryExt {\n\n fn get_activates_default(&self) -> bool;\n\n\n\n fn get_alignment(&self) -> f32;\n\n\n\n #[cfg(any(feature = \"v3_6\", feature = \"dox\"))]\n\n fn get_attributes(&self) -> Option<pango::AttrList>;\n\n\n\n fn get_buffer(&self) -> EntryBuffer;\n\n\n\n fn get_completion(&self) -> Option<EntryCompletion>;\n\n\n\n fn get_current_icon_drag_source(&self) -> i32;\n\n\n\n fn get_cursor_hadjustment(&self) -> Option<Adjustment>;\n\n\n\n fn get_has_frame(&self) -> bool;\n\n\n\n fn get_icon_activatable(&self, icon_pos: EntryIconPosition) -> bool;\n\n\n", "file_path": "src/auto/entry.rs", "rank": 80, "score": 156670.24458993023 }, { "content": "pub trait InvisibleExt {\n\n fn set_screen(&self, screen: &gdk::Screen);\n\n\n\n fn connect_property_screen_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Invisible> + IsA<glib::object::Object>> InvisibleExt for O {\n\n fn set_screen(&self, screen: &gdk::Screen) {\n\n unsafe {\n\n ffi::gtk_invisible_set_screen(self.to_glib_none().0, screen.to_glib_none().0);\n\n }\n\n }\n\n\n\n fn connect_property_screen_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {\n\n unsafe {\n\n let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));\n\n connect(self.to_glib_none().0, \"notify::screen\",\n\n transmute(notify_screen_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)\n\n }\n\n }\n\n}\n\n\n\nunsafe extern \"C\" fn notify_screen_trampoline<P>(this: *mut ffi::GtkInvisible, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)\n\nwhere P: IsA<Invisible> {\n\n let f: &&(Fn(&P) + 'static) = transmute(f);\n\n f(&Invisible::from_glib_borrow(this).downcast_unchecked())\n\n}\n", "file_path": "src/auto/invisible.rs", "rank": 81, "score": 156670.24458993023 }, { "content": "// For some reasons, it's not generated...\n\npub trait InvisibleExtManual {\n\n fn get_screen(&self) -> Option<gdk::Screen>;\n\n}\n\n\n\nimpl InvisibleExtManual for Invisible {\n\n fn get_screen(&self) -> Option<gdk::Screen> {\n\n unsafe {\n\n from_glib_none(ffi::gtk_invisible_get_screen(self.to_glib_none().0))\n\n }\n\n }\n\n}", "file_path": "src/invisible.rs", "rank": 82, "score": 156670.24458993023 }, { "content": "pub trait AlignmentExt {\n\n #[cfg_attr(feature = \"v3_14\", deprecated)]\n\n fn get_padding(&self) -> (u32, u32, u32, u32);\n\n\n\n #[cfg_attr(feature = \"v3_14\", deprecated)]\n\n fn set(&self, xalign: f32, yalign: f32, xscale: f32, yscale: f32);\n\n\n\n #[cfg_attr(feature = \"v3_14\", deprecated)]\n\n fn set_padding(&self, padding_top: u32, padding_bottom: u32, padding_left: u32, padding_right: u32);\n\n\n\n #[cfg_attr(feature = \"v3_14\", deprecated)]\n\n fn get_property_bottom_padding(&self) -> u32;\n\n\n\n #[cfg_attr(feature = \"v3_14\", deprecated)]\n\n fn set_property_bottom_padding(&self, bottom_padding: u32);\n\n\n\n #[cfg_attr(feature = \"v3_14\", deprecated)]\n\n fn get_property_left_padding(&self) -> u32;\n\n\n\n #[cfg_attr(feature = \"v3_14\", deprecated)]\n", "file_path": "src/auto/alignment.rs", "rank": 83, "score": 156670.24458993023 }, { "content": "pub trait ImageExt {\n\n fn clear(&self);\n\n\n\n fn get_animation(&self) -> Option<gdk_pixbuf::PixbufAnimation>;\n\n\n\n fn get_gicon(&self) -> (gio::Icon, i32);\n\n\n\n #[cfg_attr(feature = \"v3_10\", deprecated)]\n\n fn get_icon_set(&self) -> (IconSet, i32);\n\n\n\n fn get_pixbuf(&self) -> Option<gdk_pixbuf::Pixbuf>;\n\n\n\n fn get_pixel_size(&self) -> i32;\n\n\n\n fn get_storage_type(&self) -> ImageType;\n\n\n\n fn set_from_animation<P: IsA<gdk_pixbuf::PixbufAnimation>>(&self, animation: &P);\n\n\n\n fn set_from_file<P: AsRef<std::path::Path>>(&self, filename: P);\n\n\n", "file_path": "src/auto/image.rs", "rank": 84, "score": 156670.24458993023 }, { "content": "pub trait DialogExtManual {\n\n fn add_buttons(&self, buttons: &[(&str, i32)]);\n\n}\n\n\n\nimpl<O: DialogExt> DialogExtManual for O {\n\n fn add_buttons(&self, buttons: &[(&str, i32)]) {\n\n for &(text, id) in buttons {\n\n //FIXME: self.add_button don't work on 1.8\n\n O::add_button(self, text, id);\n\n }\n\n }\n\n}\n", "file_path": "src/dialog.rs", "rank": 85, "score": 156670.24458993023 }, { "content": "pub trait PanedExt {\n\n fn add1<P: IsA<Widget>>(&self, child: &P);\n\n\n\n fn add2<P: IsA<Widget>>(&self, child: &P);\n\n\n\n fn get_child1(&self) -> Option<Widget>;\n\n\n\n fn get_child2(&self) -> Option<Widget>;\n\n\n\n fn get_handle_window(&self) -> Option<gdk::Window>;\n\n\n\n fn get_position(&self) -> i32;\n\n\n\n #[cfg(any(feature = \"v3_16\", feature = \"dox\"))]\n\n fn get_wide_handle(&self) -> bool;\n\n\n\n fn pack1<P: IsA<Widget>>(&self, child: &P, resize: bool, shrink: bool);\n\n\n\n fn pack2<P: IsA<Widget>>(&self, child: &P, resize: bool, shrink: bool);\n\n\n", "file_path": "src/auto/paned.rs", "rank": 86, "score": 156670.24458993023 }, { "content": "pub trait AssistantExtManual {\n\n fn set_forward_page_func<F: Fn(i32) -> i32 + 'static>(&self, f: F);\n\n}\n\n\n\nimpl<O: IsA<Assistant>> AssistantExtManual for O {\n\n fn set_forward_page_func<F: Fn(i32) -> i32 + 'static>(&self, f: F) {\n\n unsafe {\n\n let f: Box_<Box_<Fn(i32) -> i32 + 'static>> = Box_::new(Box_::new(f));\n\n ffi::gtk_assistant_set_forward_page_func(self.to_glib_none().0,\n\n Some(forward_page_trampoline), Box_::into_raw(f) as *mut _, Some(destroy_closure))\n\n }\n\n }\n\n}\n\n\n\nunsafe extern \"C\" fn forward_page_trampoline(current_page: i32, f: glib_ffi::gpointer) -> i32 {\n\n let f: &&(Fn(i32) -> i32 + 'static) = transmute(f);\n\n f(current_page)\n\n}\n\n\n\nunsafe extern \"C\" fn destroy_closure(ptr: glib_ffi::gpointer) {\n\n Box_::<Box_<Fn(i32) -> i32 + 'static>>::from_raw(ptr as *mut _);\n\n}\n", "file_path": "src/assistant.rs", "rank": 87, "score": 156670.24458993023 }, { "content": "pub trait SettingsExt {\n\n #[cfg(any(feature = \"v3_20\", feature = \"dox\"))]\n\n fn reset_property(&self, name: &str);\n\n\n\n #[cfg_attr(feature = \"v3_16\", deprecated)]\n\n fn set_double_property(&self, name: &str, v_double: f64, origin: &str);\n\n\n\n #[cfg_attr(feature = \"v3_16\", deprecated)]\n\n fn set_long_property(&self, name: &str, v_long: libc::c_long, origin: &str);\n\n\n\n //#[cfg_attr(feature = \"v3_16\", deprecated)]\n\n //fn set_property_value(&self, name: &str, svalue: /*Ignored*/&SettingsValue);\n\n\n\n #[cfg_attr(feature = \"v3_16\", deprecated)]\n\n fn set_string_property(&self, name: &str, v_string: &str, origin: &str);\n\n\n\n //#[cfg_attr(feature = \"v3_8\", deprecated)]\n\n //fn get_property_color_hash(&self) -> /*Unimplemented*/HashTable TypeId { ns_id: 0, id: 28 }/TypeId { ns_id: 10, id: 9 };\n\n\n\n fn get_property_gtk_alternative_button_order(&self) -> bool;\n", "file_path": "src/auto/settings.rs", "rank": 88, "score": 156670.24458993023 }, { "content": "pub trait ArrowExt {\n\n #[cfg_attr(feature = \"v3_14\", deprecated)]\n\n fn set(&self, arrow_type: ArrowType, shadow_type: ShadowType);\n\n\n\n fn get_property_arrow_type(&self) -> ArrowType;\n\n\n\n fn set_property_arrow_type(&self, arrow_type: ArrowType);\n\n\n\n fn get_property_shadow_type(&self) -> ShadowType;\n\n\n\n fn set_property_shadow_type(&self, shadow_type: ShadowType);\n\n\n\n fn connect_property_arrow_type_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_property_shadow_type_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Arrow> + IsA<glib::object::Object>> ArrowExt for O {\n\n fn set(&self, arrow_type: ArrowType, shadow_type: ShadowType) {\n\n unsafe {\n", "file_path": "src/auto/arrow.rs", "rank": 89, "score": 156670.24458993023 }, { "content": "pub trait ActionableExt {\n\n fn get_action_name(&self) -> Option<String>;\n\n\n\n fn get_action_target_value(&self) -> Option<glib::Variant>;\n\n\n\n fn set_action_name<'a, P: Into<Option<&'a str>>>(&self, action_name: P);\n\n\n\n //fn set_action_target(&self, format_string: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n\n\n fn set_action_target_value(&self, target_value: &glib::Variant);\n\n\n\n fn set_detailed_action_name(&self, detailed_action_name: &str);\n\n\n\n fn connect_property_action_name_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Actionable> + IsA<glib::object::Object>> ActionableExt for O {\n\n fn get_action_name(&self) -> Option<String> {\n\n unsafe {\n\n from_glib_none(ffi::gtk_actionable_get_action_name(self.to_glib_none().0))\n", "file_path": "src/auto/actionable.rs", "rank": 90, "score": 156670.24458993023 }, { "content": "pub trait ViewportExt {\n\n fn get_bin_window(&self) -> Option<gdk::Window>;\n\n\n\n fn get_shadow_type(&self) -> ShadowType;\n\n\n\n fn get_view_window(&self) -> Option<gdk::Window>;\n\n\n\n fn set_shadow_type(&self, type_: ShadowType);\n\n\n\n fn connect_property_shadow_type_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Viewport> + IsA<glib::object::Object>> ViewportExt for O {\n\n fn get_bin_window(&self) -> Option<gdk::Window> {\n\n unsafe {\n\n from_glib_none(ffi::gtk_viewport_get_bin_window(self.to_glib_none().0))\n\n }\n\n }\n\n\n\n fn get_shadow_type(&self) -> ShadowType {\n", "file_path": "src/auto/viewport.rs", "rank": 91, "score": 156670.24458993023 }, { "content": "pub trait RangeExt {\n\n fn get_adjustment(&self) -> Adjustment;\n\n\n\n fn get_fill_level(&self) -> f64;\n\n\n\n fn get_flippable(&self) -> bool;\n\n\n\n fn get_inverted(&self) -> bool;\n\n\n\n fn get_lower_stepper_sensitivity(&self) -> SensitivityType;\n\n\n\n #[cfg_attr(feature = \"v3_20\", deprecated)]\n\n fn get_min_slider_size(&self) -> i32;\n\n\n\n fn get_range_rect(&self) -> gdk::Rectangle;\n\n\n\n fn get_restrict_to_fill_level(&self) -> bool;\n\n\n\n fn get_round_digits(&self) -> i32;\n\n\n", "file_path": "src/auto/range.rs", "rank": 92, "score": 156670.24458993023 }, { "content": "pub trait ToolbarExt {\n\n fn get_drop_index(&self, x: i32, y: i32) -> i32;\n\n\n\n fn get_icon_size(&self) -> IconSize;\n\n\n\n fn get_item_index<P: IsA<ToolItem>>(&self, item: &P) -> i32;\n\n\n\n fn get_n_items(&self) -> i32;\n\n\n\n fn get_nth_item(&self, n: i32) -> Option<ToolItem>;\n\n\n\n fn get_show_arrow(&self) -> bool;\n\n\n\n fn insert<P: IsA<ToolItem>>(&self, item: &P, pos: i32);\n\n\n\n fn set_drop_highlight_item<'a, P: IsA<ToolItem> + 'a, Q: Into<Option<&'a P>>>(&self, tool_item: Q, index_: i32);\n\n\n\n fn set_icon_size(&self, icon_size: IconSize);\n\n\n\n fn set_show_arrow(&self, show_arrow: bool);\n", "file_path": "src/auto/toolbar.rs", "rank": 93, "score": 156670.24458993023 }, { "content": "pub trait OverlayExt {\n\n fn add_overlay<P: IsA<Widget>>(&self, widget: &P);\n\n\n\n #[cfg(any(feature = \"v3_18\", feature = \"dox\"))]\n\n fn get_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P) -> bool;\n\n\n\n #[cfg(any(feature = \"v3_18\", feature = \"dox\"))]\n\n fn reorder_overlay<P: IsA<Widget>>(&self, child: &P, position: i32);\n\n\n\n #[cfg(any(feature = \"v3_18\", feature = \"dox\"))]\n\n fn set_overlay_pass_through<P: IsA<Widget>>(&self, widget: &P, pass_through: bool);\n\n\n\n fn get_child_index<T: IsA<Widget>>(&self, item: &T) -> i32;\n\n\n\n fn set_child_index<T: IsA<Widget>>(&self, item: &T, index: i32);\n\n\n\n //fn connect_get_child_position<Unsupported or ignored types>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Overlay> + IsA<Container>> OverlayExt for O {\n", "file_path": "src/auto/overlay.rs", "rank": 94, "score": 156670.24458993023 }, { "content": "pub trait CalendarExt {\n\n fn clear_marks(&self);\n\n\n\n fn get_date(&self) -> (u32, u32, u32);\n\n\n\n fn get_day_is_marked(&self, day: u32) -> bool;\n\n\n\n fn get_detail_height_rows(&self) -> i32;\n\n\n\n fn get_detail_width_chars(&self) -> i32;\n\n\n\n fn get_display_options(&self) -> CalendarDisplayOptions;\n\n\n\n fn mark_day(&self, day: u32);\n\n\n\n fn select_day(&self, day: u32);\n\n\n\n fn select_month(&self, month: u32, year: u32);\n\n\n\n //fn set_detail_func<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, func: /*Unknown conversion*//*Unimplemented*/CalendarDetailFunc, data: P, destroy: /*Unknown conversion*//*Unimplemented*/DestroyNotify);\n", "file_path": "src/auto/calendar.rs", "rank": 95, "score": 156670.24458993023 }, { "content": "pub trait BoxExt {\n\n #[cfg(any(feature = \"v3_10\", feature = \"dox\"))]\n\n fn get_baseline_position(&self) -> BaselinePosition;\n\n\n\n #[cfg(any(feature = \"v3_12\", feature = \"dox\"))]\n\n fn get_center_widget(&self) -> Option<Widget>;\n\n\n\n fn get_homogeneous(&self) -> bool;\n\n\n\n fn get_spacing(&self) -> i32;\n\n\n\n fn pack_end<P: IsA<Widget>>(&self, child: &P, expand: bool, fill: bool, padding: u32);\n\n\n\n fn pack_start<P: IsA<Widget>>(&self, child: &P, expand: bool, fill: bool, padding: u32);\n\n\n\n fn query_child_packing<P: IsA<Widget>>(&self, child: &P) -> (bool, bool, u32, PackType);\n\n\n\n fn reorder_child<P: IsA<Widget>>(&self, child: &P, position: i32);\n\n\n\n #[cfg(any(feature = \"v3_10\", feature = \"dox\"))]\n", "file_path": "src/auto/box_.rs", "rank": 96, "score": 156670.24458993023 }, { "content": "pub trait LabelExt {\n\n fn get_angle(&self) -> f64;\n\n\n\n fn get_attributes(&self) -> Option<pango::AttrList>;\n\n\n\n fn get_current_uri(&self) -> Option<String>;\n\n\n\n fn get_ellipsize(&self) -> pango::EllipsizeMode;\n\n\n\n fn get_justify(&self) -> Justification;\n\n\n\n fn get_label(&self) -> Option<String>;\n\n\n\n fn get_layout(&self) -> Option<pango::Layout>;\n\n\n\n fn get_layout_offsets(&self) -> (i32, i32);\n\n\n\n fn get_line_wrap(&self) -> bool;\n\n\n\n fn get_line_wrap_mode(&self) -> pango::WrapMode;\n", "file_path": "src/auto/label.rs", "rank": 97, "score": 156670.24458993023 }, { "content": "pub trait ClipboardExt {\n\n fn clear(&self);\n\n\n\n fn get_display(&self) -> Option<gdk::Display>;\n\n\n\n fn get_owner(&self) -> Option<glib::Object>;\n\n\n\n #[cfg(any(feature = \"v3_22\", feature = \"dox\"))]\n\n fn get_selection(&self) -> Option<gdk::Atom>;\n\n\n\n //fn request_contents<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, target: &gdk::Atom, callback: /*Unknown conversion*//*Unimplemented*/ClipboardReceivedFunc, user_data: P);\n\n\n\n //fn request_image<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, callback: /*Unknown conversion*//*Unimplemented*/ClipboardImageReceivedFunc, user_data: P);\n\n\n\n //fn request_rich_text<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, buffer: &TextBuffer, callback: /*Unknown conversion*//*Unimplemented*/ClipboardRichTextReceivedFunc, user_data: P);\n\n\n\n //fn request_targets<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, callback: /*Unknown conversion*//*Unimplemented*/ClipboardTargetsReceivedFunc, user_data: P);\n\n\n\n //fn request_text<P: Into<Option</*Unimplemented*/Fundamental: Pointer>>>(&self, callback: /*Unknown conversion*//*Unimplemented*/ClipboardTextReceivedFunc, user_data: P);\n\n\n", "file_path": "src/auto/clipboard.rs", "rank": 98, "score": 156670.24458993023 }, { "content": "pub trait StatusbarExt {\n\n fn get_context_id(&self, context_description: &str) -> u32;\n\n\n\n fn get_message_area(&self) -> Option<Box>;\n\n\n\n fn pop(&self, context_id: u32);\n\n\n\n fn push(&self, context_id: u32, text: &str) -> u32;\n\n\n\n fn remove(&self, context_id: u32, message_id: u32);\n\n\n\n fn remove_all(&self, context_id: u32);\n\n\n\n fn connect_text_popped<F: Fn(&Self, u32, &str) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_text_pushed<F: Fn(&Self, u32, &str) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Statusbar> + IsA<glib::object::Object>> StatusbarExt for O {\n\n fn get_context_id(&self, context_description: &str) -> u32 {\n", "file_path": "src/auto/statusbar.rs", "rank": 99, "score": 156670.24458993023 } ]
Rust
src/arena/views/character_overlay.rs
chamons/ArenaGS
0d3c8d4ebc818198b21a8c99dc853286cc16b7c2
use sdl2::pixels::Color; use sdl2::rect::Point as SDLPoint; use sdl2::rect::Rect as SDLRect; use specs::prelude::*; use super::TILE_SIZE; use crate::after_image::prelude::*; use crate::atlas::prelude::*; use crate::clash::{ShortInfo, StatusInfo, StatusKind}; use crate::props::LifeBar; pub struct CharacterOverlay { cache: IconCache, lifebar: LifeBar, } enum OverlayStatus { Burning, Frozen, Static, Aimed, Armored, Regen, } impl OverlayStatus { fn get_file_name(&self) -> &'static str { match self { OverlayStatus::Burning => "fire.png", OverlayStatus::Frozen => "ice.png", OverlayStatus::Static => "shock.png", OverlayStatus::Aimed => "aimed.png", OverlayStatus::Armored => "armor.png", OverlayStatus::Regen => "regen.png", } } } impl CharacterOverlay { pub fn init(render_context: &RenderContext) -> BoxResult<CharacterOverlay> { Ok(CharacterOverlay { cache: IconCache::init_with_alpha( render_context, IconLoader::init_overlay_icons(), &[ "small_frame.png", "large_frame.png", "fire.png", "ice.png", "shock.png", "regen.png", "aimed.png", "armor.png", ], Some(212), )?, lifebar: LifeBar::init(render_context)?, }) } fn get_overlay_statuses(&self, ecs: &World, entity: Entity) -> Vec<OverlayStatus> { let mut status = vec![]; let temperature = ecs.get_temperature(entity); if temperature.is_burning() { status.push(OverlayStatus::Burning); } if temperature.is_freezing() { status.push(OverlayStatus::Frozen); } if ecs.has_status(entity, StatusKind::StaticCharge) { status.push(OverlayStatus::Static); } if ecs.has_status(entity, StatusKind::Armored) { status.push(OverlayStatus::Armored); } if ecs.has_status(entity, StatusKind::Regen) { status.push(OverlayStatus::Regen); } if ecs.has_status(entity, StatusKind::Aimed) { status.push(OverlayStatus::Aimed); } status } pub fn draw_character_overlay(&self, canvas: &mut RenderCanvas, ecs: &World, entity: Entity, screen_position: SDLPoint) -> BoxResult<()> { let size = { let position = ecs.get_position(entity); if position.width == 1 && position.height == 1 { 1 } else if position.width == 2 && position.height == 2 { 2 } else { panic!(); } }; let life_size = { match size { 1 => TILE_SIZE - 5, 2 => 2 * (TILE_SIZE - 5), _ => panic!("Unknown lifebar size"), } }; let lifebar_rect = SDLRect::new( screen_position.x() - (life_size as i32 / 2) + 2, screen_position.y() + ((4 * TILE_SIZE as i32) / 5) + 2, life_size - 4, 6, ); let defenses = ecs.get_defenses(entity); let health = defenses.health as f64 / defenses.max_health as f64; let absorb = f64::min(defenses.absorb as f64 / defenses.max_health as f64, 1.0); self.lifebar.render(lifebar_rect, canvas, health, absorb)?; canvas.set_draw_color(Color::RGBA(0, 0, 0, 128)); for (i, status) in self.get_overlay_statuses(ecs, entity).iter().enumerate().take(if size == 1 { 2 } else { 4 }) { let offset = { match size { 1 => SDLPoint::new(-17, 22), 2 => SDLPoint::new(-38, 22), _ => panic!("Unknown overlay width"), } }; let status_start = SDLPoint::new(screen_position.x() + offset.x() + (i as i32 * 18), screen_position.y() + offset.y()); canvas.fill_rect(SDLRect::new(status_start.x(), status_start.y(), 17, 17))?; canvas.copy( &self.cache.get(status.get_file_name()), None, SDLRect::new(status_start.x(), status_start.y(), 16, 16), )?; } match size { 1 => self.draw_small_bracket(canvas, screen_position)?, 2 => self.draw_large_bracket(canvas, screen_position)?, _ => panic!("Unknown bracket size"), } Ok(()) } fn draw_large_bracket(&self, canvas: &mut RenderCanvas, screen_position: SDLPoint) -> BoxResult<()> { let image_rect = SDLRect::new(0, 0, TILE_SIZE * 2, TILE_SIZE * 2); let screen_rect = SDLRect::new( screen_position.x() - TILE_SIZE as i32, screen_position.y() - TILE_SIZE as i32, TILE_SIZE * 2, TILE_SIZE * 2, ); canvas.copy(self.cache.get("large_frame.png"), image_rect, screen_rect)?; Ok(()) } fn draw_small_bracket(&self, canvas: &mut RenderCanvas, screen_position: SDLPoint) -> BoxResult<()> { let image_rect = SDLRect::new(0, 0, TILE_SIZE, TILE_SIZE); let screen_rect = SDLRect::new(screen_position.x() - (TILE_SIZE as i32 / 2), screen_position.y(), TILE_SIZE, TILE_SIZE); canvas.copy(self.cache.get("small_frame.png"), image_rect, screen_rect)?; Ok(()) } }
use sdl2::pixels::Color; use sdl2::rect::Point as SDLPoint; use sdl2::rect::Rect as SDLRect; use specs::prelude::*; use super::TILE_SIZE; use crate::after_image::prelude::*; use crate::atlas::prelude::*; use crate::clash::{ShortInfo, StatusInfo, StatusKind}; use crate::props::LifeBar; pub struct CharacterOverlay { cache: IconCache, lifebar: LifeBar, } enum OverlayStatus { Burning, Frozen, Static, Aimed, Armored, Regen, } impl OverlayStatus { fn get_file_name(&self) -> &'static str { match self { OverlayStatus::Burning => "fire.png", OverlayStatus::Frozen => "ice.png", OverlayStatus::Static => "shock.png", OverlayStatus::Aimed => "aimed.png", OverlayStatus::Armored => "armor.png", OverlayStatus::Regen => "regen.png", } } } impl CharacterOverlay { pub fn init(render_context: &RenderContext) -> BoxResult<CharacterOverlay> {
} fn get_overlay_statuses(&self, ecs: &World, entity: Entity) -> Vec<OverlayStatus> { let mut status = vec![]; let temperature = ecs.get_temperature(entity); if temperature.is_burning() { status.push(OverlayStatus::Burning); } if temperature.is_freezing() { status.push(OverlayStatus::Frozen); } if ecs.has_status(entity, StatusKind::StaticCharge) { status.push(OverlayStatus::Static); } if ecs.has_status(entity, StatusKind::Armored) { status.push(OverlayStatus::Armored); } if ecs.has_status(entity, StatusKind::Regen) { status.push(OverlayStatus::Regen); } if ecs.has_status(entity, StatusKind::Aimed) { status.push(OverlayStatus::Aimed); } status } pub fn draw_character_overlay(&self, canvas: &mut RenderCanvas, ecs: &World, entity: Entity, screen_position: SDLPoint) -> BoxResult<()> { let size = { let position = ecs.get_position(entity); if position.width == 1 && position.height == 1 { 1 } else if position.width == 2 && position.height == 2 { 2 } else { panic!(); } }; let life_size = { match size { 1 => TILE_SIZE - 5, 2 => 2 * (TILE_SIZE - 5), _ => panic!("Unknown lifebar size"), } }; let lifebar_rect = SDLRect::new( screen_position.x() - (life_size as i32 / 2) + 2, screen_position.y() + ((4 * TILE_SIZE as i32) / 5) + 2, life_size - 4, 6, ); let defenses = ecs.get_defenses(entity); let health = defenses.health as f64 / defenses.max_health as f64; let absorb = f64::min(defenses.absorb as f64 / defenses.max_health as f64, 1.0); self.lifebar.render(lifebar_rect, canvas, health, absorb)?; canvas.set_draw_color(Color::RGBA(0, 0, 0, 128)); for (i, status) in self.get_overlay_statuses(ecs, entity).iter().enumerate().take(if size == 1 { 2 } else { 4 }) { let offset = { match size { 1 => SDLPoint::new(-17, 22), 2 => SDLPoint::new(-38, 22), _ => panic!("Unknown overlay width"), } }; let status_start = SDLPoint::new(screen_position.x() + offset.x() + (i as i32 * 18), screen_position.y() + offset.y()); canvas.fill_rect(SDLRect::new(status_start.x(), status_start.y(), 17, 17))?; canvas.copy( &self.cache.get(status.get_file_name()), None, SDLRect::new(status_start.x(), status_start.y(), 16, 16), )?; } match size { 1 => self.draw_small_bracket(canvas, screen_position)?, 2 => self.draw_large_bracket(canvas, screen_position)?, _ => panic!("Unknown bracket size"), } Ok(()) } fn draw_large_bracket(&self, canvas: &mut RenderCanvas, screen_position: SDLPoint) -> BoxResult<()> { let image_rect = SDLRect::new(0, 0, TILE_SIZE * 2, TILE_SIZE * 2); let screen_rect = SDLRect::new( screen_position.x() - TILE_SIZE as i32, screen_position.y() - TILE_SIZE as i32, TILE_SIZE * 2, TILE_SIZE * 2, ); canvas.copy(self.cache.get("large_frame.png"), image_rect, screen_rect)?; Ok(()) } fn draw_small_bracket(&self, canvas: &mut RenderCanvas, screen_position: SDLPoint) -> BoxResult<()> { let image_rect = SDLRect::new(0, 0, TILE_SIZE, TILE_SIZE); let screen_rect = SDLRect::new(screen_position.x() - (TILE_SIZE as i32 / 2), screen_position.y(), TILE_SIZE, TILE_SIZE); canvas.copy(self.cache.get("small_frame.png"), image_rect, screen_rect)?; Ok(()) } }
Ok(CharacterOverlay { cache: IconCache::init_with_alpha( render_context, IconLoader::init_overlay_icons(), &[ "small_frame.png", "large_frame.png", "fire.png", "ice.png", "shock.png", "regen.png", "aimed.png", "armor.png", ], Some(212), )?, lifebar: LifeBar::init(render_context)?, })
call_expression
[ { "content": "pub fn get_elemental_summon_to_use(ecs: &World) -> &'static str {\n\n let mut elements = vec![ElementalKind::Water, ElementalKind::Fire, ElementalKind::Wind, ElementalKind::Earth];\n\n\n\n for e in find_all_characters(ecs).iter().filter(|&&c| is_elemental(ecs, c)) {\n\n match get_elemental_kind(ecs, *e) {\n\n Some(ElementalKind::Water) => {\n\n elements.swap_remove(elements.iter().position(|x| *x == ElementalKind::Water).unwrap());\n\n }\n\n Some(ElementalKind::Fire) => {\n\n elements.swap_remove(elements.iter().position(|x| *x == ElementalKind::Fire).unwrap());\n\n }\n\n Some(ElementalKind::Wind) => {\n\n elements.swap_remove(elements.iter().position(|x| *x == ElementalKind::Wind).unwrap());\n\n }\n\n Some(ElementalKind::Earth) => {\n\n elements.swap_remove(elements.iter().position(|x| *x == ElementalKind::Earth).unwrap());\n\n }\n\n _ => panic!(\"Unexpected item in get_elemental_summon_to_use\"),\n\n }\n\n }\n\n elements.shuffle(&mut ecs.fetch_mut::<RandomComponent>().rand);\n\n match elements[0] {\n\n ElementalKind::Water => \"Summon Elemental (Water)\",\n\n ElementalKind::Fire => \"Summon Elemental (Fire)\",\n\n ElementalKind::Wind => \"Summon Elemental (Wind)\",\n\n ElementalKind::Earth => \"Summon Elemental (Earth)\",\n\n }\n\n}\n\n\n", "file_path": "src/clash/content/elementalist.rs", "rank": 0, "score": 332892.0108660596 }, { "content": "pub fn get_image_for_status(kind: StatusKind) -> &'static str {\n\n match kind {\n\n StatusKind::Ignite => \"b_31_1.png\",\n\n StatusKind::Cyclone => \"b_40_02.png\",\n\n StatusKind::Magnum => \"b_30.png\",\n\n _ => panic!(\"Unknown status {:?} in get_image_for_status\", kind),\n\n }\n\n}\n\n\n", "file_path": "src/clash/content/gunslinger.rs", "rank": 1, "score": 331083.6570420156 }, { "content": "pub fn get_icon_name_for_status(kind: StatusKind) -> &'static str {\n\n match kind {\n\n StatusKind::Burning => \"SpellBook08_130.png\",\n\n StatusKind::Frozen => \"SpellBook08_111.png\",\n\n StatusKind::StaticCharge => \"SpellBook06_89.png\",\n\n StatusKind::Aimed => \"SpellBook08_83.png\",\n\n StatusKind::Armored => \"SpellBook08_122.png\",\n\n StatusKind::Regen => \"SpellBook08_73.png\",\n\n StatusKind::Ignite | StatusKind::Cyclone | StatusKind::Magnum => crate::clash::content::gunslinger::get_image_for_status(kind),\n\n StatusKind::Flying | StatusKind::RegenTick => \"\",\n\n #[cfg(test)]\n\n _ => \"\",\n\n }\n\n}\n\n\n", "file_path": "src/arena/views/status_display.rs", "rank": 2, "score": 325906.5200073358 }, { "content": "pub fn all_icon_filenames() -> &'static [&'static str] {\n\n &[\n\n \"SpellBook08_130.png\",\n\n \"SpellBook08_111.png\",\n\n \"b_31_1.png\",\n\n \"b_40_02.png\",\n\n \"b_30.png\",\n\n \"SpellBook06_89.png\",\n\n \"SpellBook08_83.png\",\n\n \"SpellBook08_122.png\",\n\n \"SpellBook08_73.png\",\n\n ]\n\n}\n", "file_path": "src/arena/views/status_display.rs", "rank": 3, "score": 319170.8672649065 }, { "content": "pub fn summarize_character<'a>(ecs: &'a World, entity: Entity, show_status_effect: bool, use_links: bool, mut on_text: impl FnMut(&str) + 'a) {\n\n let defense_components = &ecs.read_storage::<DefenseComponent>();\n\n let defence_component = defense_components.grab(entity);\n\n let defenses = &defence_component.defenses;\n\n\n\n let linkify = |s: &str| -> String {\n\n if use_links {\n\n format!(\"[[{}]]\", s)\n\n } else {\n\n s.to_string()\n\n }\n\n };\n\n\n\n let health_text = {\n\n if defenses.absorb != 0 {\n\n format!(\n\n \"{}: (+{:.2}) {:.2}/{:.2}\",\n\n linkify(\"Health\"),\n\n defenses.absorb,\n\n defenses.health,\n", "file_path": "src/clash/help.rs", "rank": 4, "score": 288839.17047121585 }, { "content": "pub fn draw_selection_frame(canvas: &mut RenderCanvas, icons: &IconCache, mut selection_frame: SDLRect, image: &str) -> BoxResult<()> {\n\n selection_frame.offset(-2, -2);\n\n selection_frame.set_width(selection_frame.width() + 4);\n\n selection_frame.set_height(selection_frame.height() + 4);\n\n canvas.copy(icons.get(image), None, selection_frame)?;\n\n\n\n Ok(())\n\n}\n\n\n\nimpl Scene for RewardScene {\n\n fn handle_key(&mut self, _keycode: Keycode, _keymod: Mod) {}\n\n\n\n fn handle_mouse_click(&mut self, x: i32, y: i32, button: Option<MouseButton>) {\n\n for (i, c) in &mut self.cards.iter_mut().enumerate() {\n\n c.handle_mouse_click(&mut self.ecs, x, y, button);\n\n if c.grabbed.is_some() {\n\n c.grabbed = None;\n\n *self.selection.borrow_mut() = Some(i as u32);\n\n }\n\n }\n", "file_path": "src/intermission/reward_scene.rs", "rank": 5, "score": 250508.56056867537 }, { "content": "pub fn use_skill(ecs: &mut World, enemy: Entity, skill_name: &str) -> bool {\n\n use_skill_core(ecs, enemy, skill_name, None)\n\n}\n\n\n", "file_path": "src/clash/ai.rs", "rank": 6, "score": 241200.18077568855 }, { "content": "pub fn load_image(path: &str, render_context: &RenderContext) -> BoxResult<Texture> {\n\n let dest_path = Path::new(&get_exe_folder()).join(path);\n\n\n\n let texture_creator = render_context.canvas.texture_creator();\n\n Ok(texture_creator.load_texture(dest_path)?)\n\n}\n", "file_path": "src/after_image/image_loader.rs", "rank": 7, "score": 240828.96215921556 }, { "content": "pub fn player_use_skill(ecs: &mut World, name: &str, target: Option<Point>) -> bool {\n\n if !can_act(ecs) {\n\n return false;\n\n }\n\n\n\n let player = find_player(ecs);\n\n invoke_skill(ecs, player, name, target);\n\n true\n\n}\n\n\n", "file_path": "src/clash/actions.rs", "rank": 8, "score": 237085.9801610653 }, { "content": "pub fn use_skill_at_any_enemy_if_in_range(ecs: &mut World, entity: Entity, skill_name: &str) -> bool {\n\n let current_position = ecs.get_position(entity);\n\n let skill_range = ecs.get_skill(skill_name).range.unwrap();\n\n\n\n let mut targets = vec![];\n\n for e in find_enemies_of(ecs, entity) {\n\n if let Some(enemy_position) = ecs.find_position(e) {\n\n if let Some((_, target_point, distance)) = current_position.distance_to_multi_with_endpoints(enemy_position) {\n\n if distance <= skill_range && can_invoke_skill(ecs, entity, skill_name, Some(target_point)) {\n\n targets.push((distance, target_point));\n\n }\n\n }\n\n }\n\n }\n\n if let Some(target) = targets.iter().min_by(|a, b| a.0.cmp(&b.0)) {\n\n invoke_skill(ecs, entity, skill_name, Some(target.1));\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "src/clash/ai.rs", "rank": 9, "score": 236350.3280756406 }, { "content": "fn top_level_topics() -> Vec<&'static str> {\n\n vec![\n\n \"Getting Started\",\n\n \"Damage & Defenses\",\n\n \"Equipment\",\n\n \"Influence\",\n\n \"Gunslinger\",\n\n \"Resources\",\n\n \"Status Effects\",\n\n \"Temperature\",\n\n \"Time\",\n\n ]\n\n}\n\n\n", "file_path": "src/clash/help.rs", "rank": 10, "score": 235506.42615090095 }, { "content": "pub fn use_skill_at_position(ecs: &mut World, enemy: Entity, skill_name: &str, target_point: Point) -> bool {\n\n use_skill_core(ecs, enemy, skill_name, Some(target_point))\n\n}\n\n\n", "file_path": "src/clash/ai.rs", "rank": 11, "score": 228557.56094668887 }, { "content": "pub fn use_skill_with_random_target_near_player(ecs: &mut World, enemy: Entity, skill_name: &str, range: u32) -> bool {\n\n let skill = ecs.get_skill(skill_name);\n\n // Early return for lack of resources before trying many target squares\n\n if skill.is_usable(ecs, enemy) != UsableResults::Usable {\n\n return false;\n\n }\n\n\n\n let mut target = ecs.get_position(find_player(ecs));\n\n\n\n let mut range = {\n\n let random = &mut ecs.fetch_mut::<RandomComponent>().rand;\n\n random.gen_range(0, range)\n\n };\n\n\n\n // Try 20 times for a valid target\n\n for attempt in 0..20 {\n\n for _ in 0..range {\n\n let direction = get_random_direction_list(ecs)[0];\n\n if let Some(t) = direction.sized_point_in_direction(&target) {\n\n target = t;\n", "file_path": "src/clash/ai.rs", "rank": 12, "score": 224274.86177605262 }, { "content": "// Some skills have an alternate when not usable (such as reload)\n\npub fn get_current_skill_on_skillbar(ecs: &World, skill_name: &str) -> String {\n\n let skill = ecs.get_skill(skill_name);\n\n\n\n match skill.is_usable(ecs, find_player(&ecs)) {\n\n UsableResults::LacksAmmo if skill.alternate.is_some() => skill.alternate.as_ref().unwrap().to_string(),\n\n _ => skill_name.to_string(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn skill_hotkey_mapping() {\n\n assert_eq!(1, skill_index_to_hotkey(0));\n\n assert_eq!(2, skill_index_to_hotkey(1));\n\n assert_eq!(9, skill_index_to_hotkey(8));\n\n assert_eq!(0, skill_index_to_hotkey(9));\n\n }\n", "file_path": "src/props/skillbar.rs", "rank": 13, "score": 209243.37253890227 }, { "content": "pub fn get_tree_icons(render_context: &RenderContext, tree: &SkillTree) -> BoxResult<IconCache> {\n\n let tree_icons = tree.icons();\n\n Ok(IconCache::init(&render_context, IconLoader::init_icons(), &tree_icons[..])?)\n\n}\n\n\n\nimpl ProfessionTreeView {\n\n pub fn init(render_context: &RenderContext, text_renderer: &Rc<TextRenderer>, ecs: &World) -> BoxResult<ProfessionTreeView> {\n\n let tree = Rc::new(SkillTree::init(&get_tree(ecs)));\n\n\n\n let selection: Rc<RefCell<Option<String>>> = Rc::new(RefCell::new(None));\n\n let accept_button = Button::text(\n\n SDLPoint::new(800, 650),\n\n \"Purchase\",\n\n &render_context,\n\n text_renderer,\n\n ButtonDelegate::init()\n\n .enabled(Box::new(enclose! { (selection, tree) move |ecs| {\n\n let selection = selection.borrow_mut();\n\n if let Some(selection) = (*selection).as_ref() {\n\n if ProfessionTreeView::can_apply_selection(ecs, &tree, &selection) {\n", "file_path": "src/intermission/profession_tree.rs", "rank": 14, "score": 199058.09956826665 }, { "content": "pub fn icons_for_items(render_context: &RenderContext, items: &[EquipmentItem]) -> BoxResult<Rc<IconCache>> {\n\n let icons: Vec<&String> = items.iter().flat_map(|i| &i.image).collect();\n\n Ok(Rc::new(IconCache::init(&render_context, IconLoader::init_icons(), &icons[..])?))\n\n}\n\n\n\nimpl RewardScene {\n\n pub fn init(render_context_holder: &RenderContextHolder, text_renderer: &Rc<TextRenderer>, ecs: World) -> BoxResult<RewardScene> {\n\n let render_context = render_context_holder.borrow();\n\n\n\n let reward = get_reward(&ecs);\n\n let mut items: Vec<EquipmentItem> = {\n\n let equipment = &ecs.read_resource::<EquipmentResource>();\n\n reward.cards.iter().map(|c| equipment.get(&c)).collect()\n\n };\n\n\n\n let icons = icons_for_items(&render_context, &items)?;\n\n let ui = Rc::new(IconCache::init(\n\n &render_context,\n\n IconLoader::init_ui(),\n\n &[\n", "file_path": "src/intermission/reward_scene.rs", "rank": 15, "score": 197264.14861059323 }, { "content": "pub fn invoke_skill(ecs: &mut World, invoker: Entity, name: &str, target: Option<Point>) {\n\n assert_correct_targeting(ecs, invoker, name, target);\n\n let skill = ecs.get_skill(name);\n\n assert!(can_invoke_skill(ecs, invoker, name, target));\n\n\n\n if let Some(invoker_name) = ecs.get_name(invoker) {\n\n ecs.log(format!(\"{} used [[{}]]\", invoker_name.as_str(), name));\n\n }\n\n\n\n if !skill.no_time {\n\n spend_time(ecs, invoker, BASE_ACTION_COST);\n\n }\n\n spend_ammo(ecs, invoker, &skill);\n\n\n\n if let Some(exhaustion) = skill.exhaustion {\n\n spend_exhaustion(ecs, invoker, exhaustion);\n\n }\n\n if let Some(focus_use) = skill.focus_use {\n\n spend_focus(ecs, invoker, focus_use);\n\n }\n\n if skill.cooldown.is_some() {\n\n spend_cooldown(ecs, invoker, &skill);\n\n }\n\n\n\n gain_adrenaline(ecs, invoker, &skill);\n\n\n\n process_skill(ecs, invoker, &skill.effect, target, &skill.name);\n\n}\n\n\n", "file_path": "src/clash/skills.rs", "rank": 16, "score": 196917.68439434347 }, { "content": "pub fn layout_text(text: &str, font: &Font, request: LayoutRequest) -> BoxResult<LayoutResult> {\n\n let mut layout = Layout::init(request);\n\n layout.run(text, font)?;\n\n Ok(layout.results())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::font_test_helpers::*;\n\n use super::*;\n\n\n\n fn get_text(chunk: &LayoutChunkValue) -> &String {\n\n match chunk {\n\n LayoutChunkValue::String(s) => s,\n\n _ => panic!(\"Wrong type?\"),\n\n }\n\n }\n\n\n\n fn get_icon(chunk: &LayoutChunkValue) -> LayoutChunkIcon {\n\n match chunk {\n", "file_path": "src/after_image/text_layout.rs", "rank": 17, "score": 196658.24300990952 }, { "content": "pub fn can_invoke_skill(ecs: &mut World, invoker: Entity, name: &str, target: Option<Point>) -> bool {\n\n let skill = ecs.get_skill(name);\n\n let has_valid_target = target.map_or(true, |x| is_good_target(ecs, invoker, &skill, x));\n\n has_valid_target && skill.is_usable(ecs, invoker) == UsableResults::Usable\n\n}\n\n\n", "file_path": "src/clash/skills.rs", "rank": 18, "score": 191886.95898356102 }, { "content": "pub fn test_eq(name: &str, kind: EquipmentKinds, effect: &[EquipmentEffect], index: usize) -> (EquipmentKinds, EquipmentItem, usize) {\n\n (kind, EquipmentItem::init(name, None, kind, EquipmentRarity::Common, effect), index)\n\n}\n\n\n", "file_path": "src/clash/test_helpers.rs", "rank": 19, "score": 185502.87565315986 }, { "content": "pub fn begin_field(ecs: &mut World, source: Entity, target: Point, effect: FieldEffect, name: &str, kind: FieldKind) {\n\n ecs.shovel(\n\n source,\n\n FieldCastComponent::init(effect, name, kind, SizedPoint::from(target), is_player_or_ally(ecs, source)),\n\n );\n\n ecs.raise_event(EventKind::Field(FieldState::BeginCastAnimation), Some(source));\n\n}\n\n\n", "file_path": "src/clash/combat.rs", "rank": 20, "score": 184125.75162604888 }, { "content": "pub fn create_damage_field(ecs: &mut World, name: &str, position: SizedPoint, attack: AttackComponent, fields: FieldComponent) -> Entity {\n\n ecs.create_entity()\n\n .with(PositionComponent::init(position))\n\n .with(NamedComponent::init(name))\n\n .with(attack)\n\n .with(BehaviorComponent::init(BehaviorKind::Explode))\n\n .with(fields)\n\n .with(TimeComponent::init(-BASE_ACTION_COST))\n\n .marked::<SimpleMarker<ToSerialize>>()\n\n .build()\n\n}\n\n\n", "file_path": "src/clash/content/spawner.rs", "rank": 21, "score": 182544.9805195231 }, { "content": "pub fn take_player_action(ecs: &mut World) {\n\n let player = find_player(ecs);\n\n for d in get_random_direction_list(ecs) {\n\n if let Some(potential) = d.point_in_direction(&ecs.get_position(player).origin) {\n\n if can_move_character(ecs, player, SizedPoint::from(potential)) {\n\n move_character_action(ecs, player, SizedPoint::from(potential));\n\n return;\n\n }\n\n }\n\n }\n\n wait(ecs, player);\n\n}\n\n\n\n#[cfg(feature = \"profile_self_play\")]\n\npub mod tests {\n\n use std::time::Instant;\n\n\n\n use crate::conductor::StageDirection;\n\n\n\n pub fn self_play_10000_games() {\n", "file_path": "src/arena/self_play.rs", "rank": 22, "score": 180437.55245284174 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn begin_orb(ecs: &mut World, source: Entity, target_position: Point, strength: Damage, kind: OrbKind, speed: u32, duration: u32, name: &str) {\n\n let source_position = ecs.get_position(source);\n\n let path = source_position.line_to(target_position).unwrap();\n\n let path = extend_line_along_path(&path, duration);\n\n ecs.shovel(source, OrbComponent::init(path, speed, duration, name));\n\n ecs.shovel(\n\n source,\n\n AttackComponent::init(target_position, strength, AttackKind::Orb(kind), Some(source_position.origin)),\n\n );\n\n ecs.raise_event(EventKind::Orb(OrbState::BeginCastAnimation), Some(source));\n\n}\n\n\n", "file_path": "src/clash/combat.rs", "rank": 23, "score": 173048.72722478944 }, { "content": "pub fn screen_rect_for_map_grid(x: u32, y: u32) -> SDLRect {\n\n SDLRect::from((\n\n (MAP_CORNER_X + x * TILE_SIZE) as i32,\n\n (MAP_CORNER_Y + y * TILE_SIZE) as i32,\n\n TILE_SIZE as u32,\n\n TILE_SIZE as u32,\n\n ))\n\n}\n\n\n", "file_path": "src/arena/views/map.rs", "rank": 24, "score": 171335.4729490809 }, { "content": "pub fn regen_event(ecs: &mut World, kind: EventKind, target: Option<Entity>) {\n\n match kind {\n\n EventKind::StatusAdded(kind) => {\n\n if matches!(kind, StatusKind::Regen) {\n\n ecs.add_status(target.unwrap(), StatusKind::RegenTick, REGEN_DURATION);\n\n }\n\n }\n\n EventKind::StatusExpired(kind) => {\n\n if matches!(kind, StatusKind::RegenTick) {\n\n if ecs.has_status(target.unwrap(), StatusKind::Regen) {\n\n ecs.add_status(target.unwrap(), StatusKind::RegenTick, REGEN_DURATION);\n\n } else {\n\n ecs.log(format!(\"{} stops regenerating.\", ecs.get_name(target.unwrap()).unwrap()));\n\n }\n\n\n\n apply_healing_to_character(ecs, Strength::init(HEALTH_REGEN_PER_TICK), target.unwrap());\n\n }\n\n }\n\n _ => {}\n\n }\n", "file_path": "src/clash/damage.rs", "rank": 25, "score": 166908.84017817065 }, { "content": "fn use_skill_core(ecs: &mut World, enemy: Entity, skill_name: &str, target_point: Option<Point>) -> bool {\n\n if can_invoke_skill(ecs, enemy, skill_name, target_point) {\n\n invoke_skill(ecs, enemy, skill_name, target_point);\n\n return true;\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/clash/ai.rs", "rank": 26, "score": 165606.58502562606 }, { "content": "fn get_skill_name_under_ammo(base_name: &str, ammo: GunslingerAmmo) -> &str {\n\n match ammo {\n\n GunslingerAmmo::Magnum => base_name,\n\n GunslingerAmmo::Ignite => match base_name {\n\n \"Snap Shot\" => \"Spark Shot\",\n\n \"Aimed Shot\" => \"Explosive Blast\",\n\n \"Triple Shot\" => \"Dragon's Breath\",\n\n \"Quick Shot\" => \"Hot Hands\",\n\n _ => panic!(\"Unknown skill template {}\", base_name),\n\n },\n\n GunslingerAmmo::Cyclone => match base_name {\n\n \"Snap Shot\" => \"Airburst Shot\",\n\n \"Aimed Shot\" => \"Air Lance\",\n\n \"Triple Shot\" => \"Tornado Shot\",\n\n \"Quick Shot\" => \"Lightning Speed\",\n\n _ => panic!(\"Unknown skill template {}\", base_name),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/clash/content/gunslinger.rs", "rank": 27, "score": 159637.51214769756 }, { "content": "fn copy_all_with_extension(src: &Path, dest: &str, extension: &str) -> Result<(), std::io::Error> {\n\n let mut created_folder = false;\n\n\n\n for entry in fs::read_dir(src)? {\n\n let entry = entry?;\n\n let path = entry.path();\n\n\n\n if path.is_dir() {\n\n copy_all_with_extension(&path, Path::new(&dest).join(path.file_name().unwrap()).stringify(), extension)?;\n\n } else if let Some(file_name) = path.file_name() {\n\n if let Some(file_extension) = path.extension() {\n\n if file_extension.stringify().to_ascii_lowercase() == extension || extension == \"*\" {\n\n let dest_file = Path::new(&dest).join(file_name);\n\n //println!(\"{}\", format!(\"cargo:rerun-if-changed={}\", path.stringify()));\n\n\n\n if !dest_file.exists() {\n\n if !created_folder {\n\n //print(format!(\"Creating {}\", dest));\n\n fs::create_dir_all(dest).expect(\"Unable to create output dir\");\n\n created_folder = true;\n", "file_path": "build.rs", "rank": 28, "score": 156818.2049696042 }, { "content": "pub fn any_ally_without_buff_in_range(ecs: &World, entity: Entity, buff: StatusKind, range: u32) -> Option<Entity> {\n\n let position = ecs.get_position(entity);\n\n find_allies_of(ecs, entity)\n\n .iter()\n\n .filter(|&&c| !ecs.has_status(c, buff))\n\n .find(|&&c| position.distance_to_multi(ecs.get_position(c)).unwrap_or(std::u32::MAX) <= range)\n\n .copied()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::*;\n\n use super::*;\n\n\n\n #[test]\n\n fn no_behavior() {\n\n let mut ecs = create_test_state().with_character(2, 2, 100).build();\n\n let character = find_at(&ecs, 2, 2);\n\n ecs.shovel(character, BehaviorComponent::init(BehaviorKind::None));\n\n\n", "file_path": "src/clash/ai.rs", "rank": 29, "score": 155540.83539332313 }, { "content": "#[cfg(target_os = \"windows\")]\n\n#[cfg(debug_assertions)]\n\nfn open_url(url: &str) -> bool {\n\n if let Ok(mut child) = std::process::Command::new(\"cmd.exe\").arg(\"/C\").arg(\"code\").arg(\"\").arg(&url).spawn() {\n\n std::thread::sleep(std::time::Duration::new(1, 0));\n\n if let Ok(status) = child.wait() {\n\n return status.success();\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/atlas/crash.rs", "rank": 30, "score": 153231.75813422148 }, { "content": "pub fn main() -> BoxResult<()> {\n\n #[cfg(feature = \"profile_self_play\")]\n\n {\n\n crate::arena::self_play::tests::self_play_10000_games();\n\n return Ok(());\n\n }\n\n\n\n std::env::set_var(\"RUST_BACKTRACE\", \"1\");\n\n\n\n #[cfg(debug_assertions)]\n\n {\n\n let default_hook = std::panic::take_hook();\n\n panic::set_hook(Box::new(move |panic_info| {\n\n on_crash(&panic_info);\n\n default_hook(&panic_info);\n\n }));\n\n }\n\n\n\n #[cfg(feature = \"crash_reporting\")]\n\n let _guard = sentry::init(include_str!(\"../lib/sentry.key\"));\n\n\n\n let render_context = Rc::new(RefCell::new(RenderContext::initialize()?));\n\n let mut director = Director::init(get_storyteller(&render_context)?);\n\n director.run(render_context)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 31, "score": 148746.61293301842 }, { "content": "pub fn create_world() -> World {\n\n let mut ecs = World::new();\n\n ecs.register::<PositionComponent>();\n\n ecs.register::<FieldComponent>();\n\n ecs.register::<PlayerComponent>();\n\n ecs.register::<IsCharacterComponent>();\n\n ecs.register::<TemperatureComponent>();\n\n ecs.register::<SkillPowerComponent>();\n\n ecs.register::<DefenseComponent>();\n\n ecs.register::<NamedComponent>();\n\n ecs.register::<MapComponent>();\n\n ecs.register::<FrameComponent>();\n\n ecs.register::<TimeComponent>();\n\n ecs.register::<LogComponent>();\n\n ecs.register::<SkillsComponent>();\n\n ecs.register::<AttackComponent>();\n\n ecs.register::<MovementComponent>();\n\n ecs.register::<SkillResourceComponent>();\n\n ecs.register::<BehaviorComponent>();\n\n ecs.register::<RandomComponent>();\n", "file_path": "src/clash/components.rs", "rank": 32, "score": 147091.83496882848 }, { "content": "fn select_skill(ecs: &mut World, name: &str) {\n\n let skill = ecs.get_skill(name);\n\n\n\n match skill.is_usable(ecs, find_player(&ecs)) {\n\n UsableResults::Usable => {}\n\n _ => return,\n\n }\n\n\n\n let target_required = skill.target;\n\n if target_required.is_none() {\n\n player_use_skill(ecs, name, None);\n\n } else {\n\n match target_required {\n\n TargetType::AnyoneButSelf | TargetType::Any | TargetType::Enemy | TargetType::Tile => {\n\n set_action_state(ecs, BattleSceneState::Targeting(BattleTargetSource::Skill(name.to_string())))\n\n }\n\n TargetType::Player => panic!(\"TargetType::Player should not have reached here in select_skill\"),\n\n TargetType::None => panic!(\"TargetType::None should not have reached here in select_skill\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/arena/battle_actions.rs", "rank": 33, "score": 145930.81184293795 }, { "content": "#[allow(dead_code)]\n\npub fn get_exe_folder() -> String {\n\n let exe = std::env::current_exe().unwrap();\n\n let exe_path = exe.to_str().unwrap();\n\n let mut bits: Vec<&str> = exe_path.split(MAIN_SEPARATOR).collect();\n\n bits.pop();\n\n\n\n #[cfg(test)]\n\n bits.pop();\n\n\n\n bits.join(&MAIN_SEPARATOR.to_string())\n\n}\n\n\n", "file_path": "src/atlas/paths.rs", "rank": 34, "score": 145497.93569905066 }, { "content": "pub fn create_sustained_damage_field(\n\n ecs: &mut World,\n\n name: &str,\n\n position: SizedPoint,\n\n attack: AttackComponent,\n\n fields: FieldComponent,\n\n duration: u32,\n\n) -> Entity {\n\n ecs.create_entity()\n\n .with(PositionComponent::init(position))\n\n .with(NamedComponent::init(name))\n\n .with(attack)\n\n .with(BehaviorComponent::init(BehaviorKind::TickDamage))\n\n .with(fields)\n\n .with(DurationComponent::init(duration))\n\n .with(TimeComponent::init(-BASE_ACTION_COST))\n\n .marked::<SimpleMarker<ToSerialize>>()\n\n .build()\n\n}\n", "file_path": "src/clash/content/spawner.rs", "rank": 35, "score": 145333.82420913118 }, { "content": "pub fn has_test_font() -> bool {\n\n let font_path = Path::new(&get_exe_folder()).join(\"fonts\").join(\"LibreFranklin-Regular.ttf\");\n\n font_path.exists()\n\n}\n\n\n", "file_path": "src/after_image/font_test_helpers.rs", "rank": 36, "score": 143961.6162475623 }, { "content": "pub fn render_text_layout<'a>(\n\n layout: &'a LayoutResult,\n\n canvas: &mut RenderCanvas,\n\n text: &TextRenderer,\n\n options: RenderTextOptions,\n\n mut on_hittest_text: impl FnMut(SDLRect, HitTestResult) + 'a,\n\n) -> BoxResult<()> {\n\n for chunk in &layout.chunks {\n\n match &chunk.value {\n\n LayoutChunkValue::String(s) => {\n\n let (size, y_font_offset) = if chunk.attributes.contains(LayoutChunkAttributes::SMALLER_TEXT) {\n\n (options.font_size.smaller(), 2)\n\n } else {\n\n (options.font_size, 0)\n\n };\n\n\n\n if let Some(width) = options.centered {\n\n text.render_text_centered(\n\n &s,\n\n chunk.position.x as i32,\n", "file_path": "src/props/text_render_helper.rs", "rank": 37, "score": 143961.6162475623 }, { "content": "pub fn load_from_disk() -> BoxResult<World> {\n\n let data = read_to_string(\"./savegame.sav\")?;\n\n load(data)\n\n}\n\n\n", "file_path": "src/arena/saveload.rs", "rank": 38, "score": 142649.0078044897 }, { "content": "#[allow(dead_code)]\n\npub fn dump_all_position(ecs: &World) {\n\n let positions = ecs.read_storage::<PositionComponent>();\n\n let is_characters = ecs.read_storage::<IsCharacterComponent>();\n\n let orb_components = ecs.read_storage::<OrbComponent>();\n\n let attack_components = ecs.read_storage::<AttackComponent>();\n\n let fields = ecs.read_storage::<FieldComponent>();\n\n let times = ecs.read_storage::<TimeComponent>();\n\n for (position, is_character, orb, attack, field, time) in (\n\n &positions,\n\n (&is_characters).maybe(),\n\n (&orb_components).maybe(),\n\n (&attack_components).maybe(),\n\n (&fields).maybe(),\n\n (&times).maybe(),\n\n )\n\n .join()\n\n {\n\n let mut description = format!(\"{}\", position.position);\n\n if is_character.is_some() {\n\n description.push_str(\" (Char)\");\n", "file_path": "src/clash/test_helpers.rs", "rank": 39, "score": 142649.0078044897 }, { "content": "pub fn new_game_intermission_state() -> World {\n\n let mut base_state = World::new();\n\n base_state.insert(ProgressionComponent::init(ProgressionState::init_gunslinger()));\n\n\n\n create_intermission_state(&base_state, None)\n\n}\n\n\n", "file_path": "src/clash/new_game.rs", "rank": 40, "score": 142479.81185416417 }, { "content": "pub fn get_test_skills() -> SkillsResource {\n\n let mut m = SkillsResource::init();\n\n\n\n m.add(SkillInfo::init(\"TestNone\", None, TargetType::None, SkillEffect::None));\n\n m.add(SkillInfo::init(\"TestTile\", None, TargetType::Tile, SkillEffect::None));\n\n m.add(SkillInfo::init(\"TestEnemy\", None, TargetType::Enemy, SkillEffect::None));\n\n m.add(SkillInfo::init_with_distance(\n\n \"TestWithRange\",\n\n None,\n\n TargetType::Tile,\n\n SkillEffect::None,\n\n Some(2),\n\n false,\n\n ));\n\n m.add(SkillInfo::init_with_distance(\n\n \"TestMove\",\n\n None,\n\n TargetType::Tile,\n\n SkillEffect::Move,\n\n Some(2),\n", "file_path": "src/clash/content/test.rs", "rank": 41, "score": 142479.81185416417 }, { "content": "pub fn create_test_state() -> StateBuilder {\n\n let mut ecs = create_world();\n\n ecs.insert(super::content::test::get_test_skills());\n\n\n\n StateBuilder { ecs }\n\n}\n\n\n", "file_path": "src/clash/test_helpers.rs", "rank": 42, "score": 142479.81185416417 }, { "content": "pub fn get_test_font() -> Font {\n\n let font_path = Path::new(&get_exe_folder()).join(\"fonts\").join(\"LibreFranklin-Regular.ttf\");\n\n let mut font = TTF_CONTEXT.load_font(font_path, 14).unwrap();\n\n font.set_style(sdl2::ttf::FontStyle::NORMAL);\n\n font\n\n}\n\n\n", "file_path": "src/after_image/font_test_helpers.rs", "rank": 43, "score": 142479.81185416417 }, { "content": "pub fn accelerate_animations(ecs: &mut World) {\n\n let mut to_speedup = vec![];\n\n {\n\n let entities = ecs.read_resource::<specs::world::EntitiesRes>();\n\n let animations = ecs.read_storage::<AnimationComponent>();\n\n for (entity, _) in (&entities, &animations).join() {\n\n to_speedup.push(entity);\n\n }\n\n }\n\n\n\n for a in to_speedup {\n\n ecs.write_storage::<AnimationComponent>().grab_mut(a).animation.duration /= 4;\n\n }\n\n ecs.write_resource::<AccelerateAnimationsComponent>().state = true;\n\n}\n\n\n", "file_path": "src/arena/animations.rs", "rank": 44, "score": 141392.19850575825 }, { "content": "pub fn find_player(ecs: &World) -> Entity {\n\n let entities = ecs.read_resource::<specs::world::EntitiesRes>();\n\n let players = ecs.read_storage::<PlayerComponent>();\n\n let (entity, _) = (&entities, &players).join().next().expect(\"No player in world?\");\n\n entity\n\n}\n\n\n", "file_path": "src/clash/actions.rs", "rank": 45, "score": 141392.19850575825 }, { "content": "pub fn save_to_disk(ecs: &mut World) {\n\n let mut writer = File::create(\"./savegame.sav\").unwrap();\n\n save(ecs, &mut writer);\n\n}\n\n\n", "file_path": "src/arena/saveload.rs", "rank": 46, "score": 141392.19850575825 }, { "content": "#[cfg(test)]\n\npub fn wait_for_animations(ecs: &mut World) {\n\n crate::arena::force_complete_animations(ecs);\n\n}\n\n\n", "file_path": "src/clash/physics.rs", "rank": 47, "score": 141392.19850575825 }, { "content": "pub fn can_act(ecs: &World) -> bool {\n\n let player = find_player(ecs);\n\n let is_player = if let Some(actor) = get_next_actor(ecs) { actor == player } else { false };\n\n let is_ready = get_ticks(ecs, player) == BASE_ACTION_COST;\n\n is_player && is_ready\n\n}\n\n\n", "file_path": "src/clash/actions.rs", "rank": 48, "score": 141392.19850575825 }, { "content": "pub fn reap_killed(ecs: &mut World) {\n\n let mut dead = vec![];\n\n let mut player_dead = false;\n\n {\n\n let entities = ecs.read_resource::<specs::world::EntitiesRes>();\n\n let defenses = ecs.read_storage::<DefenseComponent>();\n\n let players = ecs.read_storage::<PlayerComponent>();\n\n\n\n for (entity, defense, player) in (&entities, &defenses, (&players).maybe()).join() {\n\n if defense.defenses.health == 0 {\n\n // We do not remove the player on death, as the UI assumes existence (and may paint before tick)\n\n if player.is_some() {\n\n player_dead = true;\n\n } else {\n\n dead.push(entity);\n\n }\n\n }\n\n }\n\n }\n\n\n\n if player_dead {\n\n ecs.insert(PlayerDeadComponent::init());\n\n }\n\n for d in dead {\n\n ecs.delete_entity(d).unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/clash/combat.rs", "rank": 49, "score": 141392.19850575825 }, { "content": "pub fn get_equipment() -> Vec<EquipmentItem> {\n\n let mut equipment = get_armor();\n\n equipment.append(&mut get_accessories());\n\n equipment.append(&mut get_masteries());\n\n equipment\n\n}\n\n\n", "file_path": "src/clash/content/items.rs", "rank": 50, "score": 141167.20341109158 }, { "content": "pub fn get_tiny_test_font() -> Font {\n\n let font_path = Path::new(&get_exe_folder()).join(\"fonts\").join(\"LibreFranklin-Regular.ttf\");\n\n let mut font = TTF_CONTEXT.load_font(font_path, 9).unwrap();\n\n font.set_style(sdl2::ttf::FontStyle::NORMAL);\n\n font\n\n}\n", "file_path": "src/after_image/font_test_helpers.rs", "rank": 51, "score": 141049.67146809676 }, { "content": "#[cfg(test)]\n\npub fn assert_points_not_equal(a: Point, b: Point) {\n\n assert!(a.x != b.x || a.y != b.y);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::assert_points_equal;\n\n use super::*;\n\n\n\n #[test]\n\n fn all_positions() {\n\n // (2,0) (3,0)\n\n // (2,1) (3,1)\n\n // (2,2) (3,2)\n\n let point = SizedPoint::init_multi(2, 2, 2, 3);\n\n let all = point.all_positions();\n\n assert_eq!(6, all.len());\n\n assert_eq!(all[0], Point::init(2, 2));\n\n assert_eq!(all[1], Point::init(3, 2));\n\n assert_eq!(all[2], Point::init(2, 1));\n", "file_path": "src/atlas/point.rs", "rank": 52, "score": 140187.70436118607 }, { "content": "#[cfg(test)]\n\npub fn assert_points_equal(a: Point, b: Point) {\n\n assert_eq!(a.x, b.x);\n\n assert_eq!(a.y, b.y);\n\n}\n\n\n", "file_path": "src/atlas/point.rs", "rank": 53, "score": 140187.70436118607 }, { "content": "pub fn golem_skills(m: &mut SkillsResource) {\n\n m.add(SkillInfo::init_with_distance(\n\n \"Golem Punch\",\n\n None,\n\n TargetType::Player,\n\n SkillEffect::MeleeAttack(Damage::init(3, DamageElement::PHYSICAL), WeaponKind::Sword),\n\n Some(1),\n\n false,\n\n ));\n\n\n\n m.add(\n\n SkillInfo::init_with_distance(\n\n \"Ground Slam\",\n\n None,\n\n TargetType::Player,\n\n SkillEffect::Field(FieldEffect::Damage(Damage::init(4, DamageElement::PHYSICAL), 1), FieldKind::Earth),\n\n Some(5),\n\n false,\n\n )\n\n .with_cooldown(300),\n\n );\n\n}\n\n\n", "file_path": "src/clash/content/tutorial.rs", "rank": 54, "score": 139910.3941123601 }, { "content": "pub fn force_complete_animations(ecs: &mut World) {\n\n loop {\n\n let current_frame = {\n\n ecs.write_resource::<crate::clash::FrameComponent>().current_frame += 1;\n\n ecs.read_resource::<crate::clash::FrameComponent>().current_frame\n\n };\n\n\n\n super::battle_scene::process_tick_events(ecs, current_frame);\n\n\n\n let animations = ecs.read_storage::<AnimationComponent>();\n\n if (animations).join().count() == 0 {\n\n break;\n\n }\n\n }\n\n}\n", "file_path": "src/arena/animations.rs", "rank": 55, "score": 139910.3941123601 }, { "content": "pub fn elementalist_skills(m: &mut SkillsResource) {\n\n m.add(\n\n SkillInfo::init_with_distance(\n\n \"Tidal Surge\",\n\n None,\n\n TargetType::AnyoneButSelf,\n\n SkillEffect::ConeAttack(\n\n Damage::init(3, DamageElement::PHYSICAL).with_option(DamageOptions::KNOCKBACK),\n\n ConeKind::Water,\n\n TIDAL_SURGE_SIZE,\n\n ),\n\n Some(1),\n\n false,\n\n )\n\n .with_cooldown(200),\n\n );\n\n\n\n m.add(SkillInfo::init_with_distance(\n\n \"Ice Shard\",\n\n None,\n", "file_path": "src/clash/content/elementalist.rs", "rank": 56, "score": 139910.3941123601 }, { "content": "pub fn add_ui_extension(ecs: &mut World) {\n\n ecs.register::<RenderComponent>();\n\n ecs.register::<BattleSceneStateComponent>();\n\n ecs.register::<MousePositionComponent>();\n\n ecs.register::<AnimationComponent>();\n\n ecs.register::<super::saveload::SerializationHelper>();\n\n // If you add additional components remember to update saveload.rs\n\n\n\n ecs.subscribe(super::battle_scene::create_view_event);\n\n ecs.subscribe(super::battle_animations::move_event);\n\n ecs.subscribe(super::battle_animations::battle_animation_event);\n\n ecs.subscribe(super::battle_animations::melee_cone_event);\n\n ecs.subscribe(super::battle_animations::field_event);\n\n ecs.subscribe(super::battle_animations::explode_event);\n\n ecs.subscribe(super::views::log_event);\n\n\n\n ecs.insert(BattleSceneStateComponent::init());\n\n ecs.insert(MousePositionComponent::init());\n\n ecs.insert(BufferedInputComponent::init());\n\n ecs.insert(AccelerateAnimationsComponent::init());\n\n ecs.insert(LogIndexPosition::init());\n\n}\n\n\n", "file_path": "src/arena/components.rs", "rank": 57, "score": 139910.3941123601 }, { "content": "pub fn bird_skills(m: &mut SkillsResource) {\n\n // All skills will be boosted by default +1 skill_power on main bird\n\n m.add(SkillInfo::init_with_distance(\n\n \"Wing Blast\",\n\n None,\n\n TargetType::Player,\n\n SkillEffect::RangedAttack(Damage::init(1, DamageElement::PHYSICAL), BoltKind::AirBullet),\n\n Some(2),\n\n true,\n\n ));\n\n m.add(\n\n SkillInfo::init_with_distance(\n\n \"Feather Orb\",\n\n None,\n\n TargetType::Player,\n\n SkillEffect::Orb(Damage::init(3, DamageElement::PHYSICAL), OrbKind::Feather, 2, 12),\n\n Some(12),\n\n true,\n\n )\n\n .with_ammo(AmmoKind::Feathers, 1),\n", "file_path": "src/clash/content/bird.rs", "rank": 58, "score": 139910.3941123601 }, { "content": "#[cfg(debug_assertions)]\n\npub fn on_crash(panic_info: &panic::PanicInfo) {\n\n let mut debug_spew = String::new();\n\n if let Some(location) = panic_info.location() {\n\n debug_spew.push_str(&format!(\"{} Line: {}\\n\", location.file(), location.line())[..]);\n\n }\n\n let payload = panic_info.payload();\n\n if let Some(s) = payload.downcast_ref::<&str>() {\n\n debug_spew.push_str(s);\n\n } else if let Some(s) = payload.downcast_ref::<String>() {\n\n debug_spew.push_str(s);\n\n }\n\n\n\n let _ = fs::write(\"debug.txt\", debug_spew);\n\n\n\n #[cfg(target_os = \"windows\")]\n\n open_url(\"debug.txt\");\n\n}\n", "file_path": "src/atlas/crash.rs", "rank": 59, "score": 139910.3941123601 }, { "content": "// Prevents actions when animations in progress. actions::can_act handles world state\n\npub fn has_animations_blocking(ecs: &World) -> bool {\n\n let animations = ecs.read_storage::<AnimationComponent>();\n\n (&animations).join().count() > 0\n\n}\n\n\n", "file_path": "src/arena/battle_actions.rs", "rank": 60, "score": 139910.3941123601 }, { "content": "pub fn hotkey_to_skill_index(hotkey: usize) -> usize {\n\n assert!(hotkey <= 10);\n\n if hotkey == 0 {\n\n 9\n\n } else {\n\n hotkey - 1\n\n }\n\n}\n\n\n", "file_path": "src/props/skillbar.rs", "rank": 61, "score": 139910.3941123601 }, { "content": "fn select_skill_with_target(ecs: &mut World, name: &str, position: &Point) {\n\n // Selection has been made, drop out of targeting state\n\n reset_action_state(ecs);\n\n\n\n let skill = ecs.get_skill(name);\n\n\n\n match skill.target {\n\n TargetType::AnyoneButSelf | TargetType::Enemy | TargetType::Tile | TargetType::Any => {\n\n let player = find_player(&ecs);\n\n if can_invoke_skill(ecs, player, name, Some(*position)) {\n\n player_use_skill(ecs, name, Some(*position));\n\n }\n\n }\n\n TargetType::Player => panic!(\"TargetType::Player should not have reached select_skill_with_target\"),\n\n TargetType::None => panic!(\"TargetType::None should not have reached select_skill_with_target\"),\n\n }\n\n}\n\n\n", "file_path": "src/arena/battle_actions.rs", "rank": 62, "score": 139301.42747066257 }, { "content": "fn find_filenames(filenames: &mut Vec<String>, location: &str) -> BoxResult<()> {\n\n let entries = fs::read_dir(location)?;\n\n for entry in entries {\n\n let path = entry?.path();\n\n if path.is_dir() {\n\n find_filenames(filenames, &Path::new(location).join(path).stringify())?;\n\n } else {\n\n filenames.push(path.file_name().unwrap().stringify().to_ascii_lowercase().to_string());\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/after_image/icon_loader.rs", "rank": 63, "score": 139301.42747066257 }, { "content": "pub fn coin_flip(ecs: &mut World) -> bool {\n\n let random = &mut ecs.fetch_mut::<RandomComponent>().rand;\n\n random.gen_bool(0.5)\n\n}\n\n\n", "file_path": "src/clash/ai.rs", "rank": 64, "score": 138705.89996778793 }, { "content": "pub fn find_enemies(ecs: &World) -> Vec<Entity> {\n\n let entities = ecs.read_resource::<specs::world::EntitiesRes>();\n\n let is_characters = ecs.read_storage::<IsCharacterComponent>();\n\n let players = ecs.read_storage::<PlayerComponent>();\n\n let player_ally = ecs.read_storage::<PlayerAlly>();\n\n\n\n let mut enemies = vec![];\n\n for (entity, _, player, ally) in (&entities, &is_characters, (&players).maybe(), (&player_ally).maybe()).join() {\n\n if player.is_none() && ally.is_none() {\n\n enemies.push(entity);\n\n }\n\n }\n\n enemies\n\n}\n\n\n", "file_path": "src/clash/actions.rs", "rank": 65, "score": 138705.89996778793 }, { "content": "#[cfg(test)]\n\npub fn save_to_string(ecs: &mut World) -> String {\n\n let mut writer = vec![];\n\n save(ecs, &mut writer);\n\n\n\n let mut out = Vec::new();\n\n let mut c = writer.as_slice();\n\n c.read_to_end(&mut out).unwrap();\n\n String::from_utf8(out).unwrap()\n\n}\n\n\n", "file_path": "src/arena/saveload.rs", "rank": 66, "score": 138705.89996778793 }, { "content": "pub fn find_all_characters(ecs: &World) -> Vec<Entity> {\n\n let entities = ecs.read_resource::<specs::world::EntitiesRes>();\n\n let is_characters = ecs.read_storage::<IsCharacterComponent>();\n\n\n\n let mut all = vec![];\n\n for (entity, _) in (&entities, &is_characters).join() {\n\n all.push(entity);\n\n }\n\n all\n\n}\n\n\n", "file_path": "src/clash/physics.rs", "rank": 67, "score": 138705.89996778793 }, { "content": "pub fn process_any_queued_action(ecs: &mut World) {\n\n assert!(!has_animations_blocking(ecs));\n\n let buffered_input = ecs.read_resource::<BufferedInputComponent>().input.clone();\n\n if let Some(buffered_input) = buffered_input {\n\n process_action(ecs, buffered_input);\n\n ecs.write_resource::<BufferedInputComponent>().input = None;\n\n }\n\n}\n\n\n", "file_path": "src/arena/battle_actions.rs", "rank": 68, "score": 138480.2537262927 }, { "content": "pub fn get_reward(ecs: &World) -> RewardsComponent {\n\n let rewards = ecs.read_storage::<RewardsComponent>();\n\n (&rewards).join().next().unwrap().clone()\n\n}\n\n\n", "file_path": "src/intermission/reward_scene.rs", "rank": 69, "score": 138480.2537262927 }, { "content": "pub fn reset_action_state(ecs: &mut World) {\n\n let mut state = ecs.write_resource::<BattleSceneStateComponent>();\n\n state.state = BattleSceneState::Default();\n\n}\n\n\n", "file_path": "src/arena/battle_actions.rs", "rank": 70, "score": 138480.2537262927 }, { "content": "pub fn find_first_entity(ecs: &World) -> Entity {\n\n let entities = ecs.read_resource::<specs::world::EntitiesRes>();\n\n (&entities).join().next().unwrap()\n\n}\n\n\n", "file_path": "src/clash/test_helpers.rs", "rank": 71, "score": 138480.2537262927 }, { "content": "pub fn get_item_skills(m: &mut SkillsResource) {\n\n m.add(SkillInfo::init_with_distance(\n\n \"Shadow Shot\",\n\n None,\n\n TargetType::Enemy,\n\n SkillEffect::RangedAttack(Damage::init(4, DamageElement::DARKNESS), BoltKind::Bullet),\n\n Some(5),\n\n true,\n\n ));\n\n\n\n m.add(\n\n SkillInfo::init_with_distance(\n\n \"Summon Shadow\",\n\n Some(\"SpellBook03_76.png\"),\n\n TargetType::Tile,\n\n SkillEffect::Spawn(SpawnKind::ShadowGunSlinger, Some(5)),\n\n Some(5),\n\n true,\n\n )\n\n .with_focus_use(0.5)\n\n .with_cooldown(1500),\n\n );\n\n}\n\n\n\nuse crate::try_behavior;\n\nuse specs::prelude::*;\n\n\n", "file_path": "src/clash/content/items.rs", "rank": 72, "score": 138480.2537262927 }, { "content": "pub fn set_map_background(ecs: &mut World) {\n\n ecs.create_entity()\n\n .with(RenderComponent::init(RenderInfo::init_with_order(\n\n SpriteKinds::BeachBackground,\n\n RenderOrder::Background,\n\n )))\n\n .marked::<SimpleMarker<ToSerialize>>()\n\n .build();\n\n}\n\n\n", "file_path": "src/arena/battle_scene.rs", "rank": 73, "score": 138480.2537262927 }, { "content": "pub fn selection_cost(equip: &EquipmentItem) -> u32 {\n\n match equip.rarity {\n\n EquipmentRarity::Standard => panic!(\"Standard should never be found in merchant\"),\n\n EquipmentRarity::Common => 20,\n\n EquipmentRarity::Uncommon => 50,\n\n EquipmentRarity::Rare => 100,\n\n }\n\n}\n\n\n", "file_path": "src/clash/progression/sales.rs", "rank": 74, "score": 138480.2537262927 }, { "content": "#[allow(dead_code)]\n\npub fn is_enemy(ecs: &World, entity: Entity) -> bool {\n\n ecs.read_storage::<PlayerComponent>().get(entity).is_none() && ecs.read_storage::<PlayerAlly>().get(entity).is_none()\n\n}\n\n\n", "file_path": "src/clash/actions.rs", "rank": 75, "score": 137550.52111843132 }, { "content": "pub fn wait(ecs: &mut World, entity: Entity) {\n\n spend_time(ecs, entity, BASE_ACTION_COST);\n\n}\n\n\n\npub const MAX_EXHAUSTION: f64 = 100.0;\n", "file_path": "src/clash/physics.rs", "rank": 76, "score": 137550.52111843132 }, { "content": "#[cfg(test)]\n\npub fn load_from_string(data: String) -> BoxResult<World> {\n\n load(data)\n\n}\n\n\n", "file_path": "src/arena/saveload.rs", "rank": 77, "score": 137275.75958172051 }, { "content": "pub fn find_player_and_allies(ecs: &World) -> Vec<Entity> {\n\n let entities = ecs.read_resource::<specs::world::EntitiesRes>();\n\n let is_characters = ecs.read_storage::<IsCharacterComponent>();\n\n let players = ecs.read_storage::<PlayerComponent>();\n\n let player_ally = ecs.read_storage::<PlayerAlly>();\n\n\n\n let mut enemies = vec![];\n\n for (entity, _, player, ally) in (&entities, &is_characters, (&players).maybe(), (&player_ally).maybe()).join() {\n\n if player.is_some() || ally.is_some() {\n\n enemies.push(entity);\n\n }\n\n }\n\n enemies\n\n}\n\n\n\npub enum Allegiance {\n\n Player,\n\n Opposition,\n\n}\n\n\n", "file_path": "src/clash/actions.rs", "rank": 78, "score": 137275.75958172051 }, { "content": "pub fn find_all_entities(ecs: &World) -> Vec<Entity> {\n\n let entities = ecs.read_resource::<specs::world::EntitiesRes>();\n\n\n\n let mut all = vec![];\n\n for entity in (&entities).join() {\n\n all.push(entity);\n\n }\n\n all\n\n}\n\n\n", "file_path": "src/clash/test_helpers.rs", "rank": 79, "score": 137275.75958172051 }, { "content": "pub fn maybe_find_player(ecs: &World) -> Option<Entity> {\n\n let players = ecs.read_storage::<PlayerComponent>();\n\n if (&players).join().count() > 0 {\n\n Some(find_player(ecs))\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/clash/actions.rs", "rank": 80, "score": 137275.75958172051 }, { "content": "pub fn get_next_actor(ecs: &World) -> Option<Entity> {\n\n let entities = ecs.read_resource::<specs::world::EntitiesRes>();\n\n let times = ecs.read_storage::<TimeComponent>();\n\n\n\n let mut time_map = BTreeMap::new();\n\n for (entity, time) in (&entities, &times).join() {\n\n time_map.insert(time.ticks, entity);\n\n }\n\n\n\n if let Some((_, entity)) = time_map.iter().last() {\n\n Some(*entity)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\npub const EXHAUSTION_PER_100_TICKS: f64 = 10.0;\n\npub const EXHAUSTION_COST_PER_MOVE: f64 = 5.0;\n\npub const FOCUS_PER_100_TICKS: f64 = 0.1;\n\n\n", "file_path": "src/clash/time.rs", "rank": 81, "score": 137275.75958172051 }, { "content": "// Returns if player can act\n\npub fn tick_next_action(ecs: &mut World) -> bool {\n\n if let Some(next) = wait_for_next(ecs) {\n\n if find_player(ecs) != next {\n\n take_enemy_action(ecs, next);\n\n false\n\n } else {\n\n true\n\n }\n\n } else {\n\n false\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn move_not_current_actor() {\n\n let mut ecs = create_test_state().with_player(2, 2, 0).with_timed(10).with_map().build();\n", "file_path": "src/clash/actions.rs", "rank": 82, "score": 137275.75958172051 }, { "content": "pub fn battle_stage_direction(ecs: &World) -> StageDirection {\n\n if ecs.try_fetch::<PlayerDeadComponent>().is_some() {\n\n return StageDirection::BattlePlayerDeath(\"This is where detailed death info goes\".to_string());\n\n }\n\n\n\n let non_player_character_count = find_enemies(ecs).len();\n\n if non_player_character_count == 0 {\n\n ecs.write_resource::<ProgressionComponent>().state.phase += 1;\n\n return StageDirection::BattleEnemyDefeated(new_game::create_intermission_state(&ecs, Some(generate_rewards(ecs))));\n\n }\n\n StageDirection::Continue\n\n}\n\n\n", "file_path": "src/arena/battle_scene.rs", "rank": 83, "score": 137099.121697634 }, { "content": "pub fn can_purchase_expansion(progression: &ProgressionComponent) -> bool {\n\n let influence = progression.state.influence;\n\n influence >= 100\n\n}\n\n\n", "file_path": "src/clash/progression/sales.rs", "rank": 84, "score": 137099.121697634 }, { "content": "pub fn new_turn_wait_characters(ecs: &mut World) {\n\n add_ticks(ecs, 100);\n\n for c in find_all_characters(ecs) {\n\n wait(ecs, c);\n\n }\n\n\n\n tick_next_action(ecs);\n\n wait_for_animations(ecs);\n\n}\n\n\n", "file_path": "src/clash/test_helpers.rs", "rank": 85, "score": 137099.121697634 }, { "content": "pub fn apply_melee(ecs: &mut World, character: Entity) {\n\n let attack = ecs.read_storage::<AttackComponent>().grab(character).clone();\n\n apply_damage_to_location(ecs, attack.target, attack.source, attack.damage);\n\n\n\n ecs.write_storage::<AttackComponent>().remove(character);\n\n}\n\n\n", "file_path": "src/clash/combat.rs", "rank": 86, "score": 136120.3807323639 }, { "content": "pub fn start_field(ecs: &mut World, source: Entity) {\n\n let cast = ecs.read_storage::<FieldCastComponent>().grab(source).clone();\n\n ecs.write_storage::<FieldCastComponent>().remove(source);\n\n\n\n // Fields can be fired by flying entities, skip animation if there is no Position\n\n // NotConvertSaveload - These entities only last the duration of the animation\n\n if ecs.read_storage::<PositionComponent>().get(source).is_none() {\n\n let field_projectile = ecs.create_entity().with(cast).build();\n\n apply_field(ecs, field_projectile);\n\n } else {\n\n let source_position = ecs.get_position(source);\n\n let field_projectile = ecs.create_entity().with(PositionComponent::init(source_position)).with(cast).build();\n\n ecs.raise_event(EventKind::Field(FieldState::BeginFlyingAnimation), Some(field_projectile));\n\n }\n\n}\n\n\n", "file_path": "src/clash/combat.rs", "rank": 87, "score": 136120.3807323639 }, { "content": "pub fn start_bolt(ecs: &mut World, source: Entity) {\n\n // We must re-create a position using the origin so multi-sized\n\n // monsters don't create bolts with large widths\n\n let caster_origin = ecs.get_position(source).origin;\n\n let source_position = SizedPoint::from(caster_origin);\n\n\n\n let attack = ecs.read_storage::<AttackComponent>().grab(source).clone();\n\n\n\n // NotConvertSaveload - Bolts only last during an animation\n\n let bolt = ecs.create_entity().with(PositionComponent::init(source_position)).with(attack).build();\n\n\n\n ecs.write_storage::<AttackComponent>().remove(source);\n\n ecs.raise_event(EventKind::Bolt(BoltState::BeginFlyingAnimation), Some(bolt));\n\n}\n\n\n", "file_path": "src/clash/combat.rs", "rank": 88, "score": 136120.3807323639 }, { "content": "pub fn begin_explode(ecs: &mut World, source: Entity) {\n\n ecs.raise_event(EventKind::Explode(ExplodeState::BeginAnimation), Some(source));\n\n}\n\n\n", "file_path": "src/clash/combat.rs", "rank": 89, "score": 136120.3807323639 }, { "content": "pub fn cone_hits(ecs: &mut World, entity: Entity) {\n\n let attack = ecs.read_storage::<AttackComponent>().grab(entity).clone();\n\n let position = ecs.get_position(entity);\n\n apply_damage_to_location(ecs, position.single_position(), attack.source, attack.damage);\n\n ecs.delete_entity(entity).unwrap();\n\n}\n\n\n", "file_path": "src/clash/combat.rs", "rank": 90, "score": 136120.3807323639 }, { "content": "pub fn apply_bolt(ecs: &mut World, bolt: Entity) {\n\n let attack = ecs.read_storage::<AttackComponent>().grab(bolt).clone();\n\n apply_damage_to_location(ecs, attack.target, attack.source, attack.damage);\n\n ecs.delete_entity(bolt).unwrap();\n\n}\n\n\n", "file_path": "src/clash/combat.rs", "rank": 91, "score": 136120.3807323639 }, { "content": "pub fn get_ticks(ecs: &World, entity: Entity) -> i32 {\n\n ecs.read_storage::<TimeComponent>().grab(entity).ticks\n\n}\n\n\n", "file_path": "src/clash/time.rs", "rank": 92, "score": 136120.3807323639 }, { "content": "pub fn tick_animations(ecs: &mut World, frame: u64) {\n\n let mut completed = vec![];\n\n let mut needs_events = vec![];\n\n {\n\n let entities = ecs.read_resource::<specs::world::EntitiesRes>();\n\n let animations = ecs.read_storage::<AnimationComponent>();\n\n\n\n for (entity, animation_component) in (&entities, &animations).join() {\n\n let animation = &animation_component.animation;\n\n if animation.is_complete(frame) {\n\n completed.push(entity);\n\n if let Some(post_event) = animation.post_event {\n\n needs_events.push(post_event);\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Remove must occur before notification, in case a notification\n\n // adds a new animation\n\n for entity in &completed {\n\n ecs.write_storage::<AnimationComponent>().remove(*entity);\n\n }\n\n\n\n for need_events in needs_events {\n\n ecs.raise_event(need_events.kind, need_events.target);\n\n }\n\n}\n\n\n", "file_path": "src/arena/animations.rs", "rank": 93, "score": 136120.3807323639 }, { "content": "pub fn tick_damage(ecs: &mut World, entity: Entity) {\n\n let attack = ecs.read_storage::<AttackComponent>().grab(entity).clone();\n\n for p in ecs.get_position(entity).all_positions() {\n\n if let Some(target) = find_character_at_location(ecs, p) {\n\n apply_damage_to_character(ecs, attack.damage, target, Some(p));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/clash/combat.rs", "rank": 94, "score": 136120.3807323639 }, { "content": "pub fn apply_explode(ecs: &mut World, source: Entity) {\n\n let (damage, range, source_position) = {\n\n let attack_info = ecs.read_storage::<AttackComponent>().grab(source).clone();\n\n match attack_info.kind {\n\n AttackKind::Explode(_, range) => (attack_info.damage, range, attack_info.source),\n\n _ => panic!(\"Explode with wrong AttackKind\"),\n\n }\n\n };\n\n\n\n for in_blast in ecs.get_position(source).origin.get_burst(range) {\n\n if let Some(target) = find_character_at_location(ecs, in_blast) {\n\n if target != source {\n\n apply_damage_to_location(ecs, in_blast, source_position, damage);\n\n }\n\n }\n\n }\n\n\n\n ecs.delete_entity(source).unwrap();\n\n}\n\n\n", "file_path": "src/clash/combat.rs", "rank": 95, "score": 136120.3807323639 }, { "content": "pub fn apply_cone(ecs: &mut World, character: Entity) {\n\n let attack = ecs.read_storage::<AttackComponent>().grab(character).clone();\n\n let size = match attack.kind {\n\n AttackKind::Cone(_, size) => size,\n\n _ => panic!(\"Unexpected kind in apply_cone\"),\n\n };\n\n let cone_direction = Direction::from_two_points(&attack.source.unwrap(), &attack.target);\n\n for p in attack.source.unwrap().get_cone(cone_direction, size) {\n\n // NotConvertSaveload - Hits only last during an animation\n\n let hit = ecs\n\n .create_entity()\n\n .with(PositionComponent::init(SizedPoint::from(p)))\n\n .with(attack.clone())\n\n .build();\n\n ecs.raise_event(EventKind::Cone(ConeState::BeginHitAnimation), Some(hit));\n\n }\n\n\n\n ecs.write_storage::<AttackComponent>().remove(character);\n\n}\n\n\n", "file_path": "src/clash/combat.rs", "rank": 96, "score": 136120.3807323639 }, { "content": "pub fn wait_for_next(ecs: &mut World) -> Option<Entity> {\n\n if let Some(next) = get_next_actor(ecs) {\n\n let time = get_ticks(ecs, next);\n\n if time < BASE_ACTION_COST {\n\n let missing = BASE_ACTION_COST - time;\n\n add_ticks(ecs, missing);\n\n }\n\n return Some(next);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/clash/time.rs", "rank": 97, "score": 136120.3807323639 }, { "content": "pub fn apply_field(ecs: &mut World, projectile: Entity) {\n\n let cast = ecs.read_storage::<FieldCastComponent>().grab(projectile).clone();\n\n ecs.delete_entity(projectile).unwrap();\n\n\n\n match cast.effect {\n\n FieldEffect::Damage(damage, explosion_size) => {\n\n let ((r, g, b), explosion_kind) = match cast.kind {\n\n FieldKind::Fire => ((255, 0, 0), ExplosionKind::Fire),\n\n FieldKind::Hail => ((0, 43, 102), ExplosionKind::Water),\n\n FieldKind::Lightning => ((166, 171, 35), ExplosionKind::Lightning),\n\n FieldKind::Earth => ((122, 72, 60), ExplosionKind::Earth),\n\n };\n\n\n\n let attack = AttackComponent::init(cast.target.origin, damage, AttackKind::Explode(explosion_kind, explosion_size), None);\n\n let fields = cast\n\n .target\n\n .origin\n\n .get_burst(explosion_size)\n\n .iter()\n\n .map(|p| (Some(*p), (r, g, b, 140)))\n", "file_path": "src/clash/combat.rs", "rank": 98, "score": 136120.3807323639 }, { "content": "pub fn start_orb(ecs: &mut World, source: Entity) {\n\n let orb = create_orb(ecs, source);\n\n ecs.write_storage::<AttackComponent>().remove(source);\n\n ecs.write_storage::<OrbComponent>().remove(source);\n\n ecs.raise_event(EventKind::Orb(OrbState::Created), Some(orb));\n\n}\n\n\n", "file_path": "src/clash/combat.rs", "rank": 99, "score": 136120.3807323639 } ]
Rust
tachyon-core/src/routers/api/object.rs
LGU-Web3-0/Project-Tachyon
156b5af1dc6e2c84c0818f38d81c42a4886510e1
use crate::session::UserInfo; use crate::{IntoAnyhow, State, StatusCode}; use actix_multipart::Multipart; use actix_session::Session; use actix_web::error::{ErrorBadRequest, ErrorInternalServerError}; use actix_web::http::header::{ContentDisposition, ContentType, DispositionParam, DispositionType}; use actix_web::web::Bytes; use actix_web::{error, web, HttpResponse, Result}; use entity::sea_orm::DatabaseBackend::Postgres; use entity::sea_orm::QueryFilter; use entity::sea_orm::{ActiveModelTrait, ConnectionTrait, Statement}; use entity::sea_orm::{ActiveValue, ColumnTrait, EntityTrait}; use futures::{StreamExt, TryFutureExt}; use sled::IVec; use std::pin::Pin; use std::task::{Context, Poll}; use uuid::Uuid; #[derive(Debug, serde::Deserialize, serde::Serialize)] pub struct ObjectRequest { uuid: Option<Uuid>, name: Option<String>, } struct ObjectData { inner: Option<IVec>, } const CHUNK_SIZE: usize = 1024 * 1024; impl futures::Stream for ObjectData { type Item = Result<Bytes>; fn poll_next(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let this = self.get_mut(); if this.inner.is_none() { return Poll::Ready(None); } unsafe { let length = this.inner.as_ref().unwrap_unchecked().len(); match this .inner .as_ref() .unwrap_unchecked() .chunks(CHUNK_SIZE) .next() { None => Poll::Ready(None), Some(x) => { let result = Poll::Ready(Some(Ok(Bytes::copy_from_slice(x)))); if x.len() < length { let slice = this .inner .as_ref() .unwrap_unchecked() .subslice(x.len(), length - x.len()); this.inner.replace(slice); } else { this.inner = None; } result } } } } } #[derive(serde::Deserialize, serde::Serialize, Debug)] struct ObjectResult { success: bool, message: Option<String>, } pub async fn upload( session: Session, data: web::Data<State>, mut payload: Multipart, ) -> Result<HttpResponse> { async fn parse_data(payload: &mut Multipart) -> Result<(entity::object::ActiveModel, Vec<u8>)> { let mut model = entity::object::ActiveModel { uuid: ActiveValue::NotSet, name: ActiveValue::NotSet, visibility: ActiveValue::Set(false), upload_time: ActiveValue::Set(chrono::Utc::now()), mimetype: ActiveValue::NotSet, }; let mut content = Vec::new(); while let Some(item) = payload.next().await { let mut field = item?; match field.name() { "file" => { while let Some(x) = field.next().await { content.extend(x?); } model.mimetype = ActiveValue::Set(field.content_type().to_string()); } "visibility" => { let mut data = Vec::new(); while let Some(x) = field.next().await { data.extend(x?); } log::error!("{}", String::from_utf8(data.clone()).unwrap()); model.visibility = ActiveValue::Set(data.as_slice() == b"on"); } "filename" => { let mut data = Vec::new(); while let Some(x) = field.next().await { data.extend(x?); } model.name = ActiveValue::Set( String::from_utf8(data) .map_err(ErrorInternalServerError) .and_then(|x| { if x.trim().is_empty() { Err(ErrorBadRequest("filename cannot be empty")) } else { Ok(x.trim().to_string()) } })?, ); } _ => (), } } Ok((model, content)) } async fn insert_kv( mut model: entity::object::ActiveModel, content: Vec<u8>, data: &web::Data<State>, ) -> Result<entity::object::ActiveModel> { let mut uuid; loop { uuid = Uuid::new_v4(); match data .kv_db .compare_and_swap( uuid.as_bytes(), Option::<&[u8]>::None, Some(content.as_slice()), ) .map_err(ErrorInternalServerError) { Ok(Ok(_)) => { model.uuid = ActiveValue::Set(uuid); break; } Ok(Err(_)) => tokio::task::yield_now().await, Err(e) => return Err(e), } } if let Err(e) = data.kv_db.flush_async().await { log::error!("sled insertion error: {}", e); return Err(ErrorInternalServerError(e)); }; Ok(model) } match session.get::<UserInfo>("user")? { None => simd_json::to_string(&ObjectResult { success: false, message: Some("unauthorized".to_string()), }) .map_err(ErrorInternalServerError) .map(|x| { HttpResponse::Ok() .content_type("application/json") .status(StatusCode::UNAUTHORIZED) .json(ObjectResult { success: false, message: Some(x), }) }), _ => Ok(parse_data(&mut payload) .and_then(|(model, content)| insert_kv(model, content, &data)) .and_then(|model| model.insert(&data.sql_db).map_err(ErrorInternalServerError)) .await .map(|_| { HttpResponse::Created() .content_type("application/json") .json(ObjectResult { success: true, message: None, }) }) .unwrap_or_else(|e| { HttpResponse::BadRequest() .content_type("application/json") .json(ObjectResult { success: false, message: Some(e.to_string()), }) })), } } #[derive(serde::Deserialize, serde::Serialize)] pub struct VisibilityChange { pub uuid: Uuid, } pub async fn change_visibility( info: web::Json<VisibilityChange>, data: web::Data<State>, session: Session, ) -> Result<HttpResponse> { if session.get::<UserInfo>("user")?.is_none() { return Ok(HttpResponse::Unauthorized().json(ObjectResult { success: false, message: Some("unauthorized".to_string()), })); } Ok( match data .sql_db .execute(Statement::from_string( Postgres, format!( r#"UPDATE object SET visibility = NOT visibility WHERE uuid = '{}'"#, info.uuid ), )) .await { Ok(_) => HttpResponse::Ok().json(ObjectResult { success: true, message: None, }), Err(e) => HttpResponse::BadRequest().json(ObjectResult { success: false, message: Some(e.to_string()), }), }, ) } #[derive(serde::Deserialize, serde::Serialize)] pub struct DeleteRequest { pub uuid: Uuid, } pub async fn delete( info: web::Json<DeleteRequest>, data: web::Data<State>, session: Session, ) -> Result<HttpResponse> { if session.get::<UserInfo>("user")?.is_none() { return Ok(HttpResponse::Unauthorized().json(ObjectResult { success: false, message: Some("unauthorized".to_string()), })); } Ok( match data .sql_db .execute(Statement::from_string( Postgres, format!(r#"DELETE FROM object WHERE uuid = '{}'"#, info.uuid), )) .await .anyhow() .and_then(|x| { if x.rows_affected() != 0 { data.kv_db.remove(info.uuid.as_bytes()).anyhow().and(Ok(())) } else { Ok(()) } }) { Ok(_) => HttpResponse::Ok().json(ObjectResult { success: true, message: None, }), Err(e) => HttpResponse::BadRequest().json(ObjectResult { success: false, message: Some(e.to_string()), }), }, ) } pub async fn get_handler( info: web::Query<ObjectRequest>, data: web::Data<State>, ) -> Result<HttpResponse> { let metadata: entity::object::Model = if let Some(uuid) = info.uuid { entity::object::Entity::find_by_id(uuid) .one(&data.sql_db) .await .map_err(error::ErrorNotFound)? .ok_or_else(|| error::ErrorNotFound("not found"))? } else if let Some(name) = &info.name { entity::object::Entity::find() .filter(entity::object::Column::Name.eq(name.as_str())) .one(&data.sql_db) .await .map_err(error::ErrorNotFound)? .ok_or_else(|| error::ErrorNotFound("not found"))? } else { return Err(error::ErrorBadRequest("invalid request")); }; if !metadata.visibility { return Err(error::ErrorUnauthorized("target not authorized")); } let inner = data .kv_db .get(metadata.uuid.as_bytes()) .map_err(error::ErrorInternalServerError)? .ok_or_else(|| error::ErrorNotFound("not found"))?; let stream = ObjectData { inner: Some(inner) }; Ok(HttpResponse::Ok() .insert_header(ContentType( metadata .mimetype .parse() .map_err(error::ErrorInternalServerError)?, )) .insert_header(ContentDisposition { disposition: DispositionType::Attachment, parameters: vec![DispositionParam::Filename( metadata.name.as_str().to_string(), )], }) .streaming(stream)) } #[cfg(test)] mod test { #[cfg(all(not(miri), test))] #[actix_rt::test] async fn it_polls_fully() { use crate::routers::api::object::ObjectData; use futures::StreamExt; use rand::distributions::Alphanumeric; use rand::prelude::*; let uuid = uuid::Uuid::new_v4(); let db = sled::open(format!("/tmp/tachyon-ut-{}", uuid)).unwrap(); for i in [1, 2, 123, 555, 5261, 114514, 1024000] { let rand_string: String = thread_rng() .sample_iter(&Alphanumeric) .take(i) .map(char::from) .collect(); db.insert(format!("test-{}", i), rand_string.as_bytes()) .unwrap(); let data = db.get(format!("test-{}", i)).unwrap().unwrap(); let data = ObjectData { inner: Some(data) }; let data: Vec<u8> = data .collect::<Vec<_>>() .await .into_iter() .filter_map(Result::ok) .map(|x| x.to_vec()) .flatten() .collect(); assert_eq!(data, rand_string.as_bytes()) } std::fs::remove_dir_all(format!("/tmp/tachyon-ut-{}", uuid)).unwrap(); } }
use crate::session::UserInfo; use crate::{IntoAnyhow, State, StatusCode}; use actix_multipart::Multipart; use actix_session::Session; use actix_web::error::{ErrorBadRequest, ErrorInternalServerError}; use actix_web::http::header::{ContentDisposition, ContentType, DispositionParam, DispositionType}; use actix_web::web::Bytes; use actix_web::{error, web, HttpResponse, Result}; use entity::sea_orm::DatabaseBackend::Postgres; use entity::sea_orm::QueryFilter; use entity::sea_orm::{ActiveModelTrait, ConnectionTrait, Statement}; use entity::sea_orm::{ActiveValue, ColumnTrait, EntityTrait}; use futures::{StreamExt, TryFutureExt}; use sled::IVec; use std::pin::Pin; use std::task::{Context, Poll}; use uuid::Uuid; #[derive(Debug, serde::Deserialize, serde::Serialize)] pub struct ObjectRequest { uuid: Option<Uuid>, name: Option<String>, } struct ObjectData { inner: Option<IVec>, } const CHUNK_SIZE: usize = 1024 * 1024; impl futures::Stream for ObjectData { type Item = Result<Bytes>; fn poll_next(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let this = self.get_mut(); if this.inner.is_none() { return Poll::Ready(None); } unsafe { let length = this.inner.as_ref().unwrap_unchecked().len(); match this .inner .as_ref() .unwrap_unchecked() .chunks(CHUNK_SIZE) .next() { None => Poll::Ready(None), Some(x) => { let result = Poll::Ready(Some(Ok(Bytes::copy_from_slice(x)))); if x.len() < length { let slice = this .inner .as_ref() .unwrap_unchecked() .subslice(x.len(), length - x.len()); this.inner.replace(slice); } else { this.inner = None; } result } } } } } #[derive(serde::Deserialize, serde::Serialize, Debug)] struct ObjectResult { success: bool, message: Option<String>, } pub async fn upload( session: Session, data: web::Data<State>, mut payload: Multipart, ) -> Result<HttpResponse> { async fn parse_data(payload: &mut Multipart) -> Result<(entity::object::ActiveModel, Vec<u8>)> { let mut model = entity::object::ActiveModel { uuid: ActiveValue::NotSet, name: ActiveValue::NotSet, visibility: ActiveValue::Set(false), upload_time: ActiveValue::Set(chrono::Utc::now()), mimetype: ActiveValue::NotSet, }; let mut content = Vec::new(); while let Some(item) = payload.next().await { let mut field = item?; match field.name() { "file" => { while let Some(x) = field.next().await { content.extend(x?); } model.mimetype = ActiveValue::Set(field.content_type().to_string()); } "visibility" => { let mut data = Vec::new(); while let Some(x) = field.next().await { data.extend(x?); } log::error!("{}", String::from_utf8(data.clone()).unwrap()); model.visibility = ActiveValue::Set(data.as_slice() == b"on"); } "filename" => { let mut data = Vec::new(); while let Some(x) = field.next().await { data.extend(x?); } model.name = ActiveValue::Set( String::from_utf8(data) .map_err(ErrorInternalServerError) .and_then(|x| { if x.trim().is_empty() { Err(ErrorBadRequest("filename cannot be empty")) } else { Ok(x.trim().to_string()) } })?, ); } _ => (), } } Ok((model, content)) } async fn insert_kv( mut model: entity::object::ActiveModel, content: Vec<u8>, data: &web::Data<State>, ) -> Result<entity::object::ActiveModel> { let mut uuid; loop { uuid = Uuid::new_v4(); match data .kv_db .compare_and_swap( uuid.as_bytes(), Option::<&[u8]>::None, Some(content.as_slice()), ) .map_err(ErrorInternalServerError) { Ok(Ok(_)) => { model.uuid = ActiveValue::Set(uuid); break; } Ok(Err(_)) => tokio::task::yield_now().await, Err(e) => return Err(e), } } if let Err(e) = data.kv_db.flush_async().await { log::error!("sled insertion error: {}", e); return Err(ErrorInternalServerError(e)); }; Ok(model) } match session.get::<UserInfo>("user")? { None => simd_json::to_string(&ObjectResult { success: false, message: Some("unauthorized".to_string()), }) .map_err(ErrorInternalServerError) .map(|x| { HttpResponse::Ok() .content_type("application/json") .status(StatusCode::UNAUTHORIZED) .json(ObjectResult { success: false, message: Some(x), }) }), _ => Ok(parse_data(&mut payload) .and_then(|(model, content)| insert_kv(model, content, &data)) .and_then(|model| model.insert(&data.sql_db).map_err(ErrorInternalServerError)) .await .map(|_| { HttpResponse::Created() .content_type("application/json") .json(ObjectResult { success: true, message: None, }) }) .unwrap_or_else(|e| { HttpResponse::BadRequest() .content_type("application/json") .json(ObjectResult { success: false, message: Some(e.to_string()), }) })), } } #[derive(serde::Deserialize, serde::Serialize)] pub struct VisibilityChange { pub uuid: Uuid, } pub async fn change_visibility( info: web::Json<VisibilityChange>, data: web::Data<State>, session: Session, ) -> Result<HttpResponse> { if session.get::<UserInfo>("user")?.is_none() { return Ok(HttpResponse::Unauthorized().json(ObjectResult { success: false, message: Some("unauthorized".to_string()), })); } Ok( match data .sql_db .execute(Statement::from_string( Postgres, format!( r#"UPDATE object SET visibility = NOT visibility WHERE uuid = '{}'"#, info.uuid ), )) .await { Ok(_) => HttpResponse::Ok().json(ObjectResult { success: true, message: None, }), Err(e) => HttpResponse::BadRequest().json(ObjectResult { success: false, message: Some(e.to_string()), }), }, ) } #[derive(serde::Deserialize, serde::Serialize)] pub struct DeleteRequest { pub uuid: Uuid, } pub async fn delete( info: web::Json<DeleteRequest>, data: web::Data<State>, session: Session, ) -> Result<HttpResponse> { if session.get::<UserInfo>("user")?.is_none() { return Ok(HttpResponse::Unauthorized().json(ObjectResult { success: false, message: Some("unauthorized".to_string()), })); } Ok( match data .sql_db .execute(Statement::from_string( Postgres, format!(r#"DELETE FROM object WHERE uuid = '{}'"#, info.uuid), )) .await .anyhow() .and_then(|x| { if x.rows_affected() != 0 { data.kv_db.remove(info.uuid.as_bytes()).anyhow().and(Ok(())) } else { Ok(()) } }) { Ok(_) => HttpResponse::Ok().json(ObjectResult { success: true, message: None, }), Err(e) => HttpResponse::BadRequest().json(ObjectResult { success: false, message: Some(e.to_string()), }), }, ) } pub async fn get_handler( info: web::Query<ObjectRequest>, data: web::Data<State>, ) -> Result<HttpResponse> { let metadata: entity::object::Model = if let Some(uuid) = info.uuid { entity::object::Entity::find_by_id(uuid) .one(&data.sql_db) .await .map_err(error::ErrorNotFound)? .ok_or_else(|| error::ErrorNotFound("not found"))? } else if let Some(name) = &info.name { entity::object::Entity::find() .filter(entity::object::Column::Name.eq(name.as_str())) .one(&data.sql_db) .await .map_err(error::ErrorNotFound)? .ok_or_else(|| error::ErrorNotFound("not found"))? } else { return Err(error::ErrorBadRequest("invalid request")); }; if !metadata.visibility { return Err(error::ErrorUnauthorized("target not authorized")); } let inner = data .kv_db .get(metadata.uuid.as_bytes()) .map_err(error::ErrorInternalServerError)? .ok_or_else(|| error::ErrorNotFound("not found"))?; let stream = ObjectData { inner: Some(inner) }; Ok(HttpResponse::Ok() .insert_header(
) .insert_header(ContentDisposition { disposition: DispositionType::Attachment, parameters: vec![DispositionParam::Filename( metadata.name.as_str().to_string(), )], }) .streaming(stream)) } #[cfg(test)] mod test { #[cfg(all(not(miri), test))] #[actix_rt::test] async fn it_polls_fully() { use crate::routers::api::object::ObjectData; use futures::StreamExt; use rand::distributions::Alphanumeric; use rand::prelude::*; let uuid = uuid::Uuid::new_v4(); let db = sled::open(format!("/tmp/tachyon-ut-{}", uuid)).unwrap(); for i in [1, 2, 123, 555, 5261, 114514, 1024000] { let rand_string: String = thread_rng() .sample_iter(&Alphanumeric) .take(i) .map(char::from) .collect(); db.insert(format!("test-{}", i), rand_string.as_bytes()) .unwrap(); let data = db.get(format!("test-{}", i)).unwrap().unwrap(); let data = ObjectData { inner: Some(data) }; let data: Vec<u8> = data .collect::<Vec<_>>() .await .into_iter() .filter_map(Result::ok) .map(|x| x.to_vec()) .flatten() .collect(); assert_eq!(data, rand_string.as_bytes()) } std::fs::remove_dir_all(format!("/tmp/tachyon-ut-{}", uuid)).unwrap(); } }
ContentType( metadata .mimetype .parse() .map_err(error::ErrorInternalServerError)?, )
call_expression
[ { "content": "pub fn error_handler<B>(res: dev::ServiceResponse<B>) -> Result<ErrorHandlerResponse<B>>\n\nwhere\n\n B: MessageBody + 'static,\n\n{\n\n if res.request().method() != Method::GET\n\n || res\n\n .response()\n\n .headers()\n\n .get(\"content-type\")\n\n .and_then(|x| x.to_str().ok())\n\n .map(|x| x.contains(\"application/json\") || x.contains(\"application/javascript\"))\n\n .unwrap_or(false)\n\n {\n\n return Ok(ErrorHandlerResponse::Response(\n\n res.map_into_boxed_body().map_into_right_body(),\n\n ));\n\n }\n\n let status = res.status();\n\n let (req, res) = res.into_parts();\n\n let mut res = res.set_body(\n", "file_path": "tachyon-core/src/routers/view/error.rs", "rank": 2, "score": 123045.27140303292 }, { "content": "fn forbidden_index(_: &Directory, req: &HttpRequest) -> std::io::Result<ServiceResponse> {\n\n Ok(ServiceResponse::new(\n\n req.clone(),\n\n HttpResponse::Forbidden().body(()),\n\n ))\n\n}\n\n\n\nasync fn frontend(path: web::Path<String>) -> Result<HttpResponse> {\n\n let path = path.into_inner();\n\n match tachyon_frontend::TARGETS.get(path.as_str()) {\n\n None => Err(ErrorNotFound(\"not found\")),\n\n Some(e) => Ok(HttpResponse::Ok().body(*e)),\n\n }\n\n}\n\n\n\nasync fn index(session: Session) -> Result<HttpResponse> {\n\n if let Ok(Some(_)) = session.get::<UserInfo>(\"user\") {\n\n HttpResponse::TemporaryRedirect()\n\n .append_header((\"Location\", \"/view/dashboard\"))\n\n .await\n\n } else {\n\n tachyon_template::IndexTemplate::new(\"Project Tachyon\")\n\n .render_response()\n\n .await\n\n }\n\n}\n\n\n", "file_path": "tachyon-core/src/routers/mod.rs", "rank": 3, "score": 96017.54548526733 }, { "content": "pub fn routers() -> Scope {\n\n web::scope(\"/api\")\n\n .route(\"/hello\", web::get().to(hello_world::handler))\n\n .route(\"/status\", web::get().to(status::handler))\n\n .route(\"/object/get\", web::get().to(object::get_handler))\n\n .route(\"/object/upload\", web::post().to(object::upload))\n\n .route(\n\n \"/object/visibility\",\n\n web::post().to(object::change_visibility),\n\n )\n\n .route(\"/object/delete\", web::post().to(object::delete))\n\n .route(\"/user/add\", web::post().to(user::add))\n\n .route(\"/user/login\", web::post().to(user::login))\n\n .route(\"/user/logout\", web::post().to(user::logout))\n\n .route(\"/user/lock\", web::post().to(user::lock))\n\n .route(\"/user/unlock\", web::post().to(user::unlock))\n\n .route(\"/user/delete\", web::delete().to(user::delete))\n\n .route(\"/user/edit\", web::post().to(user::edit))\n\n .route(\"/task/add\", web::post().to(task::add_task))\n\n .route(\"/task/edit\", web::post().to(task::edit_task))\n\n .route(\"/task/resolve\", web::post().to(task::resolve_task))\n\n .route(\"/task/delete\", web::post().to(task::delete_task))\n\n .route(\"/task/comment/add\", web::post().to(task::add_comment))\n\n .route(\"/task/comment/delete\", web::post().to(task::delete_comment))\n\n .route(\"/task/assign\", web::post().to(task::assign))\n\n}\n", "file_path": "tachyon-core/src/routers/api/mod.rs", "rank": 4, "score": 84774.19424170416 }, { "content": "pub fn routers() -> Scope {\n\n web::scope(\"/view\")\n\n .route(\"/hello\", web::get().to(hello::handler))\n\n .route(\"/dashboard\", web::get().to(dashboard::handler))\n\n .route(\"/user\", web::get().to(user::handler))\n\n .route(\"/object\", web::get().to(object::handler))\n\n .route(\"/task/{id}/detail\", web::get().to(task::detail))\n\n .route(\"/task\", web::get().to(task::handler))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #[cfg(all(not(miri), test, feature = \"integration-test\"))]\n\n #[actix_rt::test]\n\n #[serial_test::serial]\n\n async fn it_renders_view() {\n\n use crate::StatusCode;\n\n use actix_web::cookie::Cookie;\n\n use actix_web::dev::ServiceResponse;\n\n use actix_web::test;\n", "file_path": "tachyon-core/src/routers/view/mod.rs", "rank": 5, "score": 84774.19424170416 }, { "content": "pub trait IntoAnyhow {\n\n type Output;\n\n fn anyhow(self) -> Result<Self::Output>;\n\n}\n\n\n\nimpl<T> LoggedUnwrap for Result<T> {\n\n type Output = T;\n\n\n\n fn logged_unwrap(self) -> Self::Output {\n\n match self {\n\n Ok(x) => x,\n\n Err(y) => {\n\n log::error!(\"{}\", y);\n\n panic!(\"server panics due to above error: {}\", y);\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T, E: Into<anyhow::Error>> IntoAnyhow for core::result::Result<T, E> {\n\n type Output = T;\n\n\n\n fn anyhow(self) -> Result<Self::Output> {\n\n self.map_err(Into::into)\n\n }\n\n}\n", "file_path": "tachyon-core/src/utils.rs", "rank": 6, "score": 78918.80276342429 }, { "content": "fn convert_user_info<I>(user: I) -> Vec<tachyon_template::view::UserItem>\n\nwhere\n\n I: Iterator<Item = entity::user::Model>,\n\n{\n\n user.filter_map(|user| {\n\n user.fingerprint()\n\n .map(|f| {\n\n tachyon_template::view::UserItem::new(\n\n user.id,\n\n user.email,\n\n user.name,\n\n user.wrong_pass_attempt >= WRONG_PASS_ATTEMPT_THRESHOLD,\n\n f,\n\n )\n\n })\n\n .ok()\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "tachyon-core/src/routers/view/user.rs", "rank": 7, "score": 77522.93441924348 }, { "content": "#[async_trait]\n\npub trait AsyncRender {\n\n async fn render(&self) -> RenderResult;\n\n async fn render_response(&self) -> Result<HttpResponse> {\n\n let res = self.render().await.map_err(ErrorInternalServerError)?;\n\n Ok(HttpResponse::Ok()\n\n .content_type(\"text/html; charset=utf-8\")\n\n .body(res))\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<T> AsyncRender for T\n\nwhere\n\n T: Sync + Send + Template,\n\n{\n\n async fn render(&self) -> RenderResult {\n\n Template::render(self)\n\n }\n\n}\n\n\n", "file_path": "tachyon-template/src/lib.rs", "rank": 8, "score": 76681.37841650892 }, { "content": "fn convert_task_info<I>(task: I, email: &str) -> Vec<tachyon_template::view::TaskItem>\n\nwhere\n\n I: Iterator<Item = entity::task::Model>,\n\n{\n\n task.map(|mut t| {\n\n t.description.truncate(64);\n\n tachyon_template::view::TaskItem::new(t.id, email.to_owned(), t.name, t.description)\n\n })\n\n .collect()\n\n}\n\n\n\n#[derive(serde::Serialize, serde::Deserialize)]\n\npub struct TaskDetailRequest {\n\n id: i64,\n\n}\n\n\n\npub async fn detail(\n\n info: Path<TaskDetailRequest>,\n\n session: Session,\n\n state: Data<State>,\n", "file_path": "tachyon-core/src/routers/view/task.rs", "rank": 9, "score": 72908.57929802223 }, { "content": "#[async_trait]\n\npub trait AsyncRenderOnce: Sized {\n\n async fn render(self) -> RenderResult;\n\n async fn render_response(self) -> Result<HttpResponse> {\n\n let res = self.render().await.map_err(ErrorInternalServerError)?;\n\n Ok(HttpResponse::Ok()\n\n .content_type(\"text/html; charset=utf-8\")\n\n .body(res))\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<T> AsyncRenderOnce for T\n\nwhere\n\n T: Sync + Send + TemplateOnce,\n\n{\n\n async fn render(self) -> RenderResult {\n\n self.render_once()\n\n }\n\n}\n\n\n", "file_path": "tachyon-template/src/lib.rs", "rank": 10, "score": 72855.3038170039 }, { "content": "fn generate_rust(items: Vec<CSSTarget>) {\n\n let file = File::create(\".tmp/summary.rs\").unwrap();\n\n let mut writer = std::io::BufWriter::new(file);\n\n\n\n write!(\n\n &mut writer,\n\n \"pub const TARGETS: phf::Map<&'static str, &'static str> = \"\n\n )\n\n .unwrap();\n\n let mut map = &mut phf_codegen::Map::new();\n\n\n\n for i in &items {\n\n let data = std::fs::read_to_string(&i.path).unwrap();\n\n map = map.entry(\n\n i.path.to_str().unwrap().trim_start_matches(\"dist/\"),\n\n &format!(\"{:?}\", data),\n\n );\n\n }\n\n\n\n writeln!(&mut writer, \"{};\", map.build()).unwrap();\n\n}\n\n\n", "file_path": "tachyon-frontend/build.rs", "rank": 11, "score": 68791.35779189377 }, { "content": "pub fn routers<S: AsRef<Path>>(static_path: S) -> Scope {\n\n web::scope(\"\")\n\n .service(api::routers())\n\n .service(view::routers())\n\n .route(\"/frontend/{path}\", web::get().to(frontend))\n\n .route(\"/\", web::get().to(index))\n\n .service(\n\n Files::new(\"/static\", static_path.as_ref())\n\n .show_files_listing()\n\n .files_listing_renderer(forbidden_index)\n\n .default_handler(fn_service(forbidden)),\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use actix_web::dev::ServiceResponse;\n\n use actix_web::test;\n\n\n\n #[cfg(all(not(miri), test, feature = \"integration-test\"))]\n", "file_path": "tachyon-core/src/routers/mod.rs", "rank": 12, "score": 66796.1111228306 }, { "content": "struct CSSTarget {\n\n path: PathBuf,\n\n}\n\n\n", "file_path": "tachyon-frontend/build.rs", "rank": 13, "score": 51878.216060558094 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=src\");\n\n println!(\"cargo:rerun-if-changed=.tmp\");\n\n println!(\"cargo:rerun-if-changed=gulpfile.js\");\n\n println!(\"cargo:rerun-if-changed=package.json\");\n\n println!(\"cargo:rerun-if-changed=tsconfig.json\");\n\n println!(\"cargo:rerun-if-changed=webpack.config.js\");\n\n std::process::Command::new(\"yarn\")\n\n .arg(\"install\")\n\n .spawn()\n\n .unwrap()\n\n .wait()\n\n .unwrap()\n\n .exit_ok()\n\n .unwrap();\n\n std::process::Command::new(\"gulp\")\n\n .arg(\"default\")\n\n .spawn()\n\n .unwrap()\n\n .wait()\n", "file_path": "tachyon-frontend/build.rs", "rank": 14, "score": 51397.96645957425 }, { "content": "fn deserialize_pubkey(\n\n slice: &[u8],\n\n) -> anyhow::Result<sequoia_openpgp::packet::Key<PublicParts, PrimaryRole>> {\n\n let key = sequoia_openpgp::parse::PacketParserBuilder::from_bytes(slice)?\n\n .dearmor(Dearmor::Disabled)\n\n .build()?;\n\n match key {\n\n PacketParserResult::Some(p) => match p.packet {\n\n Packet::PublicKey(key) => Ok(key),\n\n _ => Err(anyhow!(\"unexpected packet type when reading public key\")),\n\n },\n\n PacketParserResult::EOF(_) => Err(anyhow!(\"unexpected EOF when reading public key\")),\n\n }\n\n}\n\n\n\n#[cfg(any(test, feature = \"test\"))]\n\nmod test {\n\n pub const KEY_BLOCK: &str = r#\"-----BEGIN PGP PUBLIC KEY BLOCK-----\n\nComment: User-ID:\tSchrodingerZhu <i@zhuyi.fan>\n\nComment: Created:\t4/12/20 6:30 PM\n", "file_path": "entity/src/user.rs", "rank": 15, "score": 50111.44141081534 }, { "content": "struct EmailConstraint;\n\n\n\nimpl StatementBuilder for EmailConstraint {\n\n fn build(&self, db_backend: &DbBackend) -> Statement {\n\n match db_backend {\n\n DbBackend::Postgres => {\n\n const STMT: &str = r#\"ALTER TABLE \"user\" ADD CONSTRAINT proper_email CHECK (email ~* '^[A-Za-z0-9._+%-]+@[A-Za-z0-9.-]+[.][A-Za-z]+$');\"#;\n\n Statement::from_string(DbBackend::Postgres, STMT.to_string())\n\n }\n\n _ => panic!(\"db other than PG is not supported\"),\n\n }\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl MigrationTrait for Migration {\n\n async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {\n\n manager\n\n .create_table(\n\n sea_query::Table::create()\n", "file_path": "migration/src/m20220101_000002_create_user_table.rs", "rank": 16, "score": 48534.22082989663 }, { "content": "pub trait LoggedUnwrap {\n\n type Output;\n\n fn logged_unwrap(self) -> Self::Output;\n\n}\n\n\n", "file_path": "tachyon-core/src/utils.rs", "rank": 17, "score": 46513.09857927082 }, { "content": "}\n\n\n\n#[derive(Copy, Clone, Debug, EnumIter)]\n\npub enum Relation {}\n\n\n\nimpl ColumnTrait for Column {\n\n type EntityName = Entity;\n\n fn def(&self) -> ColumnDef {\n\n match self {\n\n Self::Uuid => ColumnType::Uuid.def().unique().indexed(),\n\n Self::Name => ColumnType::String(None).def().unique().indexed(),\n\n Self::Mimetype => ColumnType::String(None).def(),\n\n Self::UploadTime => ColumnType::TimestampWithTimeZone.def(),\n\n Self::Visibility => ColumnType::Boolean.def(),\n\n }\n\n }\n\n}\n\n\n\nimpl RelationTrait for Relation {\n\n fn def(&self) -> RelationDef {\n\n panic!(\"No RelationDef\")\n\n }\n\n}\n\n\n\nimpl ActiveModelBehavior for ActiveModel {}\n", "file_path": "entity/src/object.rs", "rank": 18, "score": 34604.580599473564 }, { "content": "use sea_orm::entity::prelude::*;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Copy, Clone, Default, Debug, DeriveEntity)]\n\npub struct Entity;\n\n\n\nimpl EntityName for Entity {\n\n fn table_name(&self) -> &str {\n\n \"object\"\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, DeriveModel, DeriveActiveModel, Serialize, Deserialize)]\n\npub struct Model {\n\n pub uuid: Uuid,\n\n pub name: String,\n\n pub mimetype: String,\n\n pub upload_time: DateTimeUtc,\n\n pub visibility: bool,\n\n}\n", "file_path": "entity/src/object.rs", "rank": 19, "score": 34600.448110449885 }, { "content": "\n\n#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]\n\npub enum Column {\n\n Uuid,\n\n Name,\n\n Mimetype,\n\n UploadTime,\n\n Visibility,\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, EnumIter, DerivePrimaryKey)]\n\npub enum PrimaryKey {\n\n Uuid,\n\n}\n\n\n\nimpl PrimaryKeyTrait for PrimaryKey {\n\n type ValueType = Uuid;\n\n fn auto_increment() -> bool {\n\n false\n\n }\n", "file_path": "entity/src/object.rs", "rank": 20, "score": 34596.70981016585 }, { "content": "fn email_hash<S: AsRef<str>>(email: S) -> String {\n\n let mut email_hasher = md5::Md5::new();\n\n email_hasher.update(email.as_ref().as_bytes());\n\n format!(\"{:x}\", email_hasher.finalize())\n\n}\n", "file_path": "tachyon-template/src/view/mod.rs", "rank": 21, "score": 34536.735442155084 }, { "content": "fn list_css<P: AsRef<Path>>(dir: P) -> Vec<CSSTarget> {\n\n let mut res = Vec::new();\n\n for i in fs::read_dir(dir).unwrap().map(|x| x.unwrap()) {\n\n let meta = i.metadata().unwrap();\n\n let path = i.path();\n\n if meta.is_dir() {\n\n let mut vec = list_css(path.as_path());\n\n res.append(&mut vec);\n\n } else {\n\n res.push(CSSTarget { path })\n\n }\n\n }\n\n res\n\n}\n\n\n", "file_path": "tachyon-frontend/build.rs", "rank": 22, "score": 33853.22998925623 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse uuid::Uuid;\n\n/// This is the struct of management struct, which basically cover the Permissions management\n\n/// functionality. For the admin, he/she can have user management priviledge. So the corresponding\n\n/// bool is true.\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Permissions {\n\n pub task_management: bool,\n\n pub file_management: bool,\n\n pub team_management: bool,\n\n pub user_management: bool,\n\n pub system_management: bool,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct UserInfo {\n\n pub id: i64,\n\n pub name: String,\n\n pub email: String,\n\n pub perms: Permissions,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct PasswordForgot {\n\n pub email: String,\n\n pub token: Uuid,\n\n}\n", "file_path": "tachyon-core/src/session.rs", "rank": 23, "score": 33794.11627196501 }, { "content": "//! Session middleware is responsible for pass user information to the server\n\n//!Cookies are small text files that a website stores on your device (smartphones, computers etc.) when you browse the internet. They are created when your browser loads a particular website, and the site sends information to your browser which then creates a text file. Cookies can store a range of information, including personal data (such as name, home address, email address) and information about your preferred language or location etc. that allows the site to present you with information customized to fit your needs.\n\n//! * What are session cookies?\n\n//!\n\n//! Session cookies are cookies that last for a session. A session starts when you launch a website or web app and ends when you leave the website or close your browser window. Session cookies contain information that is stored in a temporary memory location which is deleted after the session ends. Unlike other cookies, session cookies are never stored on your device. Therefore, they are also known as transient cookies, non-persistent cookies, or temporary cookies.\n\n//! * How do session cookies work?\n\n//!\n\n//!The session cookie is a server-specific cookie that cannot be passed to any machine other than the one that generated the cookie. The server creates a “session ID” which is a randomly generated number that temporarily stores the session cookie. This cookie stores information such as the user’s input and tracks the movements of the user within the website. There is no other information stored in the session cookie.\n\n//!session cookies working\n\n//!Session cookies are set on a device’s temporary memory when a browser session starts.\n\n//!What is the purpose of session cookies?\n\n//!\n\n//!A website itself cannot track a user’s movement on its webpage and treats each new page request as a new request from a new user. Session cookies allow websites to remember users within a website when they move between web pages. These cookies tell the server what pages to show the user so the user doesn’t have to remember where they left off or start navigating the site all over again. Therefore, without session cookies, websites have no memory. Session cookies are vital for user experience on online shops and websites when the functionalities depend on users’ activities.\n\n//!What are session cookies examples?\n\n//!\n\n//!The most common example of a session cookie in action is the shopping cart on eCommerce websites. When you visit an online shop and add items to your shopping cart, the session cookie remembers your selection so your shopping cart will have the items you selected when you are ready to checkout. Without session cookies, the checkout page will not remember your selection and your shopping cart will be empty.\n\n//!\n\n//!Session cookies also help users to browse and add items to the shopping cart without logging in on an eCommerce site. Only when users checkout, do they have to add their name, address, and payment information.\n\n//!\n\n\n", "file_path": "tachyon-core/src/session.rs", "rank": 24, "score": 33784.04776272511 }, { "content": " }),\n\n })\n\n }\n\n\n\n #[cfg(all(not(miri), test, feature = \"integration-test\"))]\n\n pub async fn mocked(uuid: uuid::Uuid) -> Result<Self> {\n\n use migration::Migrator;\n\n use migration::MigratorTrait;\n\n\n\n let sql_db = Database::connect(crate::test::DB_ADDRESS).await?;\n\n Migrator::down(&sql_db, Some(999)).await?;\n\n Migrator::up(&sql_db, Some(999)).await?;\n\n let kv_db = sled::Config::new()\n\n .path(format!(\"/tmp/tachyon-mock-test-{}\", uuid))\n\n .open()?;\n\n let key = Key::try_generate().ok_or_else(|| anyhow!(\"unable to generate cookie key\"))?;\n\n Ok(State {\n\n sql_db,\n\n kv_db,\n\n key,\n\n admin_name: \"Schrodinger ZHU\".to_string(),\n\n lettre: None,\n\n })\n\n }\n\n}\n", "file_path": "tachyon-core/src/state.rs", "rank": 25, "score": 33779.31067126814 }, { "content": "use crate::configs::Configs;\n\nuse crate::utils::Result;\n\nuse actix_web::cookie::Key;\n\nuse anyhow::anyhow;\n\nuse entity::sea_orm::{Database, DatabaseConnection};\n\nuse lettre::transport::smtp::authentication::Credentials;\n\nuse lettre::{AsyncSmtpTransport, Tokio1Executor};\n\n\n\npub struct State {\n\n pub sql_db: DatabaseConnection,\n\n pub kv_db: sled::Db,\n\n pub key: Key,\n\n pub admin_name: String,\n\n pub lettre: Option<(String, AsyncSmtpTransport<Tokio1Executor>)>,\n\n}\n\n\n\nimpl State {\n\n pub async fn from_configs(configs: &Configs) -> Result<Self> {\n\n let sql_db = Database::connect(&configs.db_uri).await?;\n\n let kv_db = sled::Config::new().path(&configs.sled_dir).open()?;\n", "file_path": "tachyon-core/src/state.rs", "rank": 26, "score": 33778.894659071884 }, { "content": "\n\n let key = if let Some(key) = configs.fixed_key.as_ref() {\n\n Key::derive_from(key.as_bytes())\n\n } else {\n\n Key::try_generate().ok_or_else(|| anyhow!(\"unable to generate cookie key\"))?\n\n };\n\n Ok(State {\n\n sql_db,\n\n kv_db,\n\n key,\n\n admin_name: configs.admin_name.clone(),\n\n lettre: configs.smtp.as_ref().map(|x| {\n\n (\n\n x.userinfo.clone(),\n\n AsyncSmtpTransport::<Tokio1Executor>::starttls_relay(x.host.as_str())\n\n .unwrap()\n\n .port(x.port)\n\n .credentials(Credentials::new(x.username.clone(), x.password.clone()))\n\n .build(),\n\n )\n", "file_path": "tachyon-core/src/state.rs", "rank": 27, "score": 33763.216628598784 }, { "content": "use actix_web::http::StatusCode;\n\nuse sailfish::TemplateOnce;\n\n\n\n#[derive(TemplateOnce)]\n\n#[template(path = \"error.stpl\")]\n\npub struct ErrorTemplate {\n\n pub title: String,\n\n pub status_code: StatusCode,\n\n}\n\n\n\nimpl ErrorTemplate {\n\n pub fn new(title: String, status_code: StatusCode) -> Self {\n\n Self { title, status_code }\n\n }\n\n pub fn reason(status_code: StatusCode) -> &'static str {\n\n status_code.canonical_reason().unwrap_or(\"Unknown Error\")\n\n }\n\n}\n", "file_path": "tachyon-template/src/error.rs", "rank": 28, "score": 33750.33725836732 }, { "content": "use super::{LeftBar, LeftBarItem};\n\nuse crate::view::email_hash;\n\nuse sailfish::TemplateOnce;\n\n\n\npub struct ObjectItem {\n\n pub name: String,\n\n pub uploaded_at: chrono::DateTime<chrono::Utc>,\n\n pub uuid: uuid::Uuid,\n\n pub mimetype: String,\n\n pub visibility: bool,\n\n}\n\n\n\n#[derive(TemplateOnce)]\n\n#[template(path = \"view/object.stpl\")]\n\npub struct ObjectTemplate {\n\n pub is_admin: bool,\n\n pub title: String,\n\n pub email_hash: String,\n\n pub objects: Vec<ObjectItem>,\n\n pub page_number: usize,\n", "file_path": "tachyon-template/src/view/object.rs", "rank": 29, "score": 31952.125455559715 }, { "content": " pub next_page_number: Option<usize>,\n\n pub prev_page_number: Option<usize>,\n\n}\n\n\n\nimpl ObjectTemplate {\n\n pub fn new<S: AsRef<str>, E: AsRef<str>>(\n\n is_admin: bool,\n\n title: S,\n\n email: E,\n\n objects: Vec<ObjectItem>,\n\n page_number: usize,\n\n next_page_number: Option<usize>,\n\n prev_page_number: Option<usize>,\n\n ) -> Self {\n\n let email_hash = email_hash(email);\n\n Self {\n\n is_admin,\n\n title: title.as_ref().to_string(),\n\n email_hash,\n\n objects,\n", "file_path": "tachyon-template/src/view/object.rs", "rank": 30, "score": 31946.521961946357 }, { "content": " page_number,\n\n next_page_number,\n\n prev_page_number,\n\n }\n\n }\n\n}\n\n\n\nimpl LeftBar for ObjectTemplate {\n\n const ACTIVE_ITEM: LeftBarItem = LeftBarItem::Objects;\n\n}\n", "file_path": "tachyon-template/src/view/object.rs", "rank": 31, "score": 31933.37553574223 }, { "content": " ErrorTemplate::new(\n\n format!(\n\n \"{} | Tachyon Project\",\n\n status.canonical_reason().unwrap_or(\"Unknown Error\")\n\n ),\n\n status,\n\n )\n\n .render_once()\n\n .anyhow()\n\n .logged_unwrap(),\n\n );\n\n\n\n res.headers_mut().insert(\n\n actix_web::http::header::CONTENT_TYPE,\n\n actix_web::http::header::HeaderValue::from_static(\"text/html;charset=utf-8\"),\n\n );\n\n\n\n let res = ServiceResponse::new(req, res)\n\n .map_into_boxed_body()\n\n .map_into_right_body();\n\n\n\n Ok(ErrorHandlerResponse::Response(res))\n\n}\n", "file_path": "tachyon-core/src/routers/view/error.rs", "rank": 32, "score": 31263.33716716975 }, { "content": "use crate::utils::{IntoAnyhow, LoggedUnwrap};\n\nuse actix_web::body::MessageBody;\n\nuse actix_web::dev::ServiceResponse;\n\nuse actix_web::http::Method;\n\nuse actix_web::middleware::ErrorHandlerResponse;\n\nuse actix_web::{dev, Result};\n\nuse tachyon_template::{ErrorTemplate, TemplateOnce};\n\n\n", "file_path": "tachyon-core/src/routers/view/error.rs", "rank": 33, "score": 31262.575081651692 }, { "content": " data: Data<State>,\n\n request: Query<ObjectRequest>,\n\n) -> Result<HttpResponse> {\n\n match session.get::<UserInfo>(\"user\")? {\n\n None => Err(ErrorUnauthorized(\"login info not found\")),\n\n Some(user) => {\n\n let paginator = entity::object::Entity::find()\n\n .order_by_asc(entity::object::Column::Uuid)\n\n .paginate(&data.sql_db, 5);\n\n let current_page = request.page.unwrap_or(0);\n\n let current = paginator\n\n .fetch_page(current_page)\n\n .await\n\n .unwrap_or_default()\n\n .into_iter()\n\n .map(|object| ObjectItem {\n\n uuid: object.uuid,\n\n name: object.name,\n\n mimetype: object.mimetype,\n\n uploaded_at: object.upload_time,\n", "file_path": "tachyon-core/src/routers/view/object.rs", "rank": 40, "score": 30789.796089510284 }, { "content": "use crate::session::UserInfo;\n\nuse crate::State;\n\nuse actix_session::Session;\n\nuse actix_web::error::ErrorUnauthorized;\n\nuse actix_web::web::{Data, Query};\n\nuse actix_web::HttpResponse;\n\nuse actix_web::Result;\n\nuse entity::sea_orm::EntityTrait;\n\nuse entity::sea_orm::PaginatorTrait;\n\nuse entity::sea_orm::QueryOrder;\n\nuse tachyon_template::view::{ObjectItem, ObjectTemplate};\n\nuse tachyon_template::AsyncRenderOnce;\n\n\n\n#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]\n\npub struct ObjectRequest {\n\n page: Option<usize>,\n\n}\n\n\n\npub async fn handler(\n\n session: Session,\n", "file_path": "tachyon-core/src/routers/view/object.rs", "rank": 47, "score": 30780.994811024495 }, { "content": "use sea_schema::migration::prelude::*;\n\n\n\npub struct Migration;\n\n\n\nimpl MigrationName for Migration {\n\n fn name(&self) -> &str {\n\n \"m20220101_000001_create_object_table\"\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl MigrationTrait for Migration {\n\n async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {\n\n manager\n\n .create_table(\n\n sea_query::Table::create()\n\n .table(entity::object::Entity)\n\n .col(\n\n ColumnDef::new(entity::object::Column::Uuid)\n\n .uuid()\n", "file_path": "migration/src/m20220101_000001_create_object_table.rs", "rank": 50, "score": 30766.498006392023 }, { "content": " visibility: object.visibility,\n\n })\n\n .collect::<Vec<ObjectItem>>();\n\n let next = paginator\n\n .num_pages()\n\n .await\n\n .map(|num_pages| {\n\n if current_page + 1 < num_pages {\n\n Some(current_page + 1)\n\n } else {\n\n None\n\n }\n\n })\n\n .unwrap_or(None);\n\n let prev = if current_page != 0 {\n\n Some(current_page - 1)\n\n } else {\n\n None\n\n };\n\n ObjectTemplate::new(\n", "file_path": "tachyon-core/src/routers/view/object.rs", "rank": 52, "score": 30761.359019841962 }, { "content": " .table(entity::object::Entity)\n\n .col(entity::object::Column::Name)\n\n .to_owned(),\n\n )\n\n .await?;\n\n Ok(())\n\n }\n\n\n\n async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {\n\n manager\n\n .drop_table(\n\n sea_query::Table::drop()\n\n .table(entity::object::Entity)\n\n .to_owned(),\n\n )\n\n .await\n\n }\n\n}\n", "file_path": "migration/src/m20220101_000001_create_object_table.rs", "rank": 54, "score": 30758.5397643635 }, { "content": " user.perms.user_management,\n\n \"Object | Project Tachyon\",\n\n user.email,\n\n current,\n\n current_page,\n\n next,\n\n prev,\n\n )\n\n .render_response()\n\n .await\n\n }\n\n }\n\n}\n", "file_path": "tachyon-core/src/routers/view/object.rs", "rank": 55, "score": 30750.758812558215 }, { "content": " .col(\n\n ColumnDef::new(entity::object::Column::Visibility)\n\n .boolean()\n\n .not_null(),\n\n )\n\n .if_not_exists()\n\n .to_owned(),\n\n )\n\n .await?;\n\n manager\n\n .create_index(\n\n sea_query::Index::create()\n\n .table(entity::object::Entity)\n\n .col(entity::object::Column::Uuid)\n\n .to_owned(),\n\n )\n\n .await?;\n\n manager\n\n .create_index(\n\n sea_query::Index::create()\n", "file_path": "migration/src/m20220101_000001_create_object_table.rs", "rank": 56, "score": 30749.91261317892 }, { "content": " .not_null()\n\n .unique_key()\n\n .primary_key(),\n\n )\n\n .col(\n\n ColumnDef::new(entity::object::Column::Name)\n\n .string()\n\n .not_null()\n\n .unique_key(),\n\n )\n\n .col(\n\n ColumnDef::new(entity::object::Column::Mimetype)\n\n .string()\n\n .not_null(),\n\n )\n\n .col(\n\n ColumnDef::new(entity::object::Column::UploadTime)\n\n .timestamp_with_time_zone()\n\n .not_null(),\n\n )\n", "file_path": "migration/src/m20220101_000001_create_object_table.rs", "rank": 57, "score": 30748.895072835778 }, { "content": "#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Debug)]\n\nenum LeftBarItem {\n\n Dashboard,\n\n MyTasks,\n\n User,\n\n Objects,\n\n}\n\n\n", "file_path": "tachyon-template/src/view/mod.rs", "rank": 58, "score": 29098.438487550982 }, { "content": " .content_type(\"application/json\")\n\n .status(status)\n\n .body(x)\n\n })\n\n}\n\n\n\npub async fn delete_task(\n\n request: Json<DeleteTaskRequest>,\n\n session: Session,\n\n data: web::Data<State>,\n\n) -> Result<HttpResponse> {\n\n match session.get::<UserInfo>(\"user\").unwrap_or(None) {\n\n None => Ok(HttpResponse::Unauthorized().finish()),\n\n Some(_user_info) => {\n\n let task = entity::task::Entity::find_by_id(request.id)\n\n .one(&data.sql_db)\n\n .await\n\n .map_err(ErrorBadRequest)?\n\n .ok_or_else(|| ErrorNotFound(\"no such task\"))?;\n\n task.delete(&data.sql_db)\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 59, "score": 50.8654031934637 }, { "content": " success: false,\n\n message: unsafe { Some(format!(\"{}\", prepared.unwrap_err_unchecked())) },\n\n }\n\n }\n\n }\n\n let mut status = http::StatusCode::OK;\n\n let json = match session.get::<UserInfo>(\"user\") {\n\n Err(e) => {\n\n status = http::StatusCode::INTERNAL_SERVER_ERROR;\n\n UserAddResult {\n\n success: false,\n\n message: Some(format!(\"{}\", e)),\n\n }\n\n }\n\n Ok(Some(user)) if user.perms.user_management => insert_user(&request, &data.sql_db).await,\n\n #[cfg(feature = \"integration-test\")]\n\n Ok(None) if matches!(request.no_session, Some(true)) => {\n\n insert_user(&request, &data.sql_db).await\n\n }\n\n Ok(_) => {\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 60, "score": 47.476434972495305 }, { "content": " .await\n\n .map_err(ErrorBadRequest)?;\n\n Ok(HttpResponse::Ok().finish())\n\n}\n\n\n\npub async fn delete(\n\n request: Json<UserIdentification>,\n\n session: Session,\n\n data: web::Data<State>,\n\n) -> Result<HttpResponse> {\n\n match session.get::<UserInfo>(\"user\").unwrap_or(None) {\n\n None => Ok(HttpResponse::Unauthorized().finish()),\n\n Some(e) if !e.perms.user_management => Ok(HttpResponse::Unauthorized().finish()),\n\n Some(user_info) => {\n\n let user = entity::user::Entity::find_by_id(request.id)\n\n .one(&data.sql_db)\n\n .await\n\n .map_err(ErrorBadRequest)?\n\n .ok_or_else(|| ErrorNotFound(\"no such user\"))?;\n\n user.delete(&data.sql_db)\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 61, "score": 46.96965475052247 }, { "content": " content: ActiveValue::Set(request.content),\n\n };\n\n comment\n\n .insert(&data.sql_db)\n\n .await\n\n .map_err(ErrorInternalServerError)?;\n\n Ok(HttpResponse::Ok().finish())\n\n }\n\n }\n\n}\n\n\n\n#[derive(serde::Serialize, serde::Deserialize)]\n\npub struct DeleteCommentRequest {\n\n pub comment_id: i64,\n\n}\n\n\n\npub async fn delete_comment(\n\n request: Json<DeleteCommentRequest>,\n\n session: Session,\n\n data: web::Data<State>,\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 62, "score": 46.362986168124635 }, { "content": " ))\n\n .await\n\n .map_err(ErrorBadRequest)?;\n\n Ok(HttpResponse::Ok().finish())\n\n}\n\n\n\npub async fn unlock(\n\n request: Json<UserIdentification>,\n\n session: Session,\n\n data: web::Data<State>,\n\n) -> Result<HttpResponse> {\n\n if session.get::<UserInfo>(\"user\").unwrap_or(None).is_none() {\n\n return Ok(HttpResponse::Unauthorized().finish());\n\n }\n\n data.sql_db\n\n .execute(Statement::from_sql_and_values(\n\n Postgres,\n\n r#\"UPDATE \"user\" SET wrong_pass_attempt = 0 WHERE id = $1\"#,\n\n vec![request.id.into()],\n\n ))\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 63, "score": 45.95048393561836 }, { "content": " session: Session,\n\n data: web::Data<State>,\n\n) -> Result<HttpResponse> {\n\n let mut status = http::StatusCode::OK;\n\n let json = match session.get::<UserInfo>(\"user\") {\n\n Err(e) => {\n\n status = http::StatusCode::INTERNAL_SERVER_ERROR;\n\n EditTaskResult {\n\n success: false,\n\n message: Some(format!(\"{}\", e)),\n\n }\n\n }\n\n Ok(Some(user)) if user.perms.task_management => {\n\n let task = entity::task::Entity::find_by_id(request.id)\n\n .one(&data.sql_db)\n\n .await\n\n .map_err(ErrorBadRequest)?\n\n .ok_or_else(|| ErrorNotFound(\"no such task\"))?;\n\n\n\n let mut active_task: entity::task::ActiveModel = task.into();\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 64, "score": 44.693148404149426 }, { "content": " task.name,\n\n task.create_date,\n\n task.finish_date,\n\n task.due_date,\n\n assigned_users,\n\n comment_and_user,\n\n task.description,\n\n );\n\n template.render_response().await\n\n}\n\n\n\npub async fn handler(\n\n request: actix_web::web::Query<TaskRequest>,\n\n data: Data<State>,\n\n session: Session,\n\n) -> Result<HttpResponse> {\n\n match session.get::<UserInfo>(\"user\")? {\n\n None => Err(ErrorUnauthorized(\"login info not found\")),\n\n Some(user) => {\n\n let mut page = entity::task::Entity::find();\n", "file_path": "tachyon-core/src/routers/view/task.rs", "rank": 65, "score": 44.10764667745366 }, { "content": " .await\n\n .map_err(ErrorInternalServerError)?;\n\n if request.id == user_info.id {\n\n session.remove(\"user\");\n\n }\n\n Ok(HttpResponse::Ok().finish())\n\n }\n\n }\n\n}\n\n\n\npub async fn add(\n\n request: Json<UserAddRequest>,\n\n session: Session,\n\n data: web::Data<State>,\n\n) -> Result<HttpResponse> {\n\n async fn insert_user(req: &Json<UserAddRequest>, db: &DatabaseConnection) -> UserAddResult {\n\n match req.validate() {\n\n Ok(_) => {}\n\n Err(e) => {\n\n return UserAddResult {\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 66, "score": 43.9071357986745 }, { "content": "\n\nimpl UserLoginResult {\n\n fn from_error<E: Into<anyhow::Error>>(e: E, signature_requirement: Option<Uuid>) -> Self {\n\n Self {\n\n success: false,\n\n signature_requirement,\n\n message: Some(format!(\"{}\", e.into())),\n\n }\n\n }\n\n fn to_reply(&self, status: StatusCode) -> Result<HttpResponse> {\n\n simd_json::to_string(self)\n\n .map_err(ErrorInternalServerError)\n\n .map(|x| HttpResponse::Ok().status(status).body(x))\n\n }\n\n}\n\n\n\npub async fn logout(session: Session) -> HttpResponse {\n\n match session.remove(\"user\").ok_or(\"already logged out\") {\n\n Ok(_) => HttpResponse::Ok().json(UserLogoutResult {\n\n success: true,\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 67, "score": 43.75987615687125 }, { "content": " .map_err(ErrorBadRequest)?\n\n .ok_or_else(|| ErrorNotFound(\"no such task\"))?;\n\n\n\n let mut active_task: entity::task::ActiveModel = task.into();\n\n let finish: DateTimeUtc = request.finish_date;\n\n active_task.finish_date = ActiveValue::Set(Some(finish));\n\n match active_task\n\n .update(&data.sql_db)\n\n .await\n\n .map_err(ErrorInternalServerError)\n\n {\n\n Ok(_) => EditTaskResult {\n\n success: true,\n\n message: None,\n\n },\n\n Err(e) => EditTaskResult {\n\n success: false,\n\n message: Some(format!(\"{}\", e)),\n\n },\n\n };\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 68, "score": 43.66358742629539 }, { "content": "}\n\n\n\npub async fn resolve_task(\n\n request: Json<ResolveTaskRequest>,\n\n session: Session,\n\n data: web::Data<State>,\n\n) -> Result<HttpResponse> {\n\n let mut status = http::StatusCode::OK;\n\n let json = match session.get::<UserInfo>(\"user\") {\n\n Err(e) => {\n\n status = http::StatusCode::INTERNAL_SERVER_ERROR;\n\n EditTaskResult {\n\n success: false,\n\n message: Some(format!(\"{}\", e)),\n\n }\n\n }\n\n Ok(Some(user)) if user.perms.task_management => {\n\n let task = entity::task::Entity::find_by_id(request.id)\n\n .one(&data.sql_db)\n\n .await\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 69, "score": 43.382169302133114 }, { "content": "#[derive(serde::Serialize, serde::Deserialize)]\n\npub struct AddCommentRequest {\n\n pub task_id: i64,\n\n pub content: String,\n\n}\n\n\n\npub async fn add_comment(\n\n request: Json<AddCommentRequest>,\n\n session: Session,\n\n data: web::Data<State>,\n\n) -> Result<HttpResponse> {\n\n match session.get::<UserInfo>(\"user\").unwrap_or(None) {\n\n None => Err(ErrorUnauthorized(\"no login info\")),\n\n Some(user_info) => {\n\n let request = request.into_inner();\n\n let comment = entity::task_discussion::ActiveModel {\n\n id: ActiveValue::NotSet,\n\n task_id: ActiveValue::Set(request.task_id),\n\n update_time: ActiveValue::Set(chrono::Utc::now()),\n\n user_id: ActiveValue::Set(user_info.id),\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 70, "score": 43.23491323426847 }, { "content": " );\n\n if let Ok(model) = prepared {\n\n match model.insert(db).await {\n\n Ok(_) => AddTaskResult {\n\n success: true,\n\n message: None,\n\n },\n\n Err(e) => AddTaskResult {\n\n success: false,\n\n message: Some(format!(\"{}\", e)),\n\n },\n\n }\n\n } else {\n\n AddTaskResult {\n\n success: false,\n\n message: unsafe { Some(format!(\"{}\", prepared.unwrap_err_unchecked())) },\n\n }\n\n }\n\n }\n\n let mut status = http::StatusCode::OK;\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 71, "score": 41.853427800673195 }, { "content": " .map_err(ErrorInternalServerError)?\n\n {\n\n None => Ok(HttpResponse::BadRequest().json(UserEditResult {\n\n success: false,\n\n message: Some(\"no such user\".to_string()),\n\n })),\n\n Some(user) => {\n\n let mut active_user: entity::user::ActiveModel = user.into();\n\n match request.into_inner().apply_patch(&mut active_user) {\n\n Err(e) => Ok(HttpResponse::BadRequest().json(UserEditResult {\n\n success: false,\n\n message: Some(e.to_string()),\n\n })),\n\n Ok(_) => active_user\n\n .update(&data.sql_db)\n\n .await\n\n .map_err(ErrorInternalServerError)\n\n .map(|_| {\n\n HttpResponse::Ok().json(UserEditResult {\n\n success: true,\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 72, "score": 41.82693297385323 }, { "content": " );\n\n }\n\n if let Some(pgp_key) = self.pgp_key {\n\n target.pgp_key = ActiveValue::Set(\n\n entity::user::Model::get_public_key(pgp_key).map_err(ErrorBadRequest)?,\n\n );\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\npub async fn edit(\n\n request: Json<UserEditRequest>,\n\n session: Session,\n\n data: web::Data<State>,\n\n) -> Result<HttpResponse> {\n\n if session\n\n .get::<UserInfo>(\"user\")\n\n .unwrap_or(None)\n\n .and_then(|x| {\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 73, "score": 41.20891895620606 }, { "content": ") -> Result<HttpResponse> {\n\n match session.get::<UserInfo>(\"user\").unwrap_or(None) {\n\n None => Err(ErrorUnauthorized(\"no login info\")),\n\n Some(user_info) => {\n\n let request = request.into_inner();\n\n let comment = entity::task_discussion::Entity::find_by_id(request.comment_id)\n\n .one(&data.sql_db)\n\n .await\n\n .map_err(ErrorBadRequest)?\n\n .ok_or_else(|| ErrorNotFound(\"no such comment\"))?;\n\n if comment.user_id != user_info.id {\n\n return Err(ErrorUnauthorized(\"no permission\"));\n\n }\n\n comment\n\n .delete(&data.sql_db)\n\n .await\n\n .map_err(ErrorInternalServerError)?;\n\n Ok(HttpResponse::Ok().finish())\n\n }\n\n }\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 74, "score": 40.734818061700174 }, { "content": "}\n\n\n\n#[derive(serde::Serialize, serde::Deserialize, Debug)]\n\npub struct UserIdentification {\n\n pub id: i64,\n\n}\n\n\n\npub async fn lock(\n\n request: Json<UserIdentification>,\n\n session: Session,\n\n data: web::Data<State>,\n\n) -> Result<HttpResponse> {\n\n if session.get::<UserInfo>(\"user\").unwrap_or(None).is_none() {\n\n return Ok(HttpResponse::Unauthorized().finish());\n\n }\n\n data.sql_db\n\n .execute(Statement::from_sql_and_values(\n\n Postgres,\n\n r#\"UPDATE \"user\" SET wrong_pass_attempt = 100 WHERE id = $1\"#,\n\n vec![request.id.into()],\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 75, "score": 40.62744139655828 }, { "content": " request: Json<AddTaskRequest>,\n\n session: Session,\n\n data: web::Data<State>,\n\n) -> Result<HttpResponse> {\n\n async fn insert_task(req: &Json<AddTaskRequest>, db: &DatabaseConnection) -> AddTaskResult {\n\n match req.validate() {\n\n Ok(_) => {}\n\n Err(e) => {\n\n return AddTaskResult {\n\n success: false,\n\n message: Some(format!(\"{}\", e)),\n\n };\n\n }\n\n }\n\n\n\n let prepared = entity::task::Model::prepare(\n\n &req.name,\n\n &req.create_date,\n\n &req.due_date,\n\n &req.description,\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 76, "score": 40.185443979466264 }, { "content": "pub async fn handler(\n\n request: actix_web::web::Query<UserRequest>,\n\n data: Data<State>,\n\n session: Session,\n\n) -> Result<HttpResponse> {\n\n match session.get::<UserInfo>(\"user\")? {\n\n None => Err(ErrorUnauthorized(\"login info not found\")),\n\n Some(user) => {\n\n let mut page = entity::user::Entity::find();\n\n\n\n if let Some(keywords) = &request.search_string {\n\n log::debug!(\"search string: {}\", keywords);\n\n let expr = Expr::cust_with_values(\n\n \"user_search_vector @@ plainto_tsquery(?)\",\n\n vec![keywords.to_string()],\n\n );\n\n page = page.filter(expr);\n\n }\n\n let page_size = request.page_size.unwrap_or(10);\n\n let paginator = page\n", "file_path": "tachyon-core/src/routers/view/user.rs", "rank": 77, "score": 39.78663070436389 }, { "content": " let upd_des: String = request.updated_description.clone();\n\n active_task.description = ActiveValue::Set(upd_des);\n\n match active_task\n\n .update(&data.sql_db)\n\n .await\n\n .map_err(ErrorInternalServerError)\n\n {\n\n Ok(_) => EditTaskResult {\n\n success: true,\n\n message: None,\n\n },\n\n Err(e) => EditTaskResult {\n\n success: false,\n\n message: Some(format!(\"{}\", e)),\n\n },\n\n };\n\n\n\n EditTaskResult {\n\n success: true,\n\n message: None,\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 78, "score": 39.477080299954494 }, { "content": " .await\n\n .map_err(ErrorInternalServerError)?;\n\n res[i as usize] = count;\n\n }\n\n Ok(res)\n\n}\n\npub async fn handler(session: Session, data: Data<State>) -> Result<HttpResponse> {\n\n match session.get::<UserInfo>(\"user\")? {\n\n None => Err(ErrorUnauthorized(\"login info not found\")),\n\n Some(user) => {\n\n let total = entity::task::Entity::find()\n\n .count(&data.sql_db)\n\n .await\n\n .map_err(ErrorInternalServerError)?;\n\n let finished = entity::task::Entity::find()\n\n .filter(entity::task::Column::FinishDate.is_not_null())\n\n .count(&data.sql_db)\n\n .await\n\n .map_err(ErrorInternalServerError)?;\n\n let tasks = get_related_tasks(&user, &data).await?;\n", "file_path": "tachyon-core/src/routers/view/dashboard.rs", "rank": 79, "score": 39.45162055915562 }, { "content": "}\n\n\n\npub async fn assign(\n\n request: Json<AssignTaskRequest>,\n\n session: Session,\n\n data: web::Data<State>,\n\n) -> Result<HttpResponse> {\n\n async fn assign_task(\n\n req: &Json<AssignTaskRequest>,\n\n db: &DatabaseConnection,\n\n ) -> AssignTaskResult {\n\n match req.validate() {\n\n Ok(_) => {}\n\n Err(e) => {\n\n return AssignTaskResult {\n\n success: false,\n\n message: Some(format!(\"{}\", e)),\n\n };\n\n }\n\n }\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 80, "score": 39.19298906463972 }, { "content": "/// pub key: Key,\n\n/// pub admin_name: String,\n\n/// pub lettre: Option<(String, AsyncSmtpTransport<Tokio1Executor>)>,\n\n/// }\n\n///\n\n/// ```\n\n/// Which contains the important connections\n\n\n\npub async fn login(\n\n request: Json<UserLogin>,\n\n session: Session,\n\n data: web::Data<State>,\n\n) -> Result<HttpResponse> {\n\n if let Ok(Some(_)) = session.get::<UserInfo>(\"user\") {\n\n UserLoginResult {\n\n success: false,\n\n signature_requirement: None,\n\n message: Some(\"already logged in\".to_string()),\n\n }\n\n .to_reply(StatusCode::BAD_REQUEST)\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 81, "score": 38.952792341320155 }, { "content": "//! This is the core api in our project. Which manages the user registry, user login, user logout,\n\n//! user add,\n\n\n\nuse crate::session::UserInfo;\n\nuse crate::{session, IntoAnyhow, State};\n\nuse actix_session::Session;\n\nuse actix_web::error::{ErrorBadRequest, ErrorInternalServerError, ErrorNotFound};\n\nuse actix_web::http::StatusCode;\n\nuse actix_web::web::Json;\n\nuse actix_web::{http, web, HttpResponse, Result};\n\nuse anyhow::anyhow;\n\nuse entity::sea_orm::DatabaseBackend::Postgres;\n\nuse entity::sea_orm::{\n\n ActiveModelTrait, ActiveValue, ColumnTrait, ConnectionTrait, DatabaseBackend,\n\n DatabaseConnection, EntityTrait, ModelTrait, QueryFilter, Statement,\n\n};\n\nuse uuid::Uuid;\n\nuse validator::Validate;\n\n\n\npub const WRONG_PASS_ATTEMPT_THRESHOLD: i64 = 5;\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 82, "score": 38.26623496160612 }, { "content": " if target_user.wrong_pass_attempt >= WRONG_PASS_ATTEMPT_THRESHOLD {\n\n match &request.signature {\n\n None => {\n\n let uuid = Uuid::new_v4();\n\n match session.insert(\"verification\", uuid) {\n\n Ok(_) => UserLoginResult {\n\n success: false,\n\n signature_requirement: Some(uuid),\n\n message: Some(\"account locked\".to_string()),\n\n }\n\n .to_reply(StatusCode::LOCKED),\n\n Err(e) => UserLoginResult::from_error(e, None)\n\n .to_reply(StatusCode::INTERNAL_SERVER_ERROR),\n\n }\n\n }\n\n Some(sig) => {\n\n match session\n\n .get::<Uuid>(\"verification\")\n\n .anyhow()\n\n .and_then(|x| x.ok_or_else(|| anyhow!(\"verification not initiated\")))\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 83, "score": 38.222993994615365 }, { "content": "//! This is the most important component. One of its important usage is to assign to a user.\n\n//! The user can assign resolve cancel ...\n\n//!\n\n\n\nuse crate::session::UserInfo;\n\nuse crate::State;\n\nuse actix_session::Session;\n\nuse actix_web::error::{\n\n ErrorBadRequest, ErrorInternalServerError, ErrorNotFound, ErrorUnauthorized,\n\n};\n\nuse actix_web::web::Json;\n\nuse actix_web::{http, web, HttpResponse, Result};\n\nuse entity::sea_orm::entity::prelude::*;\n\nuse entity::sea_orm::{ActiveModelTrait, ActiveValue, DatabaseConnection};\n\nuse validator::Validate;\n\n\n\n#[derive(serde::Serialize, serde::Deserialize, Debug)]\n\npub struct AddTaskResult {\n\n success: bool,\n\n message: Option<String>,\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 84, "score": 37.81272252462267 }, { "content": " success: false,\n\n message: Some(format!(\"{}\", e)),\n\n };\n\n }\n\n }\n\n let prepared =\n\n entity::user::Model::prepare(&req.name, &req.email, &req.password, &req.gpg_key);\n\n if let Ok(model) = prepared {\n\n match model.insert(db).await {\n\n Ok(_) => UserAddResult {\n\n success: true,\n\n message: None,\n\n },\n\n Err(e) => UserAddResult {\n\n success: false,\n\n message: Some(format!(\"{}\", e)),\n\n },\n\n }\n\n } else {\n\n UserAddResult {\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 85, "score": 37.28047402164108 }, { "content": "\n\n let prepared =\n\n entity::task_user_assignment::Model::prepare(req.task_id, req.user_id, req.assign_date);\n\n if let Ok(model) = prepared {\n\n match model.insert(db).await {\n\n Ok(_) => AssignTaskResult {\n\n success: true,\n\n message: None,\n\n },\n\n Err(e) => AssignTaskResult {\n\n success: false,\n\n message: Some(format!(\"{}\", e)),\n\n },\n\n }\n\n } else {\n\n AssignTaskResult {\n\n success: false,\n\n message: unsafe { Some(format!(\"{}\", prepared.unwrap_err_unchecked())) },\n\n }\n\n }\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 86, "score": 36.80751914774677 }, { "content": "use crate::session::UserInfo;\n\nuse crate::State;\n\nuse actix_session::Session;\n\nuse actix_web::error::{ErrorInternalServerError, ErrorUnauthorized};\n\nuse actix_web::web::Data;\n\nuse actix_web::{HttpResponse, Result};\n\nuse entity::sea_orm::{ColumnTrait, EntityTrait, PaginatorTrait, QueryFilter};\n\nuse entity::sea_orm::{DatabaseBackend, Statement};\n\nuse std::ops::Add;\n\nuse tachyon_template::view::RelatedTask;\n\nuse tachyon_template::{view::DashboardTemplate, AsyncRenderOnce};\n\n\n\npub async fn get_related_tasks(\n\n user_info: &UserInfo,\n\n data: &Data<State>,\n\n) -> Result<Vec<RelatedTask>> {\n\n let tasks: Vec<entity::task::Model> = entity::task::Entity::find()\n\n .from_raw_sql(Statement::from_sql_and_values(\n\n DatabaseBackend::Postgres,\n\n r#\"\n", "file_path": "tachyon-core/src/routers/view/dashboard.rs", "rank": 87, "score": 36.76435181267346 }, { "content": " let json = match session.get::<UserInfo>(\"user\") {\n\n Err(e) => {\n\n status = http::StatusCode::INTERNAL_SERVER_ERROR;\n\n AddTaskResult {\n\n success: false,\n\n message: Some(format!(\"{}\", e)),\n\n }\n\n }\n\n Ok(Some(user)) if user.perms.task_management => insert_task(&request, &data.sql_db).await,\n\n\n\n Ok(Some(user)) => {\n\n status = http::StatusCode::FORBIDDEN;\n\n AddTaskResult {\n\n success: false,\n\n message: Some(format!(\n\n \"User {} does not have permission to add tasks\",\n\n user.name\n\n )),\n\n }\n\n }\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 88, "score": 36.525343730613976 }, { "content": " }\n\n let mut status = http::StatusCode::OK;\n\n let json = match session.get::<UserInfo>(\"user\") {\n\n Err(e) => {\n\n status = http::StatusCode::INTERNAL_SERVER_ERROR;\n\n AssignTaskResult {\n\n success: false,\n\n message: Some(format!(\"{}\", e)),\n\n }\n\n }\n\n Ok(Some(user)) if user.perms.team_management => assign_task(&request, &data.sql_db).await,\n\n\n\n Ok(Some(user)) => {\n\n status = http::StatusCode::FORBIDDEN;\n\n AssignTaskResult {\n\n success: false,\n\n message: Some(format!(\n\n \"User {} does not have permission to add tasks\",\n\n user.name\n\n )),\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 89, "score": 35.546759222087296 }, { "content": "use crate::session::UserInfo;\n\nuse crate::State;\n\nuse actix_session::Session;\n\nuse actix_web::error::{ErrorInternalServerError, ErrorNotFound, ErrorUnauthorized};\n\nuse actix_web::web::Data;\n\nuse actix_web::web::Path;\n\nuse actix_web::{HttpResponse, Result};\n\nuse entity::sea_orm::{DbBackend, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder, Statement};\n\nuse sea_query::{Expr, Order};\n\nuse tachyon_template::view::{Comment, TaskDetailTemplate, UserData};\n\nuse tachyon_template::{view::TaskTemplate, AsyncRenderOnce};\n\n\n\n#[derive(serde::Serialize, serde::Deserialize)]\n\npub struct TaskRequest {\n\n search_string: Option<String>,\n\n page_no: Option<usize>,\n\n page_size: Option<usize>,\n\n}\n\n\n", "file_path": "tachyon-core/src/routers/view/task.rs", "rank": 90, "score": 35.461653014921204 }, { "content": "//!\n\n//! ```\n\n//! the router then parse the path to get parameter from it.\n\n//!\n\n\n\nmod api;\n\nmod view;\n\n\n\nuse crate::session::UserInfo;\n\nuse actix_files::{Directory, Files};\n\nuse actix_session::Session;\n\nuse actix_web::dev::{fn_service, ServiceRequest, ServiceResponse};\n\nuse actix_web::error::ErrorNotFound;\n\nuse actix_web::{web, HttpRequest, HttpResponse, Result, Scope};\n\nuse std::path::Path;\n\nuse tachyon_template::AsyncRenderOnce;\n\npub use view::error::error_handler;\n\n\n\nasync fn forbidden(req: ServiceRequest) -> Result<ServiceResponse> {\n\n Ok(ServiceResponse::new(\n\n req.into_parts().0,\n\n HttpResponse::Forbidden().body(()),\n\n ))\n\n}\n\n\n", "file_path": "tachyon-core/src/routers/mod.rs", "rank": 91, "score": 35.383681649768604 }, { "content": " {\n\n Ok(uuid) => {\n\n let message = uuid.to_string();\n\n match target_user.verify_signature(&sig, &message) {\n\n Ok(false) => UserLoginResult {\n\n success: false,\n\n signature_requirement: Some(uuid),\n\n message: Some(\"failed to verify signature\".to_string()),\n\n }\n\n .to_reply(StatusCode::LOCKED),\n\n Ok(true) => {\n\n verify_pass_and_login(\n\n &target_user,\n\n &session,\n\n &data.sql_db,\n\n &request.password,\n\n &data.admin_name,\n\n )\n\n .await\n\n }\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 92, "score": 35.15972956826709 }, { "content": " .await\n\n .map_err(ErrorInternalServerError)?;\n\n let json = DeleteTaskResult {\n\n success: true,\n\n message: Some(\"delete task successfully!\".to_string()),\n\n };\n\n\n\n let status = http::StatusCode::OK;\n\n simd_json::to_string(&json)\n\n .map_err(ErrorInternalServerError)\n\n .map(|x| {\n\n HttpResponse::Ok()\n\n .content_type(\"application/json\")\n\n .status(status)\n\n .body(x)\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "tachyon-core/src/routers/api/task.rs", "rank": 93, "score": 34.015395232185014 }, { "content": " .order_by(entity::user::Column::Id, Order::Asc)\n\n .paginate(&data.sql_db, page_size);\n\n let items = paginator\n\n .fetch_page(request.page_no.unwrap_or(0))\n\n .await\n\n .map_err(ErrorInternalServerError)?;\n\n let num_pages = paginator\n\n .num_pages()\n\n .await\n\n .map_err(ErrorInternalServerError)?;\n\n let prev_page = match request.page_no {\n\n None | Some(0) => None,\n\n Some(no) => Some(no - 1),\n\n };\n\n let next_page = match request.page_no.unwrap_or(0) {\n\n n if n + 1 >= num_pages => None,\n\n n => Some(n + 1),\n\n };\n\n log::debug!(\"select {} items\", items.len());\n\n let converted = convert_user_info(items.into_iter());\n", "file_path": "tachyon-core/src/routers/view/user.rs", "rank": 94, "score": 33.25015471046515 }, { "content": "\n\n let target_user: anyhow::Result<entity::user::Model> = entity::user::Entity::find()\n\n .filter(entity::user::Column::Email.eq(request.email.as_str()))\n\n .one(&data.sql_db)\n\n .await\n\n .anyhow()\n\n .and_then(|x| {\n\n x.ok_or_else(|| anyhow!(\"user associated with {} not found\", request.email))\n\n });\n\n\n\n if target_user.is_err() {\n\n return UserLoginResult::from_error(\n\n unsafe { target_user.unwrap_err_unchecked() },\n\n None,\n\n )\n\n .to_reply(StatusCode::BAD_REQUEST);\n\n }\n\n\n\n let target_user = unsafe { target_user.unwrap_unchecked() };\n\n\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 95, "score": 33.13982685148044 }, { "content": " status = http::StatusCode::UNAUTHORIZED;\n\n UserAddResult {\n\n success: false,\n\n message: Some(\"unauthorized\".to_string()),\n\n }\n\n }\n\n };\n\n if json.success {\n\n if let Some((info, smtp)) = data.lettre.as_ref() {\n\n use lettre::{AsyncTransport, Message};\n\n let email = Message::builder()\n\n .from(info.parse().map_err(ErrorInternalServerError)?)\n\n .to(format!(\"{} <{}>\", request.name, request.email)\n\n .parse()\n\n .map_err(ErrorInternalServerError)?)\n\n .subject(\"Tachyon Credential\")\n\n .body(format!(\n\n \"Email: {}\\nPassword: {}\",\n\n request.email, request.password\n\n ))\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 96, "score": 32.85758072621163 }, { "content": " );\n\n match db\n\n .execute(Statement::from_string(DatabaseBackend::Postgres, query))\n\n .await\n\n {\n\n Ok(_) => UserLoginResult {\n\n success: false,\n\n signature_requirement: None,\n\n message: Some(\"password mismatch\".to_string()),\n\n }\n\n .to_reply(StatusCode::UNAUTHORIZED),\n\n Err(e) => UserLoginResult::from_error(e, None)\n\n .to_reply(StatusCode::INTERNAL_SERVER_ERROR),\n\n }\n\n }\n\n Err(e) => {\n\n UserLoginResult::from_error(e, None).to_reply(StatusCode::INTERNAL_SERVER_ERROR)\n\n }\n\n }\n\n }\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 97, "score": 32.344559244155136 }, { "content": " Err(e) => UserLoginResult::from_error(e, Some(uuid))\n\n .to_reply(StatusCode::BAD_REQUEST),\n\n }\n\n }\n\n Err(e) => UserLoginResult::from_error(e, None)\n\n .to_reply(StatusCode::INTERNAL_SERVER_ERROR),\n\n }\n\n }\n\n }\n\n } else {\n\n verify_pass_and_login(\n\n &target_user,\n\n &session,\n\n &data.sql_db,\n\n &request.password,\n\n &data.admin_name,\n\n )\n\n .await\n\n }\n\n }\n", "file_path": "tachyon-core/src/routers/api/user.rs", "rank": 98, "score": 31.96904567312047 }, { "content": ") -> Result<HttpResponse> {\n\n let user = session\n\n .get::<UserInfo>(\"user\")\n\n .map_err(ErrorInternalServerError)\n\n .and_then(|data| data.ok_or_else(|| ErrorUnauthorized(\"no login info\")))?;\n\n let info = info.into_inner();\n\n let task = entity::task::Entity::find_by_id(info.id)\n\n .one(&state.sql_db)\n\n .await\n\n .map_err(ErrorInternalServerError)?\n\n .ok_or_else(|| ErrorNotFound(\"no such task\"))?;\n\n let assigned_users = entity::user::Entity::find()\n\n .from_raw_sql(Statement::from_sql_and_values(\n\n DbBackend::Postgres,\n\n r#\"SELECT * \n\n FROM \"user\" \n\n JOIN task_user_assignment ON task_user_assignment.task_id = $1 AND task_user_assignment.user_id = \"user\".id\n\n ORDER BY \"user\".id\"#,\n\n vec![info.id.into()],\n\n ))\n", "file_path": "tachyon-core/src/routers/view/task.rs", "rank": 99, "score": 31.925945672929434 } ]
Rust
metalmq-client/src/channel_api.rs
jonasrichard/metalmq
6196238344e95bfc0d76a4b1363ba4b7e2bc8157
use crate::client_api::{ClientRequest, ClientRequestSink, Param, WaitFor}; use crate::model::ChannelNumber; use crate::processor; use anyhow::Result; use metalmq_codec::frame; use std::collections::HashMap; #[derive(Debug)] pub struct Channel { pub(crate) channel: ChannelNumber, pub(crate) sink: ClientRequestSink, pub(crate) consumers: HashMap<String, ClientRequest>, } #[derive(Debug)] pub struct Message { pub channel: ChannelNumber, pub consumer_tag: String, pub delivery_tag: u64, pub length: usize, pub body: Vec<u8>, } #[derive(Debug)] pub(crate) struct DeliveredContent { channel: ChannelNumber, consumer_tag: String, delivery_tag: u64, exchange_name: String, routing_key: String, body_size: Option<u64>, body: Option<Vec<u8>>, } impl Channel { pub(crate) fn new(channel: ChannelNumber, sink: ClientRequestSink) -> Channel { Channel { channel, sink, consumers: HashMap::new(), } } pub async fn exchange_declare( &self, exchange_name: &str, exchange_type: &str, flags: Option<frame::ExchangeDeclareFlags>, ) -> Result<()> { let frame = frame::exchange_declare(self.channel, exchange_name, exchange_type, flags); processor::call(&self.sink, frame).await } pub async fn exchange_delete(&self, exchange_name: &str, if_unused: bool) -> Result<()> { let mut flags = frame::ExchangeDeleteFlags::default(); if if_unused { flags.toggle(frame::ExchangeDeleteFlags::IF_UNUSED); } let frame = frame::exchange_delete(self.channel, exchange_name, Some(flags)); processor::call(&self.sink, frame).await } pub async fn queue_declare(&self, queue_name: &str, flags: Option<frame::QueueDeclareFlags>) -> Result<()> { let frame = frame::queue_declare(self.channel, queue_name, flags); processor::call(&self.sink, frame).await } pub async fn queue_bind(&self, queue_name: &str, exchange_name: &str, routing_key: &str) -> Result<()> { let frame = frame::queue_bind(self.channel, queue_name, exchange_name, routing_key); processor::call(&self.sink, frame).await } pub async fn queue_unbind(&self, queue_name: &str, exchange_name: &str, routing_key: &str) -> Result<()> { let frame = frame::queue_unbind(self.channel, queue_name, exchange_name, routing_key); processor::call(&self.sink, frame).await } pub async fn queue_purge(&self, queue_name: &str) -> Result<()> { Ok(()) } pub async fn queue_delete(&self, queue_name: &str, if_unused: bool, if_empty: bool) -> Result<()> { let mut flags = frame::QueueDeleteFlags::empty(); flags.set(frame::QueueDeleteFlags::IF_UNUSED, if_unused); flags.set(frame::QueueDeleteFlags::IF_EMPTY, if_empty); let frame = frame::queue_delete(self.channel, queue_name, Some(flags)); processor::call(&self.sink, frame).await } pub async fn basic_publish( &self, exchange_name: &str, routing_key: &str, payload: String, mandatory: bool, immediate: bool, ) -> Result<()> { let mut flags = frame::BasicPublishFlags::empty(); flags.set(frame::BasicPublishFlags::MANDATORY, mandatory); flags.set(frame::BasicPublishFlags::IMMEDIATE, immediate); let frame = frame::basic_publish(self.channel, exchange_name, routing_key, Some(flags)); self.sink .send(ClientRequest { param: Param::Publish(frame, payload.as_bytes().to_vec()), response: WaitFor::Nothing, }) .await?; Ok(()) } pub async fn close(&self) -> Result<()> { let (cid, mid) = frame::split_class_method(frame::CHANNEL_CLOSE); processor::call( &self.sink, frame::channel_close(self.channel, 200, "Normal close", cid, mid), ) .await } }
use crate::client_api::{ClientRequest, ClientRequestSink, Param, WaitFor}; use crate::model::ChannelNumber; use crate::processor; use anyhow::Result; use metalmq_codec::frame; use std::collections::HashMap; #[derive(Debug)] pub struct Channel { pub(crate) channel: ChannelNumber, pub(crate) sink: ClientRequestSink, pub(crate) consumers: HashMap<String, ClientRequest>, } #[derive(Debug)] pub struct Message { pub channel: ChannelNumber, pub consumer_tag: String, pub delivery_tag: u64, pub length: usize, pub body: Vec<u8>, } #[derive(Debug)] pub(crate) struct DeliveredContent { channel: ChannelNumber, consumer_tag: String, delivery_tag: u64, exchange_name: String, routing_key: String, body_size: Option<u64>, body: Option<Vec<u8>>, } impl Channel { pub(crate) fn new(channel: ChannelNumber, sink: ClientRequestSink) -> Channel { Channel { channel, sink, consumers: HashMap::new(), } } pub async fn exchange_declare( &self, exchange_name: &str, exchange_type: &str, flags: Option<frame::ExchangeDeclareFlags>, ) -> Result<()> { let frame = frame::exchange_declare(self.channel, exchange_name, exchange_type, flags); processor::call(&self.sink, frame).await } pub async fn exchange_delete(&self, exchange_name: &str, if_unused: bool) -> Result<()> { let mut flags = frame::ExchangeDeleteFlags::default(); if if_unused { flags.toggle(frame::ExchangeDeleteFlags::IF_UNUSED); } let frame = frame::exchange_delete(self.channel, exchange_name, Some(flags)); processor::call(&self.sink, frame).await } pub async fn queue_declare(&self, queue_name: &str, flags: Option<frame::QueueDeclareFlags>) -> Result<()> { let frame = frame::queue_declare(self.channel, queue_name, flags); processor::call(&self.sink, frame).await } pub async fn queue_bind(&self, queue_name: &str, exchange_name: &str, routing_key: &str) -> Result<()> { let frame = frame::queue_bind(self.channel, queue_name, exchange_name, routing_key); processor::call(&self.sink, frame).await } pub async fn queue_unbind(&self, queue_name: &str, exchange_name: &str, routing_key: &str) -> Result<()> { let frame = frame::queue_unbind(self.channel, queue_name, exchange_name, routing_key); processor::call(&self.sink, frame).await } pub async fn queue_purge(&self, queue_name: &str) -> Result<()> { Ok(()) }
pub async fn basic_publish( &self, exchange_name: &str, routing_key: &str, payload: String, mandatory: bool, immediate: bool, ) -> Result<()> { let mut flags = frame::BasicPublishFlags::empty(); flags.set(frame::BasicPublishFlags::MANDATORY, mandatory); flags.set(frame::BasicPublishFlags::IMMEDIATE, immediate); let frame = frame::basic_publish(self.channel, exchange_name, routing_key, Some(flags)); self.sink .send(ClientRequest { param: Param::Publish(frame, payload.as_bytes().to_vec()), response: WaitFor::Nothing, }) .await?; Ok(()) } pub async fn close(&self) -> Result<()> { let (cid, mid) = frame::split_class_method(frame::CHANNEL_CLOSE); processor::call( &self.sink, frame::channel_close(self.channel, 200, "Normal close", cid, mid), ) .await } }
pub async fn queue_delete(&self, queue_name: &str, if_unused: bool, if_empty: bool) -> Result<()> { let mut flags = frame::QueueDeleteFlags::empty(); flags.set(frame::QueueDeleteFlags::IF_UNUSED, if_unused); flags.set(frame::QueueDeleteFlags::IF_EMPTY, if_empty); let frame = frame::queue_delete(self.channel, queue_name, Some(flags)); processor::call(&self.sink, frame).await }
function_block-full_function
[ { "content": "pub fn basic_consume_ok(channel: u16, consumer_tag: &str) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n BASIC_CONSUME_OK,\n\n MethodFrameArgs::BasicConsumeOk(BasicConsumeOkArgs {\n\n consumer_tag: consumer_tag.to_string(),\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 0, "score": 288843.6940005553 }, { "content": "pub fn basic_cancel(channel: u16, consumer_tag: &str, no_wait: bool) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n BASIC_CANCEL,\n\n MethodFrameArgs::BasicCancel(BasicCancelArgs {\n\n consumer_tag: consumer_tag.to_string(),\n\n no_wait,\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 1, "score": 280700.9456993119 }, { "content": "pub fn basic_cancel_ok(channel: u16, consumer_tag: &str) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n BASIC_CANCEL_OK,\n\n MethodFrameArgs::BasicCancelOk(BasicCancelOkArgs {\n\n consumer_tag: consumer_tag.to_string(),\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 2, "score": 278354.0452866597 }, { "content": "pub fn basic_ack(channel: u16, delivery_tag: u64, multiple: bool) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n BASIC_ACK,\n\n MethodFrameArgs::BasicAck(BasicAckArgs { delivery_tag, multiple }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 3, "score": 257039.40360836967 }, { "content": "/// Create content header and content body frames from a message\n\npub fn message_to_content_frames(channel: frame::Channel, content: MessageContent) -> Vec<frame::AMQPFrame> {\n\n let header = frame::ContentHeaderFrame {\n\n channel,\n\n class_id: content.class_id, // TODO ???\n\n weight: content.weight,\n\n body_size: content.body_size,\n\n prop_flags: content.prop_flags,\n\n cluster_id: content.cluster_id,\n\n app_id: content.app_id,\n\n user_id: content.user_id,\n\n message_type: content.message_type,\n\n timestamp: content.timestamp,\n\n message_id: content.message_id,\n\n expiration: content.expiration,\n\n reply_to: content.reply_to,\n\n correlation_id: content.correlation_id,\n\n priority: content.priority,\n\n delivery_mode: content.delivery_mode,\n\n headers: None,\n\n content_encoding: content.content_encoding,\n", "file_path": "metalmq/src/message.rs", "rank": 4, "score": 243662.23401984025 }, { "content": "pub fn queue_declare_ok(channel: u16, queue_name: String, message_count: u32, consumer_count: u32) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n QUEUE_DECLARE_OK,\n\n MethodFrameArgs::QueueDeclareOk(QueueDeclareOkArgs {\n\n name: queue_name,\n\n message_count,\n\n consumer_count,\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 5, "score": 239435.66698212823 }, { "content": "/// Helper to create channel error frames.\n\npub fn channel_error<T>(channel: frame::Channel, cm: u32, code: ChannelError, text: &str) -> Result<T> {\n\n let (class_id, method_id) = frame::split_class_method(cm);\n\n\n\n Err(Box::new(RuntimeError {\n\n scope: ErrorScope::Channel,\n\n channel,\n\n code: code as u16,\n\n text: text.to_string(),\n\n class_id,\n\n method_id,\n\n }))\n\n}\n\n\n", "file_path": "metalmq/src/client/mod.rs", "rank": 6, "score": 237579.79761791322 }, { "content": "/// Convert ChannelError to channel close frame.\n\npub fn channel_error_frame(channel: frame::Channel, cm: u32, code: ChannelError, text: &str) -> Frame {\n\n let (class_id, method_id) = frame::split_class_method(cm);\n\n\n\n Frame::Frame(frame::channel_close(channel, code as u16, text, class_id, method_id))\n\n}\n\n\n", "file_path": "metalmq/src/client/mod.rs", "rank": 7, "score": 235587.13678097853 }, { "content": "pub fn channel_close_ok(channel: Channel) -> AMQPFrame {\n\n AMQPFrame::Method(channel, CHANNEL_CLOSE_OK, MethodFrameArgs::ChannelCloseOk)\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 8, "score": 232946.15480637405 }, { "content": "pub fn queue_unbind(channel: u16, queue_name: &str, exchange_name: &str, routing_key: &str) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n QUEUE_UNBIND,\n\n MethodFrameArgs::QueueUnbind(QueueUnbindArgs {\n\n queue_name: queue_name.to_string(),\n\n exchange_name: exchange_name.to_string(),\n\n routing_key: routing_key.to_string(),\n\n args: None,\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 9, "score": 224390.2082047092 }, { "content": "pub fn queue_bind(channel: u16, queue_name: &str, exchange_name: &str, routing_key: &str) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n QUEUE_BIND,\n\n MethodFrameArgs::QueueBind(QueueBindArgs {\n\n queue_name: queue_name.to_string(),\n\n exchange_name: exchange_name.to_string(),\n\n routing_key: routing_key.to_string(),\n\n no_wait: false,\n\n args: None,\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 10, "score": 224390.2082047092 }, { "content": "pub fn channel_open_ok(channel: u16) -> AMQPFrame {\n\n AMQPFrame::Method(channel, CHANNEL_OPEN_OK, MethodFrameArgs::ChannelOpenOk)\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 11, "score": 218367.41632137867 }, { "content": "pub fn queue_declare(channel: u16, queue_name: &str, flags: Option<QueueDeclareFlags>) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n QUEUE_DECLARE,\n\n MethodFrameArgs::QueueDeclare(QueueDeclareArgs {\n\n name: queue_name.to_string(),\n\n flags: flags.unwrap_or_default(),\n\n args: None,\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 12, "score": 216934.58286274236 }, { "content": "pub fn queue_delete(channel: u16, queue_name: &str, flags: Option<QueueDeleteFlags>) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n QUEUE_DELETE,\n\n MethodFrameArgs::QueueDelete(QueueDeleteArgs {\n\n queue_name: queue_name.to_string(),\n\n flags: flags.unwrap_or_default(),\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 13, "score": 216934.58286274236 }, { "content": "pub fn exchange_delete(channel: u16, exchange_name: &str, flags: Option<ExchangeDeleteFlags>) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n EXCHANGE_DELETE,\n\n MethodFrameArgs::ExchangeDelete(ExchangeDeleteArgs {\n\n exchange_name: exchange_name.to_string(),\n\n flags: flags.unwrap_or_default(),\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 14, "score": 216934.58286274236 }, { "content": "pub fn confirm_select(channel: u16, no_wait: bool) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n CONFIRM_SELECT,\n\n MethodFrameArgs::ConfirmSelect(ConfirmSelectArgs { no_wait }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 15, "score": 216077.5923591613 }, { "content": "pub fn queue_delete_ok(channel: u16, message_count: u32) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n QUEUE_DELETE_OK,\n\n MethodFrameArgs::QueueDeleteOk(QueueDeleteOkArgs { message_count }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 16, "score": 213526.01948449138 }, { "content": "pub fn content_header(channel: u16, size: u64) -> ContentHeaderFrame {\n\n ContentHeaderFrame {\n\n channel,\n\n class_id: 0x003C,\n\n weight: 0,\n\n body_size: size,\n\n prop_flags: HeaderPropertyFlags::empty(),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 17, "score": 212471.07314874424 }, { "content": "pub fn connection_open(channel: u16, virtual_host: &str) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n CONNECTION_OPEN,\n\n MethodFrameArgs::ConnectionOpen(ConnectionOpenArgs {\n\n virtual_host: virtual_host.to_string(),\n\n insist: true,\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 18, "score": 212015.27314569766 }, { "content": "pub fn connection_tune_ok(channel: u16) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n CONNECTION_TUNE_OK,\n\n MethodFrameArgs::ConnectionTuneOk(ConnectionTuneOkArgs {\n\n channel_max: 2047,\n\n frame_max: 131_072,\n\n heartbeat: 60,\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 19, "score": 207952.92635327735 }, { "content": "pub fn exchange_delete_ok(channel: u16) -> AMQPFrame {\n\n AMQPFrame::Method(channel, EXCHANGE_DELETE_OK, MethodFrameArgs::ExchangeDeleteOk)\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 20, "score": 207952.92635327735 }, { "content": "pub fn queue_bind_ok(channel: u16) -> AMQPFrame {\n\n AMQPFrame::Method(channel, QUEUE_BIND_OK, MethodFrameArgs::QueueBindOk)\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 21, "score": 207952.92635327735 }, { "content": "pub fn connection_open_ok(channel: u16) -> AMQPFrame {\n\n AMQPFrame::Method(channel, CONNECTION_OPEN_OK, MethodFrameArgs::ConnectionOpenOk)\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 22, "score": 207952.92635327735 }, { "content": "pub fn confirm_select_ok(channel: u16) -> AMQPFrame {\n\n AMQPFrame::Method(channel, CONFIRM_SELECT_OK, MethodFrameArgs::ConfirmSelectOk)\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 23, "score": 207952.9263532773 }, { "content": "pub fn queue_unbind_ok(channel: u16) -> AMQPFrame {\n\n AMQPFrame::Method(channel, QUEUE_UNBIND_OK, MethodFrameArgs::QueueUnbindOk)\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 24, "score": 207952.92635327735 }, { "content": "pub fn connection_close_ok(channel: u16) -> AMQPFrame {\n\n AMQPFrame::Method(channel, CONNECTION_CLOSE_OK, MethodFrameArgs::ConnectionCloseOk)\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 25, "score": 207952.92635327735 }, { "content": "pub fn exchange_declare_ok(channel: u16) -> AMQPFrame {\n\n AMQPFrame::Method(channel, EXCHANGE_DECLARE_OK, MethodFrameArgs::ExchangeDeclareOk)\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 26, "score": 207952.92635327735 }, { "content": "fn has_connection_close_ok(frame: &Frame) -> bool {\n\n match frame {\n\n Frame::Frame(f) => is_connection_close_ok(f),\n\n Frame::Frames(fs) => fs.iter().any(is_connection_close_ok),\n\n }\n\n}\n\n\n", "file_path": "metalmq/src/client/conn.rs", "rank": 27, "score": 204265.38694892003 }, { "content": "pub fn content_body(channel: u16, payload: &[u8]) -> ContentBodyFrame {\n\n ContentBodyFrame {\n\n channel,\n\n body: payload.to_vec(),\n\n }\n\n}\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 28, "score": 202926.56565726025 }, { "content": "// TODO here will be an Authentication enum with the different possibilities\n\npub fn connection_start_ok(username: &str, password: &str, capabilities: FieldTable) -> AMQPFrame {\n\n let mut client_properties = FieldTable::new();\n\n\n\n client_properties.insert(\"product\".into(), AMQPFieldValue::LongString(\"metalmq-client\".into()));\n\n client_properties.insert(\"platform\".into(), AMQPFieldValue::LongString(\"Rust\".into()));\n\n client_properties.insert(\n\n \"capabilities\".into(),\n\n AMQPFieldValue::FieldTable(Box::new(capabilities)),\n\n );\n\n // TODO get the version from the build vars or an external file\n\n client_properties.insert(\"version\".into(), AMQPFieldValue::LongString(\"0.1.0\".into()));\n\n\n\n let mut auth = vec![0x00];\n\n auth.extend_from_slice(username.as_bytes());\n\n auth.push(0x00);\n\n auth.extend_from_slice(password.as_bytes());\n\n\n\n let auth_string = String::from_utf8(auth).unwrap();\n\n\n\n AMQPFrame::Method(\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 29, "score": 195669.13471567963 }, { "content": "pub fn validate_exchange_type(exchange_type: &str) -> Result<()> {\n\n match ExchangeType::from_str(exchange_type) {\n\n Ok(_) => Ok(()),\n\n Err(_) => client::connection_error(\n\n frame::EXCHANGE_DECLARE,\n\n ConnectionError::CommandInvalid,\n\n \"COMMAND_INVALID - Exchange type is invalid\",\n\n ),\n\n }\n\n}\n\n\n\nimpl Default for Exchange {\n\n fn default() -> Exchange {\n\n Exchange {\n\n name: \"default\".to_string(),\n\n exchange_type: ExchangeType::Direct,\n\n durable: false,\n\n auto_delete: false,\n\n internal: false,\n\n }\n", "file_path": "metalmq/src/exchange/mod.rs", "rank": 30, "score": 195576.71530349553 }, { "content": "pub fn channel_open(channel: u16) -> AMQPFrame {\n\n AMQPFrame::Method(channel, CHANNEL_OPEN, MethodFrameArgs::ChannelOpen)\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 31, "score": 194458.72353147107 }, { "content": "pub fn channel_close(channel: Channel, code: u16, text: &str, class_id: u16, method_id: u16) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n CHANNEL_CLOSE,\n\n MethodFrameArgs::ChannelClose(ChannelCloseArgs {\n\n code,\n\n text: text.into(),\n\n class_id,\n\n method_id,\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 32, "score": 193983.40878150336 }, { "content": "fn encode_heartbeat_frame(buf: &mut BytesMut, channel: Channel) {\n\n buf.put_u16(channel);\n\n buf.put_u32(0);\n\n buf.put_u8(0xCE);\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 33, "score": 192148.12133257283 }, { "content": "fn is_connection_close_ok(frame: &AMQPFrame) -> bool {\n\n matches!(frame, &AMQPFrame::Method(_, frame::CONNECTION_CLOSE_OK, _))\n\n}\n\n\n\nasync fn handle_client_frame(conn: &mut Connection, f: AMQPFrame) -> Result<()> {\n\n use AMQPFrame::*;\n\n\n\n match f {\n\n Header => conn.send_frame(Frame::Frame(frame::connection_start(0))).await,\n\n Method(ch, _, mf) => handle_method_frame(conn, ch, mf).await,\n\n ContentHeader(ch) => conn.receive_content_header(ch).await,\n\n ContentBody(cb) => conn.receive_content_body(cb).await,\n\n Heartbeat(_) => Ok(()),\n\n }\n\n\n\n // Convert runtime error to AMQP frame\n\n //match result {\n\n // Err(e) => match e.downcast::<RuntimeError>() {\n\n // Ok(conn_err) => Ok(Some(Frame::Frame((*conn_err).into()))),\n\n // Err(e2) => Err(e2),\n", "file_path": "metalmq/src/client/conn.rs", "rank": 34, "score": 191554.02068989558 }, { "content": "fn validate_exchange_name(channel: u16, exchange_name: &str) -> Result<()> {\n\n let spec = String::from(\"_-:.\");\n\n\n\n for c in exchange_name.chars() {\n\n if !c.is_alphanumeric() && spec.find(c).is_none() {\n\n return client::channel_error(\n\n channel,\n\n frame::EXCHANGE_DECLARE,\n\n ChannelError::PreconditionFailed,\n\n \"PRECONDITION_FAILED - Exchange contains invalid character\",\n\n );\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "metalmq/src/exchange/manager.rs", "rank": 35, "score": 186503.23343131572 }, { "content": "pub fn connection_start(channel: u16) -> AMQPFrame {\n\n let mut capabilities = FieldTable::new();\n\n\n\n capabilities.insert(\"publisher_confirms\".into(), AMQPFieldValue::Bool(true));\n\n capabilities.insert(\"exchange_exchange_bindings\".into(), AMQPFieldValue::Bool(true));\n\n capabilities.insert(\"basic.nack\".into(), AMQPFieldValue::Bool(true));\n\n capabilities.insert(\"consumer_cancel_notify\".into(), AMQPFieldValue::Bool(true));\n\n capabilities.insert(\"connection.blocked\".into(), AMQPFieldValue::Bool(true));\n\n capabilities.insert(\"consumer_priorities\".into(), AMQPFieldValue::Bool(true));\n\n capabilities.insert(\"authentication_failure_close\".into(), AMQPFieldValue::Bool(true));\n\n capabilities.insert(\"per_consumer_qos\".into(), AMQPFieldValue::Bool(true));\n\n capabilities.insert(\"direct_reply_to\".into(), AMQPFieldValue::Bool(true));\n\n\n\n let mut server_properties = FieldTable::new();\n\n\n\n server_properties.insert(\n\n \"capabilities\".into(),\n\n AMQPFieldValue::FieldTable(Box::new(capabilities)),\n\n );\n\n server_properties.insert(\"product\".into(), AMQPFieldValue::LongString(\"MetalMQ server\".into()));\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 36, "score": 184084.1842895148 }, { "content": "pub fn connection_tune(channel: u16) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n CONNECTION_TUNE,\n\n MethodFrameArgs::ConnectionTune(ConnectionTuneArgs {\n\n channel_max: 2047,\n\n frame_max: 131_072,\n\n heartbeat: 60,\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 37, "score": 184084.1842895148 }, { "content": "pub fn basic_consume(\n\n channel: u16,\n\n queue_name: &str,\n\n consumer_tag: &str,\n\n flags: Option<BasicConsumeFlags>,\n\n) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n BASIC_CONSUME,\n\n MethodFrameArgs::BasicConsume(BasicConsumeArgs {\n\n queue: queue_name.to_string(),\n\n consumer_tag: consumer_tag.to_string(),\n\n flags: flags.unwrap_or_default(),\n\n args: None,\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 38, "score": 179356.63132026242 }, { "content": "fn encode_short_string(buf: &mut BytesMut, s: &str) {\n\n // TODO assert! that size is below 256\n\n buf.put_u8(s.len() as u8);\n\n buf.put(s.as_bytes());\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 39, "score": 177558.45076387477 }, { "content": "fn encode_long_string(buf: &mut BytesMut, s: &str) {\n\n buf.put_u32(s.len() as u32);\n\n buf.put(s.as_bytes());\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 40, "score": 177558.45076387475 }, { "content": "fn decode_basic_consume_ok(src: &mut BytesMut) -> MethodFrameArgs {\n\n let args = BasicConsumeOkArgs {\n\n consumer_tag: decode_short_string(src),\n\n };\n\n\n\n MethodFrameArgs::BasicConsumeOk(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 41, "score": 173892.2518403796 }, { "content": "fn decode_channel_open_ok(src: &mut BytesMut) -> MethodFrameArgs {\n\n let _ = decode_long_string(src);\n\n\n\n MethodFrameArgs::ChannelOpenOk\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 42, "score": 173783.47220228866 }, { "content": "// TODO have an Error type here, and it should be result<>\n\nfn decode_method_frame(src: &mut BytesMut, channel: u16) -> AMQPFrame {\n\n let class_method = src.get_u32();\n\n\n\n let method_frame_args = match class_method {\n\n CONNECTION_START => decode_connection_start(src),\n\n CONNECTION_START_OK => decode_connection_start_ok(src),\n\n CONNECTION_TUNE => decode_connection_tune(src),\n\n CONNECTION_TUNE_OK => decode_connection_tune_ok(src),\n\n CONNECTION_OPEN => decode_connection_open(src),\n\n CONNECTION_OPEN_OK => decode_connection_open_ok(src),\n\n CONNECTION_CLOSE => decode_connection_close(src),\n\n CONNECTION_CLOSE_OK => MethodFrameArgs::ConnectionCloseOk,\n\n CHANNEL_OPEN => decode_channel_open(src),\n\n CHANNEL_OPEN_OK => decode_channel_open_ok(src),\n\n CHANNEL_CLOSE => decode_channel_close(src),\n\n CHANNEL_CLOSE_OK => MethodFrameArgs::ChannelCloseOk,\n\n EXCHANGE_DECLARE => decode_exchange_declare(src),\n\n EXCHANGE_DECLARE_OK => MethodFrameArgs::ExchangeDeclareOk,\n\n EXCHANGE_DELETE => decode_exchange_delete(src),\n\n EXCHANGE_DELETE_OK => MethodFrameArgs::ExchangeDeleteOk,\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 43, "score": 173312.67179751725 }, { "content": "/// Convert ConnectionError to connection close frame.\n\npub fn connection_error_frame(cm: u32, code: ConnectionError, text: &str) -> Frame {\n\n let (class_id, method_id) = frame::split_class_method(cm);\n\n\n\n Frame::Frame(frame::connection_close(0, code as u16, text, class_id, method_id))\n\n}\n\n\n\n//pub fn connection_error_frame(err: RuntimeError) -> Option<Frame> {\n\n// if err.scope == ErrorScope::Channel {\n\n// return None;\n\n// }\n\n//\n\n// Some(Frame::Frame(frame::connection_close(\n\n// 0,\n\n// err.code,\n\n// &err.text,\n\n// err.class_id,\n\n// err.method_id,\n\n// )))\n\n//}\n\n\n", "file_path": "metalmq/src/client/mod.rs", "rank": 44, "score": 172897.76291401417 }, { "content": "fn encode_method_frame(buf: &mut BytesMut, channel: Channel, cm: ClassMethod, args: &MethodFrameArgs) {\n\n buf.put_u8(1u8);\n\n buf.put_u16(channel);\n\n\n\n let mut fr = BytesMut::with_capacity(4096);\n\n fr.put_u32(cm);\n\n\n\n match args {\n\n MethodFrameArgs::ConnectionStart(args) => encode_connection_start(&mut fr, args),\n\n MethodFrameArgs::ConnectionStartOk(args) => encode_connection_start_ok(&mut fr, args),\n\n MethodFrameArgs::ConnectionTune(args) => encode_connection_tune(&mut fr, args),\n\n MethodFrameArgs::ConnectionTuneOk(args) => encode_connection_tune_ok(&mut fr, args),\n\n MethodFrameArgs::ConnectionOpen(args) => encode_connection_open(&mut fr, args),\n\n MethodFrameArgs::ConnectionOpenOk => encode_connection_open_ok(&mut fr),\n\n MethodFrameArgs::ConnectionClose(args) => encode_connection_close(&mut fr, args),\n\n MethodFrameArgs::ConnectionCloseOk => (),\n\n MethodFrameArgs::ChannelOpen => encode_channel_open(&mut fr),\n\n MethodFrameArgs::ChannelOpenOk => encode_channel_open_ok(&mut fr),\n\n MethodFrameArgs::ChannelClose(args) => encode_channel_close(&mut fr, args),\n\n MethodFrameArgs::ChannelCloseOk => (),\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 45, "score": 171011.17272285785 }, { "content": "fn decode_content_header_frame(src: &mut BytesMut, channel: u16) -> AMQPFrame {\n\n let class_id = src.get_u16();\n\n let weight = src.get_u16();\n\n let body_size = src.get_u64();\n\n let property_flags = HeaderPropertyFlags::from_bits(src.get_u16()).unwrap_or_default();\n\n\n\n let content_type = decode_short_string_flag(src, property_flags, HeaderPropertyFlags::CONTENT_TYPE);\n\n let content_encoding = decode_short_string_flag(src, property_flags, HeaderPropertyFlags::CONTENT_ENCODING);\n\n let headers = None;\n\n let delivery_mode = if property_flags.contains(HeaderPropertyFlags::DELIVERY_MODE) {\n\n Some(src.get_u8())\n\n } else {\n\n None\n\n };\n\n let priority = if property_flags.contains(HeaderPropertyFlags::PRIORITY) {\n\n Some(src.get_u8())\n\n } else {\n\n None\n\n };\n\n let correlation_id = decode_short_string_flag(src, property_flags, HeaderPropertyFlags::CORRELATION_ID);\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 46, "score": 170647.81597664277 }, { "content": "pub fn connection_close(channel: u16, code: u16, text: &str, class_id: u16, method_id: u16) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n CONNECTION_CLOSE,\n\n MethodFrameArgs::ConnectionClose(ConnectionCloseArgs {\n\n code,\n\n text: text.into(),\n\n class_id,\n\n method_id,\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 47, "score": 169543.65154681136 }, { "content": "fn decode_short_string(buf: &mut BytesMut) -> String {\n\n let len = buf.get_u8() as usize;\n\n let sb = buf.split_to(len);\n\n\n\n String::from_utf8(sb.to_vec()).unwrap()\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 48, "score": 165586.60133585733 }, { "content": "fn decode_long_string(buf: &mut BytesMut) -> String {\n\n let len = buf.get_u32() as usize;\n\n let sb = buf.split_to(len);\n\n\n\n String::from_utf8(sb.to_vec()).unwrap()\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 49, "score": 165586.60133585733 }, { "content": "fn encode_content_body_frame(buf: &mut BytesMut, bf: &ContentBodyFrame) {\n\n // TODO buf.reserve()\n\n buf.put_u8(3u8);\n\n buf.put_u16(bf.channel);\n\n\n\n let mut fr_buf = BytesMut::with_capacity(bf.body.len());\n\n fr_buf.put(bf.body.as_slice());\n\n\n\n buf.put_u32(fr_buf.len() as u32);\n\n buf.put(fr_buf);\n\n buf.put_u8(0xCE);\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 50, "score": 164692.51369717202 }, { "content": "fn encode_basic_consume_ok(buf: &mut BytesMut, args: &BasicConsumeOkArgs) {\n\n encode_short_string(buf, &args.consumer_tag);\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 51, "score": 162644.39063401692 }, { "content": "fn encode_channel_open_ok(buf: &mut BytesMut) {\n\n // encode empty long string\n\n buf.put_u32(0);\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 52, "score": 161878.72599123645 }, { "content": "fn encode_amqp_frame(buf: &mut BytesMut, frame: AMQPFrame) {\n\n match frame {\n\n AMQPFrame::Header => buf.put(&b\"AMQP\\x00\\x00\\x09\\x01\"[..]),\n\n\n\n AMQPFrame::Method(ch, cm, args) => encode_method_frame(buf, ch, cm, &args),\n\n\n\n AMQPFrame::ContentHeader(header_frame) => encode_content_header_frame(buf, &header_frame),\n\n\n\n AMQPFrame::ContentBody(body_frame) => encode_content_body_frame(buf, &body_frame),\n\n\n\n AMQPFrame::Heartbeat(channel) => encode_heartbeat_frame(buf, channel),\n\n }\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 53, "score": 160656.29810460255 }, { "content": "/// Check if the buffer contains the full frame. We can do that easily since\n\n/// most of the time the frame contains the length information.\n\nfn is_full_frame(src: &BytesMut) -> bool {\n\n match src[0] {\n\n FRAME_AMQP_VERSION => src.len() >= 8,\n\n _ => {\n\n let mut bs = [0u8; 4];\n\n bs.copy_from_slice(&src[3..7]);\n\n\n\n let len = u32::from_be_bytes(bs) as usize;\n\n\n\n src.len() >= len + 8\n\n }\n\n }\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 54, "score": 159683.39600460508 }, { "content": "pub fn runtime_error_to_frame(rte: &RuntimeError) -> Frame {\n\n let amqp_frame = match rte.scope {\n\n ErrorScope::Connection => frame::connection_close(0, rte.code, &rte.text, rte.class_id, rte.method_id),\n\n ErrorScope::Channel => frame::channel_close(rte.channel, rte.code, &rte.text, rte.class_id, rte.method_id),\n\n };\n\n\n\n Frame::Frame(amqp_frame)\n\n}\n\n\n", "file_path": "metalmq/src/client/mod.rs", "rank": 55, "score": 154426.86277558614 }, { "content": "/// Helper to create connection error frames.\n\npub fn connection_error<T>(cm: u32, code: ConnectionError, text: &str) -> Result<T> {\n\n let (class_id, method_id) = frame::split_class_method(cm);\n\n\n\n Err(Box::new(RuntimeError {\n\n scope: ErrorScope::Connection,\n\n channel: 0,\n\n code: code as u16,\n\n text: text.to_string(),\n\n class_id,\n\n method_id,\n\n }))\n\n}\n\n\n", "file_path": "metalmq/src/client/mod.rs", "rank": 56, "score": 154248.45394763592 }, { "content": "fn decode_basic_consume(src: &mut BytesMut) -> MethodFrameArgs {\n\n let mut args = BasicConsumeArgs::default();\n\n let _ = src.get_u16();\n\n args.queue = decode_short_string(src);\n\n args.consumer_tag = decode_short_string(src);\n\n args.flags = BasicConsumeFlags::from_bits(src.get_u8()).unwrap_or_default();\n\n args.args = decode_field_table(src);\n\n\n\n MethodFrameArgs::BasicConsume(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 57, "score": 151791.9774862432 }, { "content": "fn decode_channel_open(src: &mut BytesMut) -> MethodFrameArgs {\n\n let _ = decode_short_string(src);\n\n\n\n MethodFrameArgs::ChannelOpen\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 58, "score": 151680.08221190644 }, { "content": "fn decode_channel_close(src: &mut BytesMut) -> MethodFrameArgs {\n\n let args = ChannelCloseArgs {\n\n code: src.get_u16(),\n\n text: decode_short_string(src),\n\n class_id: src.get_u16(),\n\n method_id: src.get_u16(),\n\n };\n\n\n\n MethodFrameArgs::ChannelClose(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 59, "score": 151680.08221190644 }, { "content": "fn decode_queue_delete_ok(src: &mut BytesMut) -> MethodFrameArgs {\n\n let args = QueueDeleteOkArgs {\n\n message_count: src.get_u32(),\n\n };\n\n\n\n MethodFrameArgs::QueueDeleteOk(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 60, "score": 149115.81484712823 }, { "content": "fn decode_connection_tune_ok(src: &mut BytesMut) -> MethodFrameArgs {\n\n let args = ConnectionTuneOkArgs {\n\n channel_max: src.get_u16(),\n\n frame_max: src.get_u32(),\n\n heartbeat: src.get_u16(),\n\n };\n\n\n\n MethodFrameArgs::ConnectionTuneOk(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 61, "score": 149115.81484712823 }, { "content": "fn decode_queue_declare_ok(src: &mut BytesMut) -> MethodFrameArgs {\n\n let args = QueueDeclareOkArgs {\n\n name: decode_short_string(src),\n\n message_count: src.get_u32(),\n\n consumer_count: src.get_u32(),\n\n };\n\n\n\n MethodFrameArgs::QueueDeclareOk(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 62, "score": 149115.81484712823 }, { "content": "fn decode_basic_cancel_ok(src: &mut BytesMut) -> MethodFrameArgs {\n\n let args = BasicCancelOkArgs {\n\n consumer_tag: decode_short_string(src),\n\n };\n\n\n\n MethodFrameArgs::BasicCancelOk(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 63, "score": 149115.81484712823 }, { "content": "fn decode_connection_open_ok(src: &mut BytesMut) -> MethodFrameArgs {\n\n let _ = decode_short_string(src);\n\n\n\n MethodFrameArgs::ConnectionOpenOk\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 64, "score": 149115.81484712823 }, { "content": "fn decode_connection_start_ok(src: &mut BytesMut) -> MethodFrameArgs {\n\n let args = ConnectionStartOkArgs {\n\n properties: decode_field_table(src),\n\n mechanism: decode_short_string(src),\n\n response: decode_long_string(src),\n\n locale: decode_short_string(src),\n\n ..Default::default()\n\n };\n\n\n\n // TODO init capabilities!\n\n\n\n MethodFrameArgs::ConnectionStartOk(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 65, "score": 149115.81484712823 }, { "content": "pub fn basic_publish(\n\n channel: u16,\n\n exchange_name: &str,\n\n routing_key: &str,\n\n flags: Option<BasicPublishFlags>,\n\n) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n BASIC_PUBLISH,\n\n MethodFrameArgs::BasicPublish(BasicPublishArgs {\n\n exchange_name: exchange_name.to_string(),\n\n routing_key: routing_key.to_string(),\n\n flags: flags.unwrap_or_default(),\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 66, "score": 147479.4284085065 }, { "content": "pub fn basic_deliver(\n\n channel: u16,\n\n consumer_tag: &str,\n\n delivery_tag: u64,\n\n redelivered: bool,\n\n exchange_name: &str,\n\n routing_key: &str,\n\n) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n BASIC_DELIVER,\n\n MethodFrameArgs::BasicDeliver(BasicDeliverArgs {\n\n consumer_tag: consumer_tag.to_string(),\n\n delivery_tag,\n\n redelivered,\n\n exchange_name: exchange_name.to_string(),\n\n routing_key: routing_key.to_string(),\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 67, "score": 147479.4284085065 }, { "content": "pub fn exchange_declare(\n\n channel: u16,\n\n exchange_name: &str,\n\n exchange_type: &str,\n\n flags: Option<ExchangeDeclareFlags>,\n\n) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n EXCHANGE_DECLARE,\n\n MethodFrameArgs::ExchangeDeclare(ExchangeDeclareArgs {\n\n exchange_name: exchange_name.to_string(),\n\n exchange_type: exchange_type.to_string(),\n\n flags: flags.unwrap_or_default(),\n\n args: None,\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 68, "score": 147479.4284085065 }, { "content": "pub fn basic_return(\n\n channel: u16,\n\n reply_code: u16,\n\n reply_text: &str,\n\n exchange_name: &str,\n\n routing_key: &str,\n\n) -> AMQPFrame {\n\n AMQPFrame::Method(\n\n channel,\n\n BASIC_RETURN,\n\n MethodFrameArgs::BasicReturn(BasicReturnArgs {\n\n reply_code,\n\n reply_text: reply_text.to_string(),\n\n exchange_name: exchange_name.to_string(),\n\n routing_key: routing_key.to_string(),\n\n }),\n\n )\n\n}\n\n\n", "file_path": "metalmq-codec/src/frame.rs", "rank": 69, "score": 147479.4284085065 }, { "content": "fn register_wait_for(feedback: &Arc<Mutex<HashMap<u16, FrameResponse>>>, channel: u16, wf: WaitFor) -> Result<()> {\n\n match wf {\n\n WaitFor::Nothing => (),\n\n WaitFor::SentOut(tx) => {\n\n // Since the previous block has run, we sent out the frame.\n\n // TODO we need to send back send errors which we swallowed with ? operator\n\n // DOUBT can we send back here to the thread which basically sent a clientrequest\n\n // which is waiting for us to complete\n\n //if let Err(e) = tx.send(Ok(())) {\n\n // error!(\"Error {:?}\", e);\n\n //}\n\n }\n\n WaitFor::FrameResponse(tx) => {\n\n feedback.lock().unwrap().insert(channel, tx);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "metalmq-client/src/processor.rs", "rank": 70, "score": 146169.0536697455 }, { "content": "/// Unblock the client by sending a `Response`. If there is no error on the channel or\n\n/// in the connection the result will be a unit type. If there is an AMQP channel error,\n\n/// it sends back to the client call who is blocked on that channel, so the client API\n\n/// will receive the `ClientError`. If there is a connection error, it notifies all\n\n/// the calls who are waiting on channels (otherwise the client API would remain blocked)\n\n/// and sends back the error to a random waiter. (Sorry, if I have a better idea, I fix this.)\n\nfn notify_waiter(frame: &frame::AMQPFrame, feedback: &Arc<Mutex<HashMap<u16, FrameResponse>>>) -> Result<()> {\n\n use frame::AMQPFrame;\n\n\n\n trace!(\"Notify waiter by {:?}\", frame);\n\n\n\n match frame {\n\n AMQPFrame::Method(_, frame::CONNECTION_CLOSE, frame::MethodFrameArgs::ConnectionClose(args)) => {\n\n let err = crate::error::ClientError {\n\n channel: None,\n\n code: args.code,\n\n message: args.text.clone(),\n\n class_method: frame::unify_class_method(args.class_id, args.method_id),\n\n };\n\n\n\n for (_, fb) in feedback.lock().unwrap().drain() {\n\n if fb.send(Err(anyhow::Error::new(err.clone()))).is_err() {\n\n // TODO what to do here?\n\n }\n\n }\n\n\n", "file_path": "metalmq-client/src/processor.rs", "rank": 71, "score": 139744.07462842323 }, { "content": "pub fn start() -> QueueManagerSink {\n\n let (sink, stream) = mpsc::channel(1);\n\n\n\n tokio::spawn(async move {\n\n let mut manager = QueueManagerState {\n\n command_stream: stream,\n\n queues: HashMap::new(),\n\n };\n\n\n\n if let Err(e) = manager.command_loop().await {\n\n error!(\"Queue manager exited {:?}\", e);\n\n }\n\n });\n\n\n\n sink\n\n}\n\n\n\npub async fn declare_queue(mgr: &QueueManagerSink, cmd: QueueDeclareCommand) -> Result<()> {\n\n let (tx, rx) = oneshot::channel();\n\n\n", "file_path": "metalmq/src/queue/manager.rs", "rank": 72, "score": 137739.28592763614 }, { "content": "/// Start exchange manager which manages the exchanges, exchange and queue bindings\n\n/// via `ExchangeManagerCommand`.\n\npub fn start() -> ExchangeManagerSink {\n\n let (sink, stream) = mpsc::channel(1);\n\n\n\n tokio::spawn(async move {\n\n let mut manager = ExchangeManagerState {\n\n command_stream: stream,\n\n exchanges: HashMap::new(),\n\n };\n\n\n\n if let Err(e) = manager.command_loop().await {\n\n error!(\"Exchange manager exited {:?}\", e);\n\n }\n\n });\n\n\n\n sink\n\n}\n\n\n\npub async fn declare_exchange(mgr: &ExchangeManagerSink, cmd: DeclareExchangeCommand) -> Result<ExchangeCommandSink> {\n\n let (tx, rx) = oneshot::channel();\n\n\n", "file_path": "metalmq/src/exchange/manager.rs", "rank": 73, "score": 137739.28592763614 }, { "content": "fn encode_channel_open(buf: &mut BytesMut) {\n\n // encode empty short string\n\n buf.put_u8(0);\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 74, "score": 137178.0441922693 }, { "content": "fn encode_connection_open_ok(buf: &mut BytesMut) {\n\n // encode empty short string\n\n buf.put_u8(0);\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 75, "score": 134962.7018218768 }, { "content": "fn encode_basic_consume(buf: &mut BytesMut, args: &BasicConsumeArgs) {\n\n buf.put_u16(0);\n\n encode_short_string(buf, &args.queue);\n\n encode_short_string(buf, &args.consumer_tag);\n\n buf.put_u8(args.flags.bits());\n\n encode_empty_field_table(buf);\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 76, "score": 132501.34021181645 }, { "content": "fn encode_channel_close(buf: &mut BytesMut, args: &ChannelCloseArgs) {\n\n buf.put_u16(args.code);\n\n encode_short_string(buf, &args.text);\n\n buf.put_u16(args.class_id);\n\n buf.put_u16(args.method_id);\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 77, "score": 132346.50632504112 }, { "content": "fn encode_method_frame(b: &mut Bencher) {\n\n let mut codec = metalmq_codec::codec::AMQPCodec {};\n\n\n\n b.iter(move || {\n\n let frame = generate_frame();\n\n let mut buf = BytesMut::with_capacity(1024);\n\n\n\n codec.encode(frame, &mut buf)\n\n });\n\n}\n\n\n", "file_path": "metalmq-codec/benches/encode.rs", "rank": 78, "score": 131717.46180395622 }, { "content": "/// Decode a field table\n\n///\n\n/// The buffer points to the beginning of the field table which is a `u32` length\n\n/// information.\n\nfn decode_field_table(buf: &mut BytesMut) -> Option<HashMap<String, AMQPFieldValue>> {\n\n let ft_len = buf.get_u32() as usize;\n\n\n\n if ft_len == 0 {\n\n return None;\n\n }\n\n\n\n let mut ft_buf = buf.split_to(ft_len);\n\n let mut table = HashMap::new();\n\n\n\n while ft_buf.has_remaining() {\n\n let field_name = decode_short_string(&mut ft_buf);\n\n let field_value = decode_value(&mut ft_buf);\n\n\n\n table.insert(field_name, field_value);\n\n }\n\n\n\n Some(table)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 79, "score": 130826.28253977785 }, { "content": "fn encode_field_table2(buf: &mut BytesMut, ft: &HashMap<String, AMQPFieldValue>) {\n\n let mut ft_buf = BytesMut::with_capacity(4096);\n\n\n\n for (name, value) in ft {\n\n encode_short_string(&mut ft_buf, name);\n\n\n\n match value {\n\n AMQPFieldValue::Bool(v) => {\n\n ft_buf.put_u8(b't');\n\n ft_buf.put_u8(if *v { 1 } else { 0 });\n\n }\n\n AMQPFieldValue::LongString(v) => {\n\n ft_buf.put_u8(b'S');\n\n ft_buf.put_u32(v.len() as u32);\n\n ft_buf.put(v.as_bytes());\n\n }\n\n AMQPFieldValue::EmptyFieldTable => encode_empty_field_table(&mut ft_buf),\n\n AMQPFieldValue::FieldTable(v) => {\n\n ft_buf.put_u8(b'F');\n\n\n\n // TODO we are copying here\n\n encode_field_table2(&mut ft_buf, v);\n\n }\n\n }\n\n }\n\n\n\n buf.put_u32(ft_buf.len() as u32);\n\n buf.put(ft_buf);\n\n}\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 80, "score": 130822.45101558734 }, { "content": "fn encode_content_header_frame(buf: &mut BytesMut, hf: &ContentHeaderFrame) {\n\n buf.put_u8(2u8);\n\n buf.put_u16(hf.channel);\n\n\n\n let mut fr_buf = BytesMut::with_capacity(4096);\n\n fr_buf.put_u16(hf.class_id);\n\n fr_buf.put_u16(hf.weight);\n\n fr_buf.put_u64(hf.body_size);\n\n fr_buf.put_u16(hf.prop_flags.bits());\n\n\n\n if let Some(s) = hf.content_type.as_ref() {\n\n encode_short_string(&mut fr_buf, s);\n\n }\n\n if let Some(s) = hf.content_encoding.as_ref() {\n\n encode_short_string(&mut fr_buf, s);\n\n }\n\n // TODO write headers\n\n if let Some(v) = hf.delivery_mode {\n\n fr_buf.put_u8(v);\n\n }\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 81, "score": 129585.41692038032 }, { "content": "fn encode_connection_tune_ok(buf: &mut BytesMut, args: &ConnectionTuneOkArgs) {\n\n buf.put_u16(args.channel_max);\n\n buf.put_u32(args.frame_max);\n\n buf.put_u16(args.heartbeat);\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 82, "score": 128722.71186687131 }, { "content": "fn encode_connection_start_ok(buf: &mut BytesMut, args: &ConnectionStartOkArgs) {\n\n encode_field_table(buf, args.properties.as_ref());\n\n encode_short_string(buf, &args.mechanism);\n\n encode_long_string(buf, &args.response);\n\n encode_short_string(buf, &args.locale);\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 83, "score": 128722.71186687131 }, { "content": "fn encode_queue_declare_ok(buf: &mut BytesMut, args: &QueueDeclareOkArgs) {\n\n encode_short_string(buf, &args.name);\n\n buf.put_u32(args.message_count);\n\n buf.put_u32(args.consumer_count);\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 84, "score": 128722.71186687131 }, { "content": "fn encode_basic_cancel_ok(buf: &mut BytesMut, args: &BasicCancelOkArgs) {\n\n encode_short_string(buf, &args.consumer_tag);\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 85, "score": 128722.71186687128 }, { "content": "fn encode_queue_delete_ok(buf: &mut BytesMut, args: &QueueDeleteOkArgs) {\n\n buf.put_u32(args.message_count);\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 86, "score": 128722.71186687131 }, { "content": "pub fn new(context: Context, outgoing: mpsc::Sender<Frame>) -> Connection {\n\n let conn_id = Uuid::new_v4().to_hyphenated().to_string();\n\n\n\n info!(\"Client connected id = {}\", conn_id);\n\n\n\n Connection {\n\n id: conn_id,\n\n qm: context.queue_manager,\n\n em: context.exchange_manager,\n\n open_channels: vec![],\n\n exchanges: HashMap::new(),\n\n auto_delete_exchanges: vec![],\n\n consumed_queues: vec![],\n\n in_flight_contents: HashMap::new(),\n\n outgoing,\n\n }\n\n}\n\n\n\nimpl Connection {\n\n /// Send frame out to client asynchronously.\n", "file_path": "metalmq/src/client/state.rs", "rank": 87, "score": 127013.25673832773 }, { "content": "fn generate_frame() -> Frame {\n\n let args = frame::QueueDeclareArgs {\n\n name: \"test queue\".into(),\n\n ..Default::default()\n\n };\n\n\n\n Frame::Frame(AMQPFrame::Method(\n\n 12,\n\n frame::QUEUE_DECLARE,\n\n MethodFrameArgs::QueueDeclare(args),\n\n ))\n\n}\n\n\n\nbencher::benchmark_group!(encoder, encode_method_frame);\n\n\n\nbencher::benchmark_main!(encoder);\n", "file_path": "metalmq-codec/benches/encode.rs", "rank": 88, "score": 126489.93100795773 }, { "content": "fn decode_queue_declare(src: &mut BytesMut) -> MethodFrameArgs {\n\n let mut args = QueueDeclareArgs::default();\n\n let _ = src.get_u16();\n\n args.name = decode_short_string(src);\n\n args.flags = QueueDeclareFlags::from_bits(src.get_u8()).unwrap_or_default();\n\n args.args = decode_field_table(src);\n\n\n\n MethodFrameArgs::QueueDeclare(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 89, "score": 126305.90065144948 }, { "content": "fn decode_connection_tune(src: &mut BytesMut) -> MethodFrameArgs {\n\n let args = ConnectionTuneArgs {\n\n channel_max: src.get_u16(),\n\n frame_max: src.get_u32(),\n\n heartbeat: src.get_u16(),\n\n };\n\n\n\n MethodFrameArgs::ConnectionTune(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 90, "score": 126305.90065144948 }, { "content": "fn decode_exchange_delete(src: &mut BytesMut) -> MethodFrameArgs {\n\n let mut args = ExchangeDeleteArgs::default();\n\n let _ = src.get_u16();\n\n args.exchange_name = decode_short_string(src);\n\n args.flags = ExchangeDeleteFlags::from_bits(src.get_u8()).unwrap_or_default();\n\n\n\n MethodFrameArgs::ExchangeDelete(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 91, "score": 126305.90065144948 }, { "content": "fn decode_queue_bind(src: &mut BytesMut) -> MethodFrameArgs {\n\n let mut args = QueueBindArgs::default();\n\n let _ = src.get_u16();\n\n args.queue_name = decode_short_string(src);\n\n args.exchange_name = decode_short_string(src);\n\n args.routing_key = decode_short_string(src);\n\n\n\n args.no_wait = src.get_u8() != 0;\n\n args.args = decode_field_table(src);\n\n\n\n MethodFrameArgs::QueueBind(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 92, "score": 126305.90065144948 }, { "content": "fn decode_exchange_declare(src: &mut BytesMut) -> MethodFrameArgs {\n\n let mut args = ExchangeDeclareArgs::default();\n\n let _ = src.get_u16();\n\n args.exchange_name = decode_short_string(src);\n\n args.exchange_type = decode_short_string(src);\n\n args.flags = ExchangeDeclareFlags::from_bits(src.get_u8()).unwrap_or_default();\n\n args.args = decode_field_table(src);\n\n\n\n MethodFrameArgs::ExchangeDeclare(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 93, "score": 126305.90065144948 }, { "content": "fn decode_connection_close(src: &mut BytesMut) -> MethodFrameArgs {\n\n let args = ConnectionCloseArgs {\n\n code: src.get_u16(),\n\n text: decode_short_string(src),\n\n class_id: src.get_u16(),\n\n method_id: src.get_u16(),\n\n };\n\n\n\n MethodFrameArgs::ConnectionClose(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 94, "score": 126305.90065144948 }, { "content": "fn decode_connection_start(src: &mut BytesMut) -> MethodFrameArgs {\n\n let args = ConnectionStartArgs {\n\n version_major: src.get_u8(),\n\n version_minor: src.get_u8(),\n\n properties: decode_field_table(src),\n\n mechanisms: decode_long_string(src),\n\n locales: decode_long_string(src),\n\n ..Default::default()\n\n };\n\n\n\n //if let Some(ref table) = args.properties {\n\n // if let Some(AMQPFieldValue::FieldTable(cap)) = table.get(\"capabilities\".into()) {\n\n // args.capabilities = Some(**cap.clone());\n\n // }\n\n //}\n\n\n\n MethodFrameArgs::ConnectionStart(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 95, "score": 126305.90065144948 }, { "content": "fn decode_queue_unbind(src: &mut BytesMut) -> MethodFrameArgs {\n\n let mut args = QueueUnbindArgs::default();\n\n let _ = src.get_u16();\n\n args.queue_name = decode_short_string(src);\n\n args.exchange_name = decode_short_string(src);\n\n args.routing_key = decode_short_string(src);\n\n args.args = decode_field_table(src);\n\n\n\n MethodFrameArgs::QueueUnbind(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 96, "score": 126305.90065144948 }, { "content": "fn decode_queue_delete(src: &mut BytesMut) -> MethodFrameArgs {\n\n let mut args = QueueDeleteArgs::default();\n\n let _ = src.get_u16();\n\n args.queue_name = decode_short_string(src);\n\n args.flags = QueueDeleteFlags::from_bits(src.get_u8()).unwrap_or_default();\n\n\n\n MethodFrameArgs::QueueDelete(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 97, "score": 126305.90065144948 }, { "content": "fn decode_connection_open(src: &mut BytesMut) -> MethodFrameArgs {\n\n let virtual_host = decode_short_string(src);\n\n let _reserved = decode_short_string(src);\n\n let flags = src.get_u8();\n\n\n\n MethodFrameArgs::ConnectionOpen(ConnectionOpenArgs {\n\n virtual_host,\n\n insist: flags & 0x01 != 0,\n\n })\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 98, "score": 126305.90065144948 }, { "content": "fn decode_basic_cancel(src: &mut BytesMut) -> MethodFrameArgs {\n\n let args = BasicCancelArgs {\n\n consumer_tag: decode_short_string(src),\n\n no_wait: src.get_u8() != 0,\n\n };\n\n\n\n MethodFrameArgs::BasicCancel(args)\n\n}\n\n\n", "file_path": "metalmq-codec/src/codec.rs", "rank": 99, "score": 126305.90065144948 } ]
Rust
src/model/area/foundation.rs
nakidixon/klondike-rs
b017ed3cc4645df7e48f137e0197252feef44b09
use crate::{ model::{ card::{Card, Rank, Suit}, settings::GameSettings, stack::{Orientation, Stack, StackDetails, StackSelection}, }, utils::vec::SplitOffBounded, }; use super::{ Action, Area, AreaId, Held, InvalidCard, MoveResult, NotSupported, NothingToSelect, Result, SelectedArea, SnafuSelectorExt, TooManyCards, UnselectedArea, }; #[derive(Copy, Clone, Debug)] pub struct Selection { held_from: Option<AreaId>, } #[derive(Debug)] pub struct Foundation<S> { suit: Suit, cards: Vec<Card>, take_from_foundation: bool, selection: S, } pub type UnselectedFoundation = Foundation<()>; pub type SelectedFoundation = Foundation<Selection>; impl<S> Foundation<S> { fn id(&self) -> AreaId { AreaId::Foundation(self.suit) } fn validate_cards(&self, held: &Held) -> Result { if held.source == self.id() { Ok(()) } else if let [card] = held.cards.as_slice() { ensure!( self.suit == card.suit, InvalidCard { message: format!("Wrong suit: card: {:?}, suit: {:?}", card, self.suit), } ); if let Some(foundation_card) = self.cards.last() { ensure!( foundation_card.rank.is_followed_by(card.rank), InvalidCard { message: format!( "Card does not follow: card: {:?}, top: {:?}", card, foundation_card ), } ); Ok(()) } else { ensure!( card.rank == Rank::Ace, InvalidCard { message: format!("Card does not follow: card: {:?}, top: empty", card), } ); Ok(()) } } else { ensure!( held.cards.is_empty(), TooManyCards { message: "Expected only one card", } ); Ok(()) } } fn give_cards(&mut self, mut held: Held) -> MoveResult<(), Held> { match self.validate_cards(&held) { Ok(_) => { self.cards.append(&mut held.cards); MoveResult::Moved(()) } Err(error) => MoveResult::Unmoved(held, error), } } fn take_cards(&mut self, len: usize, source: AreaId) -> Held { let cards = self.cards.split_off_bounded(len); Held { source, cards } } fn as_stack(&self, selection: Option<Selection>) -> Stack<'_> { let cards_len = self.cards.len(); Stack { cards: &self.cards, details: StackDetails { orientation: Orientation::Horizontal, len: cards_len, face_up_len: cards_len, visible_len: 2, spread_len: 1, selection: selection.map(|selection| StackSelection { len: 1, held: selection.held_from.is_some(), }), }, } } fn with_selection<T>(self, selection: T) -> Foundation<T> { Foundation { suit: self.suit, cards: self.cards, take_from_foundation: self.take_from_foundation, selection, } } } impl UnselectedFoundation { pub fn create( suit: Suit, cards: Vec<Card>, settings: &GameSettings, ) -> Box<dyn UnselectedArea> { Box::new(Foundation { suit, cards, take_from_foundation: settings.take_from_foundation, selection: (), }) } } impl<'a> Area for UnselectedFoundation { fn id(&self) -> AreaId { Foundation::id(self) } fn is_selected(&self) -> bool { false } fn is_held(&self) -> bool { false } fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> { Foundation::give_cards(self, held) } fn take_cards(&mut self, len: usize) -> Held { self.take_cards(len, self.id()) } fn take_all_cards(&mut self) -> Held { self.take_cards(self.cards.len(), self.id()) } fn peek_top_card(&self) -> Option<&Card> { self.cards.last() } fn as_stack(&self) -> Stack<'_> { self.as_stack(None) } fn as_area(&self) -> &dyn Area { self } fn as_area_mut(&mut self) -> &mut dyn Area { self } } impl<'a> Area for SelectedFoundation { fn id(&self) -> AreaId { Foundation::id(self) } fn is_selected(&self) -> bool { true } fn is_held(&self) -> bool { self.selection.held_from.is_some() } fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> { self.selection.held_from = None; Foundation::give_cards(self, held) } fn take_cards(&mut self, len: usize) -> Held { let source = self.selection.held_from.take().unwrap_or_else(|| self.id()); self.take_cards(len, source) } fn take_all_cards(&mut self) -> Held { let source = self.selection.held_from.take().unwrap_or_else(|| self.id()); self.take_cards(self.cards.len(), source) } fn peek_top_card(&self) -> Option<&Card> { self.cards.last() } fn as_stack(&self) -> Stack<'_> { self.as_stack(Some(self.selection)) } fn as_area(&self) -> &dyn Area { self } fn as_area_mut(&mut self) -> &mut dyn Area { self } } impl UnselectedArea for UnselectedFoundation { fn select(self: Box<Self>) -> MoveResult<Box<dyn SelectedArea>, Box<dyn UnselectedArea>> { if !self.cards.is_empty() { MoveResult::Moved(Box::new(self.with_selection(Selection { held_from: None }))) } else { NothingToSelect { message: "Empty area", } .fail_move(self) } } fn select_with_held( mut self: Box<Self>, held: Held, ) -> MoveResult<Box<dyn SelectedArea>, (Box<dyn UnselectedArea>, Held)> { let source = held.source; match self.give_cards(held) { MoveResult::Moved(()) => MoveResult::Moved(Box::new(self.with_selection(Selection { held_from: Some(source), }))), MoveResult::Unmoved(held, error) => MoveResult::Unmoved((self, held), error), } } } impl SelectedArea for SelectedFoundation { fn deselect(mut self: Box<Self>) -> (Box<dyn UnselectedArea>, Option<Held>) { let held = if let Some(source) = self.selection.held_from { Some(self.take_cards(1, source)) } else { None }; let unselected = Box::new(self.with_selection(())); (unselected, held) } fn activate(&mut self) -> Result<Option<Action>> { if self.selection.held_from.is_some() { self.put_down()?; } else { self.pick_up()?; } Ok(None) } fn pick_up(&mut self) -> Result { if self.take_from_foundation { self.selection.held_from = Some(self.id()); } Ok(()) } fn put_down(&mut self) -> Result { self.selection.held_from = None; Ok(()) } fn select_more(&mut self) -> Result { NotSupported { message: "Selection cannot be changed", } .fail() } fn select_less(&mut self) -> Result { NotSupported { message: "Selection cannot be changed", } .fail() } fn held_from(&self) -> Option<AreaId> { self.selection.held_from } }
use crate::{ model::{ card::{Card, Rank, Suit}, settings::GameSettings, stack::{Orientation, Stack, StackDetails, StackSelection}, }, utils::vec::SplitOffBounded, }; use super::{ Action, Area, AreaId, Held, InvalidCard, MoveResult, NotSupported, NothingToSelect, Result, SelectedArea, SnafuSelectorExt, TooManyCards, UnselectedArea, }; #[derive(Copy, Clone, Debug)] pub struct Selection { held_from: Option<AreaId>, } #[derive(Debug)] pub struct Foundation<S> { suit: Suit, cards: Vec<Card>, take_from_foundation: bool, selection: S, } pub type UnselectedFoundation = Foundation<()>; pub type SelectedFoundation = Foundation<Selection>; impl<S> Foundation<S> { fn id(&self) -> AreaId { AreaId::Foundation(self.suit) } fn validate_cards(&self, held: &Held) -> Result { if held.source == self.id() { Ok(()) } else if let [card] = held.cards.as_slice() { ensure!( self.suit == card.suit, InvalidCard { message: format!("Wrong suit: card: {:?}, suit: {:?}", card, self.suit), } ); if let Some(foundation_card) = self.cards.last() { ensure!( foundation_card.rank.is_followed_by(card.rank), InvalidCard { message: format!( "Card does not follow: card: {:?}, top: {:?}", card, foundation_card ), } ); Ok(()) } else { ensure!( card.rank == Rank::Ace, InvalidCard { message: format!("Card does not follow: card: {:?}, top: empty", card), } ); Ok(()) } } else { ensure!( held.cards.is_empty(), TooManyCards { message: "Expected only one card", } ); Ok(()) } } fn give_cards(&mut self, mut held: Held) -> MoveResult<(), Held> { match self.validate_cards(&held) { Ok(_) => { self.cards.append(&mut held.cards); MoveResult::Moved(()) } Err(error) => MoveResult::Unmoved(held, error), } } fn take_cards(&mut self, len: usize, source: AreaId) -> Held { let cards = self.cards.split_off_bounded(len); Held { source, cards } } fn as_stack(&self, selection: Option<Selection>) -> Stack<'_> { let cards_len = self.cards.len(); Stack { cards: &self.cards, details: StackDetails { orientation: Orientation::Horizontal, len: cards_len, face_up_len: cards_len, visible_len: 2, spread_len: 1, selection: selection.map(|selection| StackSelection { len: 1, held: selection.held_from.is_some(), }), }, } } fn with_selection<T>(self, selection: T) -> Foundation<T> { Foundation { suit: self.suit, cards: self.cards, take_from_foundation: self.take_from_foundation, selection, } } } impl UnselectedFoundation { pub fn create( suit: Suit, cards: Vec<Card>, settings: &GameSettings, ) -> Box<dyn UnselectedArea> { Box::new(Foundation { suit, cards, take_from_foundation: settings.take_from_foundation, selection: (), }) } } impl<'a> Area for UnselectedFoundation { fn id(&self) -> AreaId { Foundation::id(self) } fn is_selected(&self) -> bool { false } fn is_held(&self) -> bool { false } fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> { Foundation::give_cards(self, held)
p_or_else(|| self.id()); self.take_cards(len, source) } fn take_all_cards(&mut self) -> Held { let source = self.selection.held_from.take().unwrap_or_else(|| self.id()); self.take_cards(self.cards.len(), source) } fn peek_top_card(&self) -> Option<&Card> { self.cards.last() } fn as_stack(&self) -> Stack<'_> { self.as_stack(Some(self.selection)) } fn as_area(&self) -> &dyn Area { self } fn as_area_mut(&mut self) -> &mut dyn Area { self } } impl UnselectedArea for UnselectedFoundation { fn select(self: Box<Self>) -> MoveResult<Box<dyn SelectedArea>, Box<dyn UnselectedArea>> { if !self.cards.is_empty() { MoveResult::Moved(Box::new(self.with_selection(Selection { held_from: None }))) } else { NothingToSelect { message: "Empty area", } .fail_move(self) } } fn select_with_held( mut self: Box<Self>, held: Held, ) -> MoveResult<Box<dyn SelectedArea>, (Box<dyn UnselectedArea>, Held)> { let source = held.source; match self.give_cards(held) { MoveResult::Moved(()) => MoveResult::Moved(Box::new(self.with_selection(Selection { held_from: Some(source), }))), MoveResult::Unmoved(held, error) => MoveResult::Unmoved((self, held), error), } } } impl SelectedArea for SelectedFoundation { fn deselect(mut self: Box<Self>) -> (Box<dyn UnselectedArea>, Option<Held>) { let held = if let Some(source) = self.selection.held_from { Some(self.take_cards(1, source)) } else { None }; let unselected = Box::new(self.with_selection(())); (unselected, held) } fn activate(&mut self) -> Result<Option<Action>> { if self.selection.held_from.is_some() { self.put_down()?; } else { self.pick_up()?; } Ok(None) } fn pick_up(&mut self) -> Result { if self.take_from_foundation { self.selection.held_from = Some(self.id()); } Ok(()) } fn put_down(&mut self) -> Result { self.selection.held_from = None; Ok(()) } fn select_more(&mut self) -> Result { NotSupported { message: "Selection cannot be changed", } .fail() } fn select_less(&mut self) -> Result { NotSupported { message: "Selection cannot be changed", } .fail() } fn held_from(&self) -> Option<AreaId> { self.selection.held_from } }
} fn take_cards(&mut self, len: usize) -> Held { self.take_cards(len, self.id()) } fn take_all_cards(&mut self) -> Held { self.take_cards(self.cards.len(), self.id()) } fn peek_top_card(&self) -> Option<&Card> { self.cards.last() } fn as_stack(&self) -> Stack<'_> { self.as_stack(None) } fn as_area(&self) -> &dyn Area { self } fn as_area_mut(&mut self) -> &mut dyn Area { self } } impl<'a> Area for SelectedFoundation { fn id(&self) -> AreaId { Foundation::id(self) } fn is_selected(&self) -> bool { true } fn is_held(&self) -> bool { self.selection.held_from.is_some() } fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> { self.selection.held_from = None; Foundation::give_cards(self, held) } fn take_cards(&mut self, len: usize) -> Held { let source = self.selection.held_from.take().unwra
random
[ { "content": "pub trait SelectedArea: Area {\n\n fn deselect(self: Box<Self>) -> (Box<dyn UnselectedArea>, Option<Held>);\n\n\n\n fn activate(&mut self) -> Result<Option<Action>>;\n\n fn pick_up(&mut self) -> Result;\n\n fn put_down(&mut self) -> Result;\n\n fn select_more(&mut self) -> Result;\n\n fn select_less(&mut self) -> Result;\n\n\n\n fn held_from(&self) -> Option<AreaId>;\n\n}\n\n\n\npub struct SelectionMove {\n\n pub selected: Box<dyn SelectedArea>,\n\n pub unselected: Box<dyn UnselectedArea>,\n\n}\n\n\n\nimpl fmt::Debug for SelectionMove {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt.debug_struct(\"SelectionMove\")\n\n .field(\"selected\", &format_args!(\"<{:?}>\", self.selected.id()))\n\n .field(\"unselected\", &format_args!(\"<{:?}>\", self.unselected.id()))\n\n .finish()\n\n }\n\n}\n\n\n", "file_path": "src/model/area/mod.rs", "rank": 0, "score": 175745.38345421595 }, { "content": "pub fn move_selection(\n\n source: Box<dyn SelectedArea>,\n\n target: Box<dyn UnselectedArea>,\n\n) -> MoveResult<SelectionMove, SelectionMove> {\n\n let (source_unselected, held) = source.deselect();\n\n\n\n if let Some(held) = held {\n\n match target.select_with_held(held) {\n\n MoveResult::Moved(target_selected) => MoveResult::Moved(SelectionMove {\n\n selected: target_selected,\n\n unselected: source_unselected,\n\n }),\n\n\n\n MoveResult::Unmoved((target_unselected, held), error) => {\n\n let source_selected = source_unselected\n\n .select_with_held(held)\n\n .into_result()\n\n .unwrap();\n\n MoveResult::Unmoved(\n\n SelectionMove {\n", "file_path": "src/model/area/mod.rs", "rank": 1, "score": 174467.08554455644 }, { "content": "pub fn card_coords(\n\n origin: geometry::Point2D<u16>,\n\n index: usize,\n\n offsets: &Offsets,\n\n stack_details: &StackDetails,\n\n) -> Option<geometry::Point2D<u16>> {\n\n let visible_index = stack_details.visible_index() + offsets.collapse_unspread_len;\n\n let collapsed_spread_index = stack_details.spread_index();\n\n let uncollapsed_spread_index = collapsed_spread_index + offsets.collapse_spread_len;\n\n\n\n if index >= uncollapsed_spread_index {\n\n let unspread_len = (collapsed_spread_index - visible_index).to_i16().unwrap();\n\n let collapsed_spread_len = (uncollapsed_spread_index - collapsed_spread_index)\n\n .to_i16()\n\n .unwrap();\n\n let uncollapsed_spread_len = (index - uncollapsed_spread_index).to_i16().unwrap();\n\n Some(\n\n (origin.cast::<i16>()\n\n + offsets.unspread * unspread_len\n\n + offsets.collapsed_spread * collapsed_spread_len\n", "file_path": "src/display/stack/common.rs", "rank": 2, "score": 150129.51584024273 }, { "content": "pub trait UnselectedArea: Area {\n\n fn select(self: Box<Self>) -> MoveResult<Box<dyn SelectedArea>, Box<dyn UnselectedArea>>;\n\n fn select_with_held(\n\n self: Box<Self>,\n\n held: Held,\n\n ) -> MoveResult<Box<dyn SelectedArea>, (Box<dyn UnselectedArea>, Held)>;\n\n}\n\n\n", "file_path": "src/model/area/mod.rs", "rank": 3, "score": 147362.48220343044 }, { "content": "pub fn card_iter<'a>(\n\n widget: &'a StackWidget<'a>,\n\n offsets: &'a Offsets,\n\n) -> impl Iterator<Item = (usize, geometry::Point2D<u16>, &'a Card)> {\n\n let coords = widget.bounds.origin;\n\n\n\n widget\n\n .stack\n\n .into_iter()\n\n .enumerate()\n\n .filter_map(move |(index, card)| {\n\n card_coords(coords, index, offsets, &widget.stack.details)\n\n .map(|coords| (index, coords, card))\n\n })\n\n}\n\n\n", "file_path": "src/display/stack/common.rs", "rank": 4, "score": 147322.56626418786 }, { "content": "pub fn card_widget_iter<'a>(\n\n widget: &'a StackWidget<'_>,\n\n offsets: &'a Offsets,\n\n) -> impl Iterator<Item = CardWidget<'a>> {\n\n let details = &widget.stack.details;\n\n\n\n // Index at which the collapsed unspread cards will be represented.\n\n let collapsed_unspread_index = details.visible_index() + offsets.collapse_unspread_len;\n\n\n\n let uncollapsed_spread_index = details.spread_index() + offsets.collapse_spread_len;\n\n\n\n // First index of a face up card. All cards before this are face down.\n\n let face_up_index = widget.stack.details.face_up_index();\n\n\n\n card_iter(widget, offsets).map(move |(index, coords, card)| {\n\n let mode = {\n\n if offsets.collapse_unspread_len > 0 && index <= collapsed_unspread_index {\n\n // Add 1 for the one visible card.\n\n let count = offsets.collapse_unspread_len + 1;\n\n CardWidgetMode::SliceFaceDown(count)\n", "file_path": "src/display/stack/vertical.rs", "rank": 5, "score": 143507.55702246496 }, { "content": "pub fn card_widget_iter<'a>(\n\n widget: &'a StackWidget<'a>,\n\n offsets: &'a Offsets,\n\n) -> impl Iterator<Item = CardWidget<'a>> {\n\n let face_up_index = widget.stack.details.face_up_index();\n\n\n\n card_iter(widget, offsets).map(move |(index, coords, card)| {\n\n let mode = {\n\n if index < face_up_index {\n\n CardWidgetMode::FullFaceDown\n\n } else {\n\n CardWidgetMode::FullFaceUp\n\n }\n\n };\n\n\n\n CardWidget {\n\n card,\n\n origin: coords,\n\n mode,\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/display/stack/horizontal.rs", "rank": 6, "score": 143507.55702246496 }, { "content": "pub fn create_dealer(mode: settings::DealerMode) -> Box<dyn Dealer> {\n\n match mode {\n\n settings::DealerMode::AutoWin => Box::new(AutoWinDealer),\n\n settings::DealerMode::InOrder => Box::new(StandardDealer {\n\n shuffle: InOrderShuffle,\n\n }),\n\n settings::DealerMode::Random => Box::new(StandardDealer {\n\n shuffle: RandomShuffle,\n\n }),\n\n }\n\n}\n\n\n", "file_path": "src/model/dealer.rs", "rank": 7, "score": 140665.70166327048 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n WriteLogger::init(\n\n LevelFilter::Debug,\n\n ConfigBuilder::new().set_time_to_local(true).build(),\n\n fs::File::create(LOG_FILE)?,\n\n )?;\n\n log_panics::init();\n\n\n\n info!(\"STARTING KLONDIKE\");\n\n\n\n let input = TtyInput::new()?;\n\n let output = TtyOutput::new()?;\n\n\n\n let settings = Settings::read_from_system()?;\n\n\n\n let mut engine = GameEngineBuilder::builder(&settings.game, input.keys(), output)\n\n .input_mapper(DisplayState::Playing, handle_playing_input)\n\n .input_mapper(DisplayState::HelpMessageOpen, handle_help_input)\n\n .input_mapper(DisplayState::WinMessageOpen, handle_win_input)\n\n .start()?;\n\n\n\n while engine.tick()? {}\n\n\n\n info!(\"QUITTING KLONDIKE\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 134713.42733846285 }, { "content": "pub trait Area {\n\n fn id(&self) -> AreaId;\n\n\n\n fn is_selected(&self) -> bool;\n\n fn is_held(&self) -> bool;\n\n\n\n fn give_cards(&mut self, held: Held) -> MoveResult<(), Held>;\n\n fn take_cards(&mut self, len: usize) -> Held;\n\n fn take_all_cards(&mut self) -> Held;\n\n\n\n fn peek_top_card(&self) -> Option<&Card>;\n\n\n\n fn as_stack(&self) -> Stack<'_>;\n\n\n\n fn as_area(&self) -> &dyn Area;\n\n fn as_area_mut(&mut self) -> &mut dyn Area;\n\n}\n\n\n", "file_path": "src/model/area/mod.rs", "rank": 9, "score": 131919.03722453417 }, { "content": "pub fn terminal_bounds() -> io::Result<geometry::Size2D<u16>> {\n\n let (cols, rows) = terminal_size()?;\n\n Ok(geometry::size2(cols, rows))\n\n}\n", "file_path": "src/display/mod.rs", "rank": 10, "score": 113267.18857907847 }, { "content": "pub fn offsets(widget: &StackWidget<'_>) -> Offsets {\n\n let details = &widget.stack.details;\n\n\n\n let mut offsets = UNCOLLAPSED_OFFSETS.clone();\n\n let mut collapse_len: usize = collapse_len(widget, &offsets).into();\n\n\n\n debug!(\"collapse_len: {}\", collapse_len);\n\n\n\n if collapse_len > 0 {\n\n let reserve_unspread_len = if details.spread_len > 0 { 0 } else { 1 };\n\n let unspread_len = details.unspread_len();\n\n let collapse_unspread_len = unspread_len.saturating_sub(reserve_unspread_len + 1);\n\n debug!(\n\n \"unspread_len: {}, collapse_unspread_len: {}\",\n\n unspread_len, collapse_unspread_len\n\n );\n\n\n\n offsets.collapse_unspread_len = collapse_unspread_len;\n\n collapse_len = collapse_len.saturating_sub(collapse_unspread_len);\n\n }\n\n\n\n if collapse_len > 0 {\n\n offsets.collapse_spread_len = min(details.spread_len.saturating_sub(1), collapse_len);\n\n }\n\n\n\n offsets\n\n}\n\n\n", "file_path": "src/display/stack/vertical.rs", "rank": 11, "score": 113168.22148246599 }, { "content": "pub fn offsets(_widget: &StackWidget<'_>) -> Offsets {\n\n OFFSETS.clone()\n\n}\n\n\n", "file_path": "src/display/stack/horizontal.rs", "rank": 12, "score": 113168.22148246599 }, { "content": "fn card_shift(\n\n index: usize,\n\n offsets: &Offsets,\n\n stack_details: &StackDetails,\n\n) -> geometry::Vector2D<i16> {\n\n stack_details\n\n .selection_index()\n\n .filter(|_| stack_details.held())\n\n .filter(|&selection_index| index >= selection_index)\n\n .map(|_| offsets.selected)\n\n .unwrap_or_default()\n\n}\n", "file_path": "src/display/stack/common.rs", "rank": 13, "score": 107916.31602541867 }, { "content": "pub fn selector_widget(widget: &StackWidget<'_>, offsets: &Offsets) -> Option<SelectorWidget> {\n\n let coords = widget.bounds.origin;\n\n let details = &widget.stack.details;\n\n\n\n details.selection.as_ref().map(|selection| {\n\n let selection_index = details.selection_index().unwrap_or_default();\n\n\n\n // Be careful about getting the last index. It's possible for the stack to actually be\n\n // empty, in which case we can't subtract from a 0 usize.\n\n let end_index = details.len.saturating_sub(1);\n\n\n\n let held_offset = if selection.held {\n\n -UNCOLLAPSED_OFFSETS.selected\n\n } else {\n\n Default::default()\n\n };\n\n\n\n let start_coords = card_coords(coords, selection_index, offsets, details)\n\n .unwrap_or(coords)\n\n .cast::<i16>()\n", "file_path": "src/display/stack/vertical.rs", "rank": 14, "score": 98627.26030773974 }, { "content": "pub fn selector_widget(widget: &StackWidget<'_>, offsets: &Offsets) -> Option<SelectorWidget> {\n\n let coords = widget.bounds.origin;\n\n let details = &widget.stack.details;\n\n\n\n details.selection.as_ref().map(|_| {\n\n let selection_index = details.selection_index().unwrap_or_default();\n\n\n\n debug!(\"selection_index: {}\", selection_index);\n\n\n\n /* Be careful about getting the last index. It's possible for the stack to actually be empty,\n\n * in which case we can't subtract from a 0 usize. */\n\n let end_index = details.len.saturating_sub(1);\n\n\n\n let start_coords = card_coords(coords, selection_index, offsets, details)\n\n .unwrap_or(coords)\n\n .cast::<i16>()\n\n + geometry::vec2(0, CARD_SIZE.height).cast::<i16>();\n\n let end_coords = card_coords(coords, end_index, offsets, details)\n\n .unwrap_or(coords)\n\n .cast::<i16>()\n", "file_path": "src/display/stack/horizontal.rs", "rank": 15, "score": 98627.26030773972 }, { "content": "fn collapse_len(widget: &StackWidget<'_>, offsets: &Offsets) -> u16 {\n\n if widget.stack.cards.is_empty() {\n\n return 0;\n\n }\n\n\n\n let origin = widget.bounds.origin;\n\n let maximum_y = widget.bounds.max_y();\n\n\n\n let last_card_coords = card_coords(\n\n origin,\n\n widget.stack.cards.len() - 1,\n\n offsets,\n\n &widget.stack.details,\n\n )\n\n .unwrap_or_default();\n\n\n\n let uncollapsed_bounds = geometry::Rect::new(last_card_coords, *CARD_SIZE);\n\n let uncollapsed_y = uncollapsed_bounds.max_y();\n\n\n\n uncollapsed_y.saturating_sub(maximum_y)\n\n}\n\n\n", "file_path": "src/display/stack/vertical.rs", "rank": 16, "score": 95057.09055870281 }, { "content": "fn reset_style() -> impl fmt::Display {\n\n color::Fg(color::Reset)\n\n}\n\n\n", "file_path": "src/display/help.rs", "rank": 33, "score": 88467.41339775239 }, { "content": "fn key_style() -> impl fmt::Display {\n\n color::Fg(color::Cyan)\n\n}\n\n\n", "file_path": "src/display/help.rs", "rank": 34, "score": 88467.41339775239 }, { "content": "fn description_style() -> impl fmt::Display {\n\n color::Fg(color::White)\n\n}\n", "file_path": "src/display/help.rs", "rank": 35, "score": 88467.41339775239 }, { "content": "fn coords_for_area(area_id: AreaId) -> geometry::Point2D<u16> {\n\n let card_offset = geometry::vec2(CARD_SIZE.width, 0);\n\n\n\n match area_id {\n\n AreaId::Stock => *STOCK_COORDS,\n\n AreaId::Talon => *TALON_COORDS,\n\n AreaId::Foundation(suit) => {\n\n *FOUNDATION_COORDS + (card_offset + *COLUMN_OFFSET) * u16::from(u8::from(suit))\n\n }\n\n AreaId::Tableaux(index) => {\n\n *TABLEAUX_COORDS + (card_offset + *COLUMN_OFFSET) * u16::from(index)\n\n }\n\n }\n\n}\n", "file_path": "src/display/game.rs", "rank": 36, "score": 87646.1730495135 }, { "content": "#[derive(Debug)]\n\nstruct RandomShuffle;\n\n\n\nimpl Shuffle for RandomShuffle {\n\n fn create_deck() -> Vec<Card> {\n\n let mut deck = InOrderShuffle::create_deck();\n\n deck.shuffle(&mut thread_rng());\n\n deck\n\n }\n\n}\n", "file_path": "src/model/dealer.rs", "rank": 37, "score": 82412.76205037526 }, { "content": "#[derive(Debug)]\n\nstruct InOrderShuffle;\n\n\n\nimpl Shuffle for InOrderShuffle {\n\n fn create_deck() -> Vec<Card> {\n\n Suit::values()\n\n .flat_map(|suit| Rank::values().map(move |rank| Card { rank, suit }))\n\n .collect::<Vec<_>>()\n\n }\n\n}\n\n\n", "file_path": "src/model/dealer.rs", "rank": 38, "score": 82412.76205037526 }, { "content": "// TODO: What to do if filler doesn't divide evenly?\n\nfn format_with_title(\n\n Title(text, direction): &Title,\n\n width: u16,\n\n filler: &str,\n\n title_left: &str,\n\n title_right: &str,\n\n) -> String {\n\n let white = color::Fg(color::White);\n\n let formatted_title = FormattedString::new_with_content(title_left)\n\n .push_formatted_content(text)\n\n .push_formatting(white)\n\n .push_content(title_right);\n\n\n\n let available_len: u16 = usize::from(width)\n\n .saturating_sub(formatted_title.len())\n\n .try_into()\n\n .unwrap();\n\n\n\n let (left_len, right_len) = match direction {\n\n Direction::Left => (0, available_len),\n", "file_path": "src/display/frame.rs", "rank": 39, "score": 80577.3860965932 }, { "content": "#[derive(Debug)]\n\nstruct AutoWinDealer;\n\n\n\nimpl Dealer for AutoWinDealer {\n\n fn deal_game(&self, settings: &settings::GameSettings) -> Game {\n\n let stock = area::stock::UnselectedStock::create(vec![], settings);\n\n let talon = area::talon::UnselectedTalon::create(vec![], 0);\n\n\n\n let mut tableaux_areas = (0..settings.tableaux_len)\n\n .map(|index| area::tableaux::UnselectedTableaux::create(index, 0, vec![]))\n\n .collect::<Vec<_>>();\n\n\n\n let mut foundation_areas = Suit::values()\n\n .map(|suit| {\n\n let cards = Rank::values()\n\n .map(|rank| Card { suit, rank })\n\n .collect::<Vec<_>>();\n\n area::foundation::UnselectedFoundation::create(suit, cards, settings)\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n let mut areas: Vec<Box<dyn area::UnselectedArea>> = vec![stock, talon];\n\n areas.append(&mut foundation_areas);\n\n areas.append(&mut tableaux_areas);\n\n\n\n let areas = area_list::AreaList::new(areas).expect(\"Unable to create AreaList\");\n\n Game::new(areas)\n\n }\n\n}\n\n\n", "file_path": "src/model/dealer.rs", "rank": 40, "score": 80318.6023490606 }, { "content": "#[derive(Debug)]\n\nstruct StandardDealer<S>\n\nwhere\n\n S: Shuffle + fmt::Debug,\n\n{\n\n shuffle: S,\n\n}\n\n\n\nimpl<S> Dealer for StandardDealer<S>\n\nwhere\n\n S: Shuffle + fmt::Debug,\n\n{\n\n fn deal_game(&self, settings: &settings::GameSettings) -> Game {\n\n let mut deck = S::create_deck();\n\n\n\n let mut tableaux_areas = {\n\n let len: usize = settings.tableaux_len.into();\n\n\n\n let mut piles: Vec<Vec<Card>> = vec![vec![]; len];\n\n let indexes = (0..7).flat_map(|level| level..7);\n\n\n", "file_path": "src/model/dealer.rs", "rank": 41, "score": 78272.92743312949 }, { "content": "fn bounds_for_area(area_id: AreaId, widget_bounds: geometry::Rect<u16>) -> geometry::Rect<u16> {\n\n let top_left = coords_for_area(area_id);\n\n\n\n match area_id {\n\n AreaId::Stock => {\n\n let right = coords_for_area(AreaId::Talon).x - 1;\n\n let bottom = coords_for_area(AreaId::Tableaux(0)).y - 1;\n\n let bottom_right = geometry::point2(right, bottom);\n\n\n\n geometry::Box2D::new(top_left, bottom_right).to_rect()\n\n }\n\n AreaId::Talon => {\n\n let first_suit = Suit::try_from(0).unwrap();\n\n let right = coords_for_area(AreaId::Foundation(first_suit)).x - 1;\n\n let bottom = coords_for_area(AreaId::Tableaux(0)).y - 1;\n\n let bottom_right = geometry::point2(right, bottom);\n\n\n\n geometry::Box2D::new(top_left, bottom_right).to_rect()\n\n }\n\n AreaId::Foundation(suit) => {\n", "file_path": "src/display/game.rs", "rank": 42, "score": 77367.68171413998 }, { "content": "pub trait Dealer {\n\n fn deal_game(&self, settings: &settings::GameSettings) -> Game;\n\n}\n\n\n", "file_path": "src/model/dealer.rs", "rank": 43, "score": 76748.77617897294 }, { "content": "pub fn goto(point: Point2D<u16>) -> cursor::Goto {\n\n let (x, y) = point.to_tuple();\n\n cursor::Goto(x + 1, y + 1)\n\n}\n", "file_path": "src/display/geometry.rs", "rank": 44, "score": 74797.84561204474 }, { "content": "trait SnafuSelectorExt<E> {\n\n fn fail_move<T, U>(self, value: U) -> MoveResult<T, U, E>;\n\n}\n\n\n\nimpl<S, E> SnafuSelectorExt<E> for S\n\nwhere\n\n E: error::Error + snafu::ErrorCompat,\n\n S: snafu::IntoError<E, Source = snafu::NoneError>,\n\n{\n\n fn fail_move<T, U>(self, value: U) -> MoveResult<T, U, E> {\n\n MoveResult::Unmoved(value, self.into_error(snafu::NoneError))\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub enum AreaId {\n\n Stock,\n\n Talon,\n\n Foundation(Suit),\n\n Tableaux(u8),\n", "file_path": "src/model/area/mod.rs", "rank": 45, "score": 72618.73396551592 }, { "content": "pub fn both<T, U>(t: Option<T>, u: Option<U>) -> Option<(T, U)> {\n\n if let (Some(t), Some(u)) = (t, u) {\n\n Some((t, u))\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "src/utils/tuple.rs", "rank": 46, "score": 65461.71944184397 }, { "content": "#[derive(Debug, Clone, Copy, Deserialize, Serialize)]\n\npub enum DealerMode {\n\n AutoWin,\n\n InOrder,\n\n Random,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\n#[serde(default)]\n\npub struct GameSettings {\n\n pub dealer: DealerMode,\n\n pub draw_from_stock_len: usize,\n\n pub tableaux_len: u8,\n\n pub take_from_foundation: bool,\n\n}\n\n\n\nimpl Default for GameSettings {\n\n fn default() -> Self {\n\n GameSettings {\n\n dealer: DealerMode::Random,\n\n draw_from_stock_len: 3,\n\n tableaux_len: 7,\n\n take_from_foundation: true,\n\n }\n\n }\n\n}\n", "file_path": "src/model/settings.rs", "rank": 47, "score": 63887.11691521642 }, { "content": " config.try_into()\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\n#[serde(default)]\n\npub struct DisplaySettings {\n\n pub color: bool,\n\n pub unicode: bool,\n\n}\n\n\n\nimpl Default for DisplaySettings {\n\n fn default() -> Self {\n\n DisplaySettings {\n\n color: true,\n\n unicode: true,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/model/settings.rs", "rank": 48, "score": 63879.22430401225 }, { "content": "\n\nimpl Settings {\n\n // TODO: Return a snafu-defined error type\n\n pub fn read_from_system() -> Result<Settings, ConfigError> {\n\n let mut config = Config::new();\n\n\n\n if let Some(user_dirs) = UserDirs::new() {\n\n let mut path = user_dirs.home_dir().to_path_buf();\n\n path.push(HOME_CONFIG_FILE);\n\n config.merge(File::from(path).format(FileFormat::Toml).required(false))?;\n\n }\n\n\n\n if let Some(project_dirs) = ProjectDirs::from(QUALIFIER, ORGANIZATION, APPLICATION) {\n\n let mut path = project_dirs.config_dir().to_path_buf();\n\n path.push(CONFIG_FILE);\n\n config.merge(File::from(path).format(FileFormat::Toml).required(false))?;\n\n }\n\n\n\n config.merge(Environment::with_prefix(ENV_PREFIX).separator(ENV_SEPARATOR))?;\n\n\n", "file_path": "src/model/settings.rs", "rank": 49, "score": 63875.246707432736 }, { "content": "use config::{Config, ConfigError, Environment, File, FileFormat};\n\nuse directories::{ProjectDirs, UserDirs};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nstatic QUALIFIER: &str = \"net\";\n\nstatic ORGANIZATION: &str = \"upflitinglemma\";\n\nstatic APPLICATION: &str = \"klondike-rs\";\n\n\n\nstatic CONFIG_FILE: &str = \"config.toml\";\n\nstatic HOME_CONFIG_FILE: &str = \".klondike-rs.toml\";\n\n\n\nstatic ENV_PREFIX: &str = \"klondike_\";\n\nstatic ENV_SEPARATOR: &str = \"__\";\n\n\n\n#[derive(Default, Debug, Deserialize, Serialize)]\n\n#[serde(default)]\n\npub struct Settings {\n\n pub display: DisplaySettings,\n\n pub game: GameSettings,\n\n}\n", "file_path": "src/model/settings.rs", "rank": 50, "score": 63874.86087991519 }, { "content": "\n\n pub fn values() -> impl Iterator<Item = Suit> {\n\n (0u8..4).map(|value| value.try_into().unwrap())\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]\n\npub struct Card {\n\n pub suit: Suit,\n\n pub rank: Rank,\n\n}\n\n\n\nimpl Card {\n\n pub fn color(&self) -> Color {\n\n self.suit.color()\n\n }\n\n}\n", "file_path": "src/model/card.rs", "rank": 51, "score": 63543.551751226114 }, { "content": " Ten,\n\n Jack,\n\n Queen,\n\n King,\n\n}\n\n\n\nimpl Rank {\n\n pub fn is_followed_by(self, other: Rank) -> bool {\n\n u8::from(self) + 1 == u8::from(other)\n\n }\n\n\n\n pub fn values() -> impl Iterator<Item = Rank> {\n\n (1u8..14).map(|value| value.try_into().unwrap())\n\n }\n\n}\n\n\n\nimpl fmt::Display for Rank {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Rank::Ace => write!(fmt, \"A\"),\n", "file_path": "src/model/card.rs", "rank": 52, "score": 63543.53115139569 }, { "content": "use std::{convert::TryInto, fmt};\n\n\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub enum Color {\n\n Black,\n\n Red,\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd, IntoPrimitive, TryFromPrimitive)]\n\n#[repr(u8)]\n\npub enum Rank {\n\n Ace = 1,\n\n Two,\n\n Three,\n\n Four,\n\n Five,\n\n Six,\n\n Seven,\n\n Eight,\n\n Nine,\n", "file_path": "src/model/card.rs", "rank": 53, "score": 63534.6242347393 }, { "content": ")]\n\n#[repr(u8)]\n\npub enum Suit {\n\n #[display(fmt = \"♠\")]\n\n Spades,\n\n #[display(fmt = \"♥\")]\n\n Hearts,\n\n #[display(fmt = \"♦\")]\n\n Diamonds,\n\n #[display(fmt = \"♣\")]\n\n Clubs,\n\n}\n\n\n\nimpl Suit {\n\n pub fn color(self) -> Color {\n\n match self {\n\n Suit::Clubs | Suit::Spades => Color::Black,\n\n Suit::Diamonds | Suit::Hearts => Color::Red,\n\n }\n\n }\n", "file_path": "src/model/card.rs", "rank": 54, "score": 63532.93158779998 }, { "content": " Rank::Jack => write!(fmt, \"J\"),\n\n Rank::Queen => write!(fmt, \"Q\"),\n\n Rank::King => write!(fmt, \"K\"),\n\n &rank => write!(fmt, \"{}\", u8::from(rank)),\n\n }\n\n }\n\n}\n\n\n\n#[derive(\n\n Copy,\n\n Clone,\n\n Debug,\n\n Eq,\n\n Hash,\n\n PartialEq,\n\n Ord,\n\n PartialOrd,\n\n IntoPrimitive,\n\n TryFromPrimitive,\n\n Display,\n", "file_path": "src/model/card.rs", "rank": 55, "score": 63529.51444076914 }, { "content": "use std::slice::Iter;\n\n\n\nuse super::card::Card;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum Orientation {\n\n Horizontal,\n\n Vertical,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct StackSelection {\n\n pub len: usize,\n\n pub held: bool,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct StackDetails {\n\n pub orientation: Orientation,\n\n pub len: usize,\n", "file_path": "src/model/stack.rs", "rank": 56, "score": 63346.046237191054 }, { "content": " self.selection\n\n .as_ref()\n\n .map(|selection| self.len.saturating_sub(selection.len))\n\n }\n\n\n\n pub fn unspread_len(&self) -> usize {\n\n self.visible_len.saturating_sub(self.spread_len)\n\n }\n\n\n\n pub fn held(&self) -> bool {\n\n self.selection\n\n .as_ref()\n\n .map(|selection| selection.held)\n\n .unwrap_or_default()\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Stack<'a> {\n\n pub cards: &'a [Card],\n", "file_path": "src/model/stack.rs", "rank": 57, "score": 63331.16952318721 }, { "content": " pub face_up_len: usize,\n\n pub visible_len: usize,\n\n pub spread_len: usize,\n\n pub selection: Option<StackSelection>,\n\n}\n\n\n\nimpl StackDetails {\n\n pub fn face_up_index(&self) -> usize {\n\n self.len.saturating_sub(self.face_up_len)\n\n }\n\n\n\n pub fn visible_index(&self) -> usize {\n\n self.len.saturating_sub(self.visible_len)\n\n }\n\n\n\n pub fn spread_index(&self) -> usize {\n\n self.len.saturating_sub(self.spread_len)\n\n }\n\n\n\n pub fn selection_index(&self) -> Option<usize> {\n", "file_path": "src/model/stack.rs", "rank": 58, "score": 63326.083441747374 }, { "content": " pub details: StackDetails,\n\n}\n\n\n\nimpl<'a, 'b> IntoIterator for &'b Stack<'a> {\n\n type Item = &'a Card;\n\n type IntoIter = Iter<'a, Card>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.cards.iter()\n\n }\n\n}\n", "file_path": "src/model/stack.rs", "rank": 59, "score": 63323.36598270516 }, { "content": "use crate::{\n\n model::{\n\n area::{AlreadyHeld, MaxSelection, MinSelection, NothingHeld},\n\n card::{Card, Rank},\n\n stack::{Orientation, Stack, StackDetails, StackSelection},\n\n },\n\n utils::vec::SplitOffBounded,\n\n};\n\n\n\nuse super::{\n\n Action, Area, AreaId, Held, InvalidCard, MoveResult, NothingToSelect, Result, SelectedArea,\n\n SnafuSelectorExt, UnselectedArea,\n\n};\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct Selection {\n\n held_from: Option<AreaId>,\n\n len: usize,\n\n}\n\n\n", "file_path": "src/model/area/tableaux.rs", "rank": 60, "score": 58859.10393584156 }, { "content": "use crate::{\n\n model::{\n\n card::Card,\n\n settings::GameSettings,\n\n stack::{Orientation, Stack, StackDetails, StackSelection},\n\n },\n\n utils::vec::SplitOffBounded,\n\n};\n\n\n\nuse super::{\n\n Action, Area, AreaId, Held, MoveResult, NotSupported, Result, SelectedArea, SnafuSelectorExt,\n\n UnselectedArea,\n\n};\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct Selection;\n\n\n\n#[derive(Debug)]\n\npub struct Stock<S> {\n\n cards: Vec<Card>,\n", "file_path": "src/model/area/stock.rs", "rank": 61, "score": 58857.48750252593 }, { "content": "use crate::{\n\n model::{\n\n card::Card,\n\n stack::{Orientation, Stack, StackDetails, StackSelection},\n\n },\n\n utils::vec::SplitOffBounded,\n\n};\n\n\n\nuse super::{\n\n Action, Area, AreaId, Held, MoveResult, NotSupported, NothingToSelect, Result, SelectedArea,\n\n SnafuSelectorExt, UnselectedArea,\n\n};\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct Selection {\n\n held_from: Option<AreaId>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Talon<S> {\n", "file_path": "src/model/area/talon.rs", "rank": 62, "score": 58854.88290817471 }, { "content": "\n\nimpl SelectedArea for SelectedStock {\n\n fn deselect(self: Box<Self>) -> (Box<dyn UnselectedArea>, Option<Held>) {\n\n let unselected = Box::new(self.with_selection(()));\n\n (unselected, None)\n\n }\n\n\n\n fn activate(&mut self) -> Result<Option<Action>> {\n\n if self.cards.is_empty() {\n\n Ok(Some(Action::Restock))\n\n } else {\n\n Ok(Some(Action::Draw(self.draw_from_stock_len)))\n\n }\n\n }\n\n\n\n fn pick_up(&mut self) -> Result {\n\n NotSupported {\n\n message: \"Cards in this area cannot be held\",\n\n }\n\n .fail()\n", "file_path": "src/model/area/stock.rs", "rank": 63, "score": 58847.29510810332 }, { "content": "\n\n fn pick_up(&mut self) -> Result {\n\n ensure!(self.selection.held_from.is_none(), AlreadyHeld);\n\n\n\n ensure!(\n\n !self.cards.is_empty(),\n\n NothingToSelect {\n\n message: \"Empty area\",\n\n }\n\n );\n\n\n\n ensure!(\n\n self.revealed_len > 0,\n\n NothingToSelect {\n\n message: \"No visible cards\",\n\n }\n\n );\n\n\n\n self.selection.held_from = Some(self.id());\n\n Ok(())\n", "file_path": "src/model/area/tableaux.rs", "rank": 64, "score": 58845.13301453336 }, { "content": " message: format!(\"Cannot place cards from area: {:?}\", held.source),\n\n }\n\n .fail()\n\n }\n\n }\n\n\n\n fn give_cards(&mut self, mut held: Held) -> MoveResult<(), Held> {\n\n match self.validate_cards(&held) {\n\n Ok(_) => {\n\n self.cards.append(&mut held.cards);\n\n MoveResult::Moved(())\n\n }\n\n Err(error) => MoveResult::Unmoved(held, error),\n\n }\n\n }\n\n\n\n fn take_cards(&mut self, len: usize) -> Held {\n\n let cards = self.cards.split_off_bounded(len);\n\n\n\n Held {\n", "file_path": "src/model/area/stock.rs", "rank": 65, "score": 58844.80783667115 }, { "content": " match self.give_cards(held) {\n\n MoveResult::Moved(()) => MoveResult::Moved(Box::new(self.with_selection(Selection {\n\n held_from: Some(source),\n\n len,\n\n }))),\n\n MoveResult::Unmoved(held, error) => MoveResult::Unmoved((self, held), error),\n\n }\n\n }\n\n}\n\n\n\nimpl SelectedArea for SelectedTableaux {\n\n fn deselect(mut self: Box<Self>) -> (Box<dyn UnselectedArea>, Option<Held>) {\n\n let held = if let Some(source) = self.selection.held_from {\n\n Some(self.take_cards(self.selection.len, source))\n\n } else {\n\n None\n\n };\n\n\n\n let unselected = Box::new(self.with_selection(()));\n\n\n", "file_path": "src/model/area/tableaux.rs", "rank": 66, "score": 58844.80694116617 }, { "content": " if !self.cards.is_empty() {\n\n MoveResult::Moved(Box::new(self.with_selection(Selection {\n\n held_from: None,\n\n len: 1,\n\n })))\n\n } else {\n\n NothingToSelect {\n\n message: \"Empty area\",\n\n }\n\n .fail_move(self)\n\n }\n\n }\n\n\n\n fn select_with_held(\n\n mut self: Box<Self>,\n\n held: Held,\n\n ) -> MoveResult<Box<dyn SelectedArea>, (Box<dyn UnselectedArea>, Held)> {\n\n let source = held.source;\n\n let len = held.cards.len();\n\n\n", "file_path": "src/model/area/tableaux.rs", "rank": 67, "score": 58844.42565012631 }, { "content": " fn take_cards(&mut self, len: usize, source: AreaId) -> Held {\n\n let cards = self.cards.split_off_bounded(len);\n\n self.revealed_len -= cards.len();\n\n\n\n Held { source, cards }\n\n }\n\n\n\n fn as_stack(&self, mode: Option<Selection>) -> Stack<'_> {\n\n Stack {\n\n cards: &self.cards,\n\n details: StackDetails {\n\n orientation: Orientation::Vertical,\n\n len: self.cards.len(),\n\n face_up_len: self.revealed_len,\n\n visible_len: self.cards.len(),\n\n spread_len: self.revealed_len,\n\n selection: mode.map(|selection| StackSelection {\n\n len: selection.len,\n\n held: selection.held_from.is_some(),\n\n }),\n", "file_path": "src/model/area/tableaux.rs", "rank": 68, "score": 58844.281092782 }, { "content": " NothingToSelect {\n\n message: \"Empty area\",\n\n }\n\n .fail_move(self)\n\n }\n\n }\n\n\n\n fn select_with_held(\n\n mut self: Box<Self>,\n\n held: Held,\n\n ) -> MoveResult<Box<dyn SelectedArea>, (Box<dyn UnselectedArea>, Held)> {\n\n let source = held.source;\n\n\n\n match self.give_cards(held) {\n\n MoveResult::Moved(()) => MoveResult::Moved(Box::new(self.with_selection(Selection {\n\n held_from: Some(source),\n\n }))),\n\n MoveResult::Unmoved(held, error) => MoveResult::Unmoved((self, held), error),\n\n }\n\n }\n", "file_path": "src/model/area/talon.rs", "rank": 69, "score": 58844.272138384295 }, { "content": " NotSupported {\n\n message: format!(\"Cannot place cards from area: {:?}\", held.source),\n\n }\n\n .fail()\n\n }\n\n }\n\n\n\n fn give_cards(&mut self, mut held: Held) -> MoveResult<(), Held> {\n\n match self.validate_cards(&held) {\n\n Ok(_) => {\n\n if held.source == AreaId::Stock {\n\n self.fanned_len = held.cards.len();\n\n } else {\n\n self.fanned_len += held.cards.len();\n\n }\n\n\n\n self.cards.append(&mut held.cards);\n\n MoveResult::Moved(())\n\n }\n\n Err(error) => MoveResult::Unmoved(held, error),\n", "file_path": "src/model/area/talon.rs", "rank": 70, "score": 58843.94858173009 }, { "content": " (unselected, held)\n\n }\n\n\n\n fn activate(&mut self) -> Result<Option<Action>> {\n\n if self.selection.held_from.is_some() {\n\n self.put_down()?;\n\n Ok(None)\n\n } else if self.revealed_len > 0 {\n\n self.pick_up()?;\n\n Ok(None)\n\n } else if !self.cards.is_empty() {\n\n self.revealed_len += 1;\n\n Ok(None)\n\n } else {\n\n NothingToSelect {\n\n message: \"Empty area\",\n\n }\n\n .fail()\n\n }\n\n }\n", "file_path": "src/model/area/tableaux.rs", "rank": 71, "score": 58843.23623316667 }, { "content": " }\n\n }\n\n\n\n fn take_cards(&mut self, len: usize, source: AreaId) -> Held {\n\n let cards = self.cards.split_off_bounded(len);\n\n self.fanned_len = self.fanned_len.saturating_sub(cards.len());\n\n\n\n Held { source, cards }\n\n }\n\n\n\n fn as_stack(&self, mode: Option<Selection>) -> Stack<'_> {\n\n let cards_len = self.cards.len();\n\n\n\n Stack {\n\n cards: &self.cards,\n\n details: StackDetails {\n\n orientation: Orientation::Horizontal,\n\n len: cards_len,\n\n face_up_len: cards_len,\n\n visible_len: self.fanned_len + 1,\n", "file_path": "src/model/area/talon.rs", "rank": 72, "score": 58842.70470633859 }, { "content": " target_vec.push(target_area);\n\n target_vec.extend(areas_to_move.into_iter());\n\n\n\n Err(error).context(SelectionError {\n\n area_id: target_area_id,\n\n })\n\n }\n\n }\n\n }\n\n\n\n pub fn activate_selected(&mut self) -> Result<Vec<AreaId>> {\n\n let selected_area = self.selected_mut();\n\n let action = selected_area.activate().context(UnableToActivate {\n\n area_id: selected_area.id(),\n\n })?;\n\n\n\n match action {\n\n Some(Action::Draw(len)) => {\n\n // Take the next `len` cards from the stock. We reverse the held cards because they're\n\n // being drawn one-by-one into the talon, so the first drawn is at the bottom of the\n", "file_path": "src/model/area_list.rs", "rank": 73, "score": 58842.39907852755 }, { "content": "#[derive(Debug)]\n\npub struct Tableaux<S> {\n\n index: u8,\n\n cards: Vec<Card>,\n\n revealed_len: usize,\n\n selection: S,\n\n}\n\n\n\npub type UnselectedTableaux = Tableaux<()>;\n\npub type SelectedTableaux = Tableaux<Selection>;\n\n\n\nimpl<S> Tableaux<S> {\n\n fn id(&self) -> AreaId {\n\n AreaId::Tableaux(self.index)\n\n }\n\n\n\n fn accepts_cards(&self, held: &Held) -> Result {\n\n if held.source == self.id() {\n\n // We'll always take back our own cards.\n\n Ok(())\n", "file_path": "src/model/area/tableaux.rs", "rank": 74, "score": 58842.02005404961 }, { "content": "use snafu;\n\nuse std::{error, fmt};\n\n\n\nuse super::{\n\n card::{Card, Suit},\n\n stack::Stack,\n\n};\n\n\n\npub mod foundation;\n\npub mod stock;\n\npub mod tableaux;\n\npub mod talon;\n\n\n\n#[derive(Debug, Snafu)]\n\npub enum Error {\n\n #[snafu(display(\"Invalid card: {}\", message))]\n\n InvalidCard { message: String },\n\n\n\n #[snafu(display(\"Too many cards: {}\", message))]\n\n TooManyCards { message: String },\n", "file_path": "src/model/area/mod.rs", "rank": 75, "score": 58840.61053630597 }, { "content": "\n\nimpl Area for UnselectedStock {\n\n fn id(&self) -> AreaId {\n\n Stock::id(self)\n\n }\n\n\n\n fn is_selected(&self) -> bool {\n\n false\n\n }\n\n\n\n fn is_held(&self) -> bool {\n\n false\n\n }\n\n\n\n fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> {\n\n Stock::give_cards(self, held)\n\n }\n\n\n\n fn take_cards(&mut self, len: usize) -> Held {\n\n Stock::take_cards(self, len)\n", "file_path": "src/model/area/stock.rs", "rank": 76, "score": 58840.3460724463 }, { "content": " draw_from_stock_len: usize,\n\n selection: S,\n\n}\n\n\n\npub type UnselectedStock = Stock<()>;\n\npub type SelectedStock = Stock<Selection>;\n\n\n\nimpl<S> Stock<S> {\n\n fn id(&self) -> AreaId {\n\n AreaId::Stock\n\n }\n\n\n\n fn validate_cards(&self, held: &Held) -> Result {\n\n if held.source == self.id() || held.source == AreaId::Talon {\n\n // We'll always take back our own cards, and we'll allow cards from the talon to be\n\n // replaced on us.\n\n Ok(())\n\n } else {\n\n // But no cards from anywhere else.\n\n NotSupported {\n", "file_path": "src/model/area/stock.rs", "rank": 77, "score": 58840.178155912625 }, { "content": " cards: Vec<Card>,\n\n fanned_len: usize,\n\n selection: S,\n\n}\n\n\n\npub type UnselectedTalon = Talon<()>;\n\npub type SelectedTalon = Talon<Selection>;\n\n\n\nimpl<S> Talon<S> {\n\n fn id(&self) -> AreaId {\n\n AreaId::Talon\n\n }\n\n\n\n fn validate_cards(&self, held: &Held) -> Result {\n\n if held.source == self.id() || held.source == AreaId::Stock {\n\n // We'll always take back our own cards, and we'll allow cards from the stock to be\n\n // replaced onto us.\n\n Ok(())\n\n } else {\n\n // But no cards from anywhere else.\n", "file_path": "src/model/area/talon.rs", "rank": 78, "score": 58840.150958716295 }, { "content": " source: AreaId::Stock,\n\n cards,\n\n }\n\n }\n\n\n\n fn as_stack(&self, mode: Option<Selection>) -> Stack<'_> {\n\n Stack {\n\n cards: &self.cards,\n\n details: StackDetails {\n\n orientation: Orientation::Horizontal,\n\n len: self.cards.len(),\n\n face_up_len: 0,\n\n visible_len: 2,\n\n spread_len: 1,\n\n selection: mode.map(|_| StackSelection {\n\n len: 1,\n\n held: false,\n\n }),\n\n },\n\n }\n", "file_path": "src/model/area/stock.rs", "rank": 79, "score": 58839.85214733343 }, { "content": " }\n\n\n\n fn as_stack(&self) -> Stack<'_> {\n\n self.as_stack(Some(self.selection))\n\n }\n\n\n\n fn as_area(&self) -> &dyn Area {\n\n self\n\n }\n\n\n\n fn as_area_mut(&mut self) -> &mut dyn Area {\n\n self\n\n }\n\n}\n\n\n\nimpl UnselectedArea for UnselectedTalon {\n\n fn select(self: Box<Self>) -> MoveResult<Box<dyn SelectedArea>, Box<dyn UnselectedArea>> {\n\n if !self.cards.is_empty() {\n\n MoveResult::Moved(Box::new(self.with_selection(Selection { held_from: None })))\n\n } else {\n", "file_path": "src/model/area/talon.rs", "rank": 80, "score": 58839.64503466981 }, { "content": "}\n\n\n\nimpl SelectedArea for SelectedTalon {\n\n fn deselect(mut self: Box<Self>) -> (Box<dyn UnselectedArea>, Option<Held>) {\n\n let held = if let Some(source) = self.selection.held_from {\n\n Some(self.take_cards(1, source))\n\n } else {\n\n None\n\n };\n\n\n\n let unselected = Box::new(self.with_selection(()));\n\n\n\n (unselected, held)\n\n }\n\n\n\n fn activate(&mut self) -> Result<Option<Action>> {\n\n if self.selection.held_from.is_some() {\n\n self.put_down()?;\n\n } else {\n\n self.pick_up()?;\n", "file_path": "src/model/area/talon.rs", "rank": 81, "score": 58839.48515430719 }, { "content": " } else if let Some(card) = held.cards.first() {\n\n if let Some(tableaux_card) = self.cards.last() {\n\n // TODO: Check that the pile itself is legit.\n\n ensure!(\n\n self.revealed_len > 0\n\n && card.rank.is_followed_by(tableaux_card.rank)\n\n && card.color() != tableaux_card.color(),\n\n InvalidCard {\n\n message: format!(\n\n \"Card does not follow: card: {:?}, top: {:?}\",\n\n card, tableaux_card\n\n )\n\n }\n\n );\n\n Ok(())\n\n } else {\n\n ensure!(\n\n card.rank == Rank::King,\n\n InvalidCard {\n\n message: format!(\"Card does not follow: card: {:?}, top: empty\", card)\n", "file_path": "src/model/area/tableaux.rs", "rank": 82, "score": 58839.38100925839 }, { "content": " fn take_cards(&mut self, len: usize) -> Held {\n\n Stock::take_cards(self, len)\n\n }\n\n\n\n fn take_all_cards(&mut self) -> Held {\n\n Stock::take_cards(self, self.cards.len())\n\n }\n\n\n\n fn peek_top_card(&self) -> Option<&Card> {\n\n self.cards.last()\n\n }\n\n\n\n fn as_stack(&self) -> Stack<'_> {\n\n self.as_stack(Some(self.selection))\n\n }\n\n\n\n fn as_area(&self) -> &dyn Area {\n\n self\n\n }\n\n\n", "file_path": "src/model/area/stock.rs", "rank": 83, "score": 58839.05200079677 }, { "content": " spread_len: self.fanned_len,\n\n selection: mode.map(|selection| StackSelection {\n\n len: 1,\n\n held: selection.held_from.is_some(),\n\n }),\n\n },\n\n }\n\n }\n\n\n\n fn with_selection<T>(self, selection: T) -> Talon<T> {\n\n Talon {\n\n cards: self.cards,\n\n fanned_len: self.fanned_len,\n\n selection,\n\n }\n\n }\n\n}\n\n\n\nimpl UnselectedTalon {\n\n pub fn create(cards: Vec<Card>, fanned_len: usize) -> Box<dyn UnselectedArea> {\n", "file_path": "src/model/area/talon.rs", "rank": 84, "score": 58838.795372171044 }, { "content": " }\n\n\n\n fn put_down(&mut self) -> Result {\n\n ensure!(self.selection.held_from.is_some(), NothingHeld);\n\n self.selection.held_from = None;\n\n Ok(())\n\n }\n\n\n\n fn select_more(&mut self) -> Result {\n\n ensure!(self.selection.len < self.revealed_len, MaxSelection);\n\n self.selection.len += 1;\n\n Ok(())\n\n }\n\n\n\n fn select_less(&mut self) -> Result {\n\n ensure!(self.selection.len > 1, MinSelection);\n\n self.selection.len -= 1;\n\n Ok(())\n\n }\n\n\n\n fn held_from(&self) -> Option<AreaId> {\n\n self.selection.held_from\n\n }\n\n}\n", "file_path": "src/model/area/tableaux.rs", "rank": 85, "score": 58838.56003023873 }, { "content": " self.selection.held_from.is_some()\n\n }\n\n\n\n fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> {\n\n self.selection.held_from = None;\n\n Talon::give_cards(self, held)\n\n }\n\n\n\n fn take_cards(&mut self, len: usize) -> Held {\n\n let source = self.selection.held_from.take().unwrap_or_else(|| self.id());\n\n self.take_cards(len, source)\n\n }\n\n\n\n fn take_all_cards(&mut self) -> Held {\n\n let source = self.selection.held_from.take().unwrap_or_else(|| self.id());\n\n self.take_cards(self.cards.len(), source)\n\n }\n\n\n\n fn peek_top_card(&self) -> Option<&Card> {\n\n self.cards.last()\n", "file_path": "src/model/area/talon.rs", "rank": 86, "score": 58838.31209456035 }, { "content": " }\n\n );\n\n Ok(())\n\n }\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n\n\n fn give_cards(&mut self, mut held: Held) -> MoveResult<(), Held> {\n\n match self.accepts_cards(&held) {\n\n Ok(_) => {\n\n self.revealed_len += held.cards.len();\n\n self.cards.append(&mut held.cards);\n\n MoveResult::Moved(())\n\n }\n\n Err(error) => MoveResult::Unmoved(held, error),\n\n }\n\n }\n\n\n", "file_path": "src/model/area/tableaux.rs", "rank": 87, "score": 58838.065476404096 }, { "content": "\n\n fn peek_top_card(&self) -> Option<&Card> {\n\n self.cards.last()\n\n }\n\n\n\n fn as_stack(&self) -> Stack<'_> {\n\n self.as_stack(Some(self.selection))\n\n }\n\n\n\n fn as_area(&self) -> &dyn Area {\n\n self\n\n }\n\n\n\n fn as_area_mut(&mut self) -> &mut dyn Area {\n\n self\n\n }\n\n}\n\n\n\nimpl UnselectedArea for UnselectedTableaux {\n\n fn select(self: Box<Self>) -> MoveResult<Box<dyn SelectedArea>, Box<dyn UnselectedArea>> {\n", "file_path": "src/model/area/tableaux.rs", "rank": 88, "score": 58837.90178449801 }, { "content": " fn as_area_mut(&mut self) -> &mut dyn Area {\n\n self\n\n }\n\n}\n\n\n\nimpl UnselectedArea for UnselectedStock {\n\n fn select(self: Box<Self>) -> MoveResult<Box<dyn SelectedArea>, Box<dyn UnselectedArea>> {\n\n MoveResult::Moved(Box::new(self.with_selection(Selection)))\n\n }\n\n\n\n fn select_with_held(\n\n self: Box<Self>,\n\n held: Held,\n\n ) -> MoveResult<Box<dyn SelectedArea>, (Box<dyn UnselectedArea>, Held)> {\n\n NotSupported {\n\n message: \"Cards in this area cannot be held\",\n\n }\n\n .fail_move((self, held))\n\n }\n\n}\n", "file_path": "src/model/area/stock.rs", "rank": 89, "score": 58837.28951903088 }, { "content": " fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> {\n\n self.selection.held_from = None;\n\n self.selection.len = 1;\n\n\n\n Tableaux::give_cards(self, held)\n\n }\n\n\n\n fn take_cards(&mut self, len: usize) -> Held {\n\n let source = self.selection.held_from.take().unwrap_or_else(|| self.id());\n\n self.selection.len = 1;\n\n\n\n self.take_cards(len, source)\n\n }\n\n\n\n fn take_all_cards(&mut self) -> Held {\n\n let source = self.selection.held_from.take().unwrap_or_else(|| self.id());\n\n self.selection.len = 1;\n\n\n\n self.take_cards(self.cards.len(), source)\n\n }\n", "file_path": "src/model/area/tableaux.rs", "rank": 90, "score": 58837.148704056584 }, { "content": " // The stock should always accept cards from the talon, so no need to handle putting\n\n // them back on failure; just blow up.\n\n self.get_by_area_id_mut(AreaId::Stock)?\n\n .give_cards(held)\n\n .into_result()\n\n .context(SelectionError {\n\n area_id: AreaId::Stock,\n\n })?;\n\n\n\n Ok(vec![AreaId::Stock, AreaId::Talon])\n\n }\n\n None => Ok(vec![selected_area.id()]),\n\n }\n\n }\n\n\n\n pub fn return_held(&mut self) -> Result<Vec<AreaId>> {\n\n if let Some(original_area_id) = self.selected().held_from() {\n\n let affected_area_ids = self.move_selection(original_area_id)?;\n\n\n\n if !affected_area_ids.is_empty() {\n", "file_path": "src/model/area_list.rs", "rank": 91, "score": 58836.705695830016 }, { "content": " }\n\n\n\n fn take_cards(&mut self, len: usize) -> Held {\n\n self.take_cards(len, self.id())\n\n }\n\n\n\n fn take_all_cards(&mut self) -> Held {\n\n self.take_cards(self.cards.len(), self.id())\n\n }\n\n\n\n fn peek_top_card(&self) -> Option<&Card> {\n\n self.cards.last()\n\n }\n\n\n\n fn as_stack(&self) -> Stack<'_> {\n\n self.as_stack(None)\n\n }\n\n\n\n fn as_area(&self) -> &dyn Area {\n\n self\n", "file_path": "src/model/area/tableaux.rs", "rank": 92, "score": 58836.5492761833 }, { "content": "}\n\n\n\n#[derive(Debug)]\n\npub struct Held {\n\n pub source: AreaId,\n\n pub cards: Vec<Card>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Action {\n\n Draw(usize),\n\n Restock,\n\n}\n\n\n", "file_path": "src/model/area/mod.rs", "rank": 93, "score": 58836.096215822334 }, { "content": "\n\n fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> {\n\n Talon::give_cards(self, held)\n\n }\n\n\n\n fn take_cards(&mut self, len: usize) -> Held {\n\n self.take_cards(len, self.id())\n\n }\n\n\n\n fn take_all_cards(&mut self) -> Held {\n\n self.take_cards(self.cards.len(), self.id())\n\n }\n\n\n\n fn peek_top_card(&self) -> Option<&Card> {\n\n self.cards.last()\n\n }\n\n\n\n fn as_stack(&self) -> Stack<'_> {\n\n self.as_stack(None)\n\n }\n", "file_path": "src/model/area/talon.rs", "rank": 94, "score": 58835.881568820936 }, { "content": "use itertools::Itertools;\n\nuse snafu::{OptionExt, ResultExt};\n\nuse std::{collections::HashMap, fmt};\n\n\n\nuse crate::utils::vec::SplitOffAround;\n\n\n\nuse super::area::{\n\n self, move_selection, Action, Area, AreaId, MoveResult, SelectedArea, SelectionMove,\n\n UnselectedArea,\n\n};\n\n\n\n#[derive(Debug, Snafu)]\n\npub enum Error {\n\n #[snafu(display(\"Duplicate area ids: {:?}\", area_ids))]\n\n DuplicateAreaIds { area_ids: Vec<AreaId> },\n\n\n\n #[snafu(display(\"Unknown area id: {:?}\", area_id))]\n\n UnknownAreaId { area_id: AreaId },\n\n\n\n #[snafu(display(\"Unable to activate area {:?}: {}\", area_id, source))]\n", "file_path": "src/model/area_list.rs", "rank": 95, "score": 58835.548697217484 }, { "content": " }\n\n}\n\n\n\nimpl Area for SelectedStock {\n\n fn id(&self) -> AreaId {\n\n Stock::id(self)\n\n }\n\n\n\n fn is_selected(&self) -> bool {\n\n true\n\n }\n\n\n\n fn is_held(&self) -> bool {\n\n false\n\n }\n\n\n\n fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> {\n\n Stock::give_cards(self, held)\n\n }\n\n\n", "file_path": "src/model/area/stock.rs", "rank": 96, "score": 58835.48297167317 }, { "content": " }\n\n fn put_down(&mut self) -> Result {\n\n NotSupported {\n\n message: \"Cards in this area cannot be held\",\n\n }\n\n .fail()\n\n }\n\n fn select_more(&mut self) -> Result {\n\n NotSupported {\n\n message: \"Cards in this area cannot be held\",\n\n }\n\n .fail()\n\n }\n\n fn select_less(&mut self) -> Result {\n\n NotSupported {\n\n message: \"Cards in this area cannot be held\",\n\n }\n\n .fail()\n\n }\n\n\n\n fn held_from(&self) -> Option<AreaId> {\n\n None\n\n }\n\n}\n", "file_path": "src/model/area/stock.rs", "rank": 97, "score": 58835.3960029786 }, { "content": " selection: (),\n\n })\n\n }\n\n}\n\n\n\nimpl Area for UnselectedTableaux {\n\n fn id(&self) -> AreaId {\n\n Tableaux::id(self)\n\n }\n\n\n\n fn is_selected(&self) -> bool {\n\n false\n\n }\n\n\n\n fn is_held(&self) -> bool {\n\n false\n\n }\n\n\n\n fn give_cards(&mut self, held: Held) -> MoveResult<(), Held> {\n\n Tableaux::give_cards(self, held)\n", "file_path": "src/model/area/tableaux.rs", "rank": 98, "score": 58835.29115011035 }, { "content": " // pile.\n\n let mut held = self.get_by_area_id_mut(AreaId::Stock)?.take_cards(len);\n\n held.cards.reverse();\n\n\n\n // The talon should always accept cards from the stock, so no need to handle putting\n\n // them back on failure; just blow up.\n\n self.get_by_area_id_mut(AreaId::Talon)?\n\n .give_cards(held)\n\n .into_result()\n\n .context(SelectionError {\n\n area_id: AreaId::Talon,\n\n })?;\n\n\n\n Ok(vec![AreaId::Stock, AreaId::Talon])\n\n }\n\n Some(Action::Restock) => {\n\n // Flip the talon onto the stock.\n\n let mut held = self.get_by_area_id_mut(AreaId::Talon)?.take_all_cards();\n\n held.cards.reverse();\n\n\n", "file_path": "src/model/area_list.rs", "rank": 99, "score": 58835.27327076755 } ]
Rust
benches/predict.rs
moisesmcardona/rav1e
5e9d1b0754879eba2f59192af9c0d016ef380eb5
use criterion::*; use rand::{ChaChaRng, Rng, SeedableRng}; use rav1e::partition::BlockSize; use rav1e::predict::{Block4x4, Intra}; pub const MAX_ITER: usize = 50000; pub const BLOCK_SIZE: BlockSize = BlockSize::BLOCK_32X32; pub fn generate_block(rng: &mut ChaChaRng) -> (Vec<u16>, Vec<u16>, Vec<u16>) { let block = vec![0u16; BLOCK_SIZE.width() * BLOCK_SIZE.height()]; let above_context: Vec<u16> = (0..BLOCK_SIZE.height()).map(|_| rng.gen()).collect(); let left_context: Vec<u16> = (0..BLOCK_SIZE.width()).map(|_| rng.gen()).collect(); (block, above_context, left_context) } pub fn generate_block_u8(rng: &mut ChaChaRng) -> (Vec<u8>, Vec<u8>, Vec<u8>) { let block = vec![0u8; BLOCK_SIZE.width() * BLOCK_SIZE.height()]; let above_context: Vec<u8> = (0..BLOCK_SIZE.height()).map(|_| rng.gen()).collect(); let left_context: Vec<u8> = (0..BLOCK_SIZE.width()).map(|_| rng.gen()).collect(); (block, above_context, left_context) } pub fn pred_bench(c: &mut Criterion) { c.bench_function("intra_dc_4x4", |b| intra_dc_4x4(b)); c.bench_function("intra_dc_128_4x4_u8", |b| intra_dc_128_4x4_u8(b)); c.bench_function("intra_dc_left_4x4", |b| intra_dc_left_4x4(b)); c.bench_function("intra_dc_top_4x4", |b| intra_dc_top_4x4(b)); c.bench_function("intra_h_4x4", |b| intra_h_4x4(b)); c.bench_function("intra_v_4x4", |b| intra_v_4x4(b)); c.bench_function("intra_paeth_4x4", |b| intra_paeth_4x4(b)); c.bench_function("intra_smooth_4x4", |b| intra_smooth_4x4(b)); c.bench_function("intra_smooth_h_4x4", |b| intra_smooth_h_4x4(b)); c.bench_function("intra_smooth_v_4x4", |b| intra_smooth_v_4x4(b)); c.bench_function("intra_cfl_4x4", |b| intra_cfl_4x4(b)); } pub fn intra_dc_4x4(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_dc_128_4x4_u8(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block_u8(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc_128( &mut block, BLOCK_SIZE.width(), 8 ); } }) } pub fn intra_dc_left_4x4(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc_left( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_dc_top_4x4(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc_top( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_h_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, _above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_h(&mut block, BLOCK_SIZE.width(), &left[..4]); } }) } pub fn intra_v_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, _left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_v(&mut block, BLOCK_SIZE.width(), &above[..4]); } }) } pub fn intra_paeth_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); let above_left = unsafe { *above.as_ptr().offset(-1) }; b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_paeth( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4], above_left ); } }) } pub fn intra_smooth_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_smooth( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_smooth_h_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_smooth_h( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_smooth_v_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_smooth_v( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_cfl_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, _above, _left) = generate_block(&mut rng); let ac: Vec<i16> = (0..(32 * 32)).map(|_| rng.gen()).collect(); let alpha = -1 as i16; b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_cfl(&mut block, BLOCK_SIZE.width(), &ac, alpha, 8); } }) }
use criterion::*; use rand::{ChaChaRng, Rng, SeedableRng}; use rav1e::partition::BlockSize; use rav1e::predict::{Block4x4, Intra}; pub const MAX_ITER: usize = 50000; pub const BLOCK_SIZE: BlockSize = BlockSize::BLOCK_32X32; pub fn generate_block(rng: &mut ChaChaRng) -> (Vec<u16>, Vec<u16>, Vec<u16>) { let block = vec![0u16; BLOCK_SIZE.width() * BLOCK_SIZE.height()]; let above_context: Vec<u16> = (0..BLOCK_SIZE.height()).map(|_| rng.gen()).collect(); let left_context: Vec<u16> = (0..BLOCK_SIZE.width()).map(|_| rng.gen()).collect(); (block, above_context, left_context) } pub fn generate_block_u8(rng: &mut ChaChaRng) -> (Vec<u8>, Vec<u8>, Vec<u8>) { let block = vec![0u8; BLOCK_SIZE.width() * BLOCK_SIZE.height()]; let above_context: Vec<u8> = (0..BLOCK_SIZE.height()).map(|_| rng.gen()).collect(); let left_context: Vec<u8> = (0..BLOCK_SIZE.width()).map(|_| rng.gen()).collect(); (block, above_context, left_context) }
pub fn intra_dc_4x4(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_dc_128_4x4_u8(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block_u8(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc_128( &mut block, BLOCK_SIZE.width(), 8 ); } }) } pub fn intra_dc_left_4x4(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc_left( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_dc_top_4x4(b: &mut Bencher) { let mut ra = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut ra); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_dc_top( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_h_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, _above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_h(&mut block, BLOCK_SIZE.width(), &left[..4]); } }) } pub fn intra_v_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, _left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_v(&mut block, BLOCK_SIZE.width(), &above[..4]); } }) } pub fn intra_paeth_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); let above_left = unsafe { *above.as_ptr().offset(-1) }; b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_paeth( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4], above_left ); } }) } pub fn intra_smooth_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_smooth( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_smooth_h_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_smooth_h( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_smooth_v_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, above, left) = generate_block(&mut rng); b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_smooth_v( &mut block, BLOCK_SIZE.width(), &above[..4], &left[..4] ); } }) } pub fn intra_cfl_4x4(b: &mut Bencher) { let mut rng = ChaChaRng::from_seed([0; 32]); let (mut block, _above, _left) = generate_block(&mut rng); let ac: Vec<i16> = (0..(32 * 32)).map(|_| rng.gen()).collect(); let alpha = -1 as i16; b.iter(|| { for _ in 0..MAX_ITER { Block4x4::pred_cfl(&mut block, BLOCK_SIZE.width(), &ac, alpha, 8); } }) }
pub fn pred_bench(c: &mut Criterion) { c.bench_function("intra_dc_4x4", |b| intra_dc_4x4(b)); c.bench_function("intra_dc_128_4x4_u8", |b| intra_dc_128_4x4_u8(b)); c.bench_function("intra_dc_left_4x4", |b| intra_dc_left_4x4(b)); c.bench_function("intra_dc_top_4x4", |b| intra_dc_top_4x4(b)); c.bench_function("intra_h_4x4", |b| intra_h_4x4(b)); c.bench_function("intra_v_4x4", |b| intra_v_4x4(b)); c.bench_function("intra_paeth_4x4", |b| intra_paeth_4x4(b)); c.bench_function("intra_smooth_4x4", |b| intra_smooth_4x4(b)); c.bench_function("intra_smooth_h_4x4", |b| intra_smooth_h_4x4(b)); c.bench_function("intra_smooth_v_4x4", |b| intra_smooth_v_4x4(b)); c.bench_function("intra_cfl_4x4", |b| intra_cfl_4x4(b)); }
function_block-full_function
[ { "content": "pub fn intra_bench(c: &mut Criterion) {\n\n c.bench_functions(\n\n \"intra_dc_4x4\",\n\n vec![\n\n Fun::new(\"native\", |b, _: &Option<usize>| {\n\n predict_native::intra_dc_4x4(b)\n\n }),\n\n Fun::new(\"aom\", |b, _: &Option<usize>| {\n\n predict_intra_4x4_aom(b, highbd_dc_predictor)\n\n }),\n\n ],\n\n None\n\n );\n\n c.bench_functions(\n\n \"intra_h_4x4\",\n\n vec![\n\n Fun::new(\"native\", |b, _: &Option<usize>| {\n\n predict_native::intra_h_4x4(b)\n\n }),\n\n Fun::new(\"aom\", |b, _: &Option<usize>| {\n", "file_path": "benches/comparative/predict.rs", "rank": 0, "score": 276243.0373895044 }, { "content": "pub fn get_sad(c: &mut Criterion) {\n\n use partition::BlockSize::*;\n\n let blocks = vec![\n\n BLOCK_4X4,\n\n BLOCK_4X8,\n\n BLOCK_8X4,\n\n BLOCK_8X8,\n\n BLOCK_8X16,\n\n BLOCK_16X8,\n\n BLOCK_16X16,\n\n BLOCK_16X32,\n\n BLOCK_32X16,\n\n BLOCK_32X32,\n\n BLOCK_32X64,\n\n BLOCK_64X32,\n\n BLOCK_64X64,\n\n BLOCK_64X128,\n\n BLOCK_128X64,\n\n BLOCK_128X128,\n\n BLOCK_4X16,\n\n BLOCK_16X4,\n\n BLOCK_8X32,\n\n BLOCK_32X8,\n\n BLOCK_16X64,\n\n BLOCK_64X16,\n\n ];\n\n\n\n c.bench_function_over_inputs(\"get_sad\", bench_get_sad, blocks);\n\n}\n", "file_path": "benches/me.rs", "rank": 1, "score": 250863.11279062115 }, { "content": "pub fn av1_idct4(c: &mut Criterion) {\n\n let plain = Fun::new(\"plain\", bench_idct4);\n\n let funcs = vec![plain];\n\n\n\n c.bench_functions(\"av1_idct4_8\", funcs, 8);\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 2, "score": 245276.99752448508 }, { "content": "pub fn av1_idct8(c: &mut Criterion) {\n\n let plain = Fun::new(\"plain\", bench_idct8);\n\n let funcs = vec![plain];\n\n\n\n c.bench_functions(\"av1_idct8_8\", funcs, 8);\n\n}\n\n\n\ncriterion_group!(transform, av1_idct4, av1_idct8);\n", "file_path": "benches/transform.rs", "rank": 4, "score": 245276.9975244851 }, { "content": "pub fn intra_cfl_4x4_aom(b: &mut Bencher) {\n\n let mut rng = ChaChaRng::from_seed([0; 32]);\n\n let (mut block, _above_context, _left_context) = generate_block(&mut rng);\n\n let ac: Vec<i16> = (0..(32 * 32)).map(|_| rng.gen()).collect();\n\n let alpha = -1 as i16;\n\n\n\n b.iter(|| {\n\n for _ in 0..MAX_ITER {\n\n unsafe {\n\n cfl_predict_hbd_c(\n\n ac.as_ptr(),\n\n block.as_mut_ptr(),\n\n BLOCK_SIZE.width() as libc::ptrdiff_t,\n\n alpha as libc::c_int,\n\n 8,\n\n 4,\n\n 4\n\n );\n\n }\n\n }\n\n })\n\n}\n", "file_path": "benches/comparative/predict.rs", "rank": 18, "score": 205539.49972079936 }, { "content": "fn write_b(c: &mut Criterion) {\n\n for &tx_size in &[TxSize::TX_4X4, TxSize::TX_8X8] {\n\n for &qi in &[20, 55] {\n\n let n = format!(\"write_b_bench({:?}, {})\", tx_size, qi);\n\n c.bench_function(&n, move |b| write_b_bench(b, tx_size, qi));\n\n }\n\n }\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 19, "score": 198501.11568777205 }, { "content": "// Input to this process is the array CurrFrame of reconstructed samples.\n\n// Output from this process is the array CdefFrame containing deringed samples.\n\n// The purpose of CDEF is to perform deringing based on the detected direction of blocks.\n\n// CDEF parameters are stored for each 64 by 64 block of pixels.\n\n// The CDEF filter is applied on each 8 by 8 block of pixels.\n\n// Reference: http://av1-spec.argondesign.com/av1-spec/av1-spec.html#cdef-process\n\npub fn cdef_filter_frame(fi: &FrameInvariants, rec: &mut Frame, bc: &mut BlockContext, bit_depth: usize) {\n\n\n\n // Each filter block is 64x64, except right and/or bottom for non-multiple-of-64 sizes.\n\n // FIXME: 128x128 SB support will break this, we need FilterBlockOffset etc.\n\n let fb_height = (fi.padded_h + 63) / 64;\n\n let fb_width = (fi.padded_w + 63) / 64;\n\n\n\n // Construct a padded copy of the reconstructed frame.\n\n let mut padded_px: [[usize; 2]; 3] = [[0; 2]; 3];\n\n for p in 0..3 {\n\n padded_px[p][0] = (fb_width*64 >> rec.planes[p].cfg.xdec) + 4;\n\n padded_px[p][1] = (fb_height*64 >> rec.planes[p].cfg.ydec) + 4;\n\n }\n\n let mut cdef_frame = Frame {\n\n planes: [\n\n Plane::new(padded_px[0][0], padded_px[0][1], rec.planes[0].cfg.xdec, rec.planes[0].cfg.ydec, 0, 0),\n\n Plane::new(padded_px[1][0], padded_px[1][1], rec.planes[1].cfg.xdec, rec.planes[1].cfg.ydec, 0, 0),\n\n Plane::new(padded_px[2][0], padded_px[2][1], rec.planes[2].cfg.xdec, rec.planes[2].cfg.ydec, 0, 0)\n\n ]\n\n };\n", "file_path": "src/cdef.rs", "rank": 20, "score": 197530.5271996713 }, { "content": "fn cfl_rdo(c: &mut Criterion) {\n\n for &bsize in &[\n\n BlockSize::BLOCK_4X4,\n\n BlockSize::BLOCK_8X8,\n\n BlockSize::BLOCK_16X16,\n\n BlockSize::BLOCK_32X32\n\n ] {\n\n let n = format!(\"cfl_rdo({:?})\", bsize);\n\n c.bench_function(&n, move |b| cfl_rdo_bench(b, bsize));\n\n }\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 21, "score": 193985.36735776596 }, { "content": "fn cdef_frame(c: &mut Criterion) {\n\n let w = 128;\n\n let h = 128;\n\n let n = format!(\"cdef_frame({}, {})\", w, h);\n\n c.bench_function(&n, move |b| cdef_frame_bench(b, w, h));\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 22, "score": 193985.36735776596 }, { "content": "fn new_plane(ra: &mut ChaChaRng, width: usize, height: usize) -> Plane {\n\n let mut p = Plane::new(width, height, 0, 0, 128 + 8, 128 + 8);\n\n\n\n fill_plane(ra, &mut p);\n\n\n\n p\n\n}\n\n\n", "file_path": "benches/me.rs", "rank": 23, "score": 189983.99972465937 }, { "content": "/// Check alignment.\n\npub fn is_aligned<T>(ptr: *const T, n: usize) -> bool {\n\n return ((ptr as usize) & ((1 << n) - 1)) == 0;\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 24, "score": 189403.73772653018 }, { "content": "#[inline(always)]\n\npub fn get_mv_class(z: u32, offset: &mut u32) -> usize {\n\n let c =\n\n if z >= CLASS0_SIZE as u32 * 4096 { MV_CLASS_10 }\n\n else { log_in_base_2(z >> 3) as usize };\n\n\n\n *offset = z - mv_class_base(c);\n\n c\n\n}\n\n\n", "file_path": "src/context.rs", "rank": 25, "score": 187825.58537984864 }, { "content": "pub fn write_tx_blocks(fi: &FrameInvariants, fs: &mut FrameState,\n\n cw: &mut ContextWriter, w: &mut dyn Writer,\n\n luma_mode: PredictionMode, chroma_mode: PredictionMode, bo: &BlockOffset,\n\n bsize: BlockSize, tx_size: TxSize, tx_type: TxType, skip: bool, bit_depth: usize,\n\n cfl: CFLParams, luma_only: bool) {\n\n let bw = bsize.width_mi() / tx_size.width_mi();\n\n let bh = bsize.height_mi() / tx_size.height_mi();\n\n\n\n let PlaneConfig { xdec, ydec, .. } = fs.input.planes[1].cfg;\n\n let ac = &mut [0i16; 32 * 32];\n\n\n\n fs.qc.update(fi.base_q_idx, tx_size, luma_mode.is_intra(), bit_depth);\n\n\n\n for by in 0..bh {\n\n for bx in 0..bw {\n\n let tx_bo = BlockOffset {\n\n x: bo.x + bx * tx_size.width_mi(),\n\n y: bo.y + by * tx_size.height_mi()\n\n };\n\n\n", "file_path": "src/encoder.rs", "rank": 26, "score": 184095.25368717843 }, { "content": "fn fill_plane(ra: &mut ChaChaRng, plane: &mut Plane) {\n\n let stride = plane.cfg.stride;\n\n for row in plane.data.chunks_mut(stride) {\n\n for mut pixel in row {\n\n let v: u8 = ra.gen();\n\n *pixel = v as u16;\n\n }\n\n }\n\n}\n\n\n", "file_path": "benches/me.rs", "rank": 27, "score": 182375.051024195 }, { "content": "pub fn get_plane_block_size(bsize: BlockSize, subsampling_x: usize, subsampling_y: usize)\n\n -> BlockSize {\n\n ss_size_lookup[bsize as usize][subsampling_x][subsampling_y]\n\n}\n\n\n\n// Generates 4 bit field in which each bit set to 1 represents\n\n// a blocksize partition 1111 means we split 64x64, 32x32, 16x16\n\n// and 8x8. 1000 means we just split the 64x64 to 32x32\n\nstatic partition_context_lookup: [[u8; 2]; BlockSize::BLOCK_SIZES_ALL] = [\n\n [ 31, 31 ], // 4X4 - {0b11111, 0b11111}\n\n [ 31, 30 ], // 4X8 - {0b11111, 0b11110}\n\n [ 30, 31 ], // 8X4 - {0b11110, 0b11111}\n\n [ 30, 30 ], // 8X8 - {0b11110, 0b11110}\n\n [ 30, 28 ], // 8X16 - {0b11110, 0b11100}\n\n [ 28, 30 ], // 16X8 - {0b11100, 0b11110}\n\n [ 28, 28 ], // 16X16 - {0b11100, 0b11100}\n\n [ 28, 24 ], // 16X32 - {0b11100, 0b11000}\n\n [ 24, 28 ], // 32X16 - {0b11000, 0b11100}\n\n [ 24, 24 ], // 32X32 - {0b11000, 0b11000}\n\n [ 24, 16 ], // 32X64 - {0b11000, 0b10000}\n", "file_path": "src/context.rs", "rank": 28, "score": 181573.93802798985 }, { "content": "pub fn av1_idct4(input: &[i32], output: &mut [i32], range: usize) {\n\n // stage 1\n\n let stg1 = [input[0], input[2], input[1], input[3]];\n\n\n\n // stage 2\n\n let stg2 = [\n\n half_btf(COSPI_INV[32], stg1[0], COSPI_INV[32], stg1[1], INV_COS_BIT),\n\n half_btf(COSPI_INV[32], stg1[0], -COSPI_INV[32], stg1[1], INV_COS_BIT),\n\n half_btf(COSPI_INV[48], stg1[2], -COSPI_INV[16], stg1[3], INV_COS_BIT),\n\n half_btf(COSPI_INV[16], stg1[2], COSPI_INV[48], stg1[3], INV_COS_BIT)\n\n ];\n\n\n\n // stage 3\n\n output[0] = clamp_value(stg2[0] + stg2[3], range);\n\n output[1] = clamp_value(stg2[1] + stg2[2], range);\n\n output[2] = clamp_value(stg2[1] - stg2[2], range);\n\n output[3] = clamp_value(stg2[0] - stg2[3], range);\n\n}\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 29, "score": 176700.79874777645 }, { "content": "pub fn av1_idct8(input: &[i32], output: &mut [i32], range: usize) {\n\n // call idct4\n\n let temp_in = [input[0], input[2], input[4], input[6]];\n\n let mut temp_out: [i32; 4] = [0; 4];\n\n av1_idct4(&temp_in, &mut temp_out, range);\n\n\n\n // stage 0\n\n\n\n // stage 1\n\n let stg1 = [input[1], input[5], input[3], input[7]];\n\n\n\n // stage 2\n\n let stg2 = [\n\n half_btf(COSPI_INV[56], stg1[0], -COSPI_INV[8], stg1[3], INV_COS_BIT),\n\n half_btf(COSPI_INV[24], stg1[1], -COSPI_INV[40], stg1[2], INV_COS_BIT),\n\n half_btf(COSPI_INV[40], stg1[1], COSPI_INV[24], stg1[2], INV_COS_BIT),\n\n half_btf(COSPI_INV[8], stg1[0], COSPI_INV[56], stg1[3], INV_COS_BIT)\n\n ];\n\n\n\n // stage 3\n", "file_path": "src/transform/inverse.rs", "rank": 30, "score": 176700.79874777645 }, { "content": "// For a transform block,\n\n// predict, transform, quantize, write coefficients to a bitstream,\n\n// dequantize, inverse-transform.\n\npub fn encode_tx_block(\n\n fi: &FrameInvariants, fs: &mut FrameState, cw: &mut ContextWriter,\n\n w: &mut dyn Writer, p: usize, bo: &BlockOffset, mode: PredictionMode,\n\n tx_size: TxSize, tx_type: TxType, plane_bsize: BlockSize, po: &PlaneOffset,\n\n skip: bool, bit_depth: usize, ac: &[i16], alpha: i16\n\n) -> bool {\n\n let rec = &mut fs.rec.planes[p];\n\n let PlaneConfig { stride, xdec, ydec, .. } = fs.input.planes[p].cfg;\n\n\n\n if mode.is_intra() {\n\n mode.predict_intra(&mut rec.mut_slice(po), tx_size, bit_depth, &ac, alpha);\n\n }\n\n\n\n if skip { return false; }\n\n\n\n let mut residual: AlignedArray<[i16; 64 * 64]> = UninitializedAlignedArray();\n\n let mut coeffs_storage: AlignedArray<[i32; 64 * 64]> = UninitializedAlignedArray();\n\n let mut rcoeffs: AlignedArray<[i32; 64 * 64]> = UninitializedAlignedArray();\n\n let coeffs = &mut coeffs_storage.array[..tx_size.area()];\n\n\n", "file_path": "src/encoder.rs", "rank": 31, "score": 176527.38975882166 }, { "content": "pub fn cdef_analyze_superblock(in_frame: &mut Frame,\n\n bc_global: &mut BlockContext,\n\n sbo: &SuperBlockOffset,\n\n sbo_global: &SuperBlockOffset,\n\n bit_depth: usize) -> CdefDirections {\n\n let coeff_shift = bit_depth as i32 - 8;\n\n let mut dir: CdefDirections = CdefDirections {dir: [[0; 8]; 8], var: [[0; 8]; 8]};\n\n // Each direction block is 8x8 in y, and direction computation only looks at y\n\n for by in 0..8 {\n\n for bx in 0..8 {\n\n // The bc and global SBO are only to determine frame\n\n // boundaries and skips in the event we're passing in a\n\n // single-SB copy 'frame' that represents some superblock\n\n // in the main frame.\n\n let global_block_offset = sbo_global.block_offset(bx<<1, by<<1);\n\n if global_block_offset.x < bc_global.cols && global_block_offset.y < bc_global.rows {\n\n let skip = bc_global.at(&global_block_offset).skip\n\n & bc_global.at(&sbo_global.block_offset(2*bx+1, 2*by)).skip\n\n & bc_global.at(&sbo_global.block_offset(2*bx, 2*by+1)).skip\n\n & bc_global.at(&sbo_global.block_offset(2*bx+1, 2*by+1)).skip;\n", "file_path": "src/cdef.rs", "rank": 32, "score": 175125.9604180174 }, { "content": "fn fill_frame(ra: &mut ChaChaRng, frame: &mut Frame) {\n\n for plane in frame.planes.iter_mut() {\n\n let stride = plane.cfg.stride;\n\n for row in plane.data.chunks_mut(stride) {\n\n for mut pixel in row {\n\n let v: u8 = ra.gen();\n\n *pixel = v as u16;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test_encode_decode.rs", "rank": 33, "score": 174008.1947299145 }, { "content": "fn sse_optimize(fs: &mut FrameState, bc: &mut BlockContext, bit_depth: usize) {\n\n assert!(MAX_LOOP_FILTER < 999);\n\n // i64 allows us to accumulate a total of ~ 35 bits worth of pixels\n\n assert!(\n\n fs.input.planes[0].cfg.width.ilog() + fs.input.planes[0].cfg.height.ilog()\n\n < 35\n\n );\n\n\n\n for pli in 0..PLANES {\n\n let mut v_tally: [i64; MAX_LOOP_FILTER + 2] = [0; MAX_LOOP_FILTER + 2];\n\n let mut h_tally: [i64; MAX_LOOP_FILTER + 2] = [0; MAX_LOOP_FILTER + 2];\n\n\n\n sse_plane(\n\n &fs.rec.planes[pli],\n\n &fs.input.planes[pli],\n\n &mut v_tally,\n\n &mut h_tally,\n\n pli,\n\n bc,\n\n bit_depth\n", "file_path": "src/deblock.rs", "rank": 34, "score": 173525.09099379252 }, { "content": "pub fn encode_block_b(seq: &Sequence, fi: &FrameInvariants, fs: &mut FrameState,\n\n cw: &mut ContextWriter, w: &mut dyn Writer,\n\n luma_mode: PredictionMode, chroma_mode: PredictionMode,\n\n ref_frames: &[usize; 2], mvs: &[MotionVector; 2],\n\n bsize: BlockSize, bo: &BlockOffset, skip: bool, bit_depth: usize,\n\n cfl: CFLParams, tx_size: TxSize, tx_type: TxType,\n\n mode_context: usize, mv_stack: &Vec<CandidateMV>) {\n\n let is_inter = !luma_mode.is_intra();\n\n if is_inter { assert!(luma_mode == chroma_mode); };\n\n let sb_size = if seq.use_128x128_superblock {\n\n BlockSize::BLOCK_128X128\n\n } else {\n\n BlockSize::BLOCK_64X64\n\n };\n\n let PlaneConfig { xdec, ydec, .. } = fs.input.planes[1].cfg;\n\n if skip {\n\n cw.bc.reset_skip_context(bo, bsize, xdec, ydec);\n\n }\n\n cw.bc.set_block_size(bo, bsize);\n\n cw.bc.set_mode(bo, bsize, luma_mode);\n", "file_path": "src/encoder.rs", "rank": 35, "score": 173301.02437152385 }, { "content": "pub fn av1_round_shift_array(arr: &mut [i32], size: usize, bit: i8) {\n\n // FIXME\n\n // #[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\n\n // {\n\n // if is_x86_feature_detected!(\"sse4.1\") {\n\n // return unsafe {\n\n // x86_asm::av1_round_shift_array_sse4_1(arr, size, bit)\n\n // };\n\n // }\n\n // }\n\n av1_round_shift_array_rs(arr, size, bit)\n\n}\n\n\n", "file_path": "src/transform/mod.rs", "rank": 36, "score": 169849.20874898005 }, { "content": "pub fn write_ivf_header(output_file: &mut dyn io::Write, width: usize, height: usize, num: usize, den: usize) {\n\n let mut bw = BitWriter::endian(output_file, LittleEndian);\n\n bw.write_bytes(b\"DKIF\").unwrap();\n\n bw.write(16, 0).unwrap(); // version\n\n bw.write(16, 32).unwrap(); // version\n\n bw.write_bytes(b\"AV01\").unwrap();\n\n bw.write(16, width as u16).unwrap();\n\n bw.write(16, height as u16).unwrap();\n\n bw.write(32, num as u32).unwrap();\n\n bw.write(32, den as u32).unwrap();\n\n bw.write(32, 0).unwrap();\n\n bw.write(32, 0).unwrap();\n\n}\n\n\n", "file_path": "src/encoder.rs", "rank": 37, "score": 165397.88420822987 }, { "content": "pub fn encode_mv_component(w: &mut Writer, comp: i32, \n\n mvcomp: &mut NMVComponent, precision: MvSubpelPrecision) {\n\n assert!(comp != 0);\n\n let mut offset: u32 = 0;\n\n let sign: u32 = if comp < 0 { 1 } else { 0 };\n\n let mag: u32 = if sign == 1 { -comp as u32 } else { comp as u32 };\n\n let mv_class = get_mv_class(mag - 1, &mut offset);\n\n let d = offset >> 3; // int mv data\n\n let fr = (offset >> 1) & 3; // fractional mv data\n\n let hp = offset & 1; // high precision mv data\n\n\n\n // Sign\n\n w.symbol_with_update(sign, &mut mvcomp.sign_cdf);\n\n\n\n // Class\n\n w.symbol_with_update(mv_class as u32, &mut mvcomp.classes_cdf);\n\n\n\n // Integer bits\n\n if mv_class == MV_CLASS_0 {\n\n w.symbol_with_update(d, &mut mvcomp.class0_cdf);\n", "file_path": "src/context.rs", "rank": 38, "score": 162246.33603528736 }, { "content": "pub fn encode_block_a(seq: &Sequence,\n\n cw: &mut ContextWriter, w: &mut dyn Writer,\n\n bsize: BlockSize, bo: &BlockOffset, skip: bool) -> bool {\n\n cw.bc.set_skip(bo, bsize, skip);\n\n cw.write_skip(w, bo, skip);\n\n if !skip && seq.enable_cdef {\n\n cw.bc.cdef_coded = true;\n\n }\n\n cw.bc.cdef_coded\n\n}\n\n\n", "file_path": "src/encoder.rs", "rank": 39, "score": 162224.91750736203 }, { "content": "pub fn encode_frame(sequence: &mut Sequence, fi: &mut FrameInvariants, fs: &mut FrameState) -> Vec<u8> {\n\n let mut packet = Vec::new();\n\n if fi.show_existing_frame {\n\n //write_uncompressed_header(&mut packet, sequence, fi).unwrap();\n\n write_obus(&mut packet, sequence, fi, fs).unwrap();\n\n match fi.rec_buffer.frames[fi.frame_to_show_map_idx as usize] {\n\n Some(ref rec) => for p in 0..3 {\n\n fs.rec.planes[p].data.copy_from_slice(rec.frame.planes[p].data.as_slice());\n\n },\n\n None => (),\n\n }\n\n } else {\n\n if !fi.intra_only {\n\n for i in 0..INTER_REFS_PER_FRAME {\n\n fi.ref_frame_sign_bias[i] =\n\n if !sequence.enable_order_hint {\n\n false\n\n } else if let Some(ref rec) = fi.rec_buffer.frames[fi.ref_frames[i] as usize] {\n\n let hint = rec.order_hint;\n\n sequence.get_relative_dist(hint, fi.order_hint) > 0\n", "file_path": "src/encoder.rs", "rank": 40, "score": 157965.17596389714 }, { "content": "pub fn motion_compensate(fi: &FrameInvariants, fs: &mut FrameState, cw: &mut ContextWriter,\n\n luma_mode: PredictionMode, ref_frames: &[usize; 2], mvs: &[MotionVector; 2],\n\n bsize: BlockSize, bo: &BlockOffset, bit_depth: usize,\n\n luma_only: bool) {\n\n if luma_mode.is_intra() { return; }\n\n\n\n let PlaneConfig { xdec, ydec, .. } = fs.input.planes[1].cfg;\n\n\n\n // Inter mode prediction can take place once for a whole partition,\n\n // instead of each tx-block.\n\n let num_planes = 1 + if !luma_only && has_chroma(bo, bsize, xdec, ydec) { 2 } else { 0 };\n\n\n\n for p in 0..num_planes {\n\n let plane_bsize = if p == 0 { bsize }\n\n else { get_plane_block_size(bsize, xdec, ydec) };\n\n\n\n let po = bo.plane_offset(&fs.input.planes[p].cfg);\n\n let rec = &mut fs.rec.planes[p];\n\n // TODO: make more generic to handle 2xN and Nx2 MC\n\n if p > 0 && bsize == BlockSize::BLOCK_4X4 {\n", "file_path": "src/encoder.rs", "rank": 41, "score": 157357.39589185177 }, { "content": "fn cdef_frame_bench(b: &mut Bencher, w: usize, h: usize) {\n\n let config =\n\n EncoderConfig { quantizer: 100, speed: 10, ..Default::default() };\n\n let fi = FrameInvariants::new(w, h, config);\n\n let mut bc = BlockContext::new(fi.sb_width * 16, fi.sb_height * 16);\n\n let mut fs = FrameState::new(&fi);\n\n\n\n b.iter(|| cdef_filter_frame(&fi, &mut fs.rec, &mut bc, 8));\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 42, "score": 154274.57531397432 }, { "content": "pub fn update_rec_buffer(fi: &mut FrameInvariants, fs: FrameState) {\n\n let rfs = Rc::new(ReferenceFrame { order_hint: fi.order_hint, frame: fs.rec, cdfs: fs.cdfs } );\n\n for i in 0..(REF_FRAMES as usize) {\n\n if (fi.refresh_frame_flags & (1 << i)) != 0 {\n\n fi.rec_buffer.frames[i] = Some(Rc::clone(&rfs));\n\n fi.rec_buffer.deblock[i] = fs.deblock.clone();\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #[test]\n\n fn frame_state_window() {\n\n use super::*;\n\n let config = EncoderConfig { ..Default::default() };\n\n let fi = FrameInvariants::new(1024, 1024, config);\n\n let mut fs = FrameState::new(&fi);\n\n for p in fs.rec.planes.iter_mut() {\n\n for (i, v) in p\n", "file_path": "src/encoder.rs", "rank": 43, "score": 153295.64348307686 }, { "content": "fn aom_uleb_encode(mut value: u64, coded_value: &mut [u8]) -> usize {\n\n let leb_size = aom_uleb_size_in_bytes(value);\n\n\n\n for i in 0..leb_size {\n\n let mut byte = (value & 0x7f) as u8;\n\n value >>= 7;\n\n if value != 0 { byte |= 0x80 }; // Signal that more bytes follow.\n\n coded_value[i] = byte;\n\n }\n\n\n\n leb_size\n\n}\n\n\n", "file_path": "src/encoder.rs", "rank": 44, "score": 151996.3573585442 }, { "content": "// FIXME: For now, assume tx_mode is LARGEST_TX, so var-tx is not implemented yet\n\n// but only one tx block exist for a inter mode partition.\n\npub fn write_tx_tree(fi: &FrameInvariants, fs: &mut FrameState, cw: &mut ContextWriter, w: &mut dyn Writer,\n\n luma_mode: PredictionMode, bo: &BlockOffset,\n\n bsize: BlockSize, tx_size: TxSize, tx_type: TxType, skip: bool, bit_depth: usize,\n\n luma_only: bool) {\n\n let bw = bsize.width_mi() / tx_size.width_mi();\n\n let bh = bsize.height_mi() / tx_size.height_mi();\n\n\n\n let PlaneConfig { xdec, ydec, .. } = fs.input.planes[1].cfg;\n\n let ac = &[0i16; 32 * 32];\n\n\n\n fs.qc.update(fi.base_q_idx, tx_size, luma_mode.is_intra(), bit_depth);\n\n\n\n let po = bo.plane_offset(&fs.input.planes[0].cfg);\n\n let has_coeff = encode_tx_block(\n\n fi, fs, cw, w, 0, &bo, luma_mode, tx_size, tx_type, bsize, &po, skip,\n\n bit_depth, ac, 0,\n\n );\n\n\n\n if luma_only { return };\n\n\n", "file_path": "src/encoder.rs", "rank": 45, "score": 151806.52688941642 }, { "content": "fn bench_get_sad(b: &mut Bencher, bs: &BlockSize) {\n\n let mut ra = ChaChaRng::from_seed([0; 32]);\n\n let bsw = bs.width();\n\n let bsh = bs.height();\n\n let w = 640;\n\n let h = 480;\n\n let input_plane = new_plane(&mut ra, w, h);\n\n let rec_plane = new_plane(&mut ra, w, h);\n\n let po = PlaneOffset { x: 0, y: 0 };\n\n\n\n let plane_org = input_plane.slice(&po);\n\n let plane_ref = rec_plane.slice(&po);\n\n\n\n b.iter(|| {\n\n let _ = me::get_sad(&plane_org, &plane_ref, bsw, bsh);\n\n })\n\n}\n\n\n", "file_path": "benches/me.rs", "rank": 46, "score": 147760.085481096 }, { "content": "fn bench_idct8(b: &mut Bencher, bit_depth: &usize) {\n\n let mut ra = ChaChaRng::from_seed([0; 32]);\n\n let input: [i32; 8] = ra.gen();\n\n let mut output = [0i32; 8];\n\n let range = bit_depth + 8;\n\n\n\n b.iter(|| {\n\n transform::av1_idct8(&input[..], &mut output[..], range);\n\n });\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 47, "score": 147614.02199042134 }, { "content": "fn bench_idct4(b: &mut Bencher, bit_depth: &usize) {\n\n let mut ra = ChaChaRng::from_seed([0; 32]);\n\n let input: [i32; 4] = ra.gen();\n\n let mut output = [0i32; 4];\n\n let range = bit_depth + 8;\n\n\n\n b.iter(|| {\n\n transform::av1_idct4(&input[..], &mut output[..], range);\n\n });\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 48, "score": 147614.02199042134 }, { "content": "// NOTE from libaom:\n\n// Disallow values larger than 32-bits to ensure consistent behavior on 32 and\n\n// 64 bit targets: value is typically used to determine buffer allocation size\n\n// when decoded.\n\nfn aom_uleb_size_in_bytes(mut value: u64) -> usize {\n\n let mut size = 0;\n\n loop {\n\n size += 1;\n\n value = value >> 7;\n\n if value == 0 { break; }\n\n }\n\n return size;\n\n}\n\n\n", "file_path": "src/encoder.rs", "rank": 49, "score": 147583.78994107994 }, { "content": "#[inline(always)]\n\npub fn mv_class_base(mv_class: usize) -> u32 {\n\n if mv_class != MV_CLASS_0 {\n\n (CLASS0_SIZE << (mv_class as usize + 2)) as u32 }\n\n else { 0 }\n\n}\n\n#[inline(always)]\n", "file_path": "src/context.rs", "rank": 50, "score": 147274.21495587507 }, { "content": "fn cfl_rdo_bench(b: &mut Bencher, bsize: BlockSize) {\n\n let config =\n\n EncoderConfig { quantizer: 100, speed: 10, ..Default::default() };\n\n let fi = FrameInvariants::new(1024, 1024, config);\n\n let mut fs = FrameState::new(&fi);\n\n let offset = BlockOffset { x: 1, y: 1 };\n\n b.iter(|| rdo_cfl_alpha(&mut fs, &offset, bsize, 8))\n\n}\n\n\n\ncriterion_group!(intra_prediction, predict::pred_bench,);\n\n\n\ncriterion_group!(cfl, cfl_rdo);\n\ncriterion_group!(cdef, cdef_frame);\n\ncriterion_group!(write_block, write_b);\n\ncriterion_group!(me, me::get_sad);\n\n\n\n#[cfg(feature = \"comparative_bench\")]\n\ncriterion_main!(comparative::intra_prediction);\n\n\n\n#[cfg(not(feature = \"comparative_bench\"))]\n\ncriterion_main!(write_block, intra_prediction, cdef, cfl, me, transform);\n", "file_path": "benches/bench.rs", "rank": 51, "score": 144364.79998236173 }, { "content": "pub fn motion_estimation(\n\n fi: &FrameInvariants, fs: &FrameState, bsize: BlockSize,\n\n bo: &BlockOffset, ref_frame: usize, pmv: &MotionVector\n\n) -> MotionVector {\n\n match fi.rec_buffer.frames[fi.ref_frames[ref_frame - LAST_FRAME] as usize] {\n\n Some(ref rec) => {\n\n let po = PlaneOffset {\n\n x: (bo.x as isize) << BLOCK_TO_PLANE_SHIFT,\n\n y: (bo.y as isize) << BLOCK_TO_PLANE_SHIFT\n\n };\n\n let range = 32 * fi.me_range_scale as isize;\n\n let blk_w = bsize.width();\n\n let blk_h = bsize.height();\n\n let border_w = 128 + blk_w as isize * 8;\n\n let border_h = 128 + blk_h as isize * 8;\n\n let mvx_min = -(bo.x as isize) * (8 * MI_SIZE) as isize - border_w;\n\n let mvx_max = (fi.w_in_b - bo.x - blk_w / MI_SIZE) as isize * (8 * MI_SIZE) as isize + border_w;\n\n let mvy_min = -(bo.y as isize) * (8 * MI_SIZE) as isize - border_h;\n\n let mvy_max = (fi.h_in_b - bo.y - blk_h / MI_SIZE) as isize * (8 * MI_SIZE) as isize + border_h;\n\n let x_lo = po.x + ((-range + (pmv.col / 8) as isize).max(mvx_min / 8));\n", "file_path": "src/me.rs", "rank": 52, "score": 143855.45003862947 }, { "content": "pub fn has_chroma(\n\n bo: &BlockOffset, bsize: BlockSize, subsampling_x: usize,\n\n subsampling_y: usize\n\n) -> bool {\n\n let bw = bsize.width_mi();\n\n let bh = bsize.height_mi();\n\n\n\n ((bo.x & 0x01) == 1 || (bw & 0x01) == 0 || subsampling_x == 0)\n\n && ((bo.y & 0x01) == 1 || (bh & 0x01) == 0 || subsampling_y == 0)\n\n}\n\n\n", "file_path": "src/context.rs", "rank": 53, "score": 143855.45003862947 }, { "content": "#[inline(always)]\n\npub fn get_sad(\n\n plane_org: &PlaneSlice, plane_ref: &PlaneSlice, blk_h: usize,\n\n blk_w: usize\n\n) -> u32 {\n\n let mut sum = 0 as u32;\n\n\n\n let org_iter = plane_org.iter_width(blk_w);\n\n let ref_iter = plane_ref.iter_width(blk_w);\n\n\n\n for (slice_org, slice_ref) in org_iter.take(blk_h).zip(ref_iter) {\n\n sum += slice_org\n\n .iter()\n\n .zip(slice_ref)\n\n .map(|(&a, &b)| (a as i32 - b as i32).abs() as u32)\n\n .sum::<u32>();\n\n }\n\n\n\n sum\n\n}\n", "file_path": "src/me.rs", "rank": 54, "score": 143855.45003862947 }, { "content": "pub fn dequantize(\n\n qindex: u8, coeffs: &[i32], rcoeffs: &mut [i32], tx_size: TxSize,\n\n bit_depth: usize\n\n) {\n\n let log_tx_scale = get_log_tx_scale(tx_size) as i32;\n\n let offset = (1 << log_tx_scale) - 1;\n\n\n\n let dc_quant = dc_q(qindex, bit_depth) as i32;\n\n let ac_quant = ac_q(qindex, bit_depth) as i32;\n\n\n\n for (i, (r, &c)) in rcoeffs.iter_mut().zip(coeffs.iter()).enumerate() {\n\n let quant = if i == 0 { dc_quant } else { ac_quant };\n\n *r = (c * quant + ((c >> 31) & offset)) >> log_tx_scale;\n\n }\n\n}\n\n\n\n// LUTS --------------------------------------------------------------------\n\nconst MINQ: usize = 0;\n\nconst MAXQ: usize = 255;\n\nconst QINDEX_RANGE: usize = MAXQ - MINQ + 1;\n", "file_path": "src/quantize.rs", "rank": 55, "score": 143855.45003862947 }, { "content": "pub fn ac_q(qindex: u8, bit_depth: usize) -> i16 {\n\n let &table = match bit_depth {\n\n 8 => &ac_qlookup_Q3,\n\n 10 => &ac_qlookup_10_Q3,\n\n 12 => &ac_qlookup_12_Q3,\n\n _ => unimplemented!()\n\n };\n\n\n\n table[qindex as usize]\n\n}\n\n\n\n#[derive(Debug, Default, Clone, Copy)]\n\npub struct QuantizationContext {\n\n log_tx_scale: i32,\n\n dc_quant: u32,\n\n dc_offset: i32,\n\n dc_mul_add: (u32, u32, u32),\n\n\n\n ac_quant: u32,\n\n ac_offset: i32,\n\n ac_mul_add: (u32, u32, u32)\n\n}\n\n\n", "file_path": "src/quantize.rs", "rank": 56, "score": 141039.2263210267 }, { "content": "pub fn dc_q(qindex: u8, bit_depth: usize) -> i16 {\n\n let &table = match bit_depth {\n\n 8 => &dc_qlookup_Q3,\n\n 10 => &dc_qlookup_10_Q3,\n\n 12 => &dc_qlookup_12_Q3,\n\n _ => unimplemented!()\n\n };\n\n\n\n table[qindex as usize]\n\n}\n\n\n", "file_path": "src/quantize.rs", "rank": 57, "score": 141039.2263210267 }, { "content": "pub fn fht16x16(\n\n input: &[i16], output: &mut [i32], stride: usize, tx_type: TxType,\n\n bit_depth: usize\n\n) {\n\n // SIMD code may assert for transform types beyond TxType::IDTX.\n\n if tx_type < TxType::IDTX {\n\n Block16x16::fwd_txfm2d(input, output, stride, tx_type, bit_depth);\n\n } else {\n\n Block16x16::fwd_txfm2d_rs(input, output, stride, tx_type, bit_depth);\n\n }\n\n}\n\n\n", "file_path": "src/transform/forward.rs", "rank": 58, "score": 140618.48739278983 }, { "content": "pub fn fht8x8(\n\n input: &[i16], output: &mut [i32], stride: usize, tx_type: TxType,\n\n bit_depth: usize\n\n) {\n\n // SIMD code may assert for transform types beyond TxType::IDTX.\n\n if tx_type < TxType::IDTX {\n\n Block8x8::fwd_txfm2d(input, output, stride, tx_type, bit_depth);\n\n } else {\n\n Block8x8::fwd_txfm2d_rs(input, output, stride, tx_type, bit_depth);\n\n }\n\n}\n\n\n", "file_path": "src/transform/forward.rs", "rank": 59, "score": 140618.48739278983 }, { "content": "pub fn fht32x32(\n\n input: &[i16], output: &mut [i32], stride: usize, tx_type: TxType,\n\n bit_depth: usize\n\n) {\n\n // SIMD code may assert for transform types that aren't TxType::DCT_DCT.\n\n if tx_type == TxType::DCT_DCT {\n\n Block32x32::fwd_txfm2d(input, output, stride, tx_type, bit_depth);\n\n } else {\n\n Block32x32::fwd_txfm2d_rs(input, output, stride, tx_type, bit_depth);\n\n }\n\n}\n", "file_path": "src/transform/forward.rs", "rank": 60, "score": 140618.48739278983 }, { "content": "pub fn fht4x4(\n\n input: &[i16], output: &mut [i32], stride: usize, tx_type: TxType,\n\n bit_depth: usize\n\n) {\n\n // SIMD code may assert for transform types beyond TxType::IDTX.\n\n if tx_type < TxType::IDTX {\n\n Block4x4::fwd_txfm2d(input, output, stride, tx_type, bit_depth);\n\n } else {\n\n Block4x4::fwd_txfm2d_rs(input, output, stride, tx_type, bit_depth);\n\n }\n\n}\n\n\n", "file_path": "src/transform/forward.rs", "rank": 61, "score": 140618.48739278983 }, { "content": "// Deblocks all edges, vertical and horizontal, in a single plane\n\npub fn deblock_plane(\n\n deblock: &DeblockState, p: &mut Plane, pli: usize, bc: &mut BlockContext,\n\n bd: usize\n\n) {\n\n let xdec = p.cfg.xdec;\n\n let ydec = p.cfg.ydec;\n\n\n\n match pli {\n\n 0 =>\n\n if deblock.levels[0] == 0 && deblock.levels[1] == 0 {\n\n return;\n\n },\n\n 1 =>\n\n if deblock.levels[2] == 0 {\n\n return;\n\n },\n\n 2 =>\n\n if deblock.levels[3] == 0 {\n\n return;\n\n },\n", "file_path": "src/deblock.rs", "rank": 62, "score": 140618.48739278983 }, { "content": "pub fn quantize_in_place(\n\n qindex: u8, coeffs: &mut [i32], tx_size: TxSize, bit_depth: usize\n\n) {\n\n let log_tx_scale = get_log_tx_scale(tx_size);\n\n\n\n let dc_quant = dc_q(qindex, bit_depth) as i32;\n\n let ac_quant = ac_q(qindex, bit_depth) as i32;\n\n\n\n // using 21/64=0.328125 as rounding offset. To be tuned\n\n let dc_offset = dc_quant * 21 / 64 as i32;\n\n let ac_offset = ac_quant * 21 / 64 as i32;\n\n\n\n coeffs[0] <<= log_tx_scale;\n\n coeffs[0] += coeffs[0].signum() * dc_offset;\n\n coeffs[0] /= dc_quant;\n\n\n\n for c in coeffs[1..].iter_mut() {\n\n *c <<= log_tx_scale;\n\n *c += c.signum() * ac_offset;\n\n *c /= ac_quant;\n\n }\n\n}\n\n\n", "file_path": "src/quantize.rs", "rank": 63, "score": 140618.48739278983 }, { "content": "pub fn luma_ac(\n\n ac: &mut [i16], fs: &mut FrameState, bo: &BlockOffset, bsize: BlockSize\n\n) {\n\n let PlaneConfig { xdec, ydec, .. } = fs.input.planes[1].cfg;\n\n let plane_bsize = get_plane_block_size(bsize, xdec, ydec);\n\n let po = if bsize.is_sub8x8() {\n\n bo.with_offset(-1, -1).plane_offset(&fs.input.planes[0].cfg)\n\n } else {\n\n bo.plane_offset(&fs.input.planes[0].cfg)\n\n };\n\n let rec = &fs.rec.planes[0];\n\n let luma = &rec.slice(&po);\n\n\n\n let mut sum: i32 = 0;\n\n for sub_y in 0..plane_bsize.height() {\n\n for sub_x in 0..plane_bsize.width() {\n\n let y = sub_y << ydec;\n\n let x = sub_x << xdec;\n\n let sample = ((luma.p(x, y)\n\n + luma.p(x + 1, y)\n", "file_path": "src/encoder.rs", "rank": 64, "score": 140618.48739278983 }, { "content": "pub fn y_intra_mode_to_tx_type_context(pred: PredictionMode) -> TxType {\n\n intra_mode_to_tx_type_context[pred as usize]\n\n}\n\n\n", "file_path": "src/context.rs", "rank": 65, "score": 138432.30698553778 }, { "content": "type TxfmFunc = Fn(&[i32], &mut [i32], usize, &[i8]);\n\n\n", "file_path": "src/transform/forward.rs", "rank": 66, "score": 138137.40198265907 }, { "content": "pub fn iht4x4_add(\n\n input: &[i32], output: &mut [u16], stride: usize, tx_type: TxType,\n\n bit_depth: usize\n\n) {\n\n // SIMD code may assert for transform types beyond TxType::IDTX.\n\n if tx_type < TxType::IDTX {\n\n Block4x4::inv_txfm2d_add(input, output, stride, tx_type, bit_depth);\n\n } else {\n\n Block4x4::inv_txfm2d_add_rs(input, output, stride, tx_type, bit_depth);\n\n }\n\n}\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 67, "score": 137624.7051619121 }, { "content": "// Deblocks all edges in all planes of a frame\n\npub fn deblock_filter_frame(\n\n fs: &mut FrameState, bc: &mut BlockContext, bit_depth: usize\n\n) {\n\n for pli in 0..PLANES {\n\n deblock_plane(&fs.deblock, &mut fs.rec.planes[pli], pli, bc, bit_depth);\n\n }\n\n}\n\n\n", "file_path": "src/deblock.rs", "rank": 68, "score": 137624.7051619121 }, { "content": "pub fn get_tx_set(\n\n tx_size: TxSize, is_inter: bool, use_reduced_set: bool\n\n) -> TxSet {\n\n let tx_size_sqr_up = tx_size.sqr_up();\n\n let tx_size_sqr = tx_size.sqr();\n\n if tx_size_sqr > TxSize::TX_32X32 {\n\n TxSet::TX_SET_DCTONLY\n\n } else if tx_size_sqr_up == TxSize::TX_32X32 {\n\n if is_inter {\n\n TxSet::TX_SET_DCT_IDTX\n\n } else {\n\n TxSet::TX_SET_DCTONLY\n\n }\n\n } else if use_reduced_set {\n\n if is_inter {\n\n TxSet::TX_SET_DCT_IDTX\n\n } else {\n\n TxSet::TX_SET_DTT4_IDTX\n\n }\n\n } else if is_inter {\n", "file_path": "src/context.rs", "rank": 69, "score": 137624.7051619121 }, { "content": "// RDO-based mode decision\n\npub fn rdo_mode_decision(\n\n seq: &Sequence, fi: &FrameInvariants, fs: &mut FrameState,\n\n cw: &mut ContextWriter, bsize: BlockSize, bo: &BlockOffset,\n\n pmv: &MotionVector\n\n) -> RDOOutput {\n\n let mut best = EncodingSettings::default();\n\n\n\n // Get block luma and chroma dimensions\n\n let w = bsize.width();\n\n let h = bsize.height();\n\n\n\n let PlaneConfig { xdec, ydec, .. } = fs.input.planes[1].cfg;\n\n let is_chroma_block = has_chroma(bo, bsize, xdec, ydec);\n\n\n\n let cw_checkpoint = cw.checkpoint();\n\n\n\n // Exclude complex prediction modes at higher speed levels\n\n let intra_mode_set = if (fi.frame_type == FrameType::KEY\n\n && fi.config.speed <= 3)\n\n || (fi.frame_type == FrameType::INTER && fi.config.speed <= 1)\n", "file_path": "src/rdo.rs", "rank": 70, "score": 137624.7051619121 }, { "content": "pub fn iht32x32_add(\n\n input: &[i32], output: &mut [u16], stride: usize, tx_type: TxType,\n\n bit_depth: usize\n\n) {\n\n if tx_type < TxType::IDTX {\n\n // SIMD code may assert for transform types beyond TxType::IDTX.\n\n Block32x32::inv_txfm2d_add(input, output, stride, tx_type, bit_depth);\n\n } else {\n\n Block32x32::inv_txfm2d_add_rs(input, output, stride, tx_type, bit_depth);\n\n }\n\n}\n", "file_path": "src/transform/inverse.rs", "rank": 71, "score": 137624.7051619121 }, { "content": "pub fn forward_transform(\n\n input: &[i16], output: &mut [i32], stride: usize, tx_size: TxSize,\n\n tx_type: TxType, bit_depth: usize\n\n) {\n\n match tx_size {\n\n TxSize::TX_4X4 => fht4x4(input, output, stride, tx_type, bit_depth),\n\n TxSize::TX_8X8 => fht8x8(input, output, stride, tx_type, bit_depth),\n\n TxSize::TX_16X16 => fht16x16(input, output, stride, tx_type, bit_depth),\n\n TxSize::TX_32X32 => fht32x32(input, output, stride, tx_type, bit_depth),\n\n _ => panic!(\"unimplemented tx size\")\n\n }\n\n}\n\n\n", "file_path": "src/transform/mod.rs", "rank": 72, "score": 137624.7051619121 }, { "content": "pub fn iht16x16_add(\n\n input: &[i32], output: &mut [u16], stride: usize, tx_type: TxType,\n\n bit_depth: usize\n\n) {\n\n if tx_type < TxType::IDTX {\n\n // SSE C code asserts for transform types beyond TxType::IDTX.\n\n Block16x16::inv_txfm2d_add(input, output, stride, tx_type, bit_depth);\n\n } else {\n\n Block16x16::inv_txfm2d_add_rs(input, output, stride, tx_type, bit_depth);\n\n }\n\n}\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 73, "score": 137624.7051619121 }, { "content": "pub fn iht8x8_add(\n\n input: &[i32], output: &mut [u16], stride: usize, tx_type: TxType,\n\n bit_depth: usize\n\n) {\n\n if tx_type < TxType::IDTX {\n\n Block8x8::inv_txfm2d_add(input, output, stride, tx_type, bit_depth);\n\n } else {\n\n Block8x8::inv_txfm2d_add_rs(input, output, stride, tx_type, bit_depth);\n\n }\n\n}\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 74, "score": 137624.7051619121 }, { "content": "// RDO-based single level partitioning decision\n\npub fn rdo_partition_decision(\n\n seq: &Sequence, fi: &FrameInvariants, fs: &mut FrameState,\n\n cw: &mut ContextWriter, bsize: BlockSize, bo: &BlockOffset,\n\n cached_block: &RDOOutput\n\n) -> RDOOutput {\n\n let max_rd = std::f64::MAX;\n\n\n\n let mut best_partition = cached_block.part_type;\n\n let mut best_rd = cached_block.rd_cost;\n\n let mut best_pred_modes = cached_block.part_modes.clone();\n\n\n\n let cw_checkpoint = cw.checkpoint();\n\n\n\n for &partition in RAV1E_PARTITION_TYPES {\n\n // Do not re-encode results we already have\n\n if partition == cached_block.part_type && cached_block.rd_cost < max_rd {\n\n continue;\n\n }\n\n\n\n let mut rd: f64;\n", "file_path": "src/rdo.rs", "rank": 75, "score": 137624.7051619121 }, { "content": "pub fn deblock_filter_optimize(\n\n fi: &FrameInvariants, fs: &mut FrameState, bc: &mut BlockContext,\n\n bit_depth: usize\n\n) {\n\n if fi.config.speed > 3 {\n\n let q = ac_q(fi.base_q_idx, bit_depth) as i32;\n\n let level = clamp(\n\n match bit_depth {\n\n 8 =>\n\n if fi.frame_type == FrameType::KEY {\n\n q * 17563 - 421574 + (1 << 18 >> 1) >> 18\n\n } else {\n\n q * 6017 + 650707 + (1 << 18 >> 1) >> 18\n\n },\n\n 10 =>\n\n if fi.frame_type == FrameType::KEY {\n\n (q * 20723 + 4060632 + (1 << 20 >> 1) >> 20) - 4\n\n } else {\n\n q * 20723 + 4060632 + (1 << 20 >> 1) >> 20\n\n },\n", "file_path": "src/deblock.rs", "rank": 76, "score": 137624.7051619121 }, { "content": "pub fn rdo_cfl_alpha(\n\n fs: &mut FrameState, bo: &BlockOffset, bsize: BlockSize, bit_depth: usize\n\n) -> Option<CFLParams> {\n\n // TODO: these are only valid for 4:2:0\n\n let uv_tx_size = match bsize {\n\n BlockSize::BLOCK_4X4 | BlockSize::BLOCK_8X8 => TxSize::TX_4X4,\n\n BlockSize::BLOCK_16X16 => TxSize::TX_8X8,\n\n BlockSize::BLOCK_32X32 => TxSize::TX_16X16,\n\n _ => TxSize::TX_32X32\n\n };\n\n\n\n let mut ac = [0i16; 32 * 32];\n\n luma_ac(&mut ac, fs, bo, bsize);\n\n let best_alpha: Vec<i16> = (1..3)\n\n .map(|p| {\n\n let rec = &mut fs.rec.planes[p];\n\n let input = &fs.input.planes[p];\n\n let po = bo.plane_offset(&fs.input.planes[p].cfg);\n\n (-16i16..17i16)\n\n .min_by_key(|&alpha| {\n", "file_path": "src/rdo.rs", "rank": 77, "score": 137624.7051619121 }, { "content": "pub fn uv_intra_mode_to_tx_type_context(pred: PredictionMode) -> TxType {\n\n intra_mode_to_tx_type_context[uv2y[pred as usize] as usize]\n\n}\n\n\n\n#[derive(Clone,Copy)]\n\npub struct NMVComponent {\n\n classes_cdf: [u16; MV_CLASSES + 1],\n\n class0_fp_cdf: [[u16; MV_FP_SIZE + 1]; CLASS0_SIZE],\n\n fp_cdf: [u16; MV_FP_SIZE + 1],\n\n sign_cdf: [u16; 2 + 1],\n\n class0_hp_cdf: [u16; 2 + 1],\n\n hp_cdf: [u16; 2 + 1],\n\n class0_cdf: [u16; CLASS0_SIZE + 1],\n\n bits_cdf: [[u16; 2 + 1]; MV_OFFSET_BITS],\n\n}\n\n\n\n#[derive(Clone,Copy)]\n\npub struct NMVContext {\n\n joints_cdf: [u16; MV_JOINTS + 1],\n\n comps: [NMVComponent; 2],\n", "file_path": "src/context.rs", "rank": 78, "score": 135788.88186288378 }, { "content": "fn stride_copy(dst: &mut [u16], src: &[i32], pitch: usize) {\n\n for (dst, src) in dst.iter_mut().step_by(pitch).take(src.len()).zip(src) {\n\n *dst = *src as u16\n\n }\n\n}\n\n\n", "file_path": "src/deblock.rs", "rank": 79, "score": 135590.03904871977 }, { "content": "pub fn rdo_cdef_decision(sbo: &SuperBlockOffset, fi: &FrameInvariants,\n\n fs: &FrameState, cw: &mut ContextWriter, bit_depth: usize) -> u8 {\n\n // FIXME: 128x128 SB support will break this, we need FilterBlockOffset etc.\n\n // Construct a single-superblock-sized frame to test-filter into\n\n let sbo_0 = SuperBlockOffset { x: 0, y: 0 };\n\n let bc = &mut cw.bc;\n\n let mut cdef_output = Frame {\n\n planes: [\n\n Plane::new(64 >> fs.rec.planes[0].cfg.xdec, 64 >> fs.rec.planes[0].cfg.ydec,\n\n fs.rec.planes[0].cfg.xdec, fs.rec.planes[0].cfg.ydec, 0, 0),\n\n Plane::new(64 >> fs.rec.planes[1].cfg.xdec, 64 >> fs.rec.planes[1].cfg.ydec,\n\n fs.rec.planes[1].cfg.xdec, fs.rec.planes[1].cfg.ydec, 0, 0),\n\n Plane::new(64 >> fs.rec.planes[2].cfg.xdec, 64 >> fs.rec.planes[2].cfg.ydec,\n\n fs.rec.planes[2].cfg.xdec, fs.rec.planes[2].cfg.ydec, 0, 0),\n\n ]\n\n };\n\n // Construct a padded input\n\n let mut rec_input = Frame {\n\n planes: [\n\n Plane::new((64 >> fs.rec.planes[0].cfg.xdec)+4, (64 >> fs.rec.planes[0].cfg.ydec)+4,\n", "file_path": "src/rdo.rs", "rank": 80, "score": 135108.4053516077 }, { "content": "pub fn get_lambda(fi: &FrameInvariants, bit_depth: usize) -> f64 {\n\n let q = dc_q(fi.base_q_idx, bit_depth) as f64;\n\n\n\n // Convert q into Q0 precision, given that libaom quantizers are Q3\n\n let q0 = q / 8.0_f64;\n\n\n\n // Lambda formula from doc/theoretical_results.lyx in the daala repo\n\n // Use Q0 quantizer since lambda will be applied to Q0 pixel domain\n\n q0 * q0 * std::f64::consts::LN_2 / 6.0\n\n}\n\n\n", "file_path": "src/rdo.rs", "rank": 81, "score": 134886.16306284085 }, { "content": "// RDO-based transform type decision\n\npub fn rdo_tx_type_decision(\n\n fi: &FrameInvariants, fs: &mut FrameState, cw: &mut ContextWriter,\n\n mode: PredictionMode, ref_frames: &[usize; 2], mvs: &[MotionVector; 2], bsize: BlockSize, bo: &BlockOffset, tx_size: TxSize,\n\n tx_set: TxSet, bit_depth: usize\n\n) -> TxType {\n\n let mut best_type = TxType::DCT_DCT;\n\n let mut best_rd = std::f64::MAX;\n\n\n\n // Get block luma and chroma dimensions\n\n let w = bsize.width();\n\n let h = bsize.height();\n\n\n\n let PlaneConfig { xdec, ydec, .. } = fs.input.planes[1].cfg;\n\n let is_chroma_block = has_chroma(bo, bsize, xdec, ydec);\n\n\n\n let is_inter = !mode.is_intra();\n\n\n\n let cw_checkpoint = cw.checkpoint();\n\n\n\n for &tx_type in RAV1E_TX_TYPES {\n", "file_path": "src/rdo.rs", "rank": 82, "score": 134847.69165167268 }, { "content": "pub fn rdo_tx_size_type(\n\n seq: &Sequence, fi: &FrameInvariants, fs: &mut FrameState,\n\n cw: &mut ContextWriter, bsize: BlockSize, bo: &BlockOffset,\n\n luma_mode: PredictionMode, ref_frames: &[usize; 2], mvs: &[MotionVector; 2], skip: bool\n\n) -> (TxSize, TxType) {\n\n // these rules follow TX_MODE_LARGEST\n\n let tx_size = match bsize {\n\n BlockSize::BLOCK_4X4 => TxSize::TX_4X4,\n\n BlockSize::BLOCK_8X8 => TxSize::TX_8X8,\n\n BlockSize::BLOCK_16X16 => TxSize::TX_16X16,\n\n _ => TxSize::TX_32X32\n\n };\n\n cw.bc.set_tx_size(bo, tx_size);\n\n // Were we not hardcoded to TX_MODE_LARGEST, block tx size would be written here\n\n\n\n // Luma plane transform type decision\n\n let is_inter = !luma_mode.is_intra();\n\n let tx_set = get_tx_set(tx_size, is_inter, fi.use_reduced_tx_set);\n\n\n\n let tx_type =\n", "file_path": "src/rdo.rs", "rank": 83, "score": 134847.69165167268 }, { "content": "pub fn inverse_transform_add(\n\n input: &[i32], output: &mut [u16], stride: usize, tx_size: TxSize,\n\n tx_type: TxType, bit_depth: usize\n\n) {\n\n match tx_size {\n\n TxSize::TX_4X4 => iht4x4_add(input, output, stride, tx_type, bit_depth),\n\n TxSize::TX_8X8 => iht8x8_add(input, output, stride, tx_type, bit_depth),\n\n TxSize::TX_16X16 =>\n\n iht16x16_add(input, output, stride, tx_type, bit_depth),\n\n TxSize::TX_32X32 =>\n\n iht32x32_add(input, output, stride, tx_type, bit_depth),\n\n _ => panic!(\"unimplemented tx size\")\n\n }\n\n}\n", "file_path": "src/transform/mod.rs", "rank": 84, "score": 134847.69165167268 }, { "content": "pub fn write_ivf_frame(output_file: &mut dyn io::Write, pts: u64, data: &[u8]) {\n\n let mut bw = BitWriter::endian(output_file, LittleEndian);\n\n bw.write(32, data.len() as u32).unwrap();\n\n bw.write(64, pts).unwrap();\n\n bw.write_bytes(data).unwrap();\n\n}\n\n\n", "file_path": "src/encoder.rs", "rank": 85, "score": 132805.6476119149 }, { "content": "fn av1_iidentity32(input: &[i32], output: &mut [i32], _range: usize) {\n\n for i in 0..32 {\n\n output[i] = input[i] * 4;\n\n }\n\n}\n\n\n\nstatic INV_TXFM_FNS: [[fn(&[i32], &mut [i32], usize); 4]; 4] = [\n\n [av1_idct4, av1_idct8, av1_idct16, av1_idct32],\n\n [av1_iadst4, av1_iadst8, av1_iadst16, |_, _, _| unimplemented!()],\n\n [\n\n |_, _, _| unimplemented!(),\n\n |_, _, _| unimplemented!(),\n\n |_, _, _| unimplemented!(),\n\n |_, _, _| unimplemented!()\n\n ],\n\n [av1_iidentity4, av1_iidentity8, av1_iidentity16, av1_iidentity32]\n\n];\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 86, "score": 132608.68479533406 }, { "content": "fn av1_iadst8(input: &[i32], output: &mut [i32], range: usize) {\n\n // stage 1\n\n let stg1 = [\n\n input[7], input[0], input[5], input[2], input[3], input[4], input[1],\n\n input[6],\n\n ];\n\n\n\n // stage 2\n\n let stg2 = [\n\n half_btf(COSPI_INV[4], stg1[0], COSPI_INV[60], stg1[1], INV_COS_BIT),\n\n half_btf(COSPI_INV[60], stg1[0], -COSPI_INV[4], stg1[1], INV_COS_BIT),\n\n half_btf(COSPI_INV[20], stg1[2], COSPI_INV[44], stg1[3], INV_COS_BIT),\n\n half_btf(COSPI_INV[44], stg1[2], -COSPI_INV[20], stg1[3], INV_COS_BIT),\n\n half_btf(COSPI_INV[36], stg1[4], COSPI_INV[28], stg1[5], INV_COS_BIT),\n\n half_btf(COSPI_INV[28], stg1[4], -COSPI_INV[36], stg1[5], INV_COS_BIT),\n\n half_btf(COSPI_INV[52], stg1[6], COSPI_INV[12], stg1[7], INV_COS_BIT),\n\n half_btf(COSPI_INV[12], stg1[6], -COSPI_INV[52], stg1[7], INV_COS_BIT)\n\n ];\n\n\n\n // stage 3\n", "file_path": "src/transform/inverse.rs", "rank": 87, "score": 132608.68479533406 }, { "content": "fn av1_idct32(input: &[i32], output: &mut [i32], range: usize) {\n\n // stage 1;\n\n let stg1 = [\n\n input[0], input[16], input[8], input[24], input[4], input[20], input[12],\n\n input[28], input[2], input[18], input[10], input[26], input[6], input[22],\n\n input[14], input[30], input[1], input[17], input[9], input[25], input[5],\n\n input[21], input[13], input[29], input[3], input[19], input[11],\n\n input[27], input[7], input[23], input[15], input[31],\n\n ];\n\n\n\n // stage 2\n\n let stg2 = [\n\n stg1[0],\n\n stg1[1],\n\n stg1[2],\n\n stg1[3],\n\n stg1[4],\n\n stg1[5],\n\n stg1[6],\n\n stg1[7],\n", "file_path": "src/transform/inverse.rs", "rank": 88, "score": 132608.68479533406 }, { "content": "fn av1_iidentity4(input: &[i32], output: &mut [i32], _range: usize) {\n\n for i in 0..4 {\n\n output[i] = round_shift(SQRT2 * input[i], 12);\n\n }\n\n}\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 89, "score": 132608.68479533403 }, { "content": "fn write_b_bench(b: &mut Bencher, tx_size: TxSize, qindex: usize) {\n\n unsafe {\n\n av1_rtcd();\n\n aom_dsp_rtcd();\n\n }\n\n let config =\n\n EncoderConfig { quantizer: qindex, speed: 10, ..Default::default() };\n\n let mut fi = FrameInvariants::new(1024, 1024, config);\n\n let mut w = ec::WriterEncoder::new();\n\n let fc = CDFContext::new(fi.base_q_idx);\n\n let bc = BlockContext::new(fi.sb_width * 16, fi.sb_height * 16);\n\n let mut fs = FrameState::new(&fi);\n\n let mut cw = ContextWriter::new(fc, bc);\n\n\n\n let tx_type = TxType::DCT_DCT;\n\n\n\n let sbx = 0;\n\n let sby = 0;\n\n let ac = &[0i16; 32 * 32];\n\n\n", "file_path": "benches/bench.rs", "rank": 90, "score": 132608.68479533403 }, { "content": "fn av1_idct16(input: &[i32], output: &mut [i32], range: usize) {\n\n // call idct8\n\n let temp_in = [\n\n input[0], input[2], input[4], input[6], input[8], input[10], input[12],\n\n input[14],\n\n ];\n\n let mut temp_out: [i32; 8] = [0; 8];\n\n av1_idct8(&temp_in, &mut temp_out, range);\n\n\n\n // stage 1\n\n let stg1 = [\n\n input[1], input[9], input[5], input[13], input[3], input[11], input[7],\n\n input[15],\n\n ];\n\n\n\n // stage 2\n\n let stg2 = [\n\n half_btf(COSPI_INV[60], stg1[0], -COSPI_INV[4], stg1[7], INV_COS_BIT),\n\n half_btf(COSPI_INV[28], stg1[1], -COSPI_INV[36], stg1[6], INV_COS_BIT),\n\n half_btf(COSPI_INV[44], stg1[2], -COSPI_INV[20], stg1[5], INV_COS_BIT),\n", "file_path": "src/transform/inverse.rs", "rank": 91, "score": 132608.68479533406 }, { "content": "fn av1_iadst4(input: &[i32], output: &mut [i32], _range: usize) {\n\n let bit = 12;\n\n\n\n let x0 = input[0];\n\n let x1 = input[1];\n\n let x2 = input[2];\n\n let x3 = input[3];\n\n\n\n // stage 1\n\n let s0 = SINPI_INV[1] * x0;\n\n let s1 = SINPI_INV[2] * x0;\n\n let s2 = SINPI_INV[3] * x1;\n\n let s3 = SINPI_INV[4] * x2;\n\n let s4 = SINPI_INV[1] * x2;\n\n let s5 = SINPI_INV[2] * x3;\n\n let s6 = SINPI_INV[4] * x3;\n\n\n\n // stage 2\n\n let s7 = (x0 - x2) + x3;\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 92, "score": 132608.68479533406 }, { "content": "fn av1_iadst16(input: &[i32], output: &mut [i32], range: usize) {\n\n // stage 1\n\n let stg1 = [\n\n input[15], input[0], input[13], input[2], input[11], input[4], input[9],\n\n input[6], input[7], input[8], input[5], input[10], input[3], input[12],\n\n input[1], input[14],\n\n ];\n\n\n\n // stage 2\n\n let stg2 = [\n\n half_btf(COSPI_INV[2], stg1[0], COSPI_INV[62], stg1[1], INV_COS_BIT),\n\n half_btf(COSPI_INV[62], stg1[0], -COSPI_INV[2], stg1[1], INV_COS_BIT),\n\n half_btf(COSPI_INV[10], stg1[2], COSPI_INV[54], stg1[3], INV_COS_BIT),\n\n half_btf(COSPI_INV[54], stg1[2], -COSPI_INV[10], stg1[3], INV_COS_BIT),\n\n half_btf(COSPI_INV[18], stg1[4], COSPI_INV[46], stg1[5], INV_COS_BIT),\n\n half_btf(COSPI_INV[46], stg1[4], -COSPI_INV[18], stg1[5], INV_COS_BIT),\n\n half_btf(COSPI_INV[26], stg1[6], COSPI_INV[38], stg1[7], INV_COS_BIT),\n\n half_btf(COSPI_INV[38], stg1[6], -COSPI_INV[26], stg1[7], INV_COS_BIT),\n\n half_btf(COSPI_INV[34], stg1[8], COSPI_INV[30], stg1[9], INV_COS_BIT),\n\n half_btf(COSPI_INV[30], stg1[8], -COSPI_INV[34], stg1[9], INV_COS_BIT),\n", "file_path": "src/transform/inverse.rs", "rank": 93, "score": 132608.68479533406 }, { "content": "fn av1_iidentity16(input: &[i32], output: &mut [i32], _range: usize) {\n\n for i in 0..16 {\n\n output[i] = round_shift(SQRT2 * 2 * input[i], 12);\n\n }\n\n}\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 94, "score": 132608.68479533406 }, { "content": "fn av1_iidentity8(input: &[i32], output: &mut [i32], _range: usize) {\n\n for i in 0..8 {\n\n output[i] = 2 * input[i];\n\n }\n\n}\n\n\n", "file_path": "src/transform/inverse.rs", "rank": 95, "score": 132608.68479533406 }, { "content": "// TODO: rename the type bounds later\n\npub trait Intra<T>: Dim\n\nwhere\n\n T: PrimInt + Into<u32> + Into<i32> + 'static,\n\n i32: AsPrimitive<T>,\n\n u32: AsPrimitive<T>,\n\n usize: AsPrimitive<T>\n\n{\n\n #[cfg_attr(feature = \"comparative_bench\", inline(never))]\n\n fn pred_dc(output: &mut [T], stride: usize, above: &[T], left: &[T]) {\n\n let edges = left[..Self::H].iter().chain(above[..Self::W].iter());\n\n let len = (Self::W + Self::H) as u32;\n\n let avg =\n\n ((edges.fold(0u32, |acc, &v| { let v: u32 = v.into(); v + acc }) + (len >> 1)) / len).as_();\n\n\n\n for line in output.chunks_mut(stride).take(Self::H) {\n\n for v in &mut line[..Self::W] {\n\n *v = avg;\n\n }\n\n }\n\n }\n", "file_path": "src/predict.rs", "rank": 96, "score": 131071.8062449164 }, { "content": "fn encode_tile(sequence: &mut Sequence, fi: &FrameInvariants, fs: &mut FrameState, bit_depth: usize) -> Vec<u8> {\n\n let mut w = WriterEncoder::new();\n\n\n\n let fc = if fi.primary_ref_frame == PRIMARY_REF_NONE {\n\n CDFContext::new(fi.base_q_idx)\n\n } else {\n\n match fi.rec_buffer.frames[fi.ref_frames[fi.primary_ref_frame as usize] as usize] {\n\n Some(ref rec) => rec.cdfs.clone(),\n\n None => CDFContext::new(fi.base_q_idx)\n\n }\n\n };\n\n\n\n let bc = BlockContext::new(fi.w_in_b, fi.h_in_b);\n\n let mut cw = ContextWriter::new(fc, bc);\n\n\n\n for sby in 0..fi.sb_height {\n\n cw.bc.reset_left_contexts();\n\n\n\n for sbx in 0..fi.sb_width {\n\n let mut w_post_cdef = WriterRecorder::new();\n", "file_path": "src/encoder.rs", "rank": 97, "score": 128005.37982474129 }, { "content": "fn compare_img(img: *const aom_image_t, frame: &Frame, bit_depth: usize) {\n\n use std::slice;\n\n let img = unsafe { *img };\n\n let img_iter = img.planes.iter().zip(img.stride.iter());\n\n\n\n for (img_plane, frame_plane) in img_iter.zip(frame.planes.iter()) {\n\n let w = frame_plane.cfg.width;\n\n let h = frame_plane.cfg.height;\n\n let rec_stride = frame_plane.cfg.stride;\n\n\n\n if bit_depth > 8 {\n\n let dec_stride = *img_plane.1 as usize / 2;\n\n\n\n let dec = unsafe {\n\n let data = *img_plane.0 as *const u16;\n\n let size = dec_stride * h;\n\n\n\n slice::from_raw_parts(data, size)\n\n };\n\n\n", "file_path": "src/test_encode_decode.rs", "rank": 98, "score": 125417.4043569422 }, { "content": "fn av1_round_shift_array_rs(arr: &mut [i32], size: usize, bit: i8) {\n\n if bit == 0 {\n\n return;\n\n }\n\n if bit > 0 {\n\n let bit = bit as usize;\n\n for i in 0..size {\n\n arr[i] = round_shift(arr[i], bit);\n\n }\n\n } else {\n\n for i in 0..size {\n\n arr[i] =\n\n clamp((1 << (-bit)) * arr[i], i32::min_value(), i32::max_value());\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/transform/mod.rs", "rank": 99, "score": 124668.95500315099 } ]
Rust
src/config.rs
video-audio/va-tool
e97918d0aa936ace07dfd3df84758b6a7646a4d2
use std::env; use regex::Regex; use url::Url; use crate::error::{Error, Result}; use crate::opt::{Match as OptMatch, Matcher as OptMatcher, Opt, OptKind, Opts}; #[rustfmt::skip] const OPTS: Opts = &[ &Opt(&"vv", &["verbose"], OptKind::NoArg), &Opt(&"vvv", &["very-verbose"], OptKind::NoArg), &Opt(&"help", &["h"], OptKind::NoArg), &Opt(&"version", &["v"], OptKind::NoArg), &Opt(&"print-config", &[], OptKind::NoArg), &Opt(&"config", &["c", "cfg"], OptKind::Arg), &Opt(&"input", &["i"], OptKind::Arg), &Opt(&"fifo-sz", &["udp-fifo-sz", "udp-fifo-size","fifo-size"], OptKind::Arg), &Opt(&"out", &["o", "output"], OptKind::Arg), ]; #[allow(dead_code)] pub struct ConfigOutput { url: Url, } pub struct ConfigInput { id: u64, pub url: Url, pub udp_fifo_sz: usize, } pub struct Config { pub print_help: bool, pub print_version: bool, pub print_config: bool, pub log_level: log::Level, pub inputs: Vec<ConfigInput>, } impl Config { pub(crate) fn parse() -> Result<Config> { let mut c = Config { print_help: false, print_version: false, print_config: false, log_level: log::Level::Info, inputs: Default::default(), }; let opt_matcher = OptMatcher::new(env::args().skip(1).collect(), OPTS); for (i, mtch) in opt_matcher.into_iter().enumerate() { match mtch { OptMatch::Key(key, _) => match key { "vv" => c.log_level = log::Level::Debug, "vvv" => c.log_level = log::Level::Trace, "help" => c.print_help = true, "version" => c.print_version = true, "print-config" => c.print_config = true, _ => {} }, OptMatch::KeyValue(key, value) => match key { "input" => c.push_input(value)?, "fifo-sz" => { let udp_fifo_sz = value.parse::<usize>().unwrap(); c.inputs.last_mut().and_then(|input| { input.udp_fifo_sz = udp_fifo_sz; Some(input) }); } _ => {} }, OptMatch::Positional(value) | OptMatch::ExtraPositional(value) => { if i == 0 && value == "analyze" { } else { c.push_input(value)? } } OptMatch::UnknownKey(key) => { log::warn!(r#"unrecognized option "{}""#, key); } OptMatch::UnknownKeyValue(key, value) => { log::warn!(r#"unrecognized option "{}" with argument "{}""#, key, value); } OptMatch::No(key) => { log::warn!(r#"unknown argument "{}""#, key); } _ => {} } } Ok(c) } pub(crate) fn print_help(&self) { println!("Video/Audio tool version {}", env!("CARGO_PKG_VERSION")); println!(); println!("Usage:"); println!(r#" va-tool [...] [-arg ...] [--arg[="..."]] [--] [...]"#); println!(); println!("Flags:"); println!(" -vv, --verbose | <bool> | ... "); println!(" -vvv, --very-verbose | <bool> | ... "); println!(" -i, --intput | <str/url> | Where to read from"); println!(" --fifo-sz | <size> | circular buffer size; result allocaed size"); println!(" . is $(mpeg-ts-packer-size) * $(fifo-size)"); println!(" . mpeg-ts-packer-size is 188"); println!(" -o, --output, --out | <str/url> | Where to write to"); println!(); } pub(crate) fn print_version(&self) { println!("version: {}", env!("CARGO_PKG_VERSION")); } pub(crate) fn print_config(&self) { println!("log-level: {}", self.log_level.to_string().to_lowercase()); println!("inputs:"); for input in self.inputs.iter() { println!(" - id: {}", input.id); println!(" url: {}", input.url); if input.url.scheme() == "udp" { println!(" udp-fifo-sz: {}", input.udp_fifo_sz); } } } pub(crate) fn validate(&self) -> Result<()> { Ok(()) } fn push_input(&mut self, url_raw: String) -> Result<()> { let cfg_input = ConfigInput { id: 0, url: url_parse(&url_raw)?, udp_fifo_sz: 5 * 1000, }; self.inputs.push(cfg_input); Ok(()) } } fn url_parse<UR: AsRef<str>>(url_raw: UR) -> Result<url::Url> { lazy_static! { static ref RE_UDP_MCAST_GROUP: Regex = Regex::new( r#"(?x) ^ 2(?:2[4-9]|3[0-9]) (?: \. (?: 25[0-5] | 2[0-4][0-9] | 1[0-9]{2} | [1-9][0-9] | [0-9] ) ){3} "#, ) .unwrap(); } let mut url_raw = url_raw.as_ref().to_string(); if RE_UDP_MCAST_GROUP.is_match(&url_raw) { url_raw.insert_str(0, "udp://"); } else if url_raw.starts_with('.') || url_raw.starts_with('/') { url_raw.insert_str(0, "file://"); } Url::parse(&url_raw).map_err(|err| Error::url_parse(err, url_raw)) }
use std::env; use regex::Regex; use url::Url; use crate::error::{Error, Result}; use crate::opt::{Match as OptMatch, Matcher as OptMatcher, Opt, OptKind, Opts}; #[rustfmt::skip] const OPTS: Opts = &[ &Opt(&"vv", &["verbose"], OptKind::NoArg), &Opt(&"vvv", &["very-verbose"], OptKind::NoArg), &Opt(&"help", &["h"], OptKind::NoArg), &Opt(&"version", &["v"], OptKind::NoArg), &Opt(&"print-config", &[], OptKind::NoArg), &Opt(&"config", &["c", "cfg"], OptKind::Arg), &Opt(&"input", &["i"], OptKind::Arg), &Opt(&"fifo-sz", &["udp-fifo-sz", "udp-fifo-size","fifo-size"], OptKind::Arg), &Opt(&"out", &["o", "output"], OptKind::Arg), ]; #[allow(dead_code)] pub struct ConfigOutput { url: Url, } pub struct ConfigInput { id: u64, pub url: Url, pub udp_fifo_sz: usize, } pub struct Config { pub print_help: bool, pub print_version: bool, pub print_config: bool, pub log_level: log::Level, pub inputs: Vec<ConfigInput>, } impl Config { pub(crate) fn parse() -> Result<Config> { let mut c = Config { print_help: false, print_version: false, print_config: false, log_level: log::Level::Info, inputs: Default::default(), }; let opt_matcher = OptMatcher::new(env::args().skip(1).collect(), OPTS); for (i, mtch) in opt_matcher.into_iter().enumerate() { match mtch { OptMatch::Key(key, _) => match key { "vv" => c.log_level = log::Level::Debug, "vvv" => c.log_level = log::Level::Trace, "help" => c.print_help = true, "version" => c.print_version = true, "print-config" => c.print_config = true, _ => {} }, OptMatch::KeyValue(key, value) => match key { "input" => c.push_input(value)?, "fifo-sz" => { let udp_fifo_sz = value.parse::<usize>().unwrap(); c.inputs.last_mut().and_then(|input| { input.udp_fifo_sz = udp_fifo_sz; Some(input) }); } _ => {} }, OptMatch::Positional(value) | OptMatch::ExtraPositional(value) => { if i == 0 && value == "analyze" { } else { c.push_input(value)? } } OptMatch::UnknownKey(key) => { log::warn!(r#"unrecognized option "{}""#, key); } OptMatch::UnknownKeyValue(key, value) => { log::warn!(r#"unrecognized option "{}" with argument "{}""#, key, value); } OptMatch::No(key) => { log::warn!(r#"unknown argument "{}""#, key); } _ => {} } } Ok(c) } pub(crate) fn print_help(&self) { println!("Video/Audio tool version {}", env!("CARGO_PKG_VERSION")); println!(); println!("Usage:"); println!(r#" va-tool [...] [-arg ...] [--arg[="..."]] [--] [...]"#); println!(); println!("Flags:"); println!(" -vv, --verbose | <bool> | ... "); println!(" -vvv, --very-verbose | <bool> | ... "); println!(" -i, --intput | <str/url> | Where to read from"); println!(" --fifo-sz | <size> | circular buffer size; result allocaed size"); println!("
| <str/url> | Where to write to"); println!(); } pub(crate) fn print_version(&self) { println!("version: {}", env!("CARGO_PKG_VERSION")); } pub(crate) fn print_config(&self) { println!("log-level: {}", self.log_level.to_string().to_lowercase()); println!("inputs:"); for input in self.inputs.iter() { println!(" - id: {}", input.id); println!(" url: {}", input.url); if input.url.scheme() == "udp" { println!(" udp-fifo-sz: {}", input.udp_fifo_sz); } } } pub(crate) fn validate(&self) -> Result<()> { Ok(()) } fn push_input(&mut self, url_raw: String) -> Result<()> { let cfg_input = ConfigInput { id: 0, url: url_parse(&url_raw)?, udp_fifo_sz: 5 * 1000, }; self.inputs.push(cfg_input); Ok(()) } } fn url_parse<UR: AsRef<str>>(url_raw: UR) -> Result<url::Url> { lazy_static! { static ref RE_UDP_MCAST_GROUP: Regex = Regex::new( r#"(?x) ^ 2(?:2[4-9]|3[0-9]) (?: \. (?: 25[0-5] | 2[0-4][0-9] | 1[0-9]{2} | [1-9][0-9] | [0-9] ) ){3} "#, ) .unwrap(); } let mut url_raw = url_raw.as_ref().to_string(); if RE_UDP_MCAST_GROUP.is_match(&url_raw) { url_raw.insert_str(0, "udp://"); } else if url_raw.starts_with('.') || url_raw.starts_with('/') { url_raw.insert_str(0, "file://"); } Url::parse(&url_raw).map_err(|err| Error::url_parse(err, url_raw)) }
. is $(mpeg-ts-packer-size) * $(fifo-size)"); println!(" . mpeg-ts-packer-size is 188"); println!(" -o, --output, --out
random
[ { "content": "fn opts_get<'opt, 's>(opts: Opts<'opt>, key: &'s str) -> Option<&'opt Opt<'opt>> {\n\n for opt in opts {\n\n if opt.0 == key {\n\n return Some(opt);\n\n }\n\n\n\n for k in opt.1 {\n\n if *k == key {\n\n return Some(opt);\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n\npub struct Matcher<'a> {\n\n args: Vec<String>,\n\n opts: Opts<'a>,\n\n}\n", "file_path": "src/opt.rs", "rank": 0, "score": 108775.05376378083 }, { "content": "struct Arg {\n\n val: String,\n\n}\n\n\n\nimpl Arg {\n\n fn new(val: String) -> Arg {\n\n Arg { val }\n\n }\n\n\n\n #[inline(always)]\n\n fn is_option(&self) -> bool {\n\n self.val.starts_with('-')\n\n }\n\n\n\n #[inline(always)]\n\n fn is_end(&self) -> bool {\n\n self.val == \"--\"\n\n }\n\n\n\n /// extract\n", "file_path": "src/opt.rs", "rank": 1, "score": 98258.67068572927 }, { "content": "pub fn init() -> Result<()> {\n\n Logger::init()?;\n\n log::set_max_level(log::LevelFilter::Info);\n\n Ok(())\n\n}\n\n\n\n/// simple basic logger\n", "file_path": "src/logger.rs", "rank": 3, "score": 94185.80333814767 }, { "content": "pub trait Input {\n\n fn open(&mut self) -> Result<()>;\n\n fn read(&self) -> Result<()>;\n\n fn close(&self) -> Result<()>;\n\n}\n\n\n", "file_path": "src/input.rs", "rank": 4, "score": 76127.45678000619 }, { "content": "/// main with optional Error\n\nfn try_main() -> Result<()> {\n\n logger::init()?;\n\n\n\n let config = Config::parse().map_err(Error::config)?;\n\n\n\n log::set_max_level(config.log_level.to_level_filter());\n\n\n\n if config.print_help || config.print_version || config.print_config {\n\n if config.print_help {\n\n config.print_help()\n\n }\n\n\n\n if config.print_version {\n\n config.print_version()\n\n }\n\n\n\n if config.print_config {\n\n config.print_config();\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 5, "score": 64634.68633431768 }, { "content": "fn signal_chan() -> Result<Receiver<()>> {\n\n let (sender, receiver) = bounded(16);\n\n\n\n ctrlc::set_handler(move || {\n\n let _ = sender.send(());\n\n })\n\n .map_err(Error::signal)?;\n\n\n\n Ok(receiver)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 6, "score": 60211.35340417897 }, { "content": "struct App {\n\n config: Config,\n\n}\n\n\n\nimpl App {\n\n fn new(config: Config) -> App {\n\n App { config }\n\n }\n\n\n\n fn start(&self) -> Result<()> {\n\n for input in self.config.inputs.iter() {\n\n match input.url.scheme() {\n\n \"udp\" => {\n\n let mut udp = InputUdp::new(input.url.clone());\n\n udp.fifo_sz(input.udp_fifo_sz);\n\n\n\n let mut source = Source::new(udp);\n\n source.start()?;\n\n\n\n let mc = Mediacontainer::from(&input.url);\n", "file_path": "src/main.rs", "rank": 7, "score": 37737.971874905736 }, { "content": "#[derive(Debug)]\n\nstruct Logger(());\n\n\n\nconst LOGGER: &Logger = &Logger(());\n\n\n\nimpl Logger {\n\n fn init() -> Result<()> {\n\n log::set_logger(LOGGER).map_err(Error::logger)\n\n }\n\n}\n\n\n\nimpl Log for Logger {\n\n /// always enabled\n\n fn enabled(&self, _: &log::Metadata) -> bool {\n\n true\n\n }\n\n\n\n fn log(&self, record: &log::Record) {\n\n eprintln!(\"[{}] {}\", level_char(record.level()), record.args());\n\n }\n\n\n\n /// no need to flush stderr/stdout\n\n fn flush(&self) {}\n\n}\n\n\n", "file_path": "src/logger.rs", "rank": 8, "score": 37737.971874905736 }, { "content": "fn main() {\n\n if let Err(err) = try_main() {\n\n eprintln!(\"{}\", err);\n\n\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 9, "score": 35234.44626141818 }, { "content": "pub trait Producer {\n\n fn consumers(&self) -> &Consumers;\n\n fn consumers_mut(&mut self) -> &mut Consumers;\n\n\n\n fn add_consumer(&mut self, consumer: Box<dyn Consumer>) {\n\n self.consumers_mut().0.push(consumer)\n\n }\n\n\n\n fn produce_trk(&self) {\n\n for consumer in self.consumers().0.iter() {\n\n consumer.consume_trk()\n\n }\n\n }\n\n\n\n fn produce_pkt_raw(&self, pkt_raw: &[u8]) {\n\n for consumer in self.consumers().0.iter() {\n\n consumer.consume_pkt_raw(pkt_raw)\n\n }\n\n }\n\n\n", "file_path": "src/filter.rs", "rank": 10, "score": 34331.39620852081 }, { "content": "pub trait Consumer {\n\n fn consume_trk(&self) {}\n\n fn consume_pkt_raw(&self, _: &[u8]) {}\n\n fn consume_pkt(&self) {}\n\n fn consume_frm(&self) {}\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 11, "score": 34331.39620852081 }, { "content": "#[inline(always)]\n\nfn level_char(level: log::Level) -> char {\n\n match level {\n\n log::Level::Error => 'e',\n\n log::Level::Warn => 'w',\n\n log::Level::Info => 'i',\n\n log::Level::Debug => 'd',\n\n log::Level::Trace => 't',\n\n }\n\n}\n", "file_path": "src/logger.rs", "rank": 12, "score": 25877.582576776742 }, { "content": " Ok(())\n\n }\n\n fn close(&self) -> Result<()> {\n\n Ok(())\n\n }\n\n}\n\n\n\npub struct InputFile {\n\n url: Url,\n\n}\n\n\n\nimpl InputFile {\n\n pub fn new(url: Url) -> InputFile {\n\n InputFile { url }\n\n }\n\n}\n\n\n\nimpl Input for InputFile {\n\n fn open(&mut self) -> Result<()> {\n\n Ok(())\n", "file_path": "src/input.rs", "rank": 22, "score": 23415.613990049656 }, { "content": " }\n\n\n\n pub fn fifo_sz(&mut self, fifo_sz: usize) -> &InputUdp {\n\n self.fifo_sz = fifo_sz;\n\n self\n\n }\n\n}\n\n\n\nimpl Input for InputUdp {\n\n fn open(&mut self) -> Result<()> {\n\n let fifo = Arc::new((\n\n Mutex::new(VecDeque::with_capacity(self.fifo_sz)),\n\n Condvar::new(),\n\n ));\n\n self.fifo = Some(fifo.clone());\n\n\n\n let host = self.url.host().ok_or_else(Error::udp_url_missing_host)?;\n\n let host_str = host.to_owned().to_string();\n\n\n\n let port = self.url.port().unwrap_or(5500);\n", "file_path": "src/input.rs", "rank": 23, "score": 23412.60109503417 }, { "content": " }\n\n fn read(&self) -> Result<()> {\n\n info!(\"[<] {}\", self.url);\n\n Ok(())\n\n }\n\n fn close(&self) -> Result<()> {\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/input.rs", "rank": 24, "score": 23408.42644726527 }, { "content": " }\n\n\n\n let url = self.url.clone();\n\n thread::spawn(move || {\n\n let mut pkt_raw = [0; ts::Packet::SZ];\n\n\n\n // MTU (maximum transmission unit) == 1500 for Ethertnet\n\n // 7*ts::Packet::SZ = 7*188 = 1316 < 1500 => OK\n\n let mut buf7 = [0; 7 * ts::Packet::SZ];\n\n\n\n loop {\n\n let (_, _) = socket.recv_from(&mut buf7).unwrap();\n\n\n\n let &(ref lock, ref cvar) = &*fifo;\n\n let mut fifo = match lock.lock() {\n\n Err(e) => {\n\n error!(\"({}) lock and get buffer failed: {}\", url, e);\n\n // will retry after timeout;\n\n thread::sleep(Duration::from_secs(1));\n\n continue;\n", "file_path": "src/input.rs", "rank": 25, "score": 23408.00914532622 }, { "content": "use std::collections::VecDeque;\n\nuse std::net::Ipv4Addr;\n\nuse std::net::UdpSocket;\n\nuse std::sync::{Arc, Condvar, Mutex};\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse log::{debug, error, info, trace};\n\nuse url::{Host, Url};\n\n\n\nuse crate::error::{Error, Result};\n\n\n", "file_path": "src/input.rs", "rank": 26, "score": 23407.449823262672 }, { "content": " }\n\n Ok(buf) => buf,\n\n };\n\n\n\n for i in 0..7 {\n\n let f = i * ts::Packet::SZ;\n\n let t = (i + 1) * ts::Packet::SZ;\n\n let buf1 = &buf7[f..t];\n\n\n\n pkt_raw.copy_from_slice(buf1);\n\n fifo.push_back(pkt_raw);\n\n }\n\n\n\n cvar.notify_all();\n\n }\n\n });\n\n\n\n Ok(())\n\n }\n\n fn read(&self) -> Result<()> {\n", "file_path": "src/input.rs", "rank": 27, "score": 23404.27008007081 }, { "content": "\n\n let socket = UdpSocket::bind((&*host_str, port))\n\n .map_err(|err| Error::udp_socket_bind(err, &host_str, port))?;\n\n\n\n debug!(\"({}) [+] OK bind udp socket\", self.url);\n\n\n\n {\n\n match host {\n\n Host::Ipv4(v4) => {\n\n let iface = Ipv4Addr::new(0, 0, 0, 0);\n\n socket.join_multicast_v4(&v4, &iface).map_err(|err| {\n\n Error::udp_join_multicast_v4(err, host_str, port, iface.to_string())\n\n })?;\n\n\n\n debug!(\"({}) [+] OK join multicast v4\", self.url);\n\n debug!(\"({}) [+] OK ({}:{}@{})\", self.url, v4, port, iface);\n\n }\n\n Host::Ipv6(v6) => {\n\n // 0 to indicate any interface\n\n let iface = 0;\n", "file_path": "src/input.rs", "rank": 28, "score": 23403.70143197589 }, { "content": " let fifo = self\n\n .fifo\n\n .as_ref()\n\n .ok_or_else(Error::udp_fifo_not_initialized)?\n\n .clone();\n\n\n\n let &(ref lock, ref cvar) = &*fifo;\n\n let mut fifo = lock\n\n .lock()\n\n .map_err(|err| Error::udp_fifo_lock(err.to_string()))?;\n\n\n\n fifo = cvar\n\n .wait(fifo)\n\n .map_err(|err| Error::udp_fifo_cvar_wait(err.to_string()))?;\n\n\n\n while !fifo.is_empty() {\n\n let ts_pkt_raw = fifo.pop_front().ok_or_else(Error::udp_fifo_pop_empty)?;\n\n trace!(\"({}) [<] {}\", self.url, ts_pkt_raw.len())\n\n }\n\n\n", "file_path": "src/input.rs", "rank": 29, "score": 23402.87033392474 }, { "content": " socket\n\n .join_multicast_v6(&v6, iface)\n\n .map_err(|err| Error::udp_join_multicast_v6(err, host_str, port, iface))?;\n\n\n\n debug!(\"({}) [+] OK join multicast v6\", self.url);\n\n }\n\n Host::Domain(domain) => {\n\n let v4 = domain\n\n .parse()\n\n .map_err(|err| Error::udp_domain_to_ipv4(err, domain))?;\n\n\n\n let iface = Ipv4Addr::new(0, 0, 0, 0);\n\n socket.join_multicast_v4(&v4, &iface).map_err(|err| {\n\n Error::udp_join_multicast_v4(err, host_str, port, iface.to_string())\n\n })?;\n\n\n\n debug!(\"({}) [+] OK join multicast v4/domain\", self.url);\n\n debug!(\"({}) [+] OK ({}:{}@{})\", self.url, domain, port, iface);\n\n }\n\n }\n", "file_path": "src/input.rs", "rank": 30, "score": 23402.4430094839 }, { "content": " /// unknown option\n\n UnknownKey(String),\n\n /// unknown option with value\n\n UnknownKeyValue(String, String),\n\n /// extra positional argument inside optional\n\n ExtraPositional(String),\n\n\n\n /// wtf?!! no match - should never happen\n\n No(String),\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum OptKind {\n\n /// required-argument\n\n Arg,\n\n /// no argument required (bool flag) e.g. --help, --v, --vv, --vvv\n\n NoArg,\n\n}\n\n\n\n/// (name, alternatives, kind)\n\n#[derive(Debug)]\n\npub struct Opt<'a>(pub &'a str, pub &'a [&'a str], pub OptKind);\n\n\n\npub type Opts<'opt> = &'opt [&'opt Opt<'opt>];\n\n\n", "file_path": "src/opt.rs", "rank": 31, "score": 23129.695257034666 }, { "content": "pub struct MatcherIter<'a> {\n\n iter: std::iter::Peekable<std::vec::IntoIter<String>>,\n\n state: State,\n\n #[allow(unused)]\n\n opts: Opts<'a>,\n\n}\n\n\n\nimpl<'a> MatcherIter<'a> {\n\n /// check result of extracion value from (--key=value, --key:value etc)\n\n /// if no value provided - try to peek next value from iterator\n\n #[inline(always)]\n\n fn check_value_or_try_peek_next(&mut self, value: Option<String>) -> Option<String> {\n\n match value {\n\n Some(value) => Some(value),\n\n None => {\n\n // try to get option value from next argument\n\n match self.iter.peek() {\n\n Some(arg_raw) => {\n\n let arg = Arg::new(arg_raw.to_string());\n\n\n", "file_path": "src/opt.rs", "rank": 32, "score": 23128.161992828973 }, { "content": " Some(Match::Positional(arg.val))\n\n } else if self.state.is_opt() && arg.is_option() {\n\n let (key, value) = arg.extract_key_and_value();\n\n\n\n match (key, &value) {\n\n // should never hapen\n\n (None, _) => Some(Match::No(arg.val)),\n\n\n\n // check if opting defined in options array\n\n (Some(key), _) => match opts_get(self.opts, &key) {\n\n // known option\n\n Some(opt) => match opt.2 {\n\n // no argument required but try to consume next positional if exists\n\n // e.g. --debug=true or --debug true\n\n OptKind::NoArg => match self.check_value_or_try_peek_next(value) {\n\n Some(value) => Some(Match::Key(opt.0, Some(value))),\n\n None => Some(Match::Key(opt.0, None)),\n\n },\n\n\n\n // must be with argument\n", "file_path": "src/opt.rs", "rank": 33, "score": 23127.385035233845 }, { "content": " }\n\n\n\n #[inline(always)]\n\n pub fn is_end(self) -> bool {\n\n (self.bits & Self::END.bits) != 0\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Match<'opt> {\n\n /// positional parameter\n\n Positional(String),\n\n /// --key | -key\n\n Key(&'opt str, Option<String>),\n\n /// --key=value | -key value | --key:value | --key value\n\n KeyValue(&'opt str, String),\n\n\n\n /// got option but no argument provided\n\n NoArg(&'opt str),\n\n\n", "file_path": "src/opt.rs", "rank": 34, "score": 23126.215919766215 }, { "content": " OptKind::Arg => match self.check_value_or_try_peek_next(value) {\n\n Some(value) => Some(Match::KeyValue(opt.0, value)),\n\n None => Some(Match::NoArg(opt.0)),\n\n },\n\n },\n\n // unknown option\n\n None => match self.check_value_or_try_peek_next(value) {\n\n Some(value) => Some(Match::UnknownKeyValue(key, value)),\n\n None => Some(Match::UnknownKey(key)),\n\n },\n\n },\n\n }\n\n } else {\n\n Some(Match::ExtraPositional(arg.val))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/opt.rs", "rank": 35, "score": 23122.676429359857 }, { "content": " if arg.is_end() || arg.is_option() {\n\n return None;\n\n }\n\n\n\n self.iter.next(); // advance iterator\n\n\n\n Some(arg.val)\n\n }\n\n None => None,\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<'opt> Iterator for MatcherIter<'opt> {\n\n type Item = Match<'opt>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let arg_raw = self.iter.next()?;\n", "file_path": "src/opt.rs", "rank": 36, "score": 23122.073733046713 }, { "content": "\n\nimpl<'a> Matcher<'a> {\n\n pub fn new(args: Vec<String>, opts: &'a [&'a Opt<'a>]) -> Self {\n\n Matcher { args, opts }\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for Matcher<'a> {\n\n type Item = Match<'a>;\n\n type IntoIter = MatcherIter<'a>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n MatcherIter {\n\n iter: self.args.into_iter().peekable(),\n\n opts: self.opts,\n\n state: State::POS,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/opt.rs", "rank": 37, "score": 23121.273349165258 }, { "content": " let mut arg = Arg::new(arg_raw);\n\n\n\n // got \"-\", just skip it\n\n if arg.val == \"-\" {\n\n arg = Arg::new(self.iter.next()?);\n\n }\n\n\n\n // got \"--\"\n\n if arg.is_end() {\n\n self.state.set_end();\n\n self.state.set_pos(); // only positional arguments after --\n\n arg = Arg::new(self.iter.next()?);\n\n }\n\n\n\n // got \"-key\" or \"--key\" for the first time\n\n if arg.is_option() && !self.state.is_end() {\n\n self.state.set_opt(); // only optional arguments after first option and before --\n\n }\n\n\n\n if self.state.is_pos() {\n", "file_path": "src/opt.rs", "rank": 38, "score": 23119.663154477217 }, { "content": "use regex::Regex;\n\n\n\nbitflags! {\n\n struct State: u8 {\n\n /// parser inside positional arguments\n\n const POS = 0x01;\n\n /// parser inside keyword/option arguments (--i ..., -i ..., i: ..., i=...)\n\n const OPT = 0x02;\n\n /// got \"--\"\n\n /// all next arguments are positional\n\n const END = 0x04;\n\n }\n\n}\n\n\n\nimpl State {\n\n #[inline(always)]\n\n pub fn set_pos(&mut self) {\n\n self.bits &= !Self::OPT.bits;\n\n self.bits |= Self::POS.bits;\n\n }\n", "file_path": "src/opt.rs", "rank": 39, "score": 23119.474803951285 }, { "content": "\n\n #[inline(always)]\n\n pub fn set_opt(&mut self) {\n\n self.bits &= !Self::POS.bits;\n\n self.bits |= Self::OPT.bits;\n\n }\n\n\n\n #[inline(always)]\n\n pub fn set_end(&mut self) {\n\n self.bits |= Self::END.bits\n\n }\n\n\n\n #[inline(always)]\n\n pub fn is_pos(self) -> bool {\n\n (self.bits & Self::POS.bits) != 0\n\n }\n\n\n\n #[inline(always)]\n\n pub fn is_opt(self) -> bool {\n\n (self.bits & Self::OPT.bits) != 0\n", "file_path": "src/opt.rs", "rank": 40, "score": 23116.5770590847 }, { "content": " ///\n\n /// --key => (Some(key), None)\n\n /// --key:value => (Some(key), Some(value))\n\n #[inline(always)]\n\n fn extract_key_and_value(&self) -> (Option<String>, Option<String>) {\n\n lazy_static! {\n\n static ref RE: Regex = Regex::new(\n\n r#\"(?x)\n\n -?-? # \"--\" or \"-\"\n\n (?P<key>[a-zA-Z0-9_\\-]+) # key\n\n (?:\n\n (:?\n\n =|:\n\n )?\n\n (?P<value>[a-zA-Z0-9_\\-]+) # value\n\n )?\n\n \"#,\n\n )\n\n .unwrap();\n\n }\n", "file_path": "src/opt.rs", "rank": 41, "score": 23115.596100443574 }, { "content": "\n\n let caps = match RE.captures(&self.val) {\n\n Some(caps) => caps,\n\n None => return (None, None),\n\n };\n\n\n\n let key = caps.name(\"key\").map(|m| m.as_str().to_string());\n\n let value = caps.name(\"value\").map(|m| m.as_str().to_string());\n\n\n\n (key, value)\n\n }\n\n}\n", "file_path": "src/opt.rs", "rank": 42, "score": 23114.407622772953 }, { "content": "# video-audio tool\n\n\n\nFor analyze, dump, mux, demux, encode, decode, filter video/audio streams.\n", "file_path": "README.md", "rank": 43, "score": 13725.087848738787 }, { "content": "type UDPFifo = Arc<(Mutex<VecDeque<[u8; ts::Packet::SZ]>>, Condvar)>;\n\n\n\npub struct InputUdp {\n\n url: Url,\n\n\n\n /// a.k.a. circular buffer size\n\n fifo_sz: usize,\n\n\n\n /// circullar-buffer / fifo\n\n /// use two threads and buffer to read from udp\n\n fifo: Option<UDPFifo>,\n\n}\n\n\n\nimpl InputUdp {\n\n pub fn new(url: Url) -> InputUdp {\n\n InputUdp {\n\n url,\n\n fifo_sz: 1000,\n\n fifo: None,\n\n }\n", "file_path": "src/input.rs", "rank": 44, "score": 13707.343723782902 }, { "content": "use std::sync::Arc;\n\nuse std::sync::Mutex;\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse log::error;\n\n\n\nuse crate::error::{Error, Result};\n\nuse crate::filter::Filter;\n\nuse crate::input::Input;\n\n\n\npub struct Source<I> {\n\n filter: Filter,\n\n\n\n input: Arc<Mutex<I>>,\n\n\n\n thread: Option<thread::JoinHandle<()>>,\n\n}\n\n\n\nimpl<I: 'static> Source<I>\n", "file_path": "src/source.rs", "rank": 45, "score": 11.73416877960979 }, { "content": "use std::fmt;\n\n\n\nuse failure::{Backtrace, Context, Fail};\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq)]\n\npub enum ErrorKind {\n\n Logger,\n\n Config,\n\n URLParse(String),\n\n Signal,\n\n\n\n SourceSpawn,\n\n SourceInputLock(String),\n\n SourceStop,\n\n SourceJoin(String),\n\n\n\n UdpUrlMissingHost,\n\n UdpSocketBind(String, u16),\n", "file_path": "src/error.rs", "rank": 46, "score": 11.382997124271084 }, { "content": "use url::Url;\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum Mediacontainer {\n\n Ts,\n\n Mp4 { fragmented: bool },\n\n WebM,\n\n Rtp,\n\n Rtsp,\n\n}\n\n\n\nimpl From<&url::Url> for Mediacontainer {\n\n fn from(u: &Url) -> Self {\n\n Mediacontainer::Ts\n\n }\n\n}\n", "file_path": "src/mediacontainer.rs", "rank": 47, "score": 10.751757413582443 }, { "content": "#[macro_use]\n\nextern crate bitflags;\n\n#[macro_use]\n\nextern crate lazy_static;\n\n\n\nmod config;\n\nmod error;\n\nmod filter;\n\nmod input;\n\nmod logger;\n\nmod mediacontainer;\n\nmod opt;\n\nmod source;\n\n\n\nuse std::process;\n\n\n\nuse crossbeam_channel::{bounded, select, Receiver};\n\nuse log::info;\n\n\n\nuse crate::config::Config;\n\nuse crate::error::{Error, Result};\n\nuse crate::input::{InputFile, InputUdp};\n\nuse crate::mediacontainer::Mediacontainer;\n\nuse crate::source::Source;\n\n\n", "file_path": "src/main.rs", "rank": 48, "score": 10.693982160219624 }, { "content": "}\n\n\n\nimpl fmt::Display for ErrorKind {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match &*self {\n\n ErrorKind::Logger => write!(f, \"logger error\"),\n\n ErrorKind::Config => write!(f, \"config parse error\"),\n\n ErrorKind::URLParse(url_raw) => write!(f, \"url-parse error (:url-raw {})\", url_raw),\n\n ErrorKind::Signal => write!(f, \"subscription to signals failed\"),\n\n\n\n ErrorKind::SourceSpawn => write!(f, \"source-spawn thread error\"),\n\n ErrorKind::SourceInputLock(reason) => write!(\n\n f,\n\n \"lock input inside source to read data failed (:reason {})\",\n\n reason\n\n ),\n\n ErrorKind::SourceStop => write!(f, \"source stop error\"),\n\n ErrorKind::SourceJoin(reason) => write!(f, \"source-join error (:reason {})\", reason),\n\n\n\n ErrorKind::UdpUrlMissingHost => write!(f, \"source-udp - missing url host\"),\n", "file_path": "src/error.rs", "rank": 49, "score": 10.325625645784253 }, { "content": "}\n\n\n\nimpl Fail for Error {\n\n fn cause(&self) -> Option<&dyn Fail> {\n\n self.ctx.cause()\n\n }\n\n\n\n fn backtrace(&self) -> Option<&Backtrace> {\n\n self.ctx.backtrace()\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self.cause() {\n\n // pretty-print nested errors\n\n Some(err) => write!(f, \"{}: ({})\", self.ctx, err),\n\n None => self.ctx.fmt(f),\n\n }\n\n }\n", "file_path": "src/error.rs", "rank": 50, "score": 9.558028886485436 }, { "content": "where\n\n I: Input + std::marker::Send,\n\n{\n\n pub fn new(input: I) -> Source<I> {\n\n Source {\n\n filter: Default::default(),\n\n\n\n input: Arc::new(Mutex::new(input)),\n\n\n\n thread: None,\n\n }\n\n }\n\n\n\n pub fn start(&mut self) -> Result<()> {\n\n let input = self.input.clone();\n\n\n\n #[inline(always)]\n\n fn fn_lock_map_err<I>(err: std::sync::PoisonError<std::sync::MutexGuard<'_, I>>) -> Error {\n\n Error::source_input_lock(err.to_string())\n\n }\n", "file_path": "src/source.rs", "rank": 51, "score": 7.945699032135291 }, { "content": " // will retry after timeout;\n\n thread::sleep(Duration::from_secs(3));\n\n } else {\n\n return;\n\n }\n\n })\n\n .map_err(Error::source_spawn)?,\n\n );\n\n\n\n Ok(())\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn stop(&mut self) -> Result<()> {\n\n let result: Result<()> = Ok(());\n\n result.map_err(Error::source_stop)\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn done(&mut self) -> Result<()> {\n", "file_path": "src/source.rs", "rank": 52, "score": 7.4491258004519105 }, { "content": " UdpJoinMulticastV4(String, u16, String),\n\n UdpJoinMulticastV6(String, u16, u32),\n\n UdpDomainToIpV4(String),\n\n UdpFifoNotInitialized,\n\n UdpFifoLock(String),\n\n UdpFifoCvarWait(String),\n\n UdpFifoPopEmpty,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Error {\n\n ctx: Context<ErrorKind>,\n\n}\n\n\n\nimpl Error {\n\n pub(crate) fn config<E: Fail>(err: E) -> Error {\n\n Error::from(err.context(ErrorKind::Config))\n\n }\n\n\n\n pub(crate) fn url_parse<E: Fail, S: AsRef<str>>(err: E, url_raw: S) -> Error {\n", "file_path": "src/error.rs", "rank": 53, "score": 6.731402649848976 }, { "content": " if mc == Mediacontainer::Ts {\n\n // source.add_consumer(ts-demuxer)\n\n }\n\n }\n\n \"file\" => {\n\n let input = InputFile::new(input.url.clone());\n\n\n\n let mut source = Source::new(input);\n\n source.start()?;\n\n }\n\n _ => {}\n\n };\n\n }\n\n\n\n let chan = signal_chan()?;\n\n select! {\n\n recv(chan) -> _ => {\n\n info!(\"(SIGINT) will shutdown!\");\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 54, "score": 6.234318867106769 }, { "content": " consumers: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl Producer for Filter {\n\n fn consumers(&self) -> &Consumers {\n\n &self.consumers\n\n }\n\n\n\n fn consumers_mut(&mut self) -> &mut Consumers {\n\n &mut self.consumers\n\n }\n\n}\n\n\n\nimpl Consumer for Filter {\n\n fn consume_trk(&self) {\n\n self.produce_trk()\n\n }\n\n}\n", "file_path": "src/filter.rs", "rank": 55, "score": 5.6520318901492255 }, { "content": "use log::{self, Log};\n\n\n\nuse crate::error::{Error, Result};\n\n\n", "file_path": "src/logger.rs", "rank": 56, "score": 5.611208789380164 }, { "content": "\n\n let fn_do = move || -> Result<()> {\n\n {\n\n input.lock().map_err(fn_lock_map_err)?.open()?;\n\n }\n\n\n\n loop {\n\n input.lock().map_err(fn_lock_map_err)?.read()?;\n\n\n\n // thread::sleep(Duration::from_secs(1));\n\n }\n\n };\n\n\n\n self.thread = Some(\n\n thread::Builder::new()\n\n .name(\"source\".to_string())\n\n .spawn(move || loop {\n\n if let Err(err) = fn_do() {\n\n error!(\"source perform error (:reason {})\", err);\n\n\n", "file_path": "src/source.rs", "rank": 57, "score": 5.439702490696634 }, { "content": " Error::from(err.context(ErrorKind::URLParse(url_raw.as_ref().to_string())))\n\n }\n\n\n\n pub(crate) fn logger<E: Fail>(err: E) -> Error {\n\n Error::from(err.context(ErrorKind::Logger))\n\n }\n\n\n\n pub(crate) fn signal<E: Fail>(err: E) -> Error {\n\n Error::from(err.context(ErrorKind::Signal))\n\n }\n\n\n\n pub(crate) fn source_spawn<E: Fail>(err: E) -> Error {\n\n Error::from(err.context(ErrorKind::SourceSpawn))\n\n }\n\n\n\n pub(crate) fn source_join<S: AsRef<str>>(reason: S) -> Error {\n\n Error::from(ErrorKind::SourceJoin(reason.as_ref().to_string()))\n\n }\n\n\n\n pub(crate) fn source_input_lock<S: AsRef<str>>(reason: S) -> Error {\n", "file_path": "src/error.rs", "rank": 58, "score": 5.041669812084825 }, { "content": " Error::from(ErrorKind::SourceInputLock(reason.as_ref().to_string()))\n\n }\n\n\n\n pub(crate) fn source_stop<E: Fail>(err: E) -> Error {\n\n Error::from(err.context(ErrorKind::SourceStop))\n\n }\n\n\n\n pub(crate) fn udp_url_missing_host() -> Error {\n\n Error::from(ErrorKind::UdpUrlMissingHost)\n\n }\n\n\n\n pub(crate) fn udp_socket_bind<E: Fail, S: AsRef<str>>(err: E, host: S, port: u16) -> Error {\n\n Error::from(err.context(ErrorKind::UdpSocketBind(host.as_ref().to_string(), port)))\n\n }\n\n\n\n pub(crate) fn udp_join_multicast_v4<E: Fail, S: AsRef<str>>(\n\n err: E,\n\n host: S,\n\n port: u16,\n\n group: S,\n", "file_path": "src/error.rs", "rank": 59, "score": 4.7788797613994305 }, { "content": " fn produce_pkt(&self) {\n\n for consumer in self.consumers().0.iter() {\n\n consumer.consume_pkt()\n\n }\n\n }\n\n\n\n fn consume_frm(&self) {\n\n for consumer in self.consumers().0.iter() {\n\n consumer.consume_frm()\n\n }\n\n }\n\n}\n\n\n\npub struct Filter {\n\n consumers: Consumers,\n\n}\n\n\n\nimpl Default for Filter {\n\n fn default() -> Self {\n\n Filter {\n", "file_path": "src/filter.rs", "rank": 60, "score": 3.701028745520431 }, { "content": "// trait EntryPoint: Producer {}\n\n// trait Filter: Producer + Consumer {}\n\n// trait EndPoint: Consumer {}\n\n\n\npub struct Consumers(Vec<Box<dyn Consumer>>);\n\n\n\nimpl Default for Consumers {\n\n fn default() -> Self {\n\n Consumers(Vec::new())\n\n }\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 61, "score": 3.701028745520431 }, { "content": " return Ok(());\n\n }\n\n\n\n config.validate()?;\n\n\n\n let app = App::new(config);\n\n app.start()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 62, "score": 3.047277706308559 }, { "content": "#[derive(Debug, Eq, PartialEq)]\n\npub enum Video {\n\n Mpeg2,\n\n Mpeg4,\n\n H264,\n\n H265,\n\n Vp8,\n\n Vp9,\n\n AV1,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum CompressionStandard {\n\n Video,\n\n Audio,\n\n Image,\n\n Subtitle,\n\n Cc,\n\n Teletext,\n\n}\n", "file_path": "src/compression_standard.rs", "rank": 63, "score": 2.3496317806440126 }, { "content": "\n\n pub(crate) fn udp_domain_to_ipv4<E: Fail, S: AsRef<str>>(err: E, domain: S) -> Error {\n\n Error::from(err.context(ErrorKind::UdpDomainToIpV4(domain.as_ref().to_string())))\n\n }\n\n\n\n pub(crate) fn udp_fifo_not_initialized() -> Error {\n\n Error::from(ErrorKind::UdpFifoNotInitialized)\n\n }\n\n\n\n pub(crate) fn udp_fifo_lock<S: AsRef<str>>(reason: S) -> Error {\n\n Error::from(ErrorKind::UdpFifoLock(reason.as_ref().to_string()))\n\n }\n\n\n\n pub(crate) fn udp_fifo_cvar_wait<S: AsRef<str>>(reason: S) -> Error {\n\n Error::from(ErrorKind::UdpFifoCvarWait(reason.as_ref().to_string()))\n\n }\n\n\n\n pub(crate) fn udp_fifo_pop_empty() -> Error {\n\n Error::from(ErrorKind::UdpFifoPopEmpty)\n\n }\n", "file_path": "src/error.rs", "rank": 64, "score": 2.283666301779423 }, { "content": " match self.thread.take() {\n\n Some(t) => t.join().map_err(|err| {\n\n if let Some(err) = err.downcast_ref::<&'static str>() {\n\n Error::source_join(err)\n\n } else {\n\n Error::source_join(format!(\"{:?}\", err))\n\n }\n\n }),\n\n None => Ok(()),\n\n }\n\n }\n\n}\n", "file_path": "src/source.rs", "rank": 65, "score": 1.935599229950654 }, { "content": " }\n\n ErrorKind::UdpFifoLock(reason) => {\n\n write!(f, \"source-udp - fifo lock error (:reason {})\", reason)\n\n }\n\n ErrorKind::UdpFifoCvarWait(reason) => {\n\n write!(f, \"source-udp - condvar wait error (:reason {})\", reason)\n\n }\n\n ErrorKind::UdpFifoPopEmpty => write!(f, \"source-udp - no data after fifo pop\"),\n\n }\n\n }\n\n}\n\n\n\nimpl From<ErrorKind> for Error {\n\n fn from(kind: ErrorKind) -> Error {\n\n Error::from(Context::new(kind))\n\n }\n\n}\n\n\n\nimpl From<Context<ErrorKind>> for Error {\n\n fn from(ctx: Context<ErrorKind>) -> Error {\n\n Error { ctx }\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 66, "score": 1.8835140445864278 }, { "content": " ) -> Error {\n\n Error::from(err.context(ErrorKind::UdpJoinMulticastV4(\n\n host.as_ref().to_string(),\n\n port,\n\n group.as_ref().to_string(),\n\n )))\n\n }\n\n\n\n pub(crate) fn udp_join_multicast_v6<E: Fail, S: AsRef<str>>(\n\n err: E,\n\n host: S,\n\n port: u16,\n\n group: u32,\n\n ) -> Error {\n\n Error::from(err.context(ErrorKind::UdpJoinMulticastV6(\n\n host.as_ref().to_string(),\n\n port,\n\n group,\n\n )))\n\n }\n", "file_path": "src/error.rs", "rank": 67, "score": 1.343640183410236 } ]