path
stringlengths
26
218
content
stringlengths
0
231k
the-algorithm-main/ann/src/main/java/com/twitter/ann/faiss/swig/intArray.java
/* ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). * Version 4.0.2 * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- */ package com.twitter.ann.faiss; public class intArray { private transient long swigCPtr; protected transient boolean swigCMemOwn; protected intArray(long cPtr, boolean cMemoryOwn) { swigCMemOwn = cMemoryOwn; swigCPtr = cPtr; } protected static long getCPtr(intArray obj) { return (obj == null) ? 0 : obj.swigCPtr; } @SuppressWarnings("deprecation") protected void finalize() { delete(); } public synchronized void delete() { if (swigCPtr != 0) { if (swigCMemOwn) { swigCMemOwn = false; swigfaissJNI.delete_intArray(swigCPtr); } swigCPtr = 0; } } public intArray(int nelements) { this(swigfaissJNI.new_intArray(nelements), true); } public int getitem(int index) { return swigfaissJNI.intArray_getitem(swigCPtr, this, index); } public void setitem(int index, int value) { swigfaissJNI.intArray_setitem(swigCPtr, this, index, value); } public SWIGTYPE_p_int cast() { long cPtr = swigfaissJNI.intArray_cast(swigCPtr, this); return (cPtr == 0) ? null : new SWIGTYPE_p_int(cPtr, false); } public static intArray frompointer(SWIGTYPE_p_int t) { long cPtr = swigfaissJNI.intArray_frompointer(SWIGTYPE_p_int.getCPtr(t)); return (cPtr == 0) ? null : new intArray(cPtr, false); } }
the-algorithm-main/ann/src/main/java/com/twitter/ann/faiss/swig/int_maxheap_array_t.java
/* ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). * Version 4.0.2 * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- */ package com.twitter.ann.faiss; public class int_maxheap_array_t { private transient long swigCPtr; protected transient boolean swigCMemOwn; protected int_maxheap_array_t(long cPtr, boolean cMemoryOwn) { swigCMemOwn = cMemoryOwn; swigCPtr = cPtr; } protected static long getCPtr(int_maxheap_array_t obj) { return (obj == null) ? 0 : obj.swigCPtr; } @SuppressWarnings("deprecation") protected void finalize() { delete(); } public synchronized void delete() { if (swigCPtr != 0) { if (swigCMemOwn) { swigCMemOwn = false; swigfaissJNI.delete_int_maxheap_array_t(swigCPtr); } swigCPtr = 0; } } public void setNh(long value) { swigfaissJNI.int_maxheap_array_t_nh_set(swigCPtr, this, value); } public long getNh() { return swigfaissJNI.int_maxheap_array_t_nh_get(swigCPtr, this); } public void setK(long value) { swigfaissJNI.int_maxheap_array_t_k_set(swigCPtr, this, value); } public long getK() { return swigfaissJNI.int_maxheap_array_t_k_get(swigCPtr, this); } public void setIds(LongVector value) { swigfaissJNI.int_maxheap_array_t_ids_set(swigCPtr, this, SWIGTYPE_p_long_long.getCPtr(value.data()), value); } public LongVector getIds() { return new LongVector(swigfaissJNI.int_maxheap_array_t_ids_get(swigCPtr, this), false); } public void setVal(SWIGTYPE_p_int value) { swigfaissJNI.int_maxheap_array_t_val_set(swigCPtr, this, SWIGTYPE_p_int.getCPtr(value)); } public SWIGTYPE_p_int getVal() { long cPtr = swigfaissJNI.int_maxheap_array_t_val_get(swigCPtr, this); return (cPtr == 0) ? null : new SWIGTYPE_p_int(cPtr, false); } public SWIGTYPE_p_int get_val(long key) { long cPtr = swigfaissJNI.int_maxheap_array_t_get_val(swigCPtr, this, key); return (cPtr == 0) ? null : new SWIGTYPE_p_int(cPtr, false); } public LongVector get_ids(long key) { return new LongVector(swigfaissJNI.int_maxheap_array_t_get_ids(swigCPtr, this, key), false); } public void heapify() { swigfaissJNI.int_maxheap_array_t_heapify(swigCPtr, this); } public void addn(long nj, SWIGTYPE_p_int vin, long j0, long i0, long ni) { swigfaissJNI.int_maxheap_array_t_addn__SWIG_0(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), j0, i0, ni); } public void addn(long nj, SWIGTYPE_p_int vin, long j0, long i0) { swigfaissJNI.int_maxheap_array_t_addn__SWIG_1(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), j0, i0); } public void addn(long nj, SWIGTYPE_p_int vin, long j0) { swigfaissJNI.int_maxheap_array_t_addn__SWIG_2(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), j0); } public void addn(long nj, SWIGTYPE_p_int vin) { swigfaissJNI.int_maxheap_array_t_addn__SWIG_3(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin)); } public void addn_with_ids(long nj, SWIGTYPE_p_int vin, LongVector id_in, long id_stride, long i0, long ni) { swigfaissJNI.int_maxheap_array_t_addn_with_ids__SWIG_0(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), SWIGTYPE_p_long_long.getCPtr(id_in.data()), id_in, id_stride, i0, ni); } public void addn_with_ids(long nj, SWIGTYPE_p_int vin, LongVector id_in, long id_stride, long i0) { swigfaissJNI.int_maxheap_array_t_addn_with_ids__SWIG_1(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), SWIGTYPE_p_long_long.getCPtr(id_in.data()), id_in, id_stride, i0); } public void addn_with_ids(long nj, SWIGTYPE_p_int vin, LongVector id_in, long id_stride) { swigfaissJNI.int_maxheap_array_t_addn_with_ids__SWIG_2(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), SWIGTYPE_p_long_long.getCPtr(id_in.data()), id_in, id_stride); } public void addn_with_ids(long nj, SWIGTYPE_p_int vin, LongVector id_in) { swigfaissJNI.int_maxheap_array_t_addn_with_ids__SWIG_3(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), SWIGTYPE_p_long_long.getCPtr(id_in.data()), id_in); } public void addn_with_ids(long nj, SWIGTYPE_p_int vin) { swigfaissJNI.int_maxheap_array_t_addn_with_ids__SWIG_4(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin)); } public void reorder() { swigfaissJNI.int_maxheap_array_t_reorder(swigCPtr, this); } public void per_line_extrema(SWIGTYPE_p_int vals_out, LongVector idx_out) { swigfaissJNI.int_maxheap_array_t_per_line_extrema(swigCPtr, this, SWIGTYPE_p_int.getCPtr(vals_out), SWIGTYPE_p_long_long.getCPtr(idx_out.data()), idx_out); } public int_maxheap_array_t() { this(swigfaissJNI.new_int_maxheap_array_t(), true); } }
the-algorithm-main/ann/src/main/java/com/twitter/ann/faiss/swig/int_minheap_array_t.java
/* ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). * Version 4.0.2 * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- */ package com.twitter.ann.faiss; public class int_minheap_array_t { private transient long swigCPtr; protected transient boolean swigCMemOwn; protected int_minheap_array_t(long cPtr, boolean cMemoryOwn) { swigCMemOwn = cMemoryOwn; swigCPtr = cPtr; } protected static long getCPtr(int_minheap_array_t obj) { return (obj == null) ? 0 : obj.swigCPtr; } @SuppressWarnings("deprecation") protected void finalize() { delete(); } public synchronized void delete() { if (swigCPtr != 0) { if (swigCMemOwn) { swigCMemOwn = false; swigfaissJNI.delete_int_minheap_array_t(swigCPtr); } swigCPtr = 0; } } public void setNh(long value) { swigfaissJNI.int_minheap_array_t_nh_set(swigCPtr, this, value); } public long getNh() { return swigfaissJNI.int_minheap_array_t_nh_get(swigCPtr, this); } public void setK(long value) { swigfaissJNI.int_minheap_array_t_k_set(swigCPtr, this, value); } public long getK() { return swigfaissJNI.int_minheap_array_t_k_get(swigCPtr, this); } public void setIds(LongVector value) { swigfaissJNI.int_minheap_array_t_ids_set(swigCPtr, this, SWIGTYPE_p_long_long.getCPtr(value.data()), value); } public LongVector getIds() { return new LongVector(swigfaissJNI.int_minheap_array_t_ids_get(swigCPtr, this), false); } public void setVal(SWIGTYPE_p_int value) { swigfaissJNI.int_minheap_array_t_val_set(swigCPtr, this, SWIGTYPE_p_int.getCPtr(value)); } public SWIGTYPE_p_int getVal() { long cPtr = swigfaissJNI.int_minheap_array_t_val_get(swigCPtr, this); return (cPtr == 0) ? null : new SWIGTYPE_p_int(cPtr, false); } public SWIGTYPE_p_int get_val(long key) { long cPtr = swigfaissJNI.int_minheap_array_t_get_val(swigCPtr, this, key); return (cPtr == 0) ? null : new SWIGTYPE_p_int(cPtr, false); } public LongVector get_ids(long key) { return new LongVector(swigfaissJNI.int_minheap_array_t_get_ids(swigCPtr, this, key), false); } public void heapify() { swigfaissJNI.int_minheap_array_t_heapify(swigCPtr, this); } public void addn(long nj, SWIGTYPE_p_int vin, long j0, long i0, long ni) { swigfaissJNI.int_minheap_array_t_addn__SWIG_0(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), j0, i0, ni); } public void addn(long nj, SWIGTYPE_p_int vin, long j0, long i0) { swigfaissJNI.int_minheap_array_t_addn__SWIG_1(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), j0, i0); } public void addn(long nj, SWIGTYPE_p_int vin, long j0) { swigfaissJNI.int_minheap_array_t_addn__SWIG_2(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), j0); } public void addn(long nj, SWIGTYPE_p_int vin) { swigfaissJNI.int_minheap_array_t_addn__SWIG_3(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin)); } public void addn_with_ids(long nj, SWIGTYPE_p_int vin, LongVector id_in, long id_stride, long i0, long ni) { swigfaissJNI.int_minheap_array_t_addn_with_ids__SWIG_0(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), SWIGTYPE_p_long_long.getCPtr(id_in.data()), id_in, id_stride, i0, ni); } public void addn_with_ids(long nj, SWIGTYPE_p_int vin, LongVector id_in, long id_stride, long i0) { swigfaissJNI.int_minheap_array_t_addn_with_ids__SWIG_1(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), SWIGTYPE_p_long_long.getCPtr(id_in.data()), id_in, id_stride, i0); } public void addn_with_ids(long nj, SWIGTYPE_p_int vin, LongVector id_in, long id_stride) { swigfaissJNI.int_minheap_array_t_addn_with_ids__SWIG_2(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), SWIGTYPE_p_long_long.getCPtr(id_in.data()), id_in, id_stride); } public void addn_with_ids(long nj, SWIGTYPE_p_int vin, LongVector id_in) { swigfaissJNI.int_minheap_array_t_addn_with_ids__SWIG_3(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin), SWIGTYPE_p_long_long.getCPtr(id_in.data()), id_in); } public void addn_with_ids(long nj, SWIGTYPE_p_int vin) { swigfaissJNI.int_minheap_array_t_addn_with_ids__SWIG_4(swigCPtr, this, nj, SWIGTYPE_p_int.getCPtr(vin)); } public void reorder() { swigfaissJNI.int_minheap_array_t_reorder(swigCPtr, this); } public void per_line_extrema(SWIGTYPE_p_int vals_out, LongVector idx_out) { swigfaissJNI.int_minheap_array_t_per_line_extrema(swigCPtr, this, SWIGTYPE_p_int.getCPtr(vals_out), SWIGTYPE_p_long_long.getCPtr(idx_out.data()), idx_out); } public int_minheap_array_t() { this(swigfaissJNI.new_int_minheap_array_t(), true); } }
the-algorithm-main/ann/src/main/java/com/twitter/ann/faiss/swig/longArray.java
/* ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). * Version 4.0.2 * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- */ package com.twitter.ann.faiss; public class longArray { private transient long swigCPtr; protected transient boolean swigCMemOwn; protected longArray(long cPtr, boolean cMemoryOwn) { swigCMemOwn = cMemoryOwn; swigCPtr = cPtr; } protected static long getCPtr(longArray obj) { return (obj == null) ? 0 : obj.swigCPtr; } @SuppressWarnings("deprecation") protected void finalize() { delete(); } public synchronized void delete() { if (swigCPtr != 0) { if (swigCMemOwn) { swigCMemOwn = false; swigfaissJNI.delete_longArray(swigCPtr); } swigCPtr = 0; } } public longArray(int nelements) { this(swigfaissJNI.new_longArray(nelements), true); } public long getitem(int index) { return swigfaissJNI.longArray_getitem(swigCPtr, this, index); } public void setitem(int index, long value) { swigfaissJNI.longArray_setitem(swigCPtr, this, index, value); } public SWIGTYPE_p_long_long cast() { long cPtr = swigfaissJNI.longArray_cast(swigCPtr, this); return (cPtr == 0) ? null : new SWIGTYPE_p_long_long(cPtr, false); } public static longArray frompointer(SWIGTYPE_p_long_long t) { long cPtr = swigfaissJNI.longArray_frompointer(SWIGTYPE_p_long_long.getCPtr(t)); return (cPtr == 0) ? null : new longArray(cPtr, false); } }
the-algorithm-main/ann/src/main/java/com/twitter/ann/faiss/swig/resources/.gitignore
*.so *.so.0 *.so.1 *.so.3 *.so.5 *.so.6 *.dylib
the-algorithm-main/ann/src/main/java/com/twitter/ann/faiss/swig/resources/.gitkeep
the-algorithm-main/ann/src/main/java/com/twitter/ann/faiss/swig/resources/BUILD
resources( name = "resources", sources = [ "*.dylib", "*.so", "*.so.0", "*.so.1", "*.so.3", "*.so.5", "*.so.6", ], tags = [ "bazel-compatible", "bazel-only", "visibility://visibility:private", ], )
the-algorithm-main/ann/src/main/java/com/twitter/ann/faiss/swig/swigfaiss.java
/* ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). * Version 4.0.2 * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- */ package com.twitter.ann.faiss; public class swigfaiss implements swigfaissConstants { public static void bitvec_print(SWIGTYPE_p_unsigned_char b, long d) { swigfaissJNI.bitvec_print(SWIGTYPE_p_unsigned_char.getCPtr(b), d); } public static void fvecs2bitvecs(SWIGTYPE_p_float x, SWIGTYPE_p_unsigned_char b, long d, long n) { swigfaissJNI.fvecs2bitvecs(SWIGTYPE_p_float.getCPtr(x), SWIGTYPE_p_unsigned_char.getCPtr(b), d, n); } public static void bitvecs2fvecs(SWIGTYPE_p_unsigned_char b, SWIGTYPE_p_float x, long d, long n) { swigfaissJNI.bitvecs2fvecs(SWIGTYPE_p_unsigned_char.getCPtr(b), SWIGTYPE_p_float.getCPtr(x), d, n); } public static void fvec2bitvec(SWIGTYPE_p_float x, SWIGTYPE_p_unsigned_char b, long d) { swigfaissJNI.fvec2bitvec(SWIGTYPE_p_float.getCPtr(x), SWIGTYPE_p_unsigned_char.getCPtr(b), d); } public static void bitvec_shuffle(long n, long da, long db, SWIGTYPE_p_int order, SWIGTYPE_p_unsigned_char a, SWIGTYPE_p_unsigned_char b) { swigfaissJNI.bitvec_shuffle(n, da, db, SWIGTYPE_p_int.getCPtr(order), SWIGTYPE_p_unsigned_char.getCPtr(a), SWIGTYPE_p_unsigned_char.getCPtr(b)); } public static void setHamming_batch_size(long value) { swigfaissJNI.hamming_batch_size_set(value); } public static long getHamming_batch_size() { return swigfaissJNI.hamming_batch_size_get(); } public static int popcount64(long x) { return swigfaissJNI.popcount64(x); } public static void hammings(SWIGTYPE_p_unsigned_char a, SWIGTYPE_p_unsigned_char b, long na, long nb, long nbytespercode, SWIGTYPE_p_int dis) { swigfaissJNI.hammings(SWIGTYPE_p_unsigned_char.getCPtr(a), SWIGTYPE_p_unsigned_char.getCPtr(b), na, nb, nbytespercode, SWIGTYPE_p_int.getCPtr(dis)); } public static void hammings_knn_hc(SWIGTYPE_p_faiss__HeapArrayT_faiss__CMaxT_int_int64_t_t_t ha, SWIGTYPE_p_unsigned_char a, SWIGTYPE_p_unsigned_char b, long nb, long ncodes, int ordered) { swigfaissJNI.hammings_knn_hc(SWIGTYPE_p_faiss__HeapArrayT_faiss__CMaxT_int_int64_t_t_t.getCPtr(ha), SWIGTYPE_p_unsigned_char.getCPtr(a), SWIGTYPE_p_unsigned_char.getCPtr(b), nb, ncodes, ordered); } public static void hammings_knn(SWIGTYPE_p_faiss__HeapArrayT_faiss__CMaxT_int_int64_t_t_t ha, SWIGTYPE_p_unsigned_char a, SWIGTYPE_p_unsigned_char b, long nb, long ncodes, int ordered) { swigfaissJNI.hammings_knn(SWIGTYPE_p_faiss__HeapArrayT_faiss__CMaxT_int_int64_t_t_t.getCPtr(ha), SWIGTYPE_p_unsigned_char.getCPtr(a), SWIGTYPE_p_unsigned_char.getCPtr(b), nb, ncodes, ordered); } public static void hammings_knn_mc(SWIGTYPE_p_unsigned_char a, SWIGTYPE_p_unsigned_char b, long na, long nb, long k, long ncodes, SWIGTYPE_p_int distances, LongVector labels) { swigfaissJNI.hammings_knn_mc(SWIGTYPE_p_unsigned_char.getCPtr(a), SWIGTYPE_p_unsigned_char.getCPtr(b), na, nb, k, ncodes, SWIGTYPE_p_int.getCPtr(distances), SWIGTYPE_p_long_long.getCPtr(labels.data()), labels); } public static void hamming_range_search(SWIGTYPE_p_unsigned_char a, SWIGTYPE_p_unsigned_char b, long na, long nb, int radius, long ncodes, RangeSearchResult result) { swigfaissJNI.hamming_range_search(SWIGTYPE_p_unsigned_char.getCPtr(a), SWIGTYPE_p_unsigned_char.getCPtr(b), na, nb, radius, ncodes, RangeSearchResult.getCPtr(result), result); } public static void hamming_count_thres(SWIGTYPE_p_unsigned_char bs1, SWIGTYPE_p_unsigned_char bs2, long n1, long n2, int ht, long ncodes, SWIGTYPE_p_unsigned_long nptr) { swigfaissJNI.hamming_count_thres(SWIGTYPE_p_unsigned_char.getCPtr(bs1), SWIGTYPE_p_unsigned_char.getCPtr(bs2), n1, n2, ht, ncodes, SWIGTYPE_p_unsigned_long.getCPtr(nptr)); } public static long match_hamming_thres(SWIGTYPE_p_unsigned_char bs1, SWIGTYPE_p_unsigned_char bs2, long n1, long n2, int ht, long ncodes, LongVector idx, SWIGTYPE_p_int dis) { return swigfaissJNI.match_hamming_thres(SWIGTYPE_p_unsigned_char.getCPtr(bs1), SWIGTYPE_p_unsigned_char.getCPtr(bs2), n1, n2, ht, ncodes, SWIGTYPE_p_long_long.getCPtr(idx.data()), idx, SWIGTYPE_p_int.getCPtr(dis)); } public static void crosshamming_count_thres(SWIGTYPE_p_unsigned_char dbs, long n, int ht, long ncodes, SWIGTYPE_p_unsigned_long nptr) { swigfaissJNI.crosshamming_count_thres(SWIGTYPE_p_unsigned_char.getCPtr(dbs), n, ht, ncodes, SWIGTYPE_p_unsigned_long.getCPtr(nptr)); } public static int get_num_gpus() { return swigfaissJNI.get_num_gpus(); } public static String get_compile_options() { return swigfaissJNI.get_compile_options(); } public static double getmillisecs() { return swigfaissJNI.getmillisecs(); } public static long get_mem_usage_kb() { return swigfaissJNI.get_mem_usage_kb(); } public static long get_cycles() { return swigfaissJNI.get_cycles(); } public static void fvec_madd(long n, SWIGTYPE_p_float a, float bf, SWIGTYPE_p_float b, SWIGTYPE_p_float c) { swigfaissJNI.fvec_madd(n, SWIGTYPE_p_float.getCPtr(a), bf, SWIGTYPE_p_float.getCPtr(b), SWIGTYPE_p_float.getCPtr(c)); } public static int fvec_madd_and_argmin(long n, SWIGTYPE_p_float a, float bf, SWIGTYPE_p_float b, SWIGTYPE_p_float c) { return swigfaissJNI.fvec_madd_and_argmin(n, SWIGTYPE_p_float.getCPtr(a), bf, SWIGTYPE_p_float.getCPtr(b), SWIGTYPE_p_float.getCPtr(c)); } public static void reflection(SWIGTYPE_p_float u, SWIGTYPE_p_float x, long n, long d, long nu) { swigfaissJNI.reflection(SWIGTYPE_p_float.getCPtr(u), SWIGTYPE_p_float.getCPtr(x), n, d, nu); } public static void matrix_qr(int m, int n, SWIGTYPE_p_float a) { swigfaissJNI.matrix_qr(m, n, SWIGTYPE_p_float.getCPtr(a)); } public static void ranklist_handle_ties(int k, LongVector idx, SWIGTYPE_p_float dis) { swigfaissJNI.ranklist_handle_ties(k, SWIGTYPE_p_long_long.getCPtr(idx.data()), idx, SWIGTYPE_p_float.getCPtr(dis)); } public static long ranklist_intersection_size(long k1, LongVector v1, long k2, LongVector v2) { return swigfaissJNI.ranklist_intersection_size(k1, SWIGTYPE_p_long_long.getCPtr(v1.data()), v1, k2, SWIGTYPE_p_long_long.getCPtr(v2.data()), v2); } public static long merge_result_table_with(long n, long k, LongVector I0, SWIGTYPE_p_float D0, LongVector I1, SWIGTYPE_p_float D1, boolean keep_min, long translation) { return swigfaissJNI.merge_result_table_with__SWIG_0(n, k, SWIGTYPE_p_long_long.getCPtr(I0.data()), I0, SWIGTYPE_p_float.getCPtr(D0), SWIGTYPE_p_long_long.getCPtr(I1.data()), I1, SWIGTYPE_p_float.getCPtr(D1), keep_min, translation); } public static long merge_result_table_with(long n, long k, LongVector I0, SWIGTYPE_p_float D0, LongVector I1, SWIGTYPE_p_float D1, boolean keep_min) { return swigfaissJNI.merge_result_table_with__SWIG_1(n, k, SWIGTYPE_p_long_long.getCPtr(I0.data()), I0, SWIGTYPE_p_float.getCPtr(D0), SWIGTYPE_p_long_long.getCPtr(I1.data()), I1, SWIGTYPE_p_float.getCPtr(D1), keep_min); } public static long merge_result_table_with(long n, long k, LongVector I0, SWIGTYPE_p_float D0, LongVector I1, SWIGTYPE_p_float D1) { return swigfaissJNI.merge_result_table_with__SWIG_2(n, k, SWIGTYPE_p_long_long.getCPtr(I0.data()), I0, SWIGTYPE_p_float.getCPtr(D0), SWIGTYPE_p_long_long.getCPtr(I1.data()), I1, SWIGTYPE_p_float.getCPtr(D1)); } public static double imbalance_factor(int n, int k, LongVector assign) { return swigfaissJNI.imbalance_factor__SWIG_0(n, k, SWIGTYPE_p_long_long.getCPtr(assign.data()), assign); } public static double imbalance_factor(int k, SWIGTYPE_p_int hist) { return swigfaissJNI.imbalance_factor__SWIG_1(k, SWIGTYPE_p_int.getCPtr(hist)); } public static void fvec_argsort(long n, SWIGTYPE_p_float vals, SWIGTYPE_p_unsigned_long perm) { swigfaissJNI.fvec_argsort(n, SWIGTYPE_p_float.getCPtr(vals), SWIGTYPE_p_unsigned_long.getCPtr(perm)); } public static void fvec_argsort_parallel(long n, SWIGTYPE_p_float vals, SWIGTYPE_p_unsigned_long perm) { swigfaissJNI.fvec_argsort_parallel(n, SWIGTYPE_p_float.getCPtr(vals), SWIGTYPE_p_unsigned_long.getCPtr(perm)); } public static int ivec_hist(long n, SWIGTYPE_p_int v, int vmax, SWIGTYPE_p_int hist) { return swigfaissJNI.ivec_hist(n, SWIGTYPE_p_int.getCPtr(v), vmax, SWIGTYPE_p_int.getCPtr(hist)); } public static void bincode_hist(long n, long nbits, SWIGTYPE_p_unsigned_char codes, SWIGTYPE_p_int hist) { swigfaissJNI.bincode_hist(n, nbits, SWIGTYPE_p_unsigned_char.getCPtr(codes), SWIGTYPE_p_int.getCPtr(hist)); } public static long ivec_checksum(long n, SWIGTYPE_p_int a) { return swigfaissJNI.ivec_checksum(n, SWIGTYPE_p_int.getCPtr(a)); } public static SWIGTYPE_p_float fvecs_maybe_subsample(long d, SWIGTYPE_p_unsigned_long n, long nmax, SWIGTYPE_p_float x, boolean verbose, long seed) { long cPtr = swigfaissJNI.fvecs_maybe_subsample__SWIG_0(d, SWIGTYPE_p_unsigned_long.getCPtr(n), nmax, SWIGTYPE_p_float.getCPtr(x), verbose, seed); return (cPtr == 0) ? null : new SWIGTYPE_p_float(cPtr, false); } public static SWIGTYPE_p_float fvecs_maybe_subsample(long d, SWIGTYPE_p_unsigned_long n, long nmax, SWIGTYPE_p_float x, boolean verbose) { long cPtr = swigfaissJNI.fvecs_maybe_subsample__SWIG_1(d, SWIGTYPE_p_unsigned_long.getCPtr(n), nmax, SWIGTYPE_p_float.getCPtr(x), verbose); return (cPtr == 0) ? null : new SWIGTYPE_p_float(cPtr, false); } public static SWIGTYPE_p_float fvecs_maybe_subsample(long d, SWIGTYPE_p_unsigned_long n, long nmax, SWIGTYPE_p_float x) { long cPtr = swigfaissJNI.fvecs_maybe_subsample__SWIG_2(d, SWIGTYPE_p_unsigned_long.getCPtr(n), nmax, SWIGTYPE_p_float.getCPtr(x)); return (cPtr == 0) ? null : new SWIGTYPE_p_float(cPtr, false); } public static void binary_to_real(long d, SWIGTYPE_p_unsigned_char x_in, SWIGTYPE_p_float x_out) { swigfaissJNI.binary_to_real(d, SWIGTYPE_p_unsigned_char.getCPtr(x_in), SWIGTYPE_p_float.getCPtr(x_out)); } public static void real_to_binary(long d, SWIGTYPE_p_float x_in, SWIGTYPE_p_unsigned_char x_out) { swigfaissJNI.real_to_binary(d, SWIGTYPE_p_float.getCPtr(x_in), SWIGTYPE_p_unsigned_char.getCPtr(x_out)); } public static long hash_bytes(SWIGTYPE_p_unsigned_char bytes, long n) { return swigfaissJNI.hash_bytes(SWIGTYPE_p_unsigned_char.getCPtr(bytes), n); } public static boolean check_openmp() { return swigfaissJNI.check_openmp(); } public static float kmeans_clustering(long d, long n, long k, SWIGTYPE_p_float x, SWIGTYPE_p_float centroids) { return swigfaissJNI.kmeans_clustering(d, n, k, SWIGTYPE_p_float.getCPtr(x), SWIGTYPE_p_float.getCPtr(centroids)); } public static void setIndexPQ_stats(IndexPQStats value) { swigfaissJNI.indexPQ_stats_set(IndexPQStats.getCPtr(value), value); } public static IndexPQStats getIndexPQ_stats() { long cPtr = swigfaissJNI.indexPQ_stats_get(); return (cPtr == 0) ? null : new IndexPQStats(cPtr, false); } public static void setIndexIVF_stats(IndexIVFStats value) { swigfaissJNI.indexIVF_stats_set(IndexIVFStats.getCPtr(value), value); } public static IndexIVFStats getIndexIVF_stats() { long cPtr = swigfaissJNI.indexIVF_stats_get(); return (cPtr == 0) ? null : new IndexIVFStats(cPtr, false); } public static short[] getHamdis_tab_ham_bytes() { return swigfaissJNI.hamdis_tab_ham_bytes_get(); } public static int generalized_hamming_64(long a) { return swigfaissJNI.generalized_hamming_64(a); } public static void generalized_hammings_knn_hc(SWIGTYPE_p_faiss__HeapArrayT_faiss__CMaxT_int_int64_t_t_t ha, SWIGTYPE_p_unsigned_char a, SWIGTYPE_p_unsigned_char b, long nb, long code_size, int ordered) { swigfaissJNI.generalized_hammings_knn_hc__SWIG_0(SWIGTYPE_p_faiss__HeapArrayT_faiss__CMaxT_int_int64_t_t_t.getCPtr(ha), SWIGTYPE_p_unsigned_char.getCPtr(a), SWIGTYPE_p_unsigned_char.getCPtr(b), nb, code_size, ordered); } public static void generalized_hammings_knn_hc(SWIGTYPE_p_faiss__HeapArrayT_faiss__CMaxT_int_int64_t_t_t ha, SWIGTYPE_p_unsigned_char a, SWIGTYPE_p_unsigned_char b, long nb, long code_size) { swigfaissJNI.generalized_hammings_knn_hc__SWIG_1(SWIGTYPE_p_faiss__HeapArrayT_faiss__CMaxT_int_int64_t_t_t.getCPtr(ha), SWIGTYPE_p_unsigned_char.getCPtr(a), SWIGTYPE_p_unsigned_char.getCPtr(b), nb, code_size); } public static void check_compatible_for_merge(Index index1, Index index2) { swigfaissJNI.check_compatible_for_merge(Index.getCPtr(index1), index1, Index.getCPtr(index2), index2); } public static IndexIVF extract_index_ivf(Index index) { long cPtr = swigfaissJNI.extract_index_ivf__SWIG_0(Index.getCPtr(index), index); return (cPtr == 0) ? null : new IndexIVF(cPtr, false); } public static IndexIVF try_extract_index_ivf(Index index) { long cPtr = swigfaissJNI.try_extract_index_ivf__SWIG_0(Index.getCPtr(index), index); return (cPtr == 0) ? null : new IndexIVF(cPtr, false); } public static void merge_into(Index index0, Index index1, boolean shift_ids) { swigfaissJNI.merge_into(Index.getCPtr(index0), index0, Index.getCPtr(index1), index1, shift_ids); } public static void search_centroid(Index index, SWIGTYPE_p_float x, int n, LongVector centroid_ids) { swigfaissJNI.search_centroid(Index.getCPtr(index), index, SWIGTYPE_p_float.getCPtr(x), n, SWIGTYPE_p_long_long.getCPtr(centroid_ids.data()), centroid_ids); } public static void search_and_return_centroids(Index index, long n, SWIGTYPE_p_float xin, int k, SWIGTYPE_p_float distances, LongVector labels, LongVector query_centroid_ids, LongVector result_centroid_ids) { swigfaissJNI.search_and_return_centroids(Index.getCPtr(index), index, n, SWIGTYPE_p_float.getCPtr(xin), k, SWIGTYPE_p_float.getCPtr(distances), SWIGTYPE_p_long_long.getCPtr(labels.data()), labels, SWIGTYPE_p_long_long.getCPtr(query_centroid_ids.data()), query_centroid_ids, SWIGTYPE_p_long_long.getCPtr(result_centroid_ids.data()), result_centroid_ids); } public static ArrayInvertedLists get_invlist_range(Index index, int i0, int i1) { long cPtr = swigfaissJNI.get_invlist_range(Index.getCPtr(index), index, i0, i1); return (cPtr == 0) ? null : new ArrayInvertedLists(cPtr, false); } public static void set_invlist_range(Index index, int i0, int i1, ArrayInvertedLists src) { swigfaissJNI.set_invlist_range(Index.getCPtr(index), index, i0, i1, ArrayInvertedLists.getCPtr(src), src); } public static void search_with_parameters(Index index, long n, SWIGTYPE_p_float x, long k, SWIGTYPE_p_float distances, LongVector labels, IVFSearchParameters params, SWIGTYPE_p_unsigned_long nb_dis, SWIGTYPE_p_double ms_per_stage) { swigfaissJNI.search_with_parameters__SWIG_0(Index.getCPtr(index), index, n, SWIGTYPE_p_float.getCPtr(x), k, SWIGTYPE_p_float.getCPtr(distances), SWIGTYPE_p_long_long.getCPtr(labels.data()), labels, IVFSearchParameters.getCPtr(params), params, SWIGTYPE_p_unsigned_long.getCPtr(nb_dis), SWIGTYPE_p_double.getCPtr(ms_per_stage)); } public static void search_with_parameters(Index index, long n, SWIGTYPE_p_float x, long k, SWIGTYPE_p_float distances, LongVector labels, IVFSearchParameters params, SWIGTYPE_p_unsigned_long nb_dis) { swigfaissJNI.search_with_parameters__SWIG_1(Index.getCPtr(index), index, n, SWIGTYPE_p_float.getCPtr(x), k, SWIGTYPE_p_float.getCPtr(distances), SWIGTYPE_p_long_long.getCPtr(labels.data()), labels, IVFSearchParameters.getCPtr(params), params, SWIGTYPE_p_unsigned_long.getCPtr(nb_dis)); } public static void search_with_parameters(Index index, long n, SWIGTYPE_p_float x, long k, SWIGTYPE_p_float distances, LongVector labels, IVFSearchParameters params) { swigfaissJNI.search_with_parameters__SWIG_2(Index.getCPtr(index), index, n, SWIGTYPE_p_float.getCPtr(x), k, SWIGTYPE_p_float.getCPtr(distances), SWIGTYPE_p_long_long.getCPtr(labels.data()), labels, IVFSearchParameters.getCPtr(params), params); } public static void range_search_with_parameters(Index index, long n, SWIGTYPE_p_float x, float radius, RangeSearchResult result, IVFSearchParameters params, SWIGTYPE_p_unsigned_long nb_dis, SWIGTYPE_p_double ms_per_stage) { swigfaissJNI.range_search_with_parameters__SWIG_0(Index.getCPtr(index), index, n, SWIGTYPE_p_float.getCPtr(x), radius, RangeSearchResult.getCPtr(result), result, IVFSearchParameters.getCPtr(params), params, SWIGTYPE_p_unsigned_long.getCPtr(nb_dis), SWIGTYPE_p_double.getCPtr(ms_per_stage)); } public static void range_search_with_parameters(Index index, long n, SWIGTYPE_p_float x, float radius, RangeSearchResult result, IVFSearchParameters params, SWIGTYPE_p_unsigned_long nb_dis) { swigfaissJNI.range_search_with_parameters__SWIG_1(Index.getCPtr(index), index, n, SWIGTYPE_p_float.getCPtr(x), radius, RangeSearchResult.getCPtr(result), result, IVFSearchParameters.getCPtr(params), params, SWIGTYPE_p_unsigned_long.getCPtr(nb_dis)); } public static void range_search_with_parameters(Index index, long n, SWIGTYPE_p_float x, float radius, RangeSearchResult result, IVFSearchParameters params) { swigfaissJNI.range_search_with_parameters__SWIG_2(Index.getCPtr(index), index, n, SWIGTYPE_p_float.getCPtr(x), radius, RangeSearchResult.getCPtr(result), result, IVFSearchParameters.getCPtr(params), params); } public static void setHnsw_stats(HNSWStats value) { swigfaissJNI.hnsw_stats_set(HNSWStats.getCPtr(value), value); } public static HNSWStats getHnsw_stats() { long cPtr = swigfaissJNI.hnsw_stats_get(); return (cPtr == 0) ? null : new HNSWStats(cPtr, false); } public static void setPrecomputed_table_max_bytes(long value) { swigfaissJNI.precomputed_table_max_bytes_set(value); } public static long getPrecomputed_table_max_bytes() { return swigfaissJNI.precomputed_table_max_bytes_get(); } public static void initialize_IVFPQ_precomputed_table(SWIGTYPE_p_int use_precomputed_table, Index quantizer, ProductQuantizer pq, SWIGTYPE_p_AlignedTableT_float_32_t precomputed_table, boolean verbose) { swigfaissJNI.initialize_IVFPQ_precomputed_table(SWIGTYPE_p_int.getCPtr(use_precomputed_table), Index.getCPtr(quantizer), quantizer, ProductQuantizer.getCPtr(pq), pq, SWIGTYPE_p_AlignedTableT_float_32_t.getCPtr(precomputed_table), verbose); } public static void setIndexIVFPQ_stats(IndexIVFPQStats value) { swigfaissJNI.indexIVFPQ_stats_set(IndexIVFPQStats.getCPtr(value), value); } public static IndexIVFPQStats getIndexIVFPQ_stats() { long cPtr = swigfaissJNI.indexIVFPQ_stats_get(); return (cPtr == 0) ? null : new IndexIVFPQStats(cPtr, false); } public static Index downcast_index(Index index) { long cPtr = swigfaissJNI.downcast_index(Index.getCPtr(index), index); return (cPtr == 0) ? null : new Index(cPtr, false); } public static VectorTransform downcast_VectorTransform(VectorTransform vt) { long cPtr = swigfaissJNI.downcast_VectorTransform(VectorTransform.getCPtr(vt), vt); return (cPtr == 0) ? null : new VectorTransform(cPtr, false); } public static IndexBinary downcast_IndexBinary(IndexBinary index) { long cPtr = swigfaissJNI.downcast_IndexBinary(IndexBinary.getCPtr(index), index); return (cPtr == 0) ? null : new IndexBinary(cPtr, false); } public static Index upcast_IndexShards(IndexShards index) { long cPtr = swigfaissJNI.upcast_IndexShards(IndexShards.getCPtr(index), index); return (cPtr == 0) ? null : new Index(cPtr, false); } public static void write_index(Index idx, String fname) { swigfaissJNI.write_index__SWIG_0(Index.getCPtr(idx), idx, fname); } public static void write_index(Index idx, SWIGTYPE_p_FILE f) { swigfaissJNI.write_index__SWIG_1(Index.getCPtr(idx), idx, SWIGTYPE_p_FILE.getCPtr(f)); } public static void write_index(Index idx, SWIGTYPE_p_faiss__IOWriter writer) { swigfaissJNI.write_index__SWIG_2(Index.getCPtr(idx), idx, SWIGTYPE_p_faiss__IOWriter.getCPtr(writer)); } public static void write_index_binary(IndexBinary idx, String fname) { swigfaissJNI.write_index_binary__SWIG_0(IndexBinary.getCPtr(idx), idx, fname); } public static void write_index_binary(IndexBinary idx, SWIGTYPE_p_FILE f) { swigfaissJNI.write_index_binary__SWIG_1(IndexBinary.getCPtr(idx), idx, SWIGTYPE_p_FILE.getCPtr(f)); } public static void write_index_binary(IndexBinary idx, SWIGTYPE_p_faiss__IOWriter writer) { swigfaissJNI.write_index_binary__SWIG_2(IndexBinary.getCPtr(idx), idx, SWIGTYPE_p_faiss__IOWriter.getCPtr(writer)); } public static int getIO_FLAG_READ_ONLY() { return swigfaissJNI.IO_FLAG_READ_ONLY_get(); } public static int getIO_FLAG_ONDISK_SAME_DIR() { return swigfaissJNI.IO_FLAG_ONDISK_SAME_DIR_get(); } public static int getIO_FLAG_SKIP_IVF_DATA() { return swigfaissJNI.IO_FLAG_SKIP_IVF_DATA_get(); } public static int getIO_FLAG_MMAP() { return swigfaissJNI.IO_FLAG_MMAP_get(); } public static Index read_index(String fname, int io_flags) { long cPtr = swigfaissJNI.read_index__SWIG_0(fname, io_flags); return (cPtr == 0) ? null : new Index(cPtr, true); } public static Index read_index(String fname) { long cPtr = swigfaissJNI.read_index__SWIG_1(fname); return (cPtr == 0) ? null : new Index(cPtr, true); } public static Index read_index(SWIGTYPE_p_FILE f, int io_flags) { long cPtr = swigfaissJNI.read_index__SWIG_2(SWIGTYPE_p_FILE.getCPtr(f), io_flags); return (cPtr == 0) ? null : new Index(cPtr, true); } public static Index read_index(SWIGTYPE_p_FILE f) { long cPtr = swigfaissJNI.read_index__SWIG_3(SWIGTYPE_p_FILE.getCPtr(f)); return (cPtr == 0) ? null : new Index(cPtr, true); } public static Index read_index(SWIGTYPE_p_faiss__IOReader reader, int io_flags) { long cPtr = swigfaissJNI.read_index__SWIG_4(SWIGTYPE_p_faiss__IOReader.getCPtr(reader), io_flags); return (cPtr == 0) ? null : new Index(cPtr, true); } public static Index read_index(SWIGTYPE_p_faiss__IOReader reader) { long cPtr = swigfaissJNI.read_index__SWIG_5(SWIGTYPE_p_faiss__IOReader.getCPtr(reader)); return (cPtr == 0) ? null : new Index(cPtr, true); } public static IndexBinary read_index_binary(String fname, int io_flags) { long cPtr = swigfaissJNI.read_index_binary__SWIG_0(fname, io_flags); return (cPtr == 0) ? null : new IndexBinary(cPtr, true); } public static IndexBinary read_index_binary(String fname) { long cPtr = swigfaissJNI.read_index_binary__SWIG_1(fname); return (cPtr == 0) ? null : new IndexBinary(cPtr, true); } public static IndexBinary read_index_binary(SWIGTYPE_p_FILE f, int io_flags) { long cPtr = swigfaissJNI.read_index_binary__SWIG_2(SWIGTYPE_p_FILE.getCPtr(f), io_flags); return (cPtr == 0) ? null : new IndexBinary(cPtr, true); } public static IndexBinary read_index_binary(SWIGTYPE_p_FILE f) { long cPtr = swigfaissJNI.read_index_binary__SWIG_3(SWIGTYPE_p_FILE.getCPtr(f)); return (cPtr == 0) ? null : new IndexBinary(cPtr, true); } public static IndexBinary read_index_binary(SWIGTYPE_p_faiss__IOReader reader, int io_flags) { long cPtr = swigfaissJNI.read_index_binary__SWIG_4(SWIGTYPE_p_faiss__IOReader.getCPtr(reader), io_flags); return (cPtr == 0) ? null : new IndexBinary(cPtr, true); } public static IndexBinary read_index_binary(SWIGTYPE_p_faiss__IOReader reader) { long cPtr = swigfaissJNI.read_index_binary__SWIG_5(SWIGTYPE_p_faiss__IOReader.getCPtr(reader)); return (cPtr == 0) ? null : new IndexBinary(cPtr, true); } public static void write_VectorTransform(VectorTransform vt, String fname) { swigfaissJNI.write_VectorTransform(VectorTransform.getCPtr(vt), vt, fname); } public static VectorTransform read_VectorTransform(String fname) { long cPtr = swigfaissJNI.read_VectorTransform(fname); return (cPtr == 0) ? null : new VectorTransform(cPtr, true); } public static ProductQuantizer read_ProductQuantizer(String fname) { long cPtr = swigfaissJNI.read_ProductQuantizer__SWIG_0(fname); return (cPtr == 0) ? null : new ProductQuantizer(cPtr, true); } public static ProductQuantizer read_ProductQuantizer(SWIGTYPE_p_faiss__IOReader reader) { long cPtr = swigfaissJNI.read_ProductQuantizer__SWIG_1(SWIGTYPE_p_faiss__IOReader.getCPtr(reader)); return (cPtr == 0) ? null : new ProductQuantizer(cPtr, true); } public static void write_ProductQuantizer(ProductQuantizer pq, String fname) { swigfaissJNI.write_ProductQuantizer__SWIG_0(ProductQuantizer.getCPtr(pq), pq, fname); } public static void write_ProductQuantizer(ProductQuantizer pq, SWIGTYPE_p_faiss__IOWriter f) { swigfaissJNI.write_ProductQuantizer__SWIG_1(ProductQuantizer.getCPtr(pq), pq, SWIGTYPE_p_faiss__IOWriter.getCPtr(f)); } public static void write_InvertedLists(InvertedLists ils, SWIGTYPE_p_faiss__IOWriter f) { swigfaissJNI.write_InvertedLists(InvertedLists.getCPtr(ils), ils, SWIGTYPE_p_faiss__IOWriter.getCPtr(f)); } public static InvertedLists read_InvertedLists(SWIGTYPE_p_faiss__IOReader reader, int io_flags) { long cPtr = swigfaissJNI.read_InvertedLists__SWIG_0(SWIGTYPE_p_faiss__IOReader.getCPtr(reader), io_flags); return (cPtr == 0) ? null : new InvertedLists(cPtr, false); } public static InvertedLists read_InvertedLists(SWIGTYPE_p_faiss__IOReader reader) { long cPtr = swigfaissJNI.read_InvertedLists__SWIG_1(SWIGTYPE_p_faiss__IOReader.getCPtr(reader)); return (cPtr == 0) ? null : new InvertedLists(cPtr, false); } public static Index index_factory(int d, String description, MetricType metric) { long cPtr = swigfaissJNI.index_factory__SWIG_0(d, description, metric.swigValue()); return (cPtr == 0) ? null : new Index(cPtr, true); } public static Index index_factory(int d, String description) { long cPtr = swigfaissJNI.index_factory__SWIG_1(d, description); return (cPtr == 0) ? null : new Index(cPtr, true); } public static void setIndex_factory_verbose(int value) { swigfaissJNI.index_factory_verbose_set(value); } public static int getIndex_factory_verbose() { return swigfaissJNI.index_factory_verbose_get(); } public static IndexBinary index_binary_factory(int d, String description) { long cPtr = swigfaissJNI.index_binary_factory(d, description); return (cPtr == 0) ? null : new IndexBinary(cPtr, true); } public static void simd_histogram_8(SWIGTYPE_p_uint16_t data, int n, SWIGTYPE_p_uint16_t min, int shift, SWIGTYPE_p_int hist) { swigfaissJNI.simd_histogram_8(SWIGTYPE_p_uint16_t.getCPtr(data), n, SWIGTYPE_p_uint16_t.getCPtr(min), shift, SWIGTYPE_p_int.getCPtr(hist)); } public static void simd_histogram_16(SWIGTYPE_p_uint16_t data, int n, SWIGTYPE_p_uint16_t min, int shift, SWIGTYPE_p_int hist) { swigfaissJNI.simd_histogram_16(SWIGTYPE_p_uint16_t.getCPtr(data), n, SWIGTYPE_p_uint16_t.getCPtr(min), shift, SWIGTYPE_p_int.getCPtr(hist)); } public static void setPartition_stats(PartitionStats value) { swigfaissJNI.partition_stats_set(PartitionStats.getCPtr(value), value); } public static PartitionStats getPartition_stats() { long cPtr = swigfaissJNI.partition_stats_get(); return (cPtr == 0) ? null : new PartitionStats(cPtr, false); } public static float CMin_float_partition_fuzzy(SWIGTYPE_p_float vals, LongVector ids, long n, long q_min, long q_max, SWIGTYPE_p_unsigned_long q_out) { return swigfaissJNI.CMin_float_partition_fuzzy(SWIGTYPE_p_float.getCPtr(vals), SWIGTYPE_p_long_long.getCPtr(ids.data()), ids, n, q_min, q_max, SWIGTYPE_p_unsigned_long.getCPtr(q_out)); } public static float CMax_float_partition_fuzzy(SWIGTYPE_p_float vals, LongVector ids, long n, long q_min, long q_max, SWIGTYPE_p_unsigned_long q_out) { return swigfaissJNI.CMax_float_partition_fuzzy(SWIGTYPE_p_float.getCPtr(vals), SWIGTYPE_p_long_long.getCPtr(ids.data()), ids, n, q_min, q_max, SWIGTYPE_p_unsigned_long.getCPtr(q_out)); } public static SWIGTYPE_p_uint16_t CMax_uint16_partition_fuzzy(SWIGTYPE_p_uint16_t vals, LongVector ids, long n, long q_min, long q_max, SWIGTYPE_p_unsigned_long q_out) { return new SWIGTYPE_p_uint16_t(swigfaissJNI.CMax_uint16_partition_fuzzy__SWIG_0(SWIGTYPE_p_uint16_t.getCPtr(vals), SWIGTYPE_p_long_long.getCPtr(ids.data()), ids, n, q_min, q_max, SWIGTYPE_p_unsigned_long.getCPtr(q_out)), true); } public static SWIGTYPE_p_uint16_t CMin_uint16_partition_fuzzy(SWIGTYPE_p_uint16_t vals, LongVector ids, long n, long q_min, long q_max, SWIGTYPE_p_unsigned_long q_out) { return new SWIGTYPE_p_uint16_t(swigfaissJNI.CMin_uint16_partition_fuzzy__SWIG_0(SWIGTYPE_p_uint16_t.getCPtr(vals), SWIGTYPE_p_long_long.getCPtr(ids.data()), ids, n, q_min, q_max, SWIGTYPE_p_unsigned_long.getCPtr(q_out)), true); } public static SWIGTYPE_p_uint16_t CMax_uint16_partition_fuzzy(SWIGTYPE_p_uint16_t vals, SWIGTYPE_p_int ids, long n, long q_min, long q_max, SWIGTYPE_p_unsigned_long q_out) { return new SWIGTYPE_p_uint16_t(swigfaissJNI.CMax_uint16_partition_fuzzy__SWIG_1(SWIGTYPE_p_uint16_t.getCPtr(vals), SWIGTYPE_p_int.getCPtr(ids), n, q_min, q_max, SWIGTYPE_p_unsigned_long.getCPtr(q_out)), true); } public static SWIGTYPE_p_uint16_t CMin_uint16_partition_fuzzy(SWIGTYPE_p_uint16_t vals, SWIGTYPE_p_int ids, long n, long q_min, long q_max, SWIGTYPE_p_unsigned_long q_out) { return new SWIGTYPE_p_uint16_t(swigfaissJNI.CMin_uint16_partition_fuzzy__SWIG_1(SWIGTYPE_p_uint16_t.getCPtr(vals), SWIGTYPE_p_int.getCPtr(ids), n, q_min, q_max, SWIGTYPE_p_unsigned_long.getCPtr(q_out)), true); } public static void omp_set_num_threads(int num_threads) { swigfaissJNI.omp_set_num_threads(num_threads); } public static int omp_get_max_threads() { return swigfaissJNI.omp_get_max_threads(); } public static SWIGTYPE_p_void memcpy(SWIGTYPE_p_void dest, SWIGTYPE_p_void src, long n) { long cPtr = swigfaissJNI.memcpy(SWIGTYPE_p_void.getCPtr(dest), SWIGTYPE_p_void.getCPtr(src), n); return (cPtr == 0) ? null : new SWIGTYPE_p_void(cPtr, false); } public static SWIGTYPE_p_float cast_integer_to_float_ptr(int x) { long cPtr = swigfaissJNI.cast_integer_to_float_ptr(x); return (cPtr == 0) ? null : new SWIGTYPE_p_float(cPtr, false); } public static SWIGTYPE_p_long cast_integer_to_long_ptr(int x) { long cPtr = swigfaissJNI.cast_integer_to_long_ptr(x); return (cPtr == 0) ? null : new SWIGTYPE_p_long(cPtr, false); } public static SWIGTYPE_p_int cast_integer_to_int_ptr(int x) { long cPtr = swigfaissJNI.cast_integer_to_int_ptr(x); return (cPtr == 0) ? null : new SWIGTYPE_p_int(cPtr, false); } public static void ignore_SIGTTIN() { swigfaissJNI.ignore_SIGTTIN(); } }
the-algorithm-main/ann/src/main/java/com/twitter/ann/faiss/swig/swigfaissConstants.java
/* ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). * Version 4.0.2 * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- */ package com.twitter.ann.faiss; public interface swigfaissConstants { public final static int FAISS_VERSION_MAJOR = swigfaissJNI.FAISS_VERSION_MAJOR_get(); public final static int FAISS_VERSION_MINOR = swigfaissJNI.FAISS_VERSION_MINOR_get(); public final static int FAISS_VERSION_PATCH = swigfaissJNI.FAISS_VERSION_PATCH_get(); }
the-algorithm-main/ann/src/main/java/com/twitter/ann/faiss/swig/swigfaissJNI.java
/* ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). * Version 4.0.2 * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- */ package com.twitter.ann.faiss; import com.twitter.ann.faiss.NativeUtils; public class swigfaissJNI { static { try { if (NativeUtils.getOperatingSystemType() == NativeUtils.OSType.MacOS) { NativeUtils.loadLibraryFromJar("/com/twitter/ann/faiss/swig/resources/swigfaiss.dylib"); } else { NativeUtils.loadLibraryFromJar("/com/twitter/ann/faiss/swig/resources/libstdc++.so.6"); NativeUtils.loadLibraryFromJar("/com/twitter/ann/faiss/swig/resources/libgcc_s.so.1"); NativeUtils.loadLibraryFromJar("/com/twitter/ann/faiss/swig/resources/libgomp.so.1"); NativeUtils.loadLibraryFromJar("/com/twitter/ann/faiss/swig/resources/libquadmath.so.0"); NativeUtils.loadLibraryFromJar("/com/twitter/ann/faiss/swig/resources/libgfortran.so.5"); NativeUtils.loadLibraryFromJar("/com/twitter/ann/faiss/swig/resources/swigfaiss.so"); } } catch (Exception e) { System.err.println("Native code library failed to load. \n" + e); System.exit(1); } } public final static native long new_intArray(int jarg1); public final static native void delete_intArray(long jarg1); public final static native int intArray_getitem(long jarg1, intArray jarg1_, int jarg2); public final static native void intArray_setitem(long jarg1, intArray jarg1_, int jarg2, int jarg3); public final static native long intArray_cast(long jarg1, intArray jarg1_); public final static native long intArray_frompointer(long jarg1); public final static native long new_floatArray(int jarg1); public final static native void delete_floatArray(long jarg1); public final static native float floatArray_getitem(long jarg1, floatArray jarg1_, int jarg2); public final static native void floatArray_setitem(long jarg1, floatArray jarg1_, int jarg2, float jarg3); public final static native long floatArray_cast(long jarg1, floatArray jarg1_); public final static native long floatArray_frompointer(long jarg1); public final static native long new_longArray(int jarg1); public final static native void delete_longArray(long jarg1); public final static native long longArray_getitem(long jarg1, longArray jarg1_, int jarg2); public final static native void longArray_setitem(long jarg1, longArray jarg1_, int jarg2, long jarg3); public final static native long longArray_cast(long jarg1, longArray jarg1_); public final static native long longArray_frompointer(long jarg1); public final static native long new_doubleArray(int jarg1); public final static native void delete_doubleArray(long jarg1); public final static native double doubleArray_getitem(long jarg1, doubleArray jarg1_, int jarg2); public final static native void doubleArray_setitem(long jarg1, doubleArray jarg1_, int jarg2, double jarg3); public final static native long doubleArray_cast(long jarg1, doubleArray jarg1_); public final static native long doubleArray_frompointer(long jarg1); public final static native long new_FloatVector(); public final static native void FloatVector_push_back(long jarg1, FloatVector jarg1_, float jarg2); public final static native void FloatVector_clear(long jarg1, FloatVector jarg1_); public final static native long FloatVector_data(long jarg1, FloatVector jarg1_); public final static native long FloatVector_size(long jarg1, FloatVector jarg1_); public final static native float FloatVector_at(long jarg1, FloatVector jarg1_, long jarg2); public final static native void FloatVector_resize(long jarg1, FloatVector jarg1_, long jarg2); public final static native void FloatVector_reserve(long jarg1, FloatVector jarg1_, long jarg2); public final static native void FloatVector_swap(long jarg1, FloatVector jarg1_, long jarg2, FloatVector jarg2_); public final static native void delete_FloatVector(long jarg1); public final static native long new_DoubleVector(); public final static native void DoubleVector_push_back(long jarg1, DoubleVector jarg1_, double jarg2); public final static native void DoubleVector_clear(long jarg1, DoubleVector jarg1_); public final static native long DoubleVector_data(long jarg1, DoubleVector jarg1_); public final static native long DoubleVector_size(long jarg1, DoubleVector jarg1_); public final static native double DoubleVector_at(long jarg1, DoubleVector jarg1_, long jarg2); public final static native void DoubleVector_resize(long jarg1, DoubleVector jarg1_, long jarg2); public final static native void DoubleVector_reserve(long jarg1, DoubleVector jarg1_, long jarg2); public final static native void DoubleVector_swap(long jarg1, DoubleVector jarg1_, long jarg2, DoubleVector jarg2_); public final static native void delete_DoubleVector(long jarg1); public final static native long new_ByteVector(); public final static native void ByteVector_push_back(long jarg1, ByteVector jarg1_, short jarg2); public final static native void ByteVector_clear(long jarg1, ByteVector jarg1_); public final static native long ByteVector_data(long jarg1, ByteVector jarg1_); public final static native long ByteVector_size(long jarg1, ByteVector jarg1_); public final static native short ByteVector_at(long jarg1, ByteVector jarg1_, long jarg2); public final static native void ByteVector_resize(long jarg1, ByteVector jarg1_, long jarg2); public final static native void ByteVector_reserve(long jarg1, ByteVector jarg1_, long jarg2); public final static native void ByteVector_swap(long jarg1, ByteVector jarg1_, long jarg2, ByteVector jarg2_); public final static native void delete_ByteVector(long jarg1); public final static native long new_CharVector(); public final static native void CharVector_push_back(long jarg1, CharVector jarg1_, char jarg2); public final static native void CharVector_clear(long jarg1, CharVector jarg1_); public final static native String CharVector_data(long jarg1, CharVector jarg1_); public final static native long CharVector_size(long jarg1, CharVector jarg1_); public final static native char CharVector_at(long jarg1, CharVector jarg1_, long jarg2); public final static native void CharVector_resize(long jarg1, CharVector jarg1_, long jarg2); public final static native void CharVector_reserve(long jarg1, CharVector jarg1_, long jarg2); public final static native void CharVector_swap(long jarg1, CharVector jarg1_, long jarg2, CharVector jarg2_); public final static native void delete_CharVector(long jarg1); public final static native long new_Uint64Vector(); public final static native void Uint64Vector_push_back(long jarg1, Uint64Vector jarg1_, long jarg2); public final static native void Uint64Vector_clear(long jarg1, Uint64Vector jarg1_); public final static native long Uint64Vector_data(long jarg1, Uint64Vector jarg1_); public final static native long Uint64Vector_size(long jarg1, Uint64Vector jarg1_); public final static native long Uint64Vector_at(long jarg1, Uint64Vector jarg1_, long jarg2); public final static native void Uint64Vector_resize(long jarg1, Uint64Vector jarg1_, long jarg2); public final static native void Uint64Vector_reserve(long jarg1, Uint64Vector jarg1_, long jarg2); public final static native void Uint64Vector_swap(long jarg1, Uint64Vector jarg1_, long jarg2, Uint64Vector jarg2_); public final static native void delete_Uint64Vector(long jarg1); public final static native long new_LongVector(); public final static native void LongVector_push_back(long jarg1, LongVector jarg1_, long jarg2); public final static native void LongVector_clear(long jarg1, LongVector jarg1_); public final static native long LongVector_data(long jarg1, LongVector jarg1_); public final static native long LongVector_size(long jarg1, LongVector jarg1_); public final static native long LongVector_at(long jarg1, LongVector jarg1_, long jarg2); public final static native void LongVector_resize(long jarg1, LongVector jarg1_, long jarg2); public final static native void LongVector_reserve(long jarg1, LongVector jarg1_, long jarg2); public final static native void LongVector_swap(long jarg1, LongVector jarg1_, long jarg2, LongVector jarg2_); public final static native void delete_LongVector(long jarg1); public final static native long new_IntVector(); public final static native void IntVector_push_back(long jarg1, IntVector jarg1_, int jarg2); public final static native void IntVector_clear(long jarg1, IntVector jarg1_); public final static native long IntVector_data(long jarg1, IntVector jarg1_); public final static native long IntVector_size(long jarg1, IntVector jarg1_); public final static native int IntVector_at(long jarg1, IntVector jarg1_, long jarg2); public final static native void IntVector_resize(long jarg1, IntVector jarg1_, long jarg2); public final static native void IntVector_reserve(long jarg1, IntVector jarg1_, long jarg2); public final static native void IntVector_swap(long jarg1, IntVector jarg1_, long jarg2, IntVector jarg2_); public final static native void delete_IntVector(long jarg1); public final static native long new_VectorTransformVector(); public final static native void VectorTransformVector_push_back(long jarg1, VectorTransformVector jarg1_, long jarg2, VectorTransform jarg2_); public final static native void VectorTransformVector_clear(long jarg1, VectorTransformVector jarg1_); public final static native long VectorTransformVector_data(long jarg1, VectorTransformVector jarg1_); public final static native long VectorTransformVector_size(long jarg1, VectorTransformVector jarg1_); public final static native long VectorTransformVector_at(long jarg1, VectorTransformVector jarg1_, long jarg2); public final static native void VectorTransformVector_resize(long jarg1, VectorTransformVector jarg1_, long jarg2); public final static native void VectorTransformVector_reserve(long jarg1, VectorTransformVector jarg1_, long jarg2); public final static native void VectorTransformVector_swap(long jarg1, VectorTransformVector jarg1_, long jarg2, VectorTransformVector jarg2_); public final static native void delete_VectorTransformVector(long jarg1); public final static native long new_OperatingPointVector(); public final static native void OperatingPointVector_push_back(long jarg1, OperatingPointVector jarg1_, long jarg2, OperatingPoint jarg2_); public final static native void OperatingPointVector_clear(long jarg1, OperatingPointVector jarg1_); public final static native long OperatingPointVector_data(long jarg1, OperatingPointVector jarg1_); public final static native long OperatingPointVector_size(long jarg1, OperatingPointVector jarg1_); public final static native long OperatingPointVector_at(long jarg1, OperatingPointVector jarg1_, long jarg2); public final static native void OperatingPointVector_resize(long jarg1, OperatingPointVector jarg1_, long jarg2); public final static native void OperatingPointVector_reserve(long jarg1, OperatingPointVector jarg1_, long jarg2); public final static native void OperatingPointVector_swap(long jarg1, OperatingPointVector jarg1_, long jarg2, OperatingPointVector jarg2_); public final static native void delete_OperatingPointVector(long jarg1); public final static native long new_InvertedListsPtrVector(); public final static native void InvertedListsPtrVector_push_back(long jarg1, InvertedListsPtrVector jarg1_, long jarg2, InvertedLists jarg2_); public final static native void InvertedListsPtrVector_clear(long jarg1, InvertedListsPtrVector jarg1_); public final static native long InvertedListsPtrVector_data(long jarg1, InvertedListsPtrVector jarg1_); public final static native long InvertedListsPtrVector_size(long jarg1, InvertedListsPtrVector jarg1_); public final static native long InvertedListsPtrVector_at(long jarg1, InvertedListsPtrVector jarg1_, long jarg2); public final static native void InvertedListsPtrVector_resize(long jarg1, InvertedListsPtrVector jarg1_, long jarg2); public final static native void InvertedListsPtrVector_reserve(long jarg1, InvertedListsPtrVector jarg1_, long jarg2); public final static native void InvertedListsPtrVector_swap(long jarg1, InvertedListsPtrVector jarg1_, long jarg2, InvertedListsPtrVector jarg2_); public final static native void delete_InvertedListsPtrVector(long jarg1); public final static native long new_FloatVectorVector(); public final static native void FloatVectorVector_push_back(long jarg1, FloatVectorVector jarg1_, long jarg2, FloatVector jarg2_); public final static native void FloatVectorVector_clear(long jarg1, FloatVectorVector jarg1_); public final static native long FloatVectorVector_data(long jarg1, FloatVectorVector jarg1_); public final static native long FloatVectorVector_size(long jarg1, FloatVectorVector jarg1_); public final static native long FloatVectorVector_at(long jarg1, FloatVectorVector jarg1_, long jarg2); public final static native void FloatVectorVector_resize(long jarg1, FloatVectorVector jarg1_, long jarg2); public final static native void FloatVectorVector_reserve(long jarg1, FloatVectorVector jarg1_, long jarg2); public final static native void FloatVectorVector_swap(long jarg1, FloatVectorVector jarg1_, long jarg2, FloatVectorVector jarg2_); public final static native void delete_FloatVectorVector(long jarg1); public final static native long new_ByteVectorVector(); public final static native void ByteVectorVector_push_back(long jarg1, ByteVectorVector jarg1_, long jarg2, ByteVector jarg2_); public final static native void ByteVectorVector_clear(long jarg1, ByteVectorVector jarg1_); public final static native long ByteVectorVector_data(long jarg1, ByteVectorVector jarg1_); public final static native long ByteVectorVector_size(long jarg1, ByteVectorVector jarg1_); public final static native long ByteVectorVector_at(long jarg1, ByteVectorVector jarg1_, long jarg2); public final static native void ByteVectorVector_resize(long jarg1, ByteVectorVector jarg1_, long jarg2); public final static native void ByteVectorVector_reserve(long jarg1, ByteVectorVector jarg1_, long jarg2); public final static native void ByteVectorVector_swap(long jarg1, ByteVectorVector jarg1_, long jarg2, ByteVectorVector jarg2_); public final static native void delete_ByteVectorVector(long jarg1); public final static native long new_LongVectorVector(); public final static native void LongVectorVector_push_back(long jarg1, LongVectorVector jarg1_, long jarg2); public final static native void LongVectorVector_clear(long jarg1, LongVectorVector jarg1_); public final static native long LongVectorVector_data(long jarg1, LongVectorVector jarg1_); public final static native long LongVectorVector_size(long jarg1, LongVectorVector jarg1_); public final static native long LongVectorVector_at(long jarg1, LongVectorVector jarg1_, long jarg2); public final static native void LongVectorVector_resize(long jarg1, LongVectorVector jarg1_, long jarg2); public final static native void LongVectorVector_reserve(long jarg1, LongVectorVector jarg1_, long jarg2); public final static native void LongVectorVector_swap(long jarg1, LongVectorVector jarg1_, long jarg2, LongVectorVector jarg2_); public final static native void delete_LongVectorVector(long jarg1); public final static native void bitvec_print(long jarg1, long jarg2); public final static native void fvecs2bitvecs(long jarg1, long jarg2, long jarg3, long jarg4); public final static native void bitvecs2fvecs(long jarg1, long jarg2, long jarg3, long jarg4); public final static native void fvec2bitvec(long jarg1, long jarg2, long jarg3); public final static native void bitvec_shuffle(long jarg1, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6); public final static native void BitstringWriter_code_set(long jarg1, BitstringWriter jarg1_, long jarg2); public final static native long BitstringWriter_code_get(long jarg1, BitstringWriter jarg1_); public final static native void BitstringWriter_code_size_set(long jarg1, BitstringWriter jarg1_, long jarg2); public final static native long BitstringWriter_code_size_get(long jarg1, BitstringWriter jarg1_); public final static native void BitstringWriter_i_set(long jarg1, BitstringWriter jarg1_, long jarg2); public final static native long BitstringWriter_i_get(long jarg1, BitstringWriter jarg1_); public final static native long new_BitstringWriter(long jarg1, long jarg2); public final static native void BitstringWriter_write(long jarg1, BitstringWriter jarg1_, long jarg2, int jarg3); public final static native void delete_BitstringWriter(long jarg1); public final static native void BitstringReader_code_set(long jarg1, BitstringReader jarg1_, long jarg2); public final static native long BitstringReader_code_get(long jarg1, BitstringReader jarg1_); public final static native void BitstringReader_code_size_set(long jarg1, BitstringReader jarg1_, long jarg2); public final static native long BitstringReader_code_size_get(long jarg1, BitstringReader jarg1_); public final static native void BitstringReader_i_set(long jarg1, BitstringReader jarg1_, long jarg2); public final static native long BitstringReader_i_get(long jarg1, BitstringReader jarg1_); public final static native long new_BitstringReader(long jarg1, long jarg2); public final static native long BitstringReader_read(long jarg1, BitstringReader jarg1_, int jarg2); public final static native void delete_BitstringReader(long jarg1); public final static native void hamming_batch_size_set(long jarg1); public final static native long hamming_batch_size_get(); public final static native int popcount64(long jarg1); public final static native void hammings(long jarg1, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6); public final static native void hammings_knn_hc(long jarg1, long jarg2, long jarg3, long jarg4, long jarg5, int jarg6); public final static native void hammings_knn(long jarg1, long jarg2, long jarg3, long jarg4, long jarg5, int jarg6); public final static native void hammings_knn_mc(long jarg1, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, long jarg7, long jarg8, LongVector jarg8_); public final static native void hamming_range_search(long jarg1, long jarg2, long jarg3, long jarg4, int jarg5, long jarg6, long jarg7, RangeSearchResult jarg7_); public final static native void hamming_count_thres(long jarg1, long jarg2, long jarg3, long jarg4, int jarg5, long jarg6, long jarg7); public final static native long match_hamming_thres(long jarg1, long jarg2, long jarg3, long jarg4, int jarg5, long jarg6, long jarg7, LongVector jarg7_, long jarg8); public final static native void crosshamming_count_thres(long jarg1, long jarg2, int jarg3, long jarg4, long jarg5); public final static native int get_num_gpus(); public final static native int METRIC_INNER_PRODUCT_get(); public final static native int METRIC_L2_get(); public final static native int METRIC_Canberra_get(); public final static native String get_compile_options(); public final static native double getmillisecs(); public final static native long get_mem_usage_kb(); public final static native long get_cycles(); public final static native void fvec_madd(long jarg1, long jarg2, float jarg3, long jarg4, long jarg5); public final static native int fvec_madd_and_argmin(long jarg1, long jarg2, float jarg3, long jarg4, long jarg5); public final static native void reflection(long jarg1, long jarg2, long jarg3, long jarg4, long jarg5); public final static native void matrix_qr(int jarg1, int jarg2, long jarg3); public final static native void ranklist_handle_ties(int jarg1, long jarg2, LongVector jarg2_, long jarg3); public final static native long ranklist_intersection_size(long jarg1, long jarg2, LongVector jarg2_, long jarg3, long jarg4, LongVector jarg4_); public final static native long merge_result_table_with__SWIG_0(long jarg1, long jarg2, long jarg3, LongVector jarg3_, long jarg4, long jarg5, LongVector jarg5_, long jarg6, boolean jarg7, long jarg8); public final static native long merge_result_table_with__SWIG_1(long jarg1, long jarg2, long jarg3, LongVector jarg3_, long jarg4, long jarg5, LongVector jarg5_, long jarg6, boolean jarg7); public final static native long merge_result_table_with__SWIG_2(long jarg1, long jarg2, long jarg3, LongVector jarg3_, long jarg4, long jarg5, LongVector jarg5_, long jarg6); public final static native double imbalance_factor__SWIG_0(int jarg1, int jarg2, long jarg3, LongVector jarg3_); public final static native double imbalance_factor__SWIG_1(int jarg1, long jarg2); public final static native void fvec_argsort(long jarg1, long jarg2, long jarg3); public final static native void fvec_argsort_parallel(long jarg1, long jarg2, long jarg3); public final static native int ivec_hist(long jarg1, long jarg2, int jarg3, long jarg4); public final static native void bincode_hist(long jarg1, long jarg2, long jarg3, long jarg4); public final static native long ivec_checksum(long jarg1, long jarg2); public final static native long fvecs_maybe_subsample__SWIG_0(long jarg1, long jarg2, long jarg3, long jarg4, boolean jarg5, long jarg6); public final static native long fvecs_maybe_subsample__SWIG_1(long jarg1, long jarg2, long jarg3, long jarg4, boolean jarg5); public final static native long fvecs_maybe_subsample__SWIG_2(long jarg1, long jarg2, long jarg3, long jarg4); public final static native void binary_to_real(long jarg1, long jarg2, long jarg3); public final static native void real_to_binary(long jarg1, long jarg2, long jarg3); public final static native long hash_bytes(long jarg1, long jarg2); public final static native boolean check_openmp(); public final static native int FAISS_VERSION_MAJOR_get(); public final static native int FAISS_VERSION_MINOR_get(); public final static native int FAISS_VERSION_PATCH_get(); public final static native void Index_d_set(long jarg1, Index jarg1_, int jarg2); public final static native int Index_d_get(long jarg1, Index jarg1_); public final static native void Index_ntotal_set(long jarg1, Index jarg1_, long jarg2); public final static native long Index_ntotal_get(long jarg1, Index jarg1_); public final static native void Index_verbose_set(long jarg1, Index jarg1_, boolean jarg2); public final static native boolean Index_verbose_get(long jarg1, Index jarg1_); public final static native void Index_is_trained_set(long jarg1, Index jarg1_, boolean jarg2); public final static native boolean Index_is_trained_get(long jarg1, Index jarg1_); public final static native void Index_metric_type_set(long jarg1, Index jarg1_, int jarg2); public final static native int Index_metric_type_get(long jarg1, Index jarg1_); public final static native void Index_metric_arg_set(long jarg1, Index jarg1_, float jarg2); public final static native float Index_metric_arg_get(long jarg1, Index jarg1_); public final static native void delete_Index(long jarg1); public final static native void Index_train(long jarg1, Index jarg1_, long jarg2, long jarg3); public final static native void Index_add(long jarg1, Index jarg1_, long jarg2, long jarg3); public final static native void Index_add_with_ids(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void Index_search(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void Index_range_search(long jarg1, Index jarg1_, long jarg2, long jarg3, float jarg4, long jarg5, RangeSearchResult jarg5_); public final static native void Index_assign__SWIG_0(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void Index_assign__SWIG_1(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void Index_reset(long jarg1, Index jarg1_); public final static native long Index_remove_ids(long jarg1, Index jarg1_, long jarg2, IDSelector jarg2_); public final static native void Index_reconstruct(long jarg1, Index jarg1_, long jarg2, long jarg3); public final static native void Index_reconstruct_n(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4); public final static native void Index_search_and_reconstruct(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_, long jarg7); public final static native void Index_compute_residual(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4); public final static native void Index_compute_residual_n(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, LongVector jarg5_); public final static native long Index_get_distance_computer(long jarg1, Index jarg1_); public final static native long Index_sa_code_size(long jarg1, Index jarg1_); public final static native void Index_sa_encode(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4); public final static native void Index_sa_decode(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4); public final static native long Index_toIVF(long jarg1, Index jarg1_); public final static native void ClusteringParameters_niter_set(long jarg1, ClusteringParameters jarg1_, int jarg2); public final static native int ClusteringParameters_niter_get(long jarg1, ClusteringParameters jarg1_); public final static native void ClusteringParameters_nredo_set(long jarg1, ClusteringParameters jarg1_, int jarg2); public final static native int ClusteringParameters_nredo_get(long jarg1, ClusteringParameters jarg1_); public final static native void ClusteringParameters_verbose_set(long jarg1, ClusteringParameters jarg1_, boolean jarg2); public final static native boolean ClusteringParameters_verbose_get(long jarg1, ClusteringParameters jarg1_); public final static native void ClusteringParameters_spherical_set(long jarg1, ClusteringParameters jarg1_, boolean jarg2); public final static native boolean ClusteringParameters_spherical_get(long jarg1, ClusteringParameters jarg1_); public final static native void ClusteringParameters_int_centroids_set(long jarg1, ClusteringParameters jarg1_, boolean jarg2); public final static native boolean ClusteringParameters_int_centroids_get(long jarg1, ClusteringParameters jarg1_); public final static native void ClusteringParameters_update_index_set(long jarg1, ClusteringParameters jarg1_, boolean jarg2); public final static native boolean ClusteringParameters_update_index_get(long jarg1, ClusteringParameters jarg1_); public final static native void ClusteringParameters_frozen_centroids_set(long jarg1, ClusteringParameters jarg1_, boolean jarg2); public final static native boolean ClusteringParameters_frozen_centroids_get(long jarg1, ClusteringParameters jarg1_); public final static native void ClusteringParameters_min_points_per_centroid_set(long jarg1, ClusteringParameters jarg1_, int jarg2); public final static native int ClusteringParameters_min_points_per_centroid_get(long jarg1, ClusteringParameters jarg1_); public final static native void ClusteringParameters_max_points_per_centroid_set(long jarg1, ClusteringParameters jarg1_, int jarg2); public final static native int ClusteringParameters_max_points_per_centroid_get(long jarg1, ClusteringParameters jarg1_); public final static native void ClusteringParameters_seed_set(long jarg1, ClusteringParameters jarg1_, int jarg2); public final static native int ClusteringParameters_seed_get(long jarg1, ClusteringParameters jarg1_); public final static native void ClusteringParameters_decode_block_size_set(long jarg1, ClusteringParameters jarg1_, long jarg2); public final static native long ClusteringParameters_decode_block_size_get(long jarg1, ClusteringParameters jarg1_); public final static native long new_ClusteringParameters(); public final static native void delete_ClusteringParameters(long jarg1); public final static native void ClusteringIterationStats_obj_set(long jarg1, ClusteringIterationStats jarg1_, float jarg2); public final static native float ClusteringIterationStats_obj_get(long jarg1, ClusteringIterationStats jarg1_); public final static native void ClusteringIterationStats_time_set(long jarg1, ClusteringIterationStats jarg1_, double jarg2); public final static native double ClusteringIterationStats_time_get(long jarg1, ClusteringIterationStats jarg1_); public final static native void ClusteringIterationStats_time_search_set(long jarg1, ClusteringIterationStats jarg1_, double jarg2); public final static native double ClusteringIterationStats_time_search_get(long jarg1, ClusteringIterationStats jarg1_); public final static native void ClusteringIterationStats_imbalance_factor_set(long jarg1, ClusteringIterationStats jarg1_, double jarg2); public final static native double ClusteringIterationStats_imbalance_factor_get(long jarg1, ClusteringIterationStats jarg1_); public final static native void ClusteringIterationStats_nsplit_set(long jarg1, ClusteringIterationStats jarg1_, int jarg2); public final static native int ClusteringIterationStats_nsplit_get(long jarg1, ClusteringIterationStats jarg1_); public final static native long new_ClusteringIterationStats(); public final static native void delete_ClusteringIterationStats(long jarg1); public final static native void Clustering_d_set(long jarg1, Clustering jarg1_, long jarg2); public final static native long Clustering_d_get(long jarg1, Clustering jarg1_); public final static native void Clustering_k_set(long jarg1, Clustering jarg1_, long jarg2); public final static native long Clustering_k_get(long jarg1, Clustering jarg1_); public final static native void Clustering_centroids_set(long jarg1, Clustering jarg1_, long jarg2, FloatVector jarg2_); public final static native long Clustering_centroids_get(long jarg1, Clustering jarg1_); public final static native void Clustering_iteration_stats_set(long jarg1, Clustering jarg1_, long jarg2); public final static native long Clustering_iteration_stats_get(long jarg1, Clustering jarg1_); public final static native long new_Clustering__SWIG_0(int jarg1, int jarg2); public final static native long new_Clustering__SWIG_1(int jarg1, int jarg2, long jarg3, ClusteringParameters jarg3_); public final static native void Clustering_train__SWIG_0(long jarg1, Clustering jarg1_, long jarg2, long jarg3, long jarg4, Index jarg4_, long jarg5); public final static native void Clustering_train__SWIG_1(long jarg1, Clustering jarg1_, long jarg2, long jarg3, long jarg4, Index jarg4_); public final static native void Clustering_train_encoded__SWIG_0(long jarg1, Clustering jarg1_, long jarg2, long jarg3, long jarg4, Index jarg4_, long jarg5, Index jarg5_, long jarg6); public final static native void Clustering_train_encoded__SWIG_1(long jarg1, Clustering jarg1_, long jarg2, long jarg3, long jarg4, Index jarg4_, long jarg5, Index jarg5_); public final static native void Clustering_post_process_centroids(long jarg1, Clustering jarg1_); public final static native void delete_Clustering(long jarg1); public final static native long new_Clustering1D__SWIG_0(int jarg1); public final static native long new_Clustering1D__SWIG_1(int jarg1, long jarg2, ClusteringParameters jarg2_); public final static native void Clustering1D_train_exact(long jarg1, Clustering1D jarg1_, long jarg2, long jarg3); public final static native void delete_Clustering1D(long jarg1); public final static native void ProgressiveDimClusteringParameters_progressive_dim_steps_set(long jarg1, ProgressiveDimClusteringParameters jarg1_, int jarg2); public final static native int ProgressiveDimClusteringParameters_progressive_dim_steps_get(long jarg1, ProgressiveDimClusteringParameters jarg1_); public final static native void ProgressiveDimClusteringParameters_apply_pca_set(long jarg1, ProgressiveDimClusteringParameters jarg1_, boolean jarg2); public final static native boolean ProgressiveDimClusteringParameters_apply_pca_get(long jarg1, ProgressiveDimClusteringParameters jarg1_); public final static native long new_ProgressiveDimClusteringParameters(); public final static native void delete_ProgressiveDimClusteringParameters(long jarg1); public final static native void delete_ProgressiveDimIndexFactory(long jarg1); public final static native long new_ProgressiveDimIndexFactory(); public final static native void ProgressiveDimClustering_d_set(long jarg1, ProgressiveDimClustering jarg1_, long jarg2); public final static native long ProgressiveDimClustering_d_get(long jarg1, ProgressiveDimClustering jarg1_); public final static native void ProgressiveDimClustering_k_set(long jarg1, ProgressiveDimClustering jarg1_, long jarg2); public final static native long ProgressiveDimClustering_k_get(long jarg1, ProgressiveDimClustering jarg1_); public final static native void ProgressiveDimClustering_centroids_set(long jarg1, ProgressiveDimClustering jarg1_, long jarg2, FloatVector jarg2_); public final static native long ProgressiveDimClustering_centroids_get(long jarg1, ProgressiveDimClustering jarg1_); public final static native void ProgressiveDimClustering_iteration_stats_set(long jarg1, ProgressiveDimClustering jarg1_, long jarg2); public final static native long ProgressiveDimClustering_iteration_stats_get(long jarg1, ProgressiveDimClustering jarg1_); public final static native long new_ProgressiveDimClustering__SWIG_0(int jarg1, int jarg2); public final static native long new_ProgressiveDimClustering__SWIG_1(int jarg1, int jarg2, long jarg3, ProgressiveDimClusteringParameters jarg3_); public final static native void ProgressiveDimClustering_train(long jarg1, ProgressiveDimClustering jarg1_, long jarg2, long jarg3, long jarg4, ProgressiveDimIndexFactory jarg4_); public final static native void delete_ProgressiveDimClustering(long jarg1); public final static native float kmeans_clustering(long jarg1, long jarg2, long jarg3, long jarg4, long jarg5); public final static native void ProductQuantizer_d_set(long jarg1, ProductQuantizer jarg1_, long jarg2); public final static native long ProductQuantizer_d_get(long jarg1, ProductQuantizer jarg1_); public final static native void ProductQuantizer_M_set(long jarg1, ProductQuantizer jarg1_, long jarg2); public final static native long ProductQuantizer_M_get(long jarg1, ProductQuantizer jarg1_); public final static native void ProductQuantizer_nbits_set(long jarg1, ProductQuantizer jarg1_, long jarg2); public final static native long ProductQuantizer_nbits_get(long jarg1, ProductQuantizer jarg1_); public final static native void ProductQuantizer_dsub_set(long jarg1, ProductQuantizer jarg1_, long jarg2); public final static native long ProductQuantizer_dsub_get(long jarg1, ProductQuantizer jarg1_); public final static native void ProductQuantizer_code_size_set(long jarg1, ProductQuantizer jarg1_, long jarg2); public final static native long ProductQuantizer_code_size_get(long jarg1, ProductQuantizer jarg1_); public final static native void ProductQuantizer_ksub_set(long jarg1, ProductQuantizer jarg1_, long jarg2); public final static native long ProductQuantizer_ksub_get(long jarg1, ProductQuantizer jarg1_); public final static native void ProductQuantizer_verbose_set(long jarg1, ProductQuantizer jarg1_, boolean jarg2); public final static native boolean ProductQuantizer_verbose_get(long jarg1, ProductQuantizer jarg1_); public final static native void ProductQuantizer_train_type_set(long jarg1, ProductQuantizer jarg1_, int jarg2); public final static native int ProductQuantizer_train_type_get(long jarg1, ProductQuantizer jarg1_); public final static native void ProductQuantizer_cp_set(long jarg1, ProductQuantizer jarg1_, long jarg2, ClusteringParameters jarg2_); public final static native long ProductQuantizer_cp_get(long jarg1, ProductQuantizer jarg1_); public final static native void ProductQuantizer_assign_index_set(long jarg1, ProductQuantizer jarg1_, long jarg2, Index jarg2_); public final static native long ProductQuantizer_assign_index_get(long jarg1, ProductQuantizer jarg1_); public final static native void ProductQuantizer_centroids_set(long jarg1, ProductQuantizer jarg1_, long jarg2, FloatVector jarg2_); public final static native long ProductQuantizer_centroids_get(long jarg1, ProductQuantizer jarg1_); public final static native long ProductQuantizer_get_centroids(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3); public final static native void ProductQuantizer_train(long jarg1, ProductQuantizer jarg1_, int jarg2, long jarg3); public final static native long new_ProductQuantizer__SWIG_0(long jarg1, long jarg2, long jarg3); public final static native long new_ProductQuantizer__SWIG_1(); public final static native void ProductQuantizer_set_derived_values(long jarg1, ProductQuantizer jarg1_); public final static native void ProductQuantizer_set_params(long jarg1, ProductQuantizer jarg1_, long jarg2, int jarg3); public final static native void ProductQuantizer_compute_code(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3); public final static native void ProductQuantizer_compute_codes(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3, long jarg4); public final static native void ProductQuantizer_compute_codes_with_assign_index(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3, long jarg4); public final static native void ProductQuantizer_decode__SWIG_0(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3); public final static native void ProductQuantizer_decode__SWIG_1(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3, long jarg4); public final static native void ProductQuantizer_compute_code_from_distance_table(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3); public final static native void ProductQuantizer_compute_distance_table(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3); public final static native void ProductQuantizer_compute_inner_prod_table(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3); public final static native void ProductQuantizer_compute_distance_tables(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3, long jarg4); public final static native void ProductQuantizer_compute_inner_prod_tables(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3, long jarg4); public final static native void ProductQuantizer_search__SWIG_0(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, boolean jarg7); public final static native void ProductQuantizer_search__SWIG_1(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6); public final static native void ProductQuantizer_search_ip__SWIG_0(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, boolean jarg7); public final static native void ProductQuantizer_search_ip__SWIG_1(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6); public final static native void ProductQuantizer_sdc_table_set(long jarg1, ProductQuantizer jarg1_, long jarg2, FloatVector jarg2_); public final static native long ProductQuantizer_sdc_table_get(long jarg1, ProductQuantizer jarg1_); public final static native void ProductQuantizer_compute_sdc_table(long jarg1, ProductQuantizer jarg1_); public final static native void ProductQuantizer_search_sdc__SWIG_0(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, boolean jarg7); public final static native void ProductQuantizer_search_sdc__SWIG_1(long jarg1, ProductQuantizer jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6); public final static native void delete_ProductQuantizer(long jarg1); public final static native void PQEncoderGeneric_code_set(long jarg1, PQEncoderGeneric jarg1_, long jarg2); public final static native long PQEncoderGeneric_code_get(long jarg1, PQEncoderGeneric jarg1_); public final static native void PQEncoderGeneric_offset_set(long jarg1, PQEncoderGeneric jarg1_, short jarg2); public final static native short PQEncoderGeneric_offset_get(long jarg1, PQEncoderGeneric jarg1_); public final static native int PQEncoderGeneric_nbits_get(long jarg1, PQEncoderGeneric jarg1_); public final static native void PQEncoderGeneric_reg_set(long jarg1, PQEncoderGeneric jarg1_, short jarg2); public final static native short PQEncoderGeneric_reg_get(long jarg1, PQEncoderGeneric jarg1_); public final static native long new_PQEncoderGeneric__SWIG_0(long jarg1, int jarg2, short jarg3); public final static native long new_PQEncoderGeneric__SWIG_1(long jarg1, int jarg2); public final static native void PQEncoderGeneric_encode(long jarg1, PQEncoderGeneric jarg1_, long jarg2); public final static native void delete_PQEncoderGeneric(long jarg1); public final static native void PQEncoder8_code_set(long jarg1, PQEncoder8 jarg1_, long jarg2); public final static native long PQEncoder8_code_get(long jarg1, PQEncoder8 jarg1_); public final static native long new_PQEncoder8(long jarg1, int jarg2); public final static native void PQEncoder8_encode(long jarg1, PQEncoder8 jarg1_, long jarg2); public final static native void delete_PQEncoder8(long jarg1); public final static native void PQEncoder16_code_set(long jarg1, PQEncoder16 jarg1_, long jarg2); public final static native long PQEncoder16_code_get(long jarg1, PQEncoder16 jarg1_); public final static native long new_PQEncoder16(long jarg1, int jarg2); public final static native void PQEncoder16_encode(long jarg1, PQEncoder16 jarg1_, long jarg2); public final static native void delete_PQEncoder16(long jarg1); public final static native void PQDecoderGeneric_code_set(long jarg1, PQDecoderGeneric jarg1_, long jarg2); public final static native long PQDecoderGeneric_code_get(long jarg1, PQDecoderGeneric jarg1_); public final static native void PQDecoderGeneric_offset_set(long jarg1, PQDecoderGeneric jarg1_, short jarg2); public final static native short PQDecoderGeneric_offset_get(long jarg1, PQDecoderGeneric jarg1_); public final static native int PQDecoderGeneric_nbits_get(long jarg1, PQDecoderGeneric jarg1_); public final static native long PQDecoderGeneric_mask_get(long jarg1, PQDecoderGeneric jarg1_); public final static native void PQDecoderGeneric_reg_set(long jarg1, PQDecoderGeneric jarg1_, short jarg2); public final static native short PQDecoderGeneric_reg_get(long jarg1, PQDecoderGeneric jarg1_); public final static native long new_PQDecoderGeneric(long jarg1, int jarg2); public final static native long PQDecoderGeneric_decode(long jarg1, PQDecoderGeneric jarg1_); public final static native void delete_PQDecoderGeneric(long jarg1); public final static native int PQDecoder8_nbits_get(); public final static native void PQDecoder8_code_set(long jarg1, PQDecoder8 jarg1_, long jarg2); public final static native long PQDecoder8_code_get(long jarg1, PQDecoder8 jarg1_); public final static native long new_PQDecoder8(long jarg1, int jarg2); public final static native long PQDecoder8_decode(long jarg1, PQDecoder8 jarg1_); public final static native void delete_PQDecoder8(long jarg1); public final static native int PQDecoder16_nbits_get(); public final static native void PQDecoder16_code_set(long jarg1, PQDecoder16 jarg1_, long jarg2); public final static native long PQDecoder16_code_get(long jarg1, PQDecoder16 jarg1_); public final static native long new_PQDecoder16(long jarg1, int jarg2); public final static native long PQDecoder16_decode(long jarg1, PQDecoder16 jarg1_); public final static native void delete_PQDecoder16(long jarg1); public final static native void VectorTransform_d_in_set(long jarg1, VectorTransform jarg1_, int jarg2); public final static native int VectorTransform_d_in_get(long jarg1, VectorTransform jarg1_); public final static native void VectorTransform_d_out_set(long jarg1, VectorTransform jarg1_, int jarg2); public final static native int VectorTransform_d_out_get(long jarg1, VectorTransform jarg1_); public final static native void VectorTransform_is_trained_set(long jarg1, VectorTransform jarg1_, boolean jarg2); public final static native boolean VectorTransform_is_trained_get(long jarg1, VectorTransform jarg1_); public final static native void VectorTransform_train(long jarg1, VectorTransform jarg1_, long jarg2, long jarg3); public final static native long VectorTransform_apply(long jarg1, VectorTransform jarg1_, long jarg2, long jarg3); public final static native void VectorTransform_apply_noalloc(long jarg1, VectorTransform jarg1_, long jarg2, long jarg3, long jarg4); public final static native void VectorTransform_reverse_transform(long jarg1, VectorTransform jarg1_, long jarg2, long jarg3, long jarg4); public final static native void delete_VectorTransform(long jarg1); public final static native void LinearTransform_have_bias_set(long jarg1, LinearTransform jarg1_, boolean jarg2); public final static native boolean LinearTransform_have_bias_get(long jarg1, LinearTransform jarg1_); public final static native void LinearTransform_is_orthonormal_set(long jarg1, LinearTransform jarg1_, boolean jarg2); public final static native boolean LinearTransform_is_orthonormal_get(long jarg1, LinearTransform jarg1_); public final static native void LinearTransform_A_set(long jarg1, LinearTransform jarg1_, long jarg2, FloatVector jarg2_); public final static native long LinearTransform_A_get(long jarg1, LinearTransform jarg1_); public final static native void LinearTransform_b_set(long jarg1, LinearTransform jarg1_, long jarg2, FloatVector jarg2_); public final static native long LinearTransform_b_get(long jarg1, LinearTransform jarg1_); public final static native long new_LinearTransform__SWIG_0(int jarg1, int jarg2, boolean jarg3); public final static native long new_LinearTransform__SWIG_1(int jarg1, int jarg2); public final static native long new_LinearTransform__SWIG_2(int jarg1); public final static native long new_LinearTransform__SWIG_3(); public final static native void LinearTransform_apply_noalloc(long jarg1, LinearTransform jarg1_, long jarg2, long jarg3, long jarg4); public final static native void LinearTransform_transform_transpose(long jarg1, LinearTransform jarg1_, long jarg2, long jarg3, long jarg4); public final static native void LinearTransform_reverse_transform(long jarg1, LinearTransform jarg1_, long jarg2, long jarg3, long jarg4); public final static native void LinearTransform_set_is_orthonormal(long jarg1, LinearTransform jarg1_); public final static native void LinearTransform_verbose_set(long jarg1, LinearTransform jarg1_, boolean jarg2); public final static native boolean LinearTransform_verbose_get(long jarg1, LinearTransform jarg1_); public final static native void LinearTransform_print_if_verbose(long jarg1, LinearTransform jarg1_, String jarg2, long jarg3, DoubleVector jarg3_, int jarg4, int jarg5); public final static native void delete_LinearTransform(long jarg1); public final static native long new_RandomRotationMatrix__SWIG_0(int jarg1, int jarg2); public final static native void RandomRotationMatrix_init(long jarg1, RandomRotationMatrix jarg1_, int jarg2); public final static native void RandomRotationMatrix_train(long jarg1, RandomRotationMatrix jarg1_, long jarg2, long jarg3); public final static native long new_RandomRotationMatrix__SWIG_1(); public final static native void delete_RandomRotationMatrix(long jarg1); public final static native void PCAMatrix_eigen_power_set(long jarg1, PCAMatrix jarg1_, float jarg2); public final static native float PCAMatrix_eigen_power_get(long jarg1, PCAMatrix jarg1_); public final static native void PCAMatrix_epsilon_set(long jarg1, PCAMatrix jarg1_, float jarg2); public final static native float PCAMatrix_epsilon_get(long jarg1, PCAMatrix jarg1_); public final static native void PCAMatrix_random_rotation_set(long jarg1, PCAMatrix jarg1_, boolean jarg2); public final static native boolean PCAMatrix_random_rotation_get(long jarg1, PCAMatrix jarg1_); public final static native void PCAMatrix_max_points_per_d_set(long jarg1, PCAMatrix jarg1_, long jarg2); public final static native long PCAMatrix_max_points_per_d_get(long jarg1, PCAMatrix jarg1_); public final static native void PCAMatrix_balanced_bins_set(long jarg1, PCAMatrix jarg1_, int jarg2); public final static native int PCAMatrix_balanced_bins_get(long jarg1, PCAMatrix jarg1_); public final static native void PCAMatrix_mean_set(long jarg1, PCAMatrix jarg1_, long jarg2, FloatVector jarg2_); public final static native long PCAMatrix_mean_get(long jarg1, PCAMatrix jarg1_); public final static native void PCAMatrix_eigenvalues_set(long jarg1, PCAMatrix jarg1_, long jarg2, FloatVector jarg2_); public final static native long PCAMatrix_eigenvalues_get(long jarg1, PCAMatrix jarg1_); public final static native void PCAMatrix_PCAMat_set(long jarg1, PCAMatrix jarg1_, long jarg2, FloatVector jarg2_); public final static native long PCAMatrix_PCAMat_get(long jarg1, PCAMatrix jarg1_); public final static native long new_PCAMatrix__SWIG_0(int jarg1, int jarg2, float jarg3, boolean jarg4); public final static native long new_PCAMatrix__SWIG_1(int jarg1, int jarg2, float jarg3); public final static native long new_PCAMatrix__SWIG_2(int jarg1, int jarg2); public final static native long new_PCAMatrix__SWIG_3(int jarg1); public final static native long new_PCAMatrix__SWIG_4(); public final static native void PCAMatrix_train(long jarg1, PCAMatrix jarg1_, long jarg2, long jarg3); public final static native void PCAMatrix_copy_from(long jarg1, PCAMatrix jarg1_, long jarg2, PCAMatrix jarg2_); public final static native void PCAMatrix_prepare_Ab(long jarg1, PCAMatrix jarg1_); public final static native void delete_PCAMatrix(long jarg1); public final static native void ITQMatrix_max_iter_set(long jarg1, ITQMatrix jarg1_, int jarg2); public final static native int ITQMatrix_max_iter_get(long jarg1, ITQMatrix jarg1_); public final static native void ITQMatrix_seed_set(long jarg1, ITQMatrix jarg1_, int jarg2); public final static native int ITQMatrix_seed_get(long jarg1, ITQMatrix jarg1_); public final static native void ITQMatrix_init_rotation_set(long jarg1, ITQMatrix jarg1_, long jarg2, DoubleVector jarg2_); public final static native long ITQMatrix_init_rotation_get(long jarg1, ITQMatrix jarg1_); public final static native long new_ITQMatrix__SWIG_0(int jarg1); public final static native long new_ITQMatrix__SWIG_1(); public final static native void ITQMatrix_train(long jarg1, ITQMatrix jarg1_, long jarg2, long jarg3); public final static native void delete_ITQMatrix(long jarg1); public final static native void ITQTransform_mean_set(long jarg1, ITQTransform jarg1_, long jarg2, FloatVector jarg2_); public final static native long ITQTransform_mean_get(long jarg1, ITQTransform jarg1_); public final static native void ITQTransform_do_pca_set(long jarg1, ITQTransform jarg1_, boolean jarg2); public final static native boolean ITQTransform_do_pca_get(long jarg1, ITQTransform jarg1_); public final static native void ITQTransform_itq_set(long jarg1, ITQTransform jarg1_, long jarg2, ITQMatrix jarg2_); public final static native long ITQTransform_itq_get(long jarg1, ITQTransform jarg1_); public final static native void ITQTransform_max_train_per_dim_set(long jarg1, ITQTransform jarg1_, int jarg2); public final static native int ITQTransform_max_train_per_dim_get(long jarg1, ITQTransform jarg1_); public final static native void ITQTransform_pca_then_itq_set(long jarg1, ITQTransform jarg1_, long jarg2, LinearTransform jarg2_); public final static native long ITQTransform_pca_then_itq_get(long jarg1, ITQTransform jarg1_); public final static native long new_ITQTransform__SWIG_0(int jarg1, int jarg2, boolean jarg3); public final static native long new_ITQTransform__SWIG_1(int jarg1, int jarg2); public final static native long new_ITQTransform__SWIG_2(int jarg1); public final static native long new_ITQTransform__SWIG_3(); public final static native void ITQTransform_train(long jarg1, ITQTransform jarg1_, long jarg2, long jarg3); public final static native void ITQTransform_apply_noalloc(long jarg1, ITQTransform jarg1_, long jarg2, long jarg3, long jarg4); public final static native void delete_ITQTransform(long jarg1); public final static native void OPQMatrix_M_set(long jarg1, OPQMatrix jarg1_, int jarg2); public final static native int OPQMatrix_M_get(long jarg1, OPQMatrix jarg1_); public final static native void OPQMatrix_niter_set(long jarg1, OPQMatrix jarg1_, int jarg2); public final static native int OPQMatrix_niter_get(long jarg1, OPQMatrix jarg1_); public final static native void OPQMatrix_niter_pq_set(long jarg1, OPQMatrix jarg1_, int jarg2); public final static native int OPQMatrix_niter_pq_get(long jarg1, OPQMatrix jarg1_); public final static native void OPQMatrix_niter_pq_0_set(long jarg1, OPQMatrix jarg1_, int jarg2); public final static native int OPQMatrix_niter_pq_0_get(long jarg1, OPQMatrix jarg1_); public final static native void OPQMatrix_max_train_points_set(long jarg1, OPQMatrix jarg1_, long jarg2); public final static native long OPQMatrix_max_train_points_get(long jarg1, OPQMatrix jarg1_); public final static native void OPQMatrix_verbose_set(long jarg1, OPQMatrix jarg1_, boolean jarg2); public final static native boolean OPQMatrix_verbose_get(long jarg1, OPQMatrix jarg1_); public final static native void OPQMatrix_pq_set(long jarg1, OPQMatrix jarg1_, long jarg2, ProductQuantizer jarg2_); public final static native long OPQMatrix_pq_get(long jarg1, OPQMatrix jarg1_); public final static native long new_OPQMatrix__SWIG_0(int jarg1, int jarg2, int jarg3); public final static native long new_OPQMatrix__SWIG_1(int jarg1, int jarg2); public final static native long new_OPQMatrix__SWIG_2(int jarg1); public final static native long new_OPQMatrix__SWIG_3(); public final static native void OPQMatrix_train(long jarg1, OPQMatrix jarg1_, long jarg2, long jarg3); public final static native void delete_OPQMatrix(long jarg1); public final static native void RemapDimensionsTransform_map_set(long jarg1, RemapDimensionsTransform jarg1_, long jarg2, IntVector jarg2_); public final static native long RemapDimensionsTransform_map_get(long jarg1, RemapDimensionsTransform jarg1_); public final static native long new_RemapDimensionsTransform__SWIG_0(int jarg1, int jarg2, long jarg3); public final static native long new_RemapDimensionsTransform__SWIG_1(int jarg1, int jarg2, boolean jarg3); public final static native long new_RemapDimensionsTransform__SWIG_2(int jarg1, int jarg2); public final static native void RemapDimensionsTransform_apply_noalloc(long jarg1, RemapDimensionsTransform jarg1_, long jarg2, long jarg3, long jarg4); public final static native void RemapDimensionsTransform_reverse_transform(long jarg1, RemapDimensionsTransform jarg1_, long jarg2, long jarg3, long jarg4); public final static native long new_RemapDimensionsTransform__SWIG_3(); public final static native void delete_RemapDimensionsTransform(long jarg1); public final static native void NormalizationTransform_norm_set(long jarg1, NormalizationTransform jarg1_, float jarg2); public final static native float NormalizationTransform_norm_get(long jarg1, NormalizationTransform jarg1_); public final static native long new_NormalizationTransform__SWIG_0(int jarg1, float jarg2); public final static native long new_NormalizationTransform__SWIG_1(int jarg1); public final static native long new_NormalizationTransform__SWIG_2(); public final static native void NormalizationTransform_apply_noalloc(long jarg1, NormalizationTransform jarg1_, long jarg2, long jarg3, long jarg4); public final static native void NormalizationTransform_reverse_transform(long jarg1, NormalizationTransform jarg1_, long jarg2, long jarg3, long jarg4); public final static native void delete_NormalizationTransform(long jarg1); public final static native void CenteringTransform_mean_set(long jarg1, CenteringTransform jarg1_, long jarg2, FloatVector jarg2_); public final static native long CenteringTransform_mean_get(long jarg1, CenteringTransform jarg1_); public final static native long new_CenteringTransform__SWIG_0(int jarg1); public final static native long new_CenteringTransform__SWIG_1(); public final static native void CenteringTransform_train(long jarg1, CenteringTransform jarg1_, long jarg2, long jarg3); public final static native void CenteringTransform_apply_noalloc(long jarg1, CenteringTransform jarg1_, long jarg2, long jarg3, long jarg4); public final static native void CenteringTransform_reverse_transform(long jarg1, CenteringTransform jarg1_, long jarg2, long jarg3, long jarg4); public final static native void delete_CenteringTransform(long jarg1); public final static native void IndexFlatCodes_code_size_set(long jarg1, IndexFlatCodes jarg1_, long jarg2); public final static native long IndexFlatCodes_code_size_get(long jarg1, IndexFlatCodes jarg1_); public final static native void IndexFlatCodes_codes_set(long jarg1, IndexFlatCodes jarg1_, long jarg2, ByteVector jarg2_); public final static native long IndexFlatCodes_codes_get(long jarg1, IndexFlatCodes jarg1_); public final static native void IndexFlatCodes_add(long jarg1, IndexFlatCodes jarg1_, long jarg2, long jarg3); public final static native void IndexFlatCodes_reset(long jarg1, IndexFlatCodes jarg1_); public final static native void IndexFlatCodes_reconstruct_n(long jarg1, IndexFlatCodes jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexFlatCodes_reconstruct(long jarg1, IndexFlatCodes jarg1_, long jarg2, long jarg3); public final static native long IndexFlatCodes_sa_code_size(long jarg1, IndexFlatCodes jarg1_); public final static native long IndexFlatCodes_remove_ids(long jarg1, IndexFlatCodes jarg1_, long jarg2, IDSelector jarg2_); public final static native void delete_IndexFlatCodes(long jarg1); public final static native long new_IndexFlat__SWIG_0(long jarg1, int jarg2); public final static native long new_IndexFlat__SWIG_1(long jarg1); public final static native void IndexFlat_search(long jarg1, IndexFlat jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexFlat_range_search(long jarg1, IndexFlat jarg1_, long jarg2, long jarg3, float jarg4, long jarg5, RangeSearchResult jarg5_); public final static native void IndexFlat_reconstruct(long jarg1, IndexFlat jarg1_, long jarg2, long jarg3); public final static native void IndexFlat_compute_distance_subset(long jarg1, IndexFlat jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native long IndexFlat_get_xb__SWIG_0(long jarg1, IndexFlat jarg1_); public final static native long new_IndexFlat__SWIG_2(); public final static native long IndexFlat_get_distance_computer(long jarg1, IndexFlat jarg1_); public final static native void IndexFlat_sa_encode(long jarg1, IndexFlat jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexFlat_sa_decode(long jarg1, IndexFlat jarg1_, long jarg2, long jarg3, long jarg4); public final static native void delete_IndexFlat(long jarg1); public final static native long new_IndexFlatIP__SWIG_0(long jarg1); public final static native long new_IndexFlatIP__SWIG_1(); public final static native void delete_IndexFlatIP(long jarg1); public final static native long new_IndexFlatL2__SWIG_0(long jarg1); public final static native long new_IndexFlatL2__SWIG_1(); public final static native void delete_IndexFlatL2(long jarg1); public final static native void IndexFlat1D_continuous_update_set(long jarg1, IndexFlat1D jarg1_, boolean jarg2); public final static native boolean IndexFlat1D_continuous_update_get(long jarg1, IndexFlat1D jarg1_); public final static native void IndexFlat1D_perm_set(long jarg1, IndexFlat1D jarg1_, long jarg2); public final static native long IndexFlat1D_perm_get(long jarg1, IndexFlat1D jarg1_); public final static native long new_IndexFlat1D__SWIG_0(boolean jarg1); public final static native long new_IndexFlat1D__SWIG_1(); public final static native void IndexFlat1D_update_permutation(long jarg1, IndexFlat1D jarg1_); public final static native void IndexFlat1D_add(long jarg1, IndexFlat1D jarg1_, long jarg2, long jarg3); public final static native void IndexFlat1D_reset(long jarg1, IndexFlat1D jarg1_); public final static native void IndexFlat1D_search(long jarg1, IndexFlat1D jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void delete_IndexFlat1D(long jarg1); public final static native void IndexLSH_nbits_set(long jarg1, IndexLSH jarg1_, int jarg2); public final static native int IndexLSH_nbits_get(long jarg1, IndexLSH jarg1_); public final static native void IndexLSH_rotate_data_set(long jarg1, IndexLSH jarg1_, boolean jarg2); public final static native boolean IndexLSH_rotate_data_get(long jarg1, IndexLSH jarg1_); public final static native void IndexLSH_train_thresholds_set(long jarg1, IndexLSH jarg1_, boolean jarg2); public final static native boolean IndexLSH_train_thresholds_get(long jarg1, IndexLSH jarg1_); public final static native void IndexLSH_rrot_set(long jarg1, IndexLSH jarg1_, long jarg2, RandomRotationMatrix jarg2_); public final static native long IndexLSH_rrot_get(long jarg1, IndexLSH jarg1_); public final static native void IndexLSH_thresholds_set(long jarg1, IndexLSH jarg1_, long jarg2, FloatVector jarg2_); public final static native long IndexLSH_thresholds_get(long jarg1, IndexLSH jarg1_); public final static native long new_IndexLSH__SWIG_0(long jarg1, int jarg2, boolean jarg3, boolean jarg4); public final static native long new_IndexLSH__SWIG_1(long jarg1, int jarg2, boolean jarg3); public final static native long new_IndexLSH__SWIG_2(long jarg1, int jarg2); public final static native long IndexLSH_apply_preprocess(long jarg1, IndexLSH jarg1_, long jarg2, long jarg3); public final static native void IndexLSH_train(long jarg1, IndexLSH jarg1_, long jarg2, long jarg3); public final static native void IndexLSH_search(long jarg1, IndexLSH jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexLSH_transfer_thresholds(long jarg1, IndexLSH jarg1_, long jarg2, LinearTransform jarg2_); public final static native void delete_IndexLSH(long jarg1); public final static native long new_IndexLSH__SWIG_3(); public final static native void IndexLSH_sa_encode(long jarg1, IndexLSH jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexLSH_sa_decode(long jarg1, IndexLSH jarg1_, long jarg2, long jarg3, long jarg4); public final static native void SimulatedAnnealingParameters_init_temperature_set(long jarg1, SimulatedAnnealingParameters jarg1_, double jarg2); public final static native double SimulatedAnnealingParameters_init_temperature_get(long jarg1, SimulatedAnnealingParameters jarg1_); public final static native void SimulatedAnnealingParameters_temperature_decay_set(long jarg1, SimulatedAnnealingParameters jarg1_, double jarg2); public final static native double SimulatedAnnealingParameters_temperature_decay_get(long jarg1, SimulatedAnnealingParameters jarg1_); public final static native void SimulatedAnnealingParameters_n_iter_set(long jarg1, SimulatedAnnealingParameters jarg1_, int jarg2); public final static native int SimulatedAnnealingParameters_n_iter_get(long jarg1, SimulatedAnnealingParameters jarg1_); public final static native void SimulatedAnnealingParameters_n_redo_set(long jarg1, SimulatedAnnealingParameters jarg1_, int jarg2); public final static native int SimulatedAnnealingParameters_n_redo_get(long jarg1, SimulatedAnnealingParameters jarg1_); public final static native void SimulatedAnnealingParameters_seed_set(long jarg1, SimulatedAnnealingParameters jarg1_, int jarg2); public final static native int SimulatedAnnealingParameters_seed_get(long jarg1, SimulatedAnnealingParameters jarg1_); public final static native void SimulatedAnnealingParameters_verbose_set(long jarg1, SimulatedAnnealingParameters jarg1_, int jarg2); public final static native int SimulatedAnnealingParameters_verbose_get(long jarg1, SimulatedAnnealingParameters jarg1_); public final static native void SimulatedAnnealingParameters_only_bit_flips_set(long jarg1, SimulatedAnnealingParameters jarg1_, boolean jarg2); public final static native boolean SimulatedAnnealingParameters_only_bit_flips_get(long jarg1, SimulatedAnnealingParameters jarg1_); public final static native void SimulatedAnnealingParameters_init_random_set(long jarg1, SimulatedAnnealingParameters jarg1_, boolean jarg2); public final static native boolean SimulatedAnnealingParameters_init_random_get(long jarg1, SimulatedAnnealingParameters jarg1_); public final static native long new_SimulatedAnnealingParameters(); public final static native void delete_SimulatedAnnealingParameters(long jarg1); public final static native void PermutationObjective_n_set(long jarg1, PermutationObjective jarg1_, int jarg2); public final static native int PermutationObjective_n_get(long jarg1, PermutationObjective jarg1_); public final static native double PermutationObjective_compute_cost(long jarg1, PermutationObjective jarg1_, long jarg2); public final static native double PermutationObjective_cost_update(long jarg1, PermutationObjective jarg1_, long jarg2, int jarg3, int jarg4); public final static native void delete_PermutationObjective(long jarg1); public final static native void ReproduceDistancesObjective_dis_weight_factor_set(long jarg1, ReproduceDistancesObjective jarg1_, double jarg2); public final static native double ReproduceDistancesObjective_dis_weight_factor_get(long jarg1, ReproduceDistancesObjective jarg1_); public final static native double ReproduceDistancesObjective_sqr(double jarg1); public final static native double ReproduceDistancesObjective_dis_weight(long jarg1, ReproduceDistancesObjective jarg1_, double jarg2); public final static native void ReproduceDistancesObjective_source_dis_set(long jarg1, ReproduceDistancesObjective jarg1_, long jarg2, DoubleVector jarg2_); public final static native long ReproduceDistancesObjective_source_dis_get(long jarg1, ReproduceDistancesObjective jarg1_); public final static native void ReproduceDistancesObjective_target_dis_set(long jarg1, ReproduceDistancesObjective jarg1_, long jarg2); public final static native long ReproduceDistancesObjective_target_dis_get(long jarg1, ReproduceDistancesObjective jarg1_); public final static native void ReproduceDistancesObjective_weights_set(long jarg1, ReproduceDistancesObjective jarg1_, long jarg2, DoubleVector jarg2_); public final static native long ReproduceDistancesObjective_weights_get(long jarg1, ReproduceDistancesObjective jarg1_); public final static native double ReproduceDistancesObjective_get_source_dis(long jarg1, ReproduceDistancesObjective jarg1_, int jarg2, int jarg3); public final static native double ReproduceDistancesObjective_compute_cost(long jarg1, ReproduceDistancesObjective jarg1_, long jarg2); public final static native double ReproduceDistancesObjective_cost_update(long jarg1, ReproduceDistancesObjective jarg1_, long jarg2, int jarg3, int jarg4); public final static native long new_ReproduceDistancesObjective(int jarg1, long jarg2, long jarg3, double jarg4); public final static native void ReproduceDistancesObjective_compute_mean_stdev(long jarg1, long jarg2, long jarg3, long jarg4); public final static native void ReproduceDistancesObjective_set_affine_target_dis(long jarg1, ReproduceDistancesObjective jarg1_, long jarg2); public final static native void delete_ReproduceDistancesObjective(long jarg1); public final static native void SimulatedAnnealingOptimizer_obj_set(long jarg1, SimulatedAnnealingOptimizer jarg1_, long jarg2, PermutationObjective jarg2_); public final static native long SimulatedAnnealingOptimizer_obj_get(long jarg1, SimulatedAnnealingOptimizer jarg1_); public final static native void SimulatedAnnealingOptimizer_n_set(long jarg1, SimulatedAnnealingOptimizer jarg1_, int jarg2); public final static native int SimulatedAnnealingOptimizer_n_get(long jarg1, SimulatedAnnealingOptimizer jarg1_); public final static native void SimulatedAnnealingOptimizer_logfile_set(long jarg1, SimulatedAnnealingOptimizer jarg1_, long jarg2); public final static native long SimulatedAnnealingOptimizer_logfile_get(long jarg1, SimulatedAnnealingOptimizer jarg1_); public final static native long new_SimulatedAnnealingOptimizer(long jarg1, PermutationObjective jarg1_, long jarg2, SimulatedAnnealingParameters jarg2_); public final static native void SimulatedAnnealingOptimizer_rnd_set(long jarg1, SimulatedAnnealingOptimizer jarg1_, long jarg2); public final static native long SimulatedAnnealingOptimizer_rnd_get(long jarg1, SimulatedAnnealingOptimizer jarg1_); public final static native void SimulatedAnnealingOptimizer_init_cost_set(long jarg1, SimulatedAnnealingOptimizer jarg1_, double jarg2); public final static native double SimulatedAnnealingOptimizer_init_cost_get(long jarg1, SimulatedAnnealingOptimizer jarg1_); public final static native double SimulatedAnnealingOptimizer_optimize(long jarg1, SimulatedAnnealingOptimizer jarg1_, long jarg2); public final static native double SimulatedAnnealingOptimizer_run_optimization(long jarg1, SimulatedAnnealingOptimizer jarg1_, long jarg2); public final static native void delete_SimulatedAnnealingOptimizer(long jarg1); public final static native void PolysemousTraining_optimization_type_set(long jarg1, PolysemousTraining jarg1_, int jarg2); public final static native int PolysemousTraining_optimization_type_get(long jarg1, PolysemousTraining jarg1_); public final static native void PolysemousTraining_ntrain_permutation_set(long jarg1, PolysemousTraining jarg1_, int jarg2); public final static native int PolysemousTraining_ntrain_permutation_get(long jarg1, PolysemousTraining jarg1_); public final static native void PolysemousTraining_dis_weight_factor_set(long jarg1, PolysemousTraining jarg1_, double jarg2); public final static native double PolysemousTraining_dis_weight_factor_get(long jarg1, PolysemousTraining jarg1_); public final static native void PolysemousTraining_max_memory_set(long jarg1, PolysemousTraining jarg1_, long jarg2); public final static native long PolysemousTraining_max_memory_get(long jarg1, PolysemousTraining jarg1_); public final static native void PolysemousTraining_log_pattern_set(long jarg1, PolysemousTraining jarg1_, String jarg2); public final static native String PolysemousTraining_log_pattern_get(long jarg1, PolysemousTraining jarg1_); public final static native long new_PolysemousTraining(); public final static native void PolysemousTraining_optimize_pq_for_hamming(long jarg1, PolysemousTraining jarg1_, long jarg2, ProductQuantizer jarg2_, long jarg3, long jarg4); public final static native void PolysemousTraining_optimize_ranking(long jarg1, PolysemousTraining jarg1_, long jarg2, ProductQuantizer jarg2_, long jarg3, long jarg4); public final static native void PolysemousTraining_optimize_reproduce_distances(long jarg1, PolysemousTraining jarg1_, long jarg2, ProductQuantizer jarg2_); public final static native long PolysemousTraining_memory_usage_per_thread(long jarg1, PolysemousTraining jarg1_, long jarg2, ProductQuantizer jarg2_); public final static native void delete_PolysemousTraining(long jarg1); public final static native void IndexPQ_pq_set(long jarg1, IndexPQ jarg1_, long jarg2, ProductQuantizer jarg2_); public final static native long IndexPQ_pq_get(long jarg1, IndexPQ jarg1_); public final static native long new_IndexPQ__SWIG_0(int jarg1, long jarg2, long jarg3, int jarg4); public final static native long new_IndexPQ__SWIG_1(int jarg1, long jarg2, long jarg3); public final static native long new_IndexPQ__SWIG_2(); public final static native void IndexPQ_train(long jarg1, IndexPQ jarg1_, long jarg2, long jarg3); public final static native void IndexPQ_search(long jarg1, IndexPQ jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexPQ_sa_encode(long jarg1, IndexPQ jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexPQ_sa_decode(long jarg1, IndexPQ jarg1_, long jarg2, long jarg3, long jarg4); public final static native long IndexPQ_get_distance_computer(long jarg1, IndexPQ jarg1_); public final static native void IndexPQ_do_polysemous_training_set(long jarg1, IndexPQ jarg1_, boolean jarg2); public final static native boolean IndexPQ_do_polysemous_training_get(long jarg1, IndexPQ jarg1_); public final static native void IndexPQ_polysemous_training_set(long jarg1, IndexPQ jarg1_, long jarg2, PolysemousTraining jarg2_); public final static native long IndexPQ_polysemous_training_get(long jarg1, IndexPQ jarg1_); public final static native void IndexPQ_search_type_set(long jarg1, IndexPQ jarg1_, int jarg2); public final static native int IndexPQ_search_type_get(long jarg1, IndexPQ jarg1_); public final static native void IndexPQ_encode_signs_set(long jarg1, IndexPQ jarg1_, boolean jarg2); public final static native boolean IndexPQ_encode_signs_get(long jarg1, IndexPQ jarg1_); public final static native void IndexPQ_polysemous_ht_set(long jarg1, IndexPQ jarg1_, int jarg2); public final static native int IndexPQ_polysemous_ht_get(long jarg1, IndexPQ jarg1_); public final static native void IndexPQ_search_core_polysemous(long jarg1, IndexPQ jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexPQ_hamming_distance_histogram(long jarg1, IndexPQ jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexPQ_hamming_distance_table(long jarg1, IndexPQ jarg1_, long jarg2, long jarg3, long jarg4); public final static native void delete_IndexPQ(long jarg1); public final static native void IndexPQStats_nq_set(long jarg1, IndexPQStats jarg1_, long jarg2); public final static native long IndexPQStats_nq_get(long jarg1, IndexPQStats jarg1_); public final static native void IndexPQStats_ncode_set(long jarg1, IndexPQStats jarg1_, long jarg2); public final static native long IndexPQStats_ncode_get(long jarg1, IndexPQStats jarg1_); public final static native void IndexPQStats_n_hamming_pass_set(long jarg1, IndexPQStats jarg1_, long jarg2); public final static native long IndexPQStats_n_hamming_pass_get(long jarg1, IndexPQStats jarg1_); public final static native long new_IndexPQStats(); public final static native void IndexPQStats_reset(long jarg1, IndexPQStats jarg1_); public final static native void delete_IndexPQStats(long jarg1); public final static native void indexPQ_stats_set(long jarg1, IndexPQStats jarg1_); public final static native long indexPQ_stats_get(); public final static native void MultiIndexQuantizer_pq_set(long jarg1, MultiIndexQuantizer jarg1_, long jarg2, ProductQuantizer jarg2_); public final static native long MultiIndexQuantizer_pq_get(long jarg1, MultiIndexQuantizer jarg1_); public final static native long new_MultiIndexQuantizer__SWIG_0(int jarg1, long jarg2, long jarg3); public final static native void MultiIndexQuantizer_train(long jarg1, MultiIndexQuantizer jarg1_, long jarg2, long jarg3); public final static native void MultiIndexQuantizer_search(long jarg1, MultiIndexQuantizer jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void MultiIndexQuantizer_add(long jarg1, MultiIndexQuantizer jarg1_, long jarg2, long jarg3); public final static native void MultiIndexQuantizer_reset(long jarg1, MultiIndexQuantizer jarg1_); public final static native long new_MultiIndexQuantizer__SWIG_1(); public final static native void MultiIndexQuantizer_reconstruct(long jarg1, MultiIndexQuantizer jarg1_, long jarg2, long jarg3); public final static native void delete_MultiIndexQuantizer(long jarg1); public final static native void MultiIndexQuantizer2_assign_indexes_set(long jarg1, MultiIndexQuantizer2 jarg1_, long jarg2); public final static native long MultiIndexQuantizer2_assign_indexes_get(long jarg1, MultiIndexQuantizer2 jarg1_); public final static native void MultiIndexQuantizer2_own_fields_set(long jarg1, MultiIndexQuantizer2 jarg1_, boolean jarg2); public final static native boolean MultiIndexQuantizer2_own_fields_get(long jarg1, MultiIndexQuantizer2 jarg1_); public final static native long new_MultiIndexQuantizer2__SWIG_0(int jarg1, long jarg2, long jarg3, long jarg4); public final static native long new_MultiIndexQuantizer2__SWIG_1(int jarg1, long jarg2, long jarg3, Index jarg3_, long jarg4, Index jarg4_); public final static native void MultiIndexQuantizer2_train(long jarg1, MultiIndexQuantizer2 jarg1_, long jarg2, long jarg3); public final static native void MultiIndexQuantizer2_search(long jarg1, MultiIndexQuantizer2 jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void delete_MultiIndexQuantizer2(long jarg1); public final static native void InvertedLists_nlist_set(long jarg1, InvertedLists jarg1_, long jarg2); public final static native long InvertedLists_nlist_get(long jarg1, InvertedLists jarg1_); public final static native void InvertedLists_code_size_set(long jarg1, InvertedLists jarg1_, long jarg2); public final static native long InvertedLists_code_size_get(long jarg1, InvertedLists jarg1_); public final static native long InvertedLists_INVALID_CODE_SIZE_get(); public final static native long InvertedLists_list_size(long jarg1, InvertedLists jarg1_, long jarg2); public final static native long InvertedLists_get_codes(long jarg1, InvertedLists jarg1_, long jarg2); public final static native long InvertedLists_get_ids(long jarg1, InvertedLists jarg1_, long jarg2); public final static native void InvertedLists_release_codes(long jarg1, InvertedLists jarg1_, long jarg2, long jarg3); public final static native void InvertedLists_release_ids(long jarg1, InvertedLists jarg1_, long jarg2, long jarg3, LongVector jarg3_); public final static native long InvertedLists_get_single_id(long jarg1, InvertedLists jarg1_, long jarg2, long jarg3); public final static native long InvertedLists_get_single_code(long jarg1, InvertedLists jarg1_, long jarg2, long jarg3); public final static native void InvertedLists_prefetch_lists(long jarg1, InvertedLists jarg1_, long jarg2, LongVector jarg2_, int jarg3); public final static native long InvertedLists_add_entry(long jarg1, InvertedLists jarg1_, long jarg2, long jarg3, long jarg4); public final static native long InvertedLists_add_entries(long jarg1, InvertedLists jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void InvertedLists_update_entry(long jarg1, InvertedLists jarg1_, long jarg2, long jarg3, long jarg4, long jarg5); public final static native void InvertedLists_update_entries(long jarg1, InvertedLists jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, LongVector jarg5_, long jarg6); public final static native void InvertedLists_resize(long jarg1, InvertedLists jarg1_, long jarg2, long jarg3); public final static native void InvertedLists_reset(long jarg1, InvertedLists jarg1_); public final static native void InvertedLists_merge_from(long jarg1, InvertedLists jarg1_, long jarg2, InvertedLists jarg2_, long jarg3); public final static native void delete_InvertedLists(long jarg1); public final static native double InvertedLists_imbalance_factor(long jarg1, InvertedLists jarg1_); public final static native void InvertedLists_print_stats(long jarg1, InvertedLists jarg1_); public final static native long InvertedLists_compute_ntotal(long jarg1, InvertedLists jarg1_); public final static native void InvertedLists_ScopedIds_il_set(long jarg1, InvertedLists.ScopedIds jarg1_, long jarg2, InvertedLists jarg2_); public final static native long InvertedLists_ScopedIds_il_get(long jarg1, InvertedLists.ScopedIds jarg1_); public final static native void InvertedLists_ScopedIds_ids_set(long jarg1, InvertedLists.ScopedIds jarg1_, long jarg2, LongVector jarg2_); public final static native long InvertedLists_ScopedIds_ids_get(long jarg1, InvertedLists.ScopedIds jarg1_); public final static native void InvertedLists_ScopedIds_list_no_set(long jarg1, InvertedLists.ScopedIds jarg1_, long jarg2); public final static native long InvertedLists_ScopedIds_list_no_get(long jarg1, InvertedLists.ScopedIds jarg1_); public final static native long new_InvertedLists_ScopedIds(long jarg1, InvertedLists jarg1_, long jarg2); public final static native long InvertedLists_ScopedIds_get(long jarg1, InvertedLists.ScopedIds jarg1_); public final static native void delete_InvertedLists_ScopedIds(long jarg1); public final static native void InvertedLists_ScopedCodes_il_set(long jarg1, InvertedLists.ScopedCodes jarg1_, long jarg2, InvertedLists jarg2_); public final static native long InvertedLists_ScopedCodes_il_get(long jarg1, InvertedLists.ScopedCodes jarg1_); public final static native void InvertedLists_ScopedCodes_codes_set(long jarg1, InvertedLists.ScopedCodes jarg1_, long jarg2); public final static native long InvertedLists_ScopedCodes_codes_get(long jarg1, InvertedLists.ScopedCodes jarg1_); public final static native void InvertedLists_ScopedCodes_list_no_set(long jarg1, InvertedLists.ScopedCodes jarg1_, long jarg2); public final static native long InvertedLists_ScopedCodes_list_no_get(long jarg1, InvertedLists.ScopedCodes jarg1_); public final static native long new_InvertedLists_ScopedCodes__SWIG_0(long jarg1, InvertedLists jarg1_, long jarg2); public final static native long new_InvertedLists_ScopedCodes__SWIG_1(long jarg1, InvertedLists jarg1_, long jarg2, long jarg3); public final static native long InvertedLists_ScopedCodes_get(long jarg1, InvertedLists.ScopedCodes jarg1_); public final static native void delete_InvertedLists_ScopedCodes(long jarg1); public final static native void ArrayInvertedLists_codes_set(long jarg1, ArrayInvertedLists jarg1_, long jarg2, ByteVectorVector jarg2_); public final static native long ArrayInvertedLists_codes_get(long jarg1, ArrayInvertedLists jarg1_); public final static native void ArrayInvertedLists_ids_set(long jarg1, ArrayInvertedLists jarg1_, long jarg2); public final static native long ArrayInvertedLists_ids_get(long jarg1, ArrayInvertedLists jarg1_); public final static native long new_ArrayInvertedLists(long jarg1, long jarg2); public final static native long ArrayInvertedLists_list_size(long jarg1, ArrayInvertedLists jarg1_, long jarg2); public final static native long ArrayInvertedLists_get_codes(long jarg1, ArrayInvertedLists jarg1_, long jarg2); public final static native long ArrayInvertedLists_get_ids(long jarg1, ArrayInvertedLists jarg1_, long jarg2); public final static native long ArrayInvertedLists_add_entries(long jarg1, ArrayInvertedLists jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void ArrayInvertedLists_update_entries(long jarg1, ArrayInvertedLists jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, LongVector jarg5_, long jarg6); public final static native void ArrayInvertedLists_resize(long jarg1, ArrayInvertedLists jarg1_, long jarg2, long jarg3); public final static native void delete_ArrayInvertedLists(long jarg1); public final static native long ReadOnlyInvertedLists_add_entries(long jarg1, ReadOnlyInvertedLists jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void ReadOnlyInvertedLists_update_entries(long jarg1, ReadOnlyInvertedLists jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, LongVector jarg5_, long jarg6); public final static native void ReadOnlyInvertedLists_resize(long jarg1, ReadOnlyInvertedLists jarg1_, long jarg2, long jarg3); public final static native void delete_ReadOnlyInvertedLists(long jarg1); public final static native void HStackInvertedLists_ils_set(long jarg1, HStackInvertedLists jarg1_, long jarg2); public final static native long HStackInvertedLists_ils_get(long jarg1, HStackInvertedLists jarg1_); public final static native long new_HStackInvertedLists(int jarg1, long jarg2); public final static native long HStackInvertedLists_list_size(long jarg1, HStackInvertedLists jarg1_, long jarg2); public final static native long HStackInvertedLists_get_codes(long jarg1, HStackInvertedLists jarg1_, long jarg2); public final static native long HStackInvertedLists_get_ids(long jarg1, HStackInvertedLists jarg1_, long jarg2); public final static native void HStackInvertedLists_prefetch_lists(long jarg1, HStackInvertedLists jarg1_, long jarg2, LongVector jarg2_, int jarg3); public final static native void HStackInvertedLists_release_codes(long jarg1, HStackInvertedLists jarg1_, long jarg2, long jarg3); public final static native void HStackInvertedLists_release_ids(long jarg1, HStackInvertedLists jarg1_, long jarg2, long jarg3, LongVector jarg3_); public final static native long HStackInvertedLists_get_single_id(long jarg1, HStackInvertedLists jarg1_, long jarg2, long jarg3); public final static native long HStackInvertedLists_get_single_code(long jarg1, HStackInvertedLists jarg1_, long jarg2, long jarg3); public final static native void delete_HStackInvertedLists(long jarg1); public final static native void SliceInvertedLists_il_set(long jarg1, SliceInvertedLists jarg1_, long jarg2, InvertedLists jarg2_); public final static native long SliceInvertedLists_il_get(long jarg1, SliceInvertedLists jarg1_); public final static native void SliceInvertedLists_i0_set(long jarg1, SliceInvertedLists jarg1_, long jarg2); public final static native long SliceInvertedLists_i0_get(long jarg1, SliceInvertedLists jarg1_); public final static native void SliceInvertedLists_i1_set(long jarg1, SliceInvertedLists jarg1_, long jarg2); public final static native long SliceInvertedLists_i1_get(long jarg1, SliceInvertedLists jarg1_); public final static native long new_SliceInvertedLists(long jarg1, InvertedLists jarg1_, long jarg2, long jarg3); public final static native long SliceInvertedLists_list_size(long jarg1, SliceInvertedLists jarg1_, long jarg2); public final static native long SliceInvertedLists_get_codes(long jarg1, SliceInvertedLists jarg1_, long jarg2); public final static native long SliceInvertedLists_get_ids(long jarg1, SliceInvertedLists jarg1_, long jarg2); public final static native void SliceInvertedLists_release_codes(long jarg1, SliceInvertedLists jarg1_, long jarg2, long jarg3); public final static native void SliceInvertedLists_release_ids(long jarg1, SliceInvertedLists jarg1_, long jarg2, long jarg3, LongVector jarg3_); public final static native long SliceInvertedLists_get_single_id(long jarg1, SliceInvertedLists jarg1_, long jarg2, long jarg3); public final static native long SliceInvertedLists_get_single_code(long jarg1, SliceInvertedLists jarg1_, long jarg2, long jarg3); public final static native void SliceInvertedLists_prefetch_lists(long jarg1, SliceInvertedLists jarg1_, long jarg2, LongVector jarg2_, int jarg3); public final static native void delete_SliceInvertedLists(long jarg1); public final static native void VStackInvertedLists_ils_set(long jarg1, VStackInvertedLists jarg1_, long jarg2); public final static native long VStackInvertedLists_ils_get(long jarg1, VStackInvertedLists jarg1_); public final static native void VStackInvertedLists_cumsz_set(long jarg1, VStackInvertedLists jarg1_, long jarg2); public final static native long VStackInvertedLists_cumsz_get(long jarg1, VStackInvertedLists jarg1_); public final static native long new_VStackInvertedLists(int jarg1, long jarg2); public final static native long VStackInvertedLists_list_size(long jarg1, VStackInvertedLists jarg1_, long jarg2); public final static native long VStackInvertedLists_get_codes(long jarg1, VStackInvertedLists jarg1_, long jarg2); public final static native long VStackInvertedLists_get_ids(long jarg1, VStackInvertedLists jarg1_, long jarg2); public final static native void VStackInvertedLists_release_codes(long jarg1, VStackInvertedLists jarg1_, long jarg2, long jarg3); public final static native void VStackInvertedLists_release_ids(long jarg1, VStackInvertedLists jarg1_, long jarg2, long jarg3, LongVector jarg3_); public final static native long VStackInvertedLists_get_single_id(long jarg1, VStackInvertedLists jarg1_, long jarg2, long jarg3); public final static native long VStackInvertedLists_get_single_code(long jarg1, VStackInvertedLists jarg1_, long jarg2, long jarg3); public final static native void VStackInvertedLists_prefetch_lists(long jarg1, VStackInvertedLists jarg1_, long jarg2, LongVector jarg2_, int jarg3); public final static native void delete_VStackInvertedLists(long jarg1); public final static native void MaskedInvertedLists_il0_set(long jarg1, MaskedInvertedLists jarg1_, long jarg2, InvertedLists jarg2_); public final static native long MaskedInvertedLists_il0_get(long jarg1, MaskedInvertedLists jarg1_); public final static native void MaskedInvertedLists_il1_set(long jarg1, MaskedInvertedLists jarg1_, long jarg2, InvertedLists jarg2_); public final static native long MaskedInvertedLists_il1_get(long jarg1, MaskedInvertedLists jarg1_); public final static native long new_MaskedInvertedLists(long jarg1, InvertedLists jarg1_, long jarg2, InvertedLists jarg2_); public final static native long MaskedInvertedLists_list_size(long jarg1, MaskedInvertedLists jarg1_, long jarg2); public final static native long MaskedInvertedLists_get_codes(long jarg1, MaskedInvertedLists jarg1_, long jarg2); public final static native long MaskedInvertedLists_get_ids(long jarg1, MaskedInvertedLists jarg1_, long jarg2); public final static native void MaskedInvertedLists_release_codes(long jarg1, MaskedInvertedLists jarg1_, long jarg2, long jarg3); public final static native void MaskedInvertedLists_release_ids(long jarg1, MaskedInvertedLists jarg1_, long jarg2, long jarg3, LongVector jarg3_); public final static native long MaskedInvertedLists_get_single_id(long jarg1, MaskedInvertedLists jarg1_, long jarg2, long jarg3); public final static native long MaskedInvertedLists_get_single_code(long jarg1, MaskedInvertedLists jarg1_, long jarg2, long jarg3); public final static native void MaskedInvertedLists_prefetch_lists(long jarg1, MaskedInvertedLists jarg1_, long jarg2, LongVector jarg2_, int jarg3); public final static native void delete_MaskedInvertedLists(long jarg1); public final static native void StopWordsInvertedLists_il0_set(long jarg1, StopWordsInvertedLists jarg1_, long jarg2, InvertedLists jarg2_); public final static native long StopWordsInvertedLists_il0_get(long jarg1, StopWordsInvertedLists jarg1_); public final static native void StopWordsInvertedLists_maxsize_set(long jarg1, StopWordsInvertedLists jarg1_, long jarg2); public final static native long StopWordsInvertedLists_maxsize_get(long jarg1, StopWordsInvertedLists jarg1_); public final static native long new_StopWordsInvertedLists(long jarg1, InvertedLists jarg1_, long jarg2); public final static native long StopWordsInvertedLists_list_size(long jarg1, StopWordsInvertedLists jarg1_, long jarg2); public final static native long StopWordsInvertedLists_get_codes(long jarg1, StopWordsInvertedLists jarg1_, long jarg2); public final static native long StopWordsInvertedLists_get_ids(long jarg1, StopWordsInvertedLists jarg1_, long jarg2); public final static native void StopWordsInvertedLists_release_codes(long jarg1, StopWordsInvertedLists jarg1_, long jarg2, long jarg3); public final static native void StopWordsInvertedLists_release_ids(long jarg1, StopWordsInvertedLists jarg1_, long jarg2, long jarg3, LongVector jarg3_); public final static native long StopWordsInvertedLists_get_single_id(long jarg1, StopWordsInvertedLists jarg1_, long jarg2, long jarg3); public final static native long StopWordsInvertedLists_get_single_code(long jarg1, StopWordsInvertedLists jarg1_, long jarg2, long jarg3); public final static native void StopWordsInvertedLists_prefetch_lists(long jarg1, StopWordsInvertedLists jarg1_, long jarg2, LongVector jarg2_, int jarg3); public final static native void delete_StopWordsInvertedLists(long jarg1); public final static native void Level1Quantizer_quantizer_set(long jarg1, Level1Quantizer jarg1_, long jarg2, Index jarg2_); public final static native long Level1Quantizer_quantizer_get(long jarg1, Level1Quantizer jarg1_); public final static native void Level1Quantizer_nlist_set(long jarg1, Level1Quantizer jarg1_, long jarg2); public final static native long Level1Quantizer_nlist_get(long jarg1, Level1Quantizer jarg1_); public final static native void Level1Quantizer_quantizer_trains_alone_set(long jarg1, Level1Quantizer jarg1_, char jarg2); public final static native char Level1Quantizer_quantizer_trains_alone_get(long jarg1, Level1Quantizer jarg1_); public final static native void Level1Quantizer_own_fields_set(long jarg1, Level1Quantizer jarg1_, boolean jarg2); public final static native boolean Level1Quantizer_own_fields_get(long jarg1, Level1Quantizer jarg1_); public final static native void Level1Quantizer_cp_set(long jarg1, Level1Quantizer jarg1_, long jarg2, ClusteringParameters jarg2_); public final static native long Level1Quantizer_cp_get(long jarg1, Level1Quantizer jarg1_); public final static native void Level1Quantizer_clustering_index_set(long jarg1, Level1Quantizer jarg1_, long jarg2, Index jarg2_); public final static native long Level1Quantizer_clustering_index_get(long jarg1, Level1Quantizer jarg1_); public final static native void Level1Quantizer_train_q1(long jarg1, Level1Quantizer jarg1_, long jarg2, long jarg3, boolean jarg4, int jarg5); public final static native long Level1Quantizer_coarse_code_size(long jarg1, Level1Quantizer jarg1_); public final static native void Level1Quantizer_encode_listno(long jarg1, Level1Quantizer jarg1_, long jarg2, long jarg3); public final static native long Level1Quantizer_decode_listno(long jarg1, Level1Quantizer jarg1_, long jarg2); public final static native long new_Level1Quantizer__SWIG_0(long jarg1, Index jarg1_, long jarg2); public final static native long new_Level1Quantizer__SWIG_1(); public final static native void delete_Level1Quantizer(long jarg1); public final static native void IVFSearchParameters_nprobe_set(long jarg1, IVFSearchParameters jarg1_, long jarg2); public final static native long IVFSearchParameters_nprobe_get(long jarg1, IVFSearchParameters jarg1_); public final static native void IVFSearchParameters_max_codes_set(long jarg1, IVFSearchParameters jarg1_, long jarg2); public final static native long IVFSearchParameters_max_codes_get(long jarg1, IVFSearchParameters jarg1_); public final static native long new_IVFSearchParameters(); public final static native void delete_IVFSearchParameters(long jarg1); public final static native void IndexIVF_invlists_set(long jarg1, IndexIVF jarg1_, long jarg2, InvertedLists jarg2_); public final static native long IndexIVF_invlists_get(long jarg1, IndexIVF jarg1_); public final static native void IndexIVF_own_invlists_set(long jarg1, IndexIVF jarg1_, boolean jarg2); public final static native boolean IndexIVF_own_invlists_get(long jarg1, IndexIVF jarg1_); public final static native void IndexIVF_code_size_set(long jarg1, IndexIVF jarg1_, long jarg2); public final static native long IndexIVF_code_size_get(long jarg1, IndexIVF jarg1_); public final static native void IndexIVF_nprobe_set(long jarg1, IndexIVF jarg1_, long jarg2); public final static native long IndexIVF_nprobe_get(long jarg1, IndexIVF jarg1_); public final static native void IndexIVF_max_codes_set(long jarg1, IndexIVF jarg1_, long jarg2); public final static native long IndexIVF_max_codes_get(long jarg1, IndexIVF jarg1_); public final static native void IndexIVF_parallel_mode_set(long jarg1, IndexIVF jarg1_, int jarg2); public final static native int IndexIVF_parallel_mode_get(long jarg1, IndexIVF jarg1_); public final static native int IndexIVF_PARALLEL_MODE_NO_HEAP_INIT_get(long jarg1, IndexIVF jarg1_); public final static native void IndexIVF_direct_map_set(long jarg1, IndexIVF jarg1_, long jarg2); public final static native long IndexIVF_direct_map_get(long jarg1, IndexIVF jarg1_); public final static native void IndexIVF_reset(long jarg1, IndexIVF jarg1_); public final static native void IndexIVF_train(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3); public final static native void IndexIVF_add(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3); public final static native void IndexIVF_add_with_ids(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void IndexIVF_add_core(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, LongVector jarg5_); public final static native void IndexIVF_encode_vectors__SWIG_0(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, boolean jarg6); public final static native void IndexIVF_encode_vectors__SWIG_1(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void IndexIVF_add_sa_codes(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void IndexIVF_train_residual(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3); public final static native void IndexIVF_search_preassigned__SWIG_0(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, LongVector jarg5_, long jarg6, long jarg7, long jarg8, LongVector jarg8_, boolean jarg9, long jarg10, IVFSearchParameters jarg10_, long jarg11, IndexIVFStats jarg11_); public final static native void IndexIVF_search_preassigned__SWIG_1(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, LongVector jarg5_, long jarg6, long jarg7, long jarg8, LongVector jarg8_, boolean jarg9, long jarg10, IVFSearchParameters jarg10_); public final static native void IndexIVF_search_preassigned__SWIG_2(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, LongVector jarg5_, long jarg6, long jarg7, long jarg8, LongVector jarg8_, boolean jarg9); public final static native void IndexIVF_search(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexIVF_range_search(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, float jarg4, long jarg5, RangeSearchResult jarg5_); public final static native void IndexIVF_range_search_preassigned__SWIG_0(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, float jarg4, long jarg5, LongVector jarg5_, long jarg6, long jarg7, RangeSearchResult jarg7_, boolean jarg8, long jarg9, IVFSearchParameters jarg9_, long jarg10, IndexIVFStats jarg10_); public final static native void IndexIVF_range_search_preassigned__SWIG_1(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, float jarg4, long jarg5, LongVector jarg5_, long jarg6, long jarg7, RangeSearchResult jarg7_, boolean jarg8, long jarg9, IVFSearchParameters jarg9_); public final static native void IndexIVF_range_search_preassigned__SWIG_2(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, float jarg4, long jarg5, LongVector jarg5_, long jarg6, long jarg7, RangeSearchResult jarg7_, boolean jarg8); public final static native void IndexIVF_range_search_preassigned__SWIG_3(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, float jarg4, long jarg5, LongVector jarg5_, long jarg6, long jarg7, RangeSearchResult jarg7_); public final static native long IndexIVF_get_InvertedListScanner__SWIG_0(long jarg1, IndexIVF jarg1_, boolean jarg2); public final static native long IndexIVF_get_InvertedListScanner__SWIG_1(long jarg1, IndexIVF jarg1_); public final static native void IndexIVF_reconstruct(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3); public final static native void IndexIVF_update_vectors(long jarg1, IndexIVF jarg1_, int jarg2, long jarg3, LongVector jarg3_, long jarg4); public final static native void IndexIVF_reconstruct_n(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexIVF_search_and_reconstruct(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_, long jarg7); public final static native void IndexIVF_reconstruct_from_offset(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, long jarg4); public final static native long IndexIVF_remove_ids(long jarg1, IndexIVF jarg1_, long jarg2, IDSelector jarg2_); public final static native void IndexIVF_check_compatible_for_merge(long jarg1, IndexIVF jarg1_, long jarg2, IndexIVF jarg2_); public final static native void IndexIVF_merge_from(long jarg1, IndexIVF jarg1_, long jarg2, IndexIVF jarg2_, long jarg3); public final static native void IndexIVF_copy_subset_to(long jarg1, IndexIVF jarg1_, long jarg2, IndexIVF jarg2_, int jarg3, long jarg4, long jarg5); public final static native void delete_IndexIVF(long jarg1); public final static native long IndexIVF_get_list_size(long jarg1, IndexIVF jarg1_, long jarg2); public final static native void IndexIVF_make_direct_map__SWIG_0(long jarg1, IndexIVF jarg1_, boolean jarg2); public final static native void IndexIVF_make_direct_map__SWIG_1(long jarg1, IndexIVF jarg1_); public final static native void IndexIVF_set_direct_map_type(long jarg1, IndexIVF jarg1_, long jarg2); public final static native void IndexIVF_replace_invlists__SWIG_0(long jarg1, IndexIVF jarg1_, long jarg2, InvertedLists jarg2_, boolean jarg3); public final static native void IndexIVF_replace_invlists__SWIG_1(long jarg1, IndexIVF jarg1_, long jarg2, InvertedLists jarg2_); public final static native long IndexIVF_sa_code_size(long jarg1, IndexIVF jarg1_); public final static native void IndexIVF_sa_encode(long jarg1, IndexIVF jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexIVFStats_nq_set(long jarg1, IndexIVFStats jarg1_, long jarg2); public final static native long IndexIVFStats_nq_get(long jarg1, IndexIVFStats jarg1_); public final static native void IndexIVFStats_nlist_set(long jarg1, IndexIVFStats jarg1_, long jarg2); public final static native long IndexIVFStats_nlist_get(long jarg1, IndexIVFStats jarg1_); public final static native void IndexIVFStats_ndis_set(long jarg1, IndexIVFStats jarg1_, long jarg2); public final static native long IndexIVFStats_ndis_get(long jarg1, IndexIVFStats jarg1_); public final static native void IndexIVFStats_nheap_updates_set(long jarg1, IndexIVFStats jarg1_, long jarg2); public final static native long IndexIVFStats_nheap_updates_get(long jarg1, IndexIVFStats jarg1_); public final static native void IndexIVFStats_quantization_time_set(long jarg1, IndexIVFStats jarg1_, double jarg2); public final static native double IndexIVFStats_quantization_time_get(long jarg1, IndexIVFStats jarg1_); public final static native void IndexIVFStats_search_time_set(long jarg1, IndexIVFStats jarg1_, double jarg2); public final static native double IndexIVFStats_search_time_get(long jarg1, IndexIVFStats jarg1_); public final static native long new_IndexIVFStats(); public final static native void IndexIVFStats_reset(long jarg1, IndexIVFStats jarg1_); public final static native void IndexIVFStats_add(long jarg1, IndexIVFStats jarg1_, long jarg2, IndexIVFStats jarg2_); public final static native void delete_IndexIVFStats(long jarg1); public final static native void indexIVF_stats_set(long jarg1, IndexIVFStats jarg1_); public final static native long indexIVF_stats_get(); public final static native short[] hamdis_tab_ham_bytes_get(); public final static native void HammingComputer4_a0_set(long jarg1, HammingComputer4 jarg1_, long jarg2); public final static native long HammingComputer4_a0_get(long jarg1, HammingComputer4 jarg1_); public final static native long new_HammingComputer4__SWIG_0(); public final static native long new_HammingComputer4__SWIG_1(long jarg1, int jarg2); public final static native void HammingComputer4_set(long jarg1, HammingComputer4 jarg1_, long jarg2, int jarg3); public final static native int HammingComputer4_hamming(long jarg1, HammingComputer4 jarg1_, long jarg2); public final static native void delete_HammingComputer4(long jarg1); public final static native void HammingComputer8_a0_set(long jarg1, HammingComputer8 jarg1_, long jarg2); public final static native long HammingComputer8_a0_get(long jarg1, HammingComputer8 jarg1_); public final static native long new_HammingComputer8__SWIG_0(); public final static native long new_HammingComputer8__SWIG_1(long jarg1, int jarg2); public final static native void HammingComputer8_set(long jarg1, HammingComputer8 jarg1_, long jarg2, int jarg3); public final static native int HammingComputer8_hamming(long jarg1, HammingComputer8 jarg1_, long jarg2); public final static native void delete_HammingComputer8(long jarg1); public final static native void HammingComputer16_a0_set(long jarg1, HammingComputer16 jarg1_, long jarg2); public final static native long HammingComputer16_a0_get(long jarg1, HammingComputer16 jarg1_); public final static native void HammingComputer16_a1_set(long jarg1, HammingComputer16 jarg1_, long jarg2); public final static native long HammingComputer16_a1_get(long jarg1, HammingComputer16 jarg1_); public final static native long new_HammingComputer16__SWIG_0(); public final static native long new_HammingComputer16__SWIG_1(long jarg1, int jarg2); public final static native void HammingComputer16_set(long jarg1, HammingComputer16 jarg1_, long jarg2, int jarg3); public final static native int HammingComputer16_hamming(long jarg1, HammingComputer16 jarg1_, long jarg2); public final static native void delete_HammingComputer16(long jarg1); public final static native void HammingComputer20_a0_set(long jarg1, HammingComputer20 jarg1_, long jarg2); public final static native long HammingComputer20_a0_get(long jarg1, HammingComputer20 jarg1_); public final static native void HammingComputer20_a1_set(long jarg1, HammingComputer20 jarg1_, long jarg2); public final static native long HammingComputer20_a1_get(long jarg1, HammingComputer20 jarg1_); public final static native void HammingComputer20_a2_set(long jarg1, HammingComputer20 jarg1_, long jarg2); public final static native long HammingComputer20_a2_get(long jarg1, HammingComputer20 jarg1_); public final static native long new_HammingComputer20__SWIG_0(); public final static native long new_HammingComputer20__SWIG_1(long jarg1, int jarg2); public final static native void HammingComputer20_set(long jarg1, HammingComputer20 jarg1_, long jarg2, int jarg3); public final static native int HammingComputer20_hamming(long jarg1, HammingComputer20 jarg1_, long jarg2); public final static native void delete_HammingComputer20(long jarg1); public final static native void HammingComputer32_a0_set(long jarg1, HammingComputer32 jarg1_, long jarg2); public final static native long HammingComputer32_a0_get(long jarg1, HammingComputer32 jarg1_); public final static native void HammingComputer32_a1_set(long jarg1, HammingComputer32 jarg1_, long jarg2); public final static native long HammingComputer32_a1_get(long jarg1, HammingComputer32 jarg1_); public final static native void HammingComputer32_a2_set(long jarg1, HammingComputer32 jarg1_, long jarg2); public final static native long HammingComputer32_a2_get(long jarg1, HammingComputer32 jarg1_); public final static native void HammingComputer32_a3_set(long jarg1, HammingComputer32 jarg1_, long jarg2); public final static native long HammingComputer32_a3_get(long jarg1, HammingComputer32 jarg1_); public final static native long new_HammingComputer32__SWIG_0(); public final static native long new_HammingComputer32__SWIG_1(long jarg1, int jarg2); public final static native void HammingComputer32_set(long jarg1, HammingComputer32 jarg1_, long jarg2, int jarg3); public final static native int HammingComputer32_hamming(long jarg1, HammingComputer32 jarg1_, long jarg2); public final static native void delete_HammingComputer32(long jarg1); public final static native void HammingComputer64_a0_set(long jarg1, HammingComputer64 jarg1_, long jarg2); public final static native long HammingComputer64_a0_get(long jarg1, HammingComputer64 jarg1_); public final static native void HammingComputer64_a1_set(long jarg1, HammingComputer64 jarg1_, long jarg2); public final static native long HammingComputer64_a1_get(long jarg1, HammingComputer64 jarg1_); public final static native void HammingComputer64_a2_set(long jarg1, HammingComputer64 jarg1_, long jarg2); public final static native long HammingComputer64_a2_get(long jarg1, HammingComputer64 jarg1_); public final static native void HammingComputer64_a3_set(long jarg1, HammingComputer64 jarg1_, long jarg2); public final static native long HammingComputer64_a3_get(long jarg1, HammingComputer64 jarg1_); public final static native void HammingComputer64_a4_set(long jarg1, HammingComputer64 jarg1_, long jarg2); public final static native long HammingComputer64_a4_get(long jarg1, HammingComputer64 jarg1_); public final static native void HammingComputer64_a5_set(long jarg1, HammingComputer64 jarg1_, long jarg2); public final static native long HammingComputer64_a5_get(long jarg1, HammingComputer64 jarg1_); public final static native void HammingComputer64_a6_set(long jarg1, HammingComputer64 jarg1_, long jarg2); public final static native long HammingComputer64_a6_get(long jarg1, HammingComputer64 jarg1_); public final static native void HammingComputer64_a7_set(long jarg1, HammingComputer64 jarg1_, long jarg2); public final static native long HammingComputer64_a7_get(long jarg1, HammingComputer64 jarg1_); public final static native long new_HammingComputer64__SWIG_0(); public final static native long new_HammingComputer64__SWIG_1(long jarg1, int jarg2); public final static native void HammingComputer64_set(long jarg1, HammingComputer64 jarg1_, long jarg2, int jarg3); public final static native int HammingComputer64_hamming(long jarg1, HammingComputer64 jarg1_, long jarg2); public final static native void delete_HammingComputer64(long jarg1); public final static native void HammingComputerDefault_a8_set(long jarg1, HammingComputerDefault jarg1_, long jarg2); public final static native long HammingComputerDefault_a8_get(long jarg1, HammingComputerDefault jarg1_); public final static native void HammingComputerDefault_quotient8_set(long jarg1, HammingComputerDefault jarg1_, int jarg2); public final static native int HammingComputerDefault_quotient8_get(long jarg1, HammingComputerDefault jarg1_); public final static native void HammingComputerDefault_remainder8_set(long jarg1, HammingComputerDefault jarg1_, int jarg2); public final static native int HammingComputerDefault_remainder8_get(long jarg1, HammingComputerDefault jarg1_); public final static native long new_HammingComputerDefault__SWIG_0(); public final static native long new_HammingComputerDefault__SWIG_1(long jarg1, int jarg2); public final static native void HammingComputerDefault_set(long jarg1, HammingComputerDefault jarg1_, long jarg2, int jarg3); public final static native int HammingComputerDefault_hamming(long jarg1, HammingComputerDefault jarg1_, long jarg2); public final static native void delete_HammingComputerDefault(long jarg1); public final static native void HammingComputerM8_a_set(long jarg1, HammingComputerM8 jarg1_, long jarg2); public final static native long HammingComputerM8_a_get(long jarg1, HammingComputerM8 jarg1_); public final static native void HammingComputerM8_n_set(long jarg1, HammingComputerM8 jarg1_, int jarg2); public final static native int HammingComputerM8_n_get(long jarg1, HammingComputerM8 jarg1_); public final static native long new_HammingComputerM8__SWIG_0(); public final static native long new_HammingComputerM8__SWIG_1(long jarg1, int jarg2); public final static native void HammingComputerM8_set(long jarg1, HammingComputerM8 jarg1_, long jarg2, int jarg3); public final static native int HammingComputerM8_hamming(long jarg1, HammingComputerM8 jarg1_, long jarg2); public final static native void delete_HammingComputerM8(long jarg1); public final static native void HammingComputerM4_a_set(long jarg1, HammingComputerM4 jarg1_, long jarg2); public final static native long HammingComputerM4_a_get(long jarg1, HammingComputerM4 jarg1_); public final static native void HammingComputerM4_n_set(long jarg1, HammingComputerM4 jarg1_, int jarg2); public final static native int HammingComputerM4_n_get(long jarg1, HammingComputerM4 jarg1_); public final static native long new_HammingComputerM4__SWIG_0(); public final static native long new_HammingComputerM4__SWIG_1(long jarg1, int jarg2); public final static native void HammingComputerM4_set(long jarg1, HammingComputerM4 jarg1_, long jarg2, int jarg3); public final static native int HammingComputerM4_hamming(long jarg1, HammingComputerM4 jarg1_, long jarg2); public final static native void delete_HammingComputerM4(long jarg1); public final static native int generalized_hamming_64(long jarg1); public final static native void GenHammingComputer8_a0_set(long jarg1, GenHammingComputer8 jarg1_, long jarg2); public final static native long GenHammingComputer8_a0_get(long jarg1, GenHammingComputer8 jarg1_); public final static native long new_GenHammingComputer8(long jarg1, int jarg2); public final static native int GenHammingComputer8_hamming(long jarg1, GenHammingComputer8 jarg1_, long jarg2); public final static native void delete_GenHammingComputer8(long jarg1); public final static native void GenHammingComputer16_a0_set(long jarg1, GenHammingComputer16 jarg1_, long jarg2); public final static native long GenHammingComputer16_a0_get(long jarg1, GenHammingComputer16 jarg1_); public final static native void GenHammingComputer16_a1_set(long jarg1, GenHammingComputer16 jarg1_, long jarg2); public final static native long GenHammingComputer16_a1_get(long jarg1, GenHammingComputer16 jarg1_); public final static native long new_GenHammingComputer16(long jarg1, int jarg2); public final static native int GenHammingComputer16_hamming(long jarg1, GenHammingComputer16 jarg1_, long jarg2); public final static native void delete_GenHammingComputer16(long jarg1); public final static native void GenHammingComputer32_a0_set(long jarg1, GenHammingComputer32 jarg1_, long jarg2); public final static native long GenHammingComputer32_a0_get(long jarg1, GenHammingComputer32 jarg1_); public final static native void GenHammingComputer32_a1_set(long jarg1, GenHammingComputer32 jarg1_, long jarg2); public final static native long GenHammingComputer32_a1_get(long jarg1, GenHammingComputer32 jarg1_); public final static native void GenHammingComputer32_a2_set(long jarg1, GenHammingComputer32 jarg1_, long jarg2); public final static native long GenHammingComputer32_a2_get(long jarg1, GenHammingComputer32 jarg1_); public final static native void GenHammingComputer32_a3_set(long jarg1, GenHammingComputer32 jarg1_, long jarg2); public final static native long GenHammingComputer32_a3_get(long jarg1, GenHammingComputer32 jarg1_); public final static native long new_GenHammingComputer32(long jarg1, int jarg2); public final static native int GenHammingComputer32_hamming(long jarg1, GenHammingComputer32 jarg1_, long jarg2); public final static native void delete_GenHammingComputer32(long jarg1); public final static native void GenHammingComputerM8_a_set(long jarg1, GenHammingComputerM8 jarg1_, long jarg2); public final static native long GenHammingComputerM8_a_get(long jarg1, GenHammingComputerM8 jarg1_); public final static native void GenHammingComputerM8_n_set(long jarg1, GenHammingComputerM8 jarg1_, int jarg2); public final static native int GenHammingComputerM8_n_get(long jarg1, GenHammingComputerM8 jarg1_); public final static native long new_GenHammingComputerM8(long jarg1, int jarg2); public final static native int GenHammingComputerM8_hamming(long jarg1, GenHammingComputerM8 jarg1_, long jarg2); public final static native void delete_GenHammingComputerM8(long jarg1); public final static native void generalized_hammings_knn_hc__SWIG_0(long jarg1, long jarg2, long jarg3, long jarg4, long jarg5, int jarg6); public final static native void generalized_hammings_knn_hc__SWIG_1(long jarg1, long jarg2, long jarg3, long jarg4, long jarg5); public final static native void check_compatible_for_merge(long jarg1, Index jarg1_, long jarg2, Index jarg2_); public final static native long extract_index_ivf__SWIG_0(long jarg1, Index jarg1_); public final static native long try_extract_index_ivf__SWIG_0(long jarg1, Index jarg1_); public final static native void merge_into(long jarg1, Index jarg1_, long jarg2, Index jarg2_, boolean jarg3); public final static native void search_centroid(long jarg1, Index jarg1_, long jarg2, int jarg3, long jarg4, LongVector jarg4_); public final static native void search_and_return_centroids(long jarg1, Index jarg1_, long jarg2, long jarg3, int jarg4, long jarg5, long jarg6, LongVector jarg6_, long jarg7, LongVector jarg7_, long jarg8, LongVector jarg8_); public final static native void SlidingIndexWindow_index_set(long jarg1, SlidingIndexWindow jarg1_, long jarg2, Index jarg2_); public final static native long SlidingIndexWindow_index_get(long jarg1, SlidingIndexWindow jarg1_); public final static native void SlidingIndexWindow_ils_set(long jarg1, SlidingIndexWindow jarg1_, long jarg2, ArrayInvertedLists jarg2_); public final static native long SlidingIndexWindow_ils_get(long jarg1, SlidingIndexWindow jarg1_); public final static native void SlidingIndexWindow_n_slice_set(long jarg1, SlidingIndexWindow jarg1_, int jarg2); public final static native int SlidingIndexWindow_n_slice_get(long jarg1, SlidingIndexWindow jarg1_); public final static native void SlidingIndexWindow_nlist_set(long jarg1, SlidingIndexWindow jarg1_, long jarg2); public final static native long SlidingIndexWindow_nlist_get(long jarg1, SlidingIndexWindow jarg1_); public final static native void SlidingIndexWindow_sizes_set(long jarg1, SlidingIndexWindow jarg1_, long jarg2); public final static native long SlidingIndexWindow_sizes_get(long jarg1, SlidingIndexWindow jarg1_); public final static native long new_SlidingIndexWindow(long jarg1, Index jarg1_); public final static native void SlidingIndexWindow_step(long jarg1, SlidingIndexWindow jarg1_, long jarg2, Index jarg2_, boolean jarg3); public final static native void delete_SlidingIndexWindow(long jarg1); public final static native long get_invlist_range(long jarg1, Index jarg1_, int jarg2, int jarg3); public final static native void set_invlist_range(long jarg1, Index jarg1_, int jarg2, int jarg3, long jarg4, ArrayInvertedLists jarg4_); public final static native void search_with_parameters__SWIG_0(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_, long jarg7, IVFSearchParameters jarg7_, long jarg8, long jarg9); public final static native void search_with_parameters__SWIG_1(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_, long jarg7, IVFSearchParameters jarg7_, long jarg8); public final static native void search_with_parameters__SWIG_2(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_, long jarg7, IVFSearchParameters jarg7_); public final static native void range_search_with_parameters__SWIG_0(long jarg1, Index jarg1_, long jarg2, long jarg3, float jarg4, long jarg5, RangeSearchResult jarg5_, long jarg6, IVFSearchParameters jarg6_, long jarg7, long jarg8); public final static native void range_search_with_parameters__SWIG_1(long jarg1, Index jarg1_, long jarg2, long jarg3, float jarg4, long jarg5, RangeSearchResult jarg5_, long jarg6, IVFSearchParameters jarg6_, long jarg7); public final static native void range_search_with_parameters__SWIG_2(long jarg1, Index jarg1_, long jarg2, long jarg3, float jarg4, long jarg5, RangeSearchResult jarg5_, long jarg6, IVFSearchParameters jarg6_); public final static native void IndexScalarQuantizer_sq_set(long jarg1, IndexScalarQuantizer jarg1_, long jarg2); public final static native long IndexScalarQuantizer_sq_get(long jarg1, IndexScalarQuantizer jarg1_); public final static native long new_IndexScalarQuantizer__SWIG_0(int jarg1, long jarg2, int jarg3); public final static native long new_IndexScalarQuantizer__SWIG_1(int jarg1, long jarg2); public final static native long new_IndexScalarQuantizer__SWIG_2(); public final static native void IndexScalarQuantizer_train(long jarg1, IndexScalarQuantizer jarg1_, long jarg2, long jarg3); public final static native void IndexScalarQuantizer_search(long jarg1, IndexScalarQuantizer jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native long IndexScalarQuantizer_get_distance_computer(long jarg1, IndexScalarQuantizer jarg1_); public final static native void IndexScalarQuantizer_sa_encode(long jarg1, IndexScalarQuantizer jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexScalarQuantizer_sa_decode(long jarg1, IndexScalarQuantizer jarg1_, long jarg2, long jarg3, long jarg4); public final static native void delete_IndexScalarQuantizer(long jarg1); public final static native void IndexIVFScalarQuantizer_sq_set(long jarg1, IndexIVFScalarQuantizer jarg1_, long jarg2); public final static native long IndexIVFScalarQuantizer_sq_get(long jarg1, IndexIVFScalarQuantizer jarg1_); public final static native void IndexIVFScalarQuantizer_by_residual_set(long jarg1, IndexIVFScalarQuantizer jarg1_, boolean jarg2); public final static native boolean IndexIVFScalarQuantizer_by_residual_get(long jarg1, IndexIVFScalarQuantizer jarg1_); public final static native long new_IndexIVFScalarQuantizer__SWIG_0(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4, int jarg5, boolean jarg6); public final static native long new_IndexIVFScalarQuantizer__SWIG_1(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4, int jarg5); public final static native long new_IndexIVFScalarQuantizer__SWIG_2(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4); public final static native long new_IndexIVFScalarQuantizer__SWIG_3(); public final static native void IndexIVFScalarQuantizer_train_residual(long jarg1, IndexIVFScalarQuantizer jarg1_, long jarg2, long jarg3); public final static native void IndexIVFScalarQuantizer_encode_vectors__SWIG_0(long jarg1, IndexIVFScalarQuantizer jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, boolean jarg6); public final static native void IndexIVFScalarQuantizer_encode_vectors__SWIG_1(long jarg1, IndexIVFScalarQuantizer jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void IndexIVFScalarQuantizer_add_core(long jarg1, IndexIVFScalarQuantizer jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, LongVector jarg5_); public final static native long IndexIVFScalarQuantizer_get_InvertedListScanner(long jarg1, IndexIVFScalarQuantizer jarg1_, boolean jarg2); public final static native void IndexIVFScalarQuantizer_reconstruct_from_offset(long jarg1, IndexIVFScalarQuantizer jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexIVFScalarQuantizer_sa_decode(long jarg1, IndexIVFScalarQuantizer jarg1_, long jarg2, long jarg3, long jarg4); public final static native void delete_IndexIVFScalarQuantizer(long jarg1); public final static native void HNSW_MinimaxHeap_n_set(long jarg1, HNSW.MinimaxHeap jarg1_, int jarg2); public final static native int HNSW_MinimaxHeap_n_get(long jarg1, HNSW.MinimaxHeap jarg1_); public final static native void HNSW_MinimaxHeap_k_set(long jarg1, HNSW.MinimaxHeap jarg1_, int jarg2); public final static native int HNSW_MinimaxHeap_k_get(long jarg1, HNSW.MinimaxHeap jarg1_); public final static native void HNSW_MinimaxHeap_nvalid_set(long jarg1, HNSW.MinimaxHeap jarg1_, int jarg2); public final static native int HNSW_MinimaxHeap_nvalid_get(long jarg1, HNSW.MinimaxHeap jarg1_); public final static native void HNSW_MinimaxHeap_ids_set(long jarg1, HNSW.MinimaxHeap jarg1_, long jarg2, IntVector jarg2_); public final static native long HNSW_MinimaxHeap_ids_get(long jarg1, HNSW.MinimaxHeap jarg1_); public final static native void HNSW_MinimaxHeap_dis_set(long jarg1, HNSW.MinimaxHeap jarg1_, long jarg2, FloatVector jarg2_); public final static native long HNSW_MinimaxHeap_dis_get(long jarg1, HNSW.MinimaxHeap jarg1_); public final static native long new_HNSW_MinimaxHeap(int jarg1); public final static native void HNSW_MinimaxHeap_push(long jarg1, HNSW.MinimaxHeap jarg1_, int jarg2, float jarg3); public final static native float HNSW_MinimaxHeap_max(long jarg1, HNSW.MinimaxHeap jarg1_); public final static native int HNSW_MinimaxHeap_size(long jarg1, HNSW.MinimaxHeap jarg1_); public final static native void HNSW_MinimaxHeap_clear(long jarg1, HNSW.MinimaxHeap jarg1_); public final static native int HNSW_MinimaxHeap_pop_min__SWIG_0(long jarg1, HNSW.MinimaxHeap jarg1_, long jarg2); public final static native int HNSW_MinimaxHeap_pop_min__SWIG_1(long jarg1, HNSW.MinimaxHeap jarg1_); public final static native int HNSW_MinimaxHeap_count_below(long jarg1, HNSW.MinimaxHeap jarg1_, float jarg2); public final static native void delete_HNSW_MinimaxHeap(long jarg1); public final static native void HNSW_NodeDistCloser_d_set(long jarg1, HNSW.NodeDistCloser jarg1_, float jarg2); public final static native float HNSW_NodeDistCloser_d_get(long jarg1, HNSW.NodeDistCloser jarg1_); public final static native void HNSW_NodeDistCloser_id_set(long jarg1, HNSW.NodeDistCloser jarg1_, int jarg2); public final static native int HNSW_NodeDistCloser_id_get(long jarg1, HNSW.NodeDistCloser jarg1_); public final static native long new_HNSW_NodeDistCloser(float jarg1, int jarg2); public final static native void delete_HNSW_NodeDistCloser(long jarg1); public final static native void HNSW_NodeDistFarther_d_set(long jarg1, HNSW.NodeDistFarther jarg1_, float jarg2); public final static native float HNSW_NodeDistFarther_d_get(long jarg1, HNSW.NodeDistFarther jarg1_); public final static native void HNSW_NodeDistFarther_id_set(long jarg1, HNSW.NodeDistFarther jarg1_, int jarg2); public final static native int HNSW_NodeDistFarther_id_get(long jarg1, HNSW.NodeDistFarther jarg1_); public final static native long new_HNSW_NodeDistFarther(float jarg1, int jarg2); public final static native void delete_HNSW_NodeDistFarther(long jarg1); public final static native void HNSW_assign_probas_set(long jarg1, HNSW jarg1_, long jarg2, DoubleVector jarg2_); public final static native long HNSW_assign_probas_get(long jarg1, HNSW jarg1_); public final static native void HNSW_cum_nneighbor_per_level_set(long jarg1, HNSW jarg1_, long jarg2, IntVector jarg2_); public final static native long HNSW_cum_nneighbor_per_level_get(long jarg1, HNSW jarg1_); public final static native void HNSW_levels_set(long jarg1, HNSW jarg1_, long jarg2, IntVector jarg2_); public final static native long HNSW_levels_get(long jarg1, HNSW jarg1_); public final static native void HNSW_offsets_set(long jarg1, HNSW jarg1_, long jarg2, Uint64Vector jarg2_); public final static native long HNSW_offsets_get(long jarg1, HNSW jarg1_); public final static native void HNSW_neighbors_set(long jarg1, HNSW jarg1_, long jarg2, IntVector jarg2_); public final static native long HNSW_neighbors_get(long jarg1, HNSW jarg1_); public final static native void HNSW_entry_point_set(long jarg1, HNSW jarg1_, int jarg2); public final static native int HNSW_entry_point_get(long jarg1, HNSW jarg1_); public final static native void HNSW_rng_set(long jarg1, HNSW jarg1_, long jarg2); public final static native long HNSW_rng_get(long jarg1, HNSW jarg1_); public final static native void HNSW_max_level_set(long jarg1, HNSW jarg1_, int jarg2); public final static native int HNSW_max_level_get(long jarg1, HNSW jarg1_); public final static native void HNSW_efConstruction_set(long jarg1, HNSW jarg1_, int jarg2); public final static native int HNSW_efConstruction_get(long jarg1, HNSW jarg1_); public final static native void HNSW_efSearch_set(long jarg1, HNSW jarg1_, int jarg2); public final static native int HNSW_efSearch_get(long jarg1, HNSW jarg1_); public final static native void HNSW_check_relative_distance_set(long jarg1, HNSW jarg1_, boolean jarg2); public final static native boolean HNSW_check_relative_distance_get(long jarg1, HNSW jarg1_); public final static native void HNSW_upper_beam_set(long jarg1, HNSW jarg1_, int jarg2); public final static native int HNSW_upper_beam_get(long jarg1, HNSW jarg1_); public final static native void HNSW_search_bounded_queue_set(long jarg1, HNSW jarg1_, boolean jarg2); public final static native boolean HNSW_search_bounded_queue_get(long jarg1, HNSW jarg1_); public final static native void HNSW_set_default_probas(long jarg1, HNSW jarg1_, int jarg2, float jarg3); public final static native void HNSW_set_nb_neighbors(long jarg1, HNSW jarg1_, int jarg2, int jarg3); public final static native int HNSW_nb_neighbors(long jarg1, HNSW jarg1_, int jarg2); public final static native int HNSW_cum_nb_neighbors(long jarg1, HNSW jarg1_, int jarg2); public final static native void HNSW_neighbor_range(long jarg1, HNSW jarg1_, long jarg2, int jarg3, long jarg4, long jarg5); public final static native long new_HNSW__SWIG_0(int jarg1); public final static native long new_HNSW__SWIG_1(); public final static native int HNSW_random_level(long jarg1, HNSW jarg1_); public final static native void HNSW_fill_with_random_links(long jarg1, HNSW jarg1_, long jarg2); public final static native void HNSW_add_links_starting_from(long jarg1, HNSW jarg1_, long jarg2, DistanceComputer jarg2_, int jarg3, int jarg4, float jarg5, int jarg6, long jarg7, long jarg8, VisitedTable jarg8_); public final static native void HNSW_add_with_locks(long jarg1, HNSW jarg1_, long jarg2, DistanceComputer jarg2_, int jarg3, int jarg4, long jarg5, long jarg6, VisitedTable jarg6_); public final static native int HNSW_search_from_candidates__SWIG_0(long jarg1, HNSW jarg1_, long jarg2, DistanceComputer jarg2_, int jarg3, long jarg4, LongVector jarg4_, long jarg5, long jarg6, HNSW.MinimaxHeap jarg6_, long jarg7, VisitedTable jarg7_, long jarg8, HNSWStats jarg8_, int jarg9, int jarg10); public final static native int HNSW_search_from_candidates__SWIG_1(long jarg1, HNSW jarg1_, long jarg2, DistanceComputer jarg2_, int jarg3, long jarg4, LongVector jarg4_, long jarg5, long jarg6, HNSW.MinimaxHeap jarg6_, long jarg7, VisitedTable jarg7_, long jarg8, HNSWStats jarg8_, int jarg9); public final static native long HNSW_search_from_candidate_unbounded(long jarg1, HNSW jarg1_, long jarg2, long jarg3, DistanceComputer jarg3_, int jarg4, long jarg5, VisitedTable jarg5_, long jarg6, HNSWStats jarg6_); public final static native long HNSW_search(long jarg1, HNSW jarg1_, long jarg2, DistanceComputer jarg2_, int jarg3, long jarg4, LongVector jarg4_, long jarg5, long jarg6, VisitedTable jarg6_); public final static native void HNSW_reset(long jarg1, HNSW jarg1_); public final static native void HNSW_clear_neighbor_tables(long jarg1, HNSW jarg1_, int jarg2); public final static native void HNSW_print_neighbor_stats(long jarg1, HNSW jarg1_, int jarg2); public final static native int HNSW_prepare_level_tab__SWIG_0(long jarg1, HNSW jarg1_, long jarg2, boolean jarg3); public final static native int HNSW_prepare_level_tab__SWIG_1(long jarg1, HNSW jarg1_, long jarg2); public final static native void HNSW_shrink_neighbor_list(long jarg1, DistanceComputer jarg1_, long jarg2, long jarg3, int jarg4); public final static native void delete_HNSW(long jarg1); public final static native void HNSWStats_n1_set(long jarg1, HNSWStats jarg1_, long jarg2); public final static native long HNSWStats_n1_get(long jarg1, HNSWStats jarg1_); public final static native void HNSWStats_n2_set(long jarg1, HNSWStats jarg1_, long jarg2); public final static native long HNSWStats_n2_get(long jarg1, HNSWStats jarg1_); public final static native void HNSWStats_n3_set(long jarg1, HNSWStats jarg1_, long jarg2); public final static native long HNSWStats_n3_get(long jarg1, HNSWStats jarg1_); public final static native void HNSWStats_ndis_set(long jarg1, HNSWStats jarg1_, long jarg2); public final static native long HNSWStats_ndis_get(long jarg1, HNSWStats jarg1_); public final static native void HNSWStats_nreorder_set(long jarg1, HNSWStats jarg1_, long jarg2); public final static native long HNSWStats_nreorder_get(long jarg1, HNSWStats jarg1_); public final static native long new_HNSWStats__SWIG_0(long jarg1, long jarg2, long jarg3, long jarg4, long jarg5); public final static native long new_HNSWStats__SWIG_1(long jarg1, long jarg2, long jarg3, long jarg4); public final static native long new_HNSWStats__SWIG_2(long jarg1, long jarg2, long jarg3); public final static native long new_HNSWStats__SWIG_3(long jarg1, long jarg2); public final static native long new_HNSWStats__SWIG_4(long jarg1); public final static native long new_HNSWStats__SWIG_5(); public final static native void HNSWStats_reset(long jarg1, HNSWStats jarg1_); public final static native void HNSWStats_combine(long jarg1, HNSWStats jarg1_, long jarg2, HNSWStats jarg2_); public final static native void delete_HNSWStats(long jarg1); public final static native void hnsw_stats_set(long jarg1, HNSWStats jarg1_); public final static native long hnsw_stats_get(); public final static native long ReconstructFromNeighbors_index_get(long jarg1, ReconstructFromNeighbors jarg1_); public final static native void ReconstructFromNeighbors_M_set(long jarg1, ReconstructFromNeighbors jarg1_, long jarg2); public final static native long ReconstructFromNeighbors_M_get(long jarg1, ReconstructFromNeighbors jarg1_); public final static native void ReconstructFromNeighbors_k_set(long jarg1, ReconstructFromNeighbors jarg1_, long jarg2); public final static native long ReconstructFromNeighbors_k_get(long jarg1, ReconstructFromNeighbors jarg1_); public final static native void ReconstructFromNeighbors_nsq_set(long jarg1, ReconstructFromNeighbors jarg1_, long jarg2); public final static native long ReconstructFromNeighbors_nsq_get(long jarg1, ReconstructFromNeighbors jarg1_); public final static native void ReconstructFromNeighbors_code_size_set(long jarg1, ReconstructFromNeighbors jarg1_, long jarg2); public final static native long ReconstructFromNeighbors_code_size_get(long jarg1, ReconstructFromNeighbors jarg1_); public final static native void ReconstructFromNeighbors_k_reorder_set(long jarg1, ReconstructFromNeighbors jarg1_, int jarg2); public final static native int ReconstructFromNeighbors_k_reorder_get(long jarg1, ReconstructFromNeighbors jarg1_); public final static native void ReconstructFromNeighbors_codebook_set(long jarg1, ReconstructFromNeighbors jarg1_, long jarg2, FloatVector jarg2_); public final static native long ReconstructFromNeighbors_codebook_get(long jarg1, ReconstructFromNeighbors jarg1_); public final static native void ReconstructFromNeighbors_codes_set(long jarg1, ReconstructFromNeighbors jarg1_, long jarg2, ByteVector jarg2_); public final static native long ReconstructFromNeighbors_codes_get(long jarg1, ReconstructFromNeighbors jarg1_); public final static native void ReconstructFromNeighbors_ntotal_set(long jarg1, ReconstructFromNeighbors jarg1_, long jarg2); public final static native long ReconstructFromNeighbors_ntotal_get(long jarg1, ReconstructFromNeighbors jarg1_); public final static native void ReconstructFromNeighbors_d_set(long jarg1, ReconstructFromNeighbors jarg1_, long jarg2); public final static native long ReconstructFromNeighbors_d_get(long jarg1, ReconstructFromNeighbors jarg1_); public final static native void ReconstructFromNeighbors_dsub_set(long jarg1, ReconstructFromNeighbors jarg1_, long jarg2); public final static native long ReconstructFromNeighbors_dsub_get(long jarg1, ReconstructFromNeighbors jarg1_); public final static native long new_ReconstructFromNeighbors__SWIG_0(long jarg1, IndexHNSW jarg1_, long jarg2, long jarg3); public final static native long new_ReconstructFromNeighbors__SWIG_1(long jarg1, IndexHNSW jarg1_, long jarg2); public final static native long new_ReconstructFromNeighbors__SWIG_2(long jarg1, IndexHNSW jarg1_); public final static native void ReconstructFromNeighbors_add_codes(long jarg1, ReconstructFromNeighbors jarg1_, long jarg2, long jarg3); public final static native long ReconstructFromNeighbors_compute_distances(long jarg1, ReconstructFromNeighbors jarg1_, long jarg2, long jarg3, LongVector jarg3_, long jarg4, long jarg5); public final static native void ReconstructFromNeighbors_estimate_code(long jarg1, ReconstructFromNeighbors jarg1_, long jarg2, int jarg3, long jarg4); public final static native void ReconstructFromNeighbors_reconstruct(long jarg1, ReconstructFromNeighbors jarg1_, int jarg2, long jarg3, long jarg4); public final static native void ReconstructFromNeighbors_reconstruct_n(long jarg1, ReconstructFromNeighbors jarg1_, int jarg2, int jarg3, long jarg4); public final static native void ReconstructFromNeighbors_get_neighbor_table(long jarg1, ReconstructFromNeighbors jarg1_, int jarg2, long jarg3); public final static native void delete_ReconstructFromNeighbors(long jarg1); public final static native void IndexHNSW_hnsw_set(long jarg1, IndexHNSW jarg1_, long jarg2, HNSW jarg2_); public final static native long IndexHNSW_hnsw_get(long jarg1, IndexHNSW jarg1_); public final static native void IndexHNSW_own_fields_set(long jarg1, IndexHNSW jarg1_, boolean jarg2); public final static native boolean IndexHNSW_own_fields_get(long jarg1, IndexHNSW jarg1_); public final static native void IndexHNSW_storage_set(long jarg1, IndexHNSW jarg1_, long jarg2, Index jarg2_); public final static native long IndexHNSW_storage_get(long jarg1, IndexHNSW jarg1_); public final static native void IndexHNSW_reconstruct_from_neighbors_set(long jarg1, IndexHNSW jarg1_, long jarg2, ReconstructFromNeighbors jarg2_); public final static native long IndexHNSW_reconstruct_from_neighbors_get(long jarg1, IndexHNSW jarg1_); public final static native long new_IndexHNSW__SWIG_0(int jarg1, int jarg2, int jarg3); public final static native long new_IndexHNSW__SWIG_1(int jarg1, int jarg2); public final static native long new_IndexHNSW__SWIG_2(int jarg1); public final static native long new_IndexHNSW__SWIG_3(); public final static native long new_IndexHNSW__SWIG_4(long jarg1, Index jarg1_, int jarg2); public final static native long new_IndexHNSW__SWIG_5(long jarg1, Index jarg1_); public final static native void delete_IndexHNSW(long jarg1); public final static native void IndexHNSW_add(long jarg1, IndexHNSW jarg1_, long jarg2, long jarg3); public final static native void IndexHNSW_train(long jarg1, IndexHNSW jarg1_, long jarg2, long jarg3); public final static native void IndexHNSW_search(long jarg1, IndexHNSW jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexHNSW_reconstruct(long jarg1, IndexHNSW jarg1_, long jarg2, long jarg3); public final static native void IndexHNSW_reset(long jarg1, IndexHNSW jarg1_); public final static native void IndexHNSW_shrink_level_0_neighbors(long jarg1, IndexHNSW jarg1_, int jarg2); public final static native void IndexHNSW_search_level_0__SWIG_0(long jarg1, IndexHNSW jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, long jarg7, long jarg8, LongVector jarg8_, int jarg9, int jarg10); public final static native void IndexHNSW_search_level_0__SWIG_1(long jarg1, IndexHNSW jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, long jarg7, long jarg8, LongVector jarg8_, int jarg9); public final static native void IndexHNSW_search_level_0__SWIG_2(long jarg1, IndexHNSW jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, long jarg7, long jarg8, LongVector jarg8_); public final static native void IndexHNSW_init_level_0_from_knngraph(long jarg1, IndexHNSW jarg1_, int jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void IndexHNSW_init_level_0_from_entry_points(long jarg1, IndexHNSW jarg1_, int jarg2, long jarg3, long jarg4); public final static native void IndexHNSW_reorder_links(long jarg1, IndexHNSW jarg1_); public final static native void IndexHNSW_link_singletons(long jarg1, IndexHNSW jarg1_); public final static native long new_IndexHNSWFlat__SWIG_0(); public final static native long new_IndexHNSWFlat__SWIG_1(int jarg1, int jarg2, int jarg3); public final static native long new_IndexHNSWFlat__SWIG_2(int jarg1, int jarg2); public final static native void delete_IndexHNSWFlat(long jarg1); public final static native long new_IndexHNSWPQ__SWIG_0(); public final static native long new_IndexHNSWPQ__SWIG_1(int jarg1, int jarg2, int jarg3); public final static native void IndexHNSWPQ_train(long jarg1, IndexHNSWPQ jarg1_, long jarg2, long jarg3); public final static native void delete_IndexHNSWPQ(long jarg1); public final static native long new_IndexHNSWSQ__SWIG_0(); public final static native long new_IndexHNSWSQ__SWIG_1(int jarg1, long jarg2, int jarg3, int jarg4); public final static native long new_IndexHNSWSQ__SWIG_2(int jarg1, long jarg2, int jarg3); public final static native void delete_IndexHNSWSQ(long jarg1); public final static native long new_IndexHNSW2Level__SWIG_0(); public final static native long new_IndexHNSW2Level__SWIG_1(long jarg1, Index jarg1_, long jarg2, int jarg3, int jarg4); public final static native void IndexHNSW2Level_flip_to_ivf(long jarg1, IndexHNSW2Level jarg1_); public final static native void IndexHNSW2Level_search(long jarg1, IndexHNSW2Level jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void delete_IndexHNSW2Level(long jarg1); public final static native long new_IndexIVFFlat__SWIG_0(long jarg1, Index jarg1_, long jarg2, long jarg3, int jarg4); public final static native long new_IndexIVFFlat__SWIG_1(long jarg1, Index jarg1_, long jarg2, long jarg3); public final static native void IndexIVFFlat_add_core(long jarg1, IndexIVFFlat jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, LongVector jarg5_); public final static native void IndexIVFFlat_encode_vectors__SWIG_0(long jarg1, IndexIVFFlat jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, boolean jarg6); public final static native void IndexIVFFlat_encode_vectors__SWIG_1(long jarg1, IndexIVFFlat jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native long IndexIVFFlat_get_InvertedListScanner(long jarg1, IndexIVFFlat jarg1_, boolean jarg2); public final static native void IndexIVFFlat_reconstruct_from_offset(long jarg1, IndexIVFFlat jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexIVFFlat_sa_decode(long jarg1, IndexIVFFlat jarg1_, long jarg2, long jarg3, long jarg4); public final static native long new_IndexIVFFlat__SWIG_2(); public final static native void delete_IndexIVFFlat(long jarg1); public final static native void IndexIVFFlatDedup_instances_set(long jarg1, IndexIVFFlatDedup jarg1_, long jarg2); public final static native long IndexIVFFlatDedup_instances_get(long jarg1, IndexIVFFlatDedup jarg1_); public final static native long new_IndexIVFFlatDedup__SWIG_0(long jarg1, Index jarg1_, long jarg2, long jarg3, int jarg4); public final static native long new_IndexIVFFlatDedup__SWIG_1(long jarg1, Index jarg1_, long jarg2, long jarg3); public final static native void IndexIVFFlatDedup_train(long jarg1, IndexIVFFlatDedup jarg1_, long jarg2, long jarg3); public final static native void IndexIVFFlatDedup_add_with_ids(long jarg1, IndexIVFFlatDedup jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void IndexIVFFlatDedup_search_preassigned__SWIG_0(long jarg1, IndexIVFFlatDedup jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, LongVector jarg5_, long jarg6, long jarg7, long jarg8, LongVector jarg8_, boolean jarg9, long jarg10, IVFSearchParameters jarg10_, long jarg11, IndexIVFStats jarg11_); public final static native void IndexIVFFlatDedup_search_preassigned__SWIG_1(long jarg1, IndexIVFFlatDedup jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, LongVector jarg5_, long jarg6, long jarg7, long jarg8, LongVector jarg8_, boolean jarg9, long jarg10, IVFSearchParameters jarg10_); public final static native void IndexIVFFlatDedup_search_preassigned__SWIG_2(long jarg1, IndexIVFFlatDedup jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, LongVector jarg5_, long jarg6, long jarg7, long jarg8, LongVector jarg8_, boolean jarg9); public final static native long IndexIVFFlatDedup_remove_ids(long jarg1, IndexIVFFlatDedup jarg1_, long jarg2, IDSelector jarg2_); public final static native void IndexIVFFlatDedup_range_search(long jarg1, IndexIVFFlatDedup jarg1_, long jarg2, long jarg3, float jarg4, long jarg5, RangeSearchResult jarg5_); public final static native void IndexIVFFlatDedup_update_vectors(long jarg1, IndexIVFFlatDedup jarg1_, int jarg2, long jarg3, LongVector jarg3_, long jarg4); public final static native void IndexIVFFlatDedup_reconstruct_from_offset(long jarg1, IndexIVFFlatDedup jarg1_, long jarg2, long jarg3, long jarg4); public final static native long new_IndexIVFFlatDedup__SWIG_2(); public final static native void delete_IndexIVFFlatDedup(long jarg1); public final static native void OnDiskOneList_size_set(long jarg1, OnDiskOneList jarg1_, long jarg2); public final static native long OnDiskOneList_size_get(long jarg1, OnDiskOneList jarg1_); public final static native void OnDiskOneList_capacity_set(long jarg1, OnDiskOneList jarg1_, long jarg2); public final static native long OnDiskOneList_capacity_get(long jarg1, OnDiskOneList jarg1_); public final static native void OnDiskOneList_offset_set(long jarg1, OnDiskOneList jarg1_, long jarg2); public final static native long OnDiskOneList_offset_get(long jarg1, OnDiskOneList jarg1_); public final static native long new_OnDiskOneList(); public final static native void delete_OnDiskOneList(long jarg1); public final static native void OnDiskInvertedLists_lists_set(long jarg1, OnDiskInvertedLists jarg1_, long jarg2); public final static native long OnDiskInvertedLists_lists_get(long jarg1, OnDiskInvertedLists jarg1_); public final static native void OnDiskInvertedLists_Slot_offset_set(long jarg1, OnDiskInvertedLists.Slot jarg1_, long jarg2); public final static native long OnDiskInvertedLists_Slot_offset_get(long jarg1, OnDiskInvertedLists.Slot jarg1_); public final static native void OnDiskInvertedLists_Slot_capacity_set(long jarg1, OnDiskInvertedLists.Slot jarg1_, long jarg2); public final static native long OnDiskInvertedLists_Slot_capacity_get(long jarg1, OnDiskInvertedLists.Slot jarg1_); public final static native long new_OnDiskInvertedLists_Slot__SWIG_0(long jarg1, long jarg2); public final static native long new_OnDiskInvertedLists_Slot__SWIG_1(); public final static native void delete_OnDiskInvertedLists_Slot(long jarg1); public final static native void OnDiskInvertedLists_slots_set(long jarg1, OnDiskInvertedLists jarg1_, long jarg2); public final static native long OnDiskInvertedLists_slots_get(long jarg1, OnDiskInvertedLists jarg1_); public final static native void OnDiskInvertedLists_filename_set(long jarg1, OnDiskInvertedLists jarg1_, String jarg2); public final static native String OnDiskInvertedLists_filename_get(long jarg1, OnDiskInvertedLists jarg1_); public final static native void OnDiskInvertedLists_totsize_set(long jarg1, OnDiskInvertedLists jarg1_, long jarg2); public final static native long OnDiskInvertedLists_totsize_get(long jarg1, OnDiskInvertedLists jarg1_); public final static native void OnDiskInvertedLists_ptr_set(long jarg1, OnDiskInvertedLists jarg1_, long jarg2); public final static native long OnDiskInvertedLists_ptr_get(long jarg1, OnDiskInvertedLists jarg1_); public final static native void OnDiskInvertedLists_read_only_set(long jarg1, OnDiskInvertedLists jarg1_, boolean jarg2); public final static native boolean OnDiskInvertedLists_read_only_get(long jarg1, OnDiskInvertedLists jarg1_); public final static native long new_OnDiskInvertedLists__SWIG_0(long jarg1, long jarg2, String jarg3); public final static native long OnDiskInvertedLists_list_size(long jarg1, OnDiskInvertedLists jarg1_, long jarg2); public final static native long OnDiskInvertedLists_get_codes(long jarg1, OnDiskInvertedLists jarg1_, long jarg2); public final static native long OnDiskInvertedLists_get_ids(long jarg1, OnDiskInvertedLists jarg1_, long jarg2); public final static native long OnDiskInvertedLists_add_entries(long jarg1, OnDiskInvertedLists jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void OnDiskInvertedLists_update_entries(long jarg1, OnDiskInvertedLists jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, LongVector jarg5_, long jarg6); public final static native void OnDiskInvertedLists_resize(long jarg1, OnDiskInvertedLists jarg1_, long jarg2, long jarg3); public final static native long OnDiskInvertedLists_merge_from__SWIG_0(long jarg1, OnDiskInvertedLists jarg1_, long jarg2, int jarg3, boolean jarg4); public final static native long OnDiskInvertedLists_merge_from__SWIG_1(long jarg1, OnDiskInvertedLists jarg1_, long jarg2, int jarg3); public final static native long OnDiskInvertedLists_merge_from_1__SWIG_0(long jarg1, OnDiskInvertedLists jarg1_, long jarg2, InvertedLists jarg2_, boolean jarg3); public final static native long OnDiskInvertedLists_merge_from_1__SWIG_1(long jarg1, OnDiskInvertedLists jarg1_, long jarg2, InvertedLists jarg2_); public final static native void OnDiskInvertedLists_crop_invlists(long jarg1, OnDiskInvertedLists jarg1_, long jarg2, long jarg3); public final static native void OnDiskInvertedLists_prefetch_lists(long jarg1, OnDiskInvertedLists jarg1_, long jarg2, LongVector jarg2_, int jarg3); public final static native void delete_OnDiskInvertedLists(long jarg1); public final static native void OnDiskInvertedLists_locks_set(long jarg1, OnDiskInvertedLists jarg1_, long jarg2); public final static native long OnDiskInvertedLists_locks_get(long jarg1, OnDiskInvertedLists jarg1_); public final static native void OnDiskInvertedLists_pf_set(long jarg1, OnDiskInvertedLists jarg1_, long jarg2); public final static native long OnDiskInvertedLists_pf_get(long jarg1, OnDiskInvertedLists jarg1_); public final static native void OnDiskInvertedLists_prefetch_nthread_set(long jarg1, OnDiskInvertedLists jarg1_, int jarg2); public final static native int OnDiskInvertedLists_prefetch_nthread_get(long jarg1, OnDiskInvertedLists jarg1_); public final static native void OnDiskInvertedLists_do_mmap(long jarg1, OnDiskInvertedLists jarg1_); public final static native void OnDiskInvertedLists_update_totsize(long jarg1, OnDiskInvertedLists jarg1_, long jarg2); public final static native void OnDiskInvertedLists_resize_locked(long jarg1, OnDiskInvertedLists jarg1_, long jarg2, long jarg3); public final static native long OnDiskInvertedLists_allocate_slot(long jarg1, OnDiskInvertedLists jarg1_, long jarg2); public final static native void OnDiskInvertedLists_free_slot(long jarg1, OnDiskInvertedLists jarg1_, long jarg2, long jarg3); public final static native void OnDiskInvertedLists_set_all_lists_sizes(long jarg1, OnDiskInvertedLists jarg1_, long jarg2); public final static native long new_OnDiskInvertedLists__SWIG_1(); public final static native long new_OnDiskInvertedListsIOHook(); public final static native void OnDiskInvertedListsIOHook_write(long jarg1, OnDiskInvertedListsIOHook jarg1_, long jarg2, InvertedLists jarg2_, long jarg3); public final static native long OnDiskInvertedListsIOHook_read(long jarg1, OnDiskInvertedListsIOHook jarg1_, long jarg2, int jarg3); public final static native long OnDiskInvertedListsIOHook_read_ArrayInvertedLists(long jarg1, OnDiskInvertedListsIOHook jarg1_, long jarg2, int jarg3, long jarg4, long jarg5, long jarg6, Uint64Vector jarg6_); public final static native void delete_OnDiskInvertedListsIOHook(long jarg1); public final static native void IVFPQSearchParameters_scan_table_threshold_set(long jarg1, IVFPQSearchParameters jarg1_, long jarg2); public final static native long IVFPQSearchParameters_scan_table_threshold_get(long jarg1, IVFPQSearchParameters jarg1_); public final static native void IVFPQSearchParameters_polysemous_ht_set(long jarg1, IVFPQSearchParameters jarg1_, int jarg2); public final static native int IVFPQSearchParameters_polysemous_ht_get(long jarg1, IVFPQSearchParameters jarg1_); public final static native long new_IVFPQSearchParameters(); public final static native void delete_IVFPQSearchParameters(long jarg1); public final static native void precomputed_table_max_bytes_set(long jarg1); public final static native long precomputed_table_max_bytes_get(); public final static native void IndexIVFPQ_by_residual_set(long jarg1, IndexIVFPQ jarg1_, boolean jarg2); public final static native boolean IndexIVFPQ_by_residual_get(long jarg1, IndexIVFPQ jarg1_); public final static native void IndexIVFPQ_pq_set(long jarg1, IndexIVFPQ jarg1_, long jarg2, ProductQuantizer jarg2_); public final static native long IndexIVFPQ_pq_get(long jarg1, IndexIVFPQ jarg1_); public final static native void IndexIVFPQ_do_polysemous_training_set(long jarg1, IndexIVFPQ jarg1_, boolean jarg2); public final static native boolean IndexIVFPQ_do_polysemous_training_get(long jarg1, IndexIVFPQ jarg1_); public final static native void IndexIVFPQ_polysemous_training_set(long jarg1, IndexIVFPQ jarg1_, long jarg2, PolysemousTraining jarg2_); public final static native long IndexIVFPQ_polysemous_training_get(long jarg1, IndexIVFPQ jarg1_); public final static native void IndexIVFPQ_scan_table_threshold_set(long jarg1, IndexIVFPQ jarg1_, long jarg2); public final static native long IndexIVFPQ_scan_table_threshold_get(long jarg1, IndexIVFPQ jarg1_); public final static native void IndexIVFPQ_polysemous_ht_set(long jarg1, IndexIVFPQ jarg1_, int jarg2); public final static native int IndexIVFPQ_polysemous_ht_get(long jarg1, IndexIVFPQ jarg1_); public final static native void IndexIVFPQ_use_precomputed_table_set(long jarg1, IndexIVFPQ jarg1_, int jarg2); public final static native int IndexIVFPQ_use_precomputed_table_get(long jarg1, IndexIVFPQ jarg1_); public final static native void IndexIVFPQ_precomputed_table_set(long jarg1, IndexIVFPQ jarg1_, long jarg2); public final static native long IndexIVFPQ_precomputed_table_get(long jarg1, IndexIVFPQ jarg1_); public final static native long new_IndexIVFPQ__SWIG_0(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, int jarg6); public final static native long new_IndexIVFPQ__SWIG_1(long jarg1, Index jarg1_, long jarg2, long jarg3, long jarg4, long jarg5); public final static native void IndexIVFPQ_encode_vectors__SWIG_0(long jarg1, IndexIVFPQ jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, boolean jarg6); public final static native void IndexIVFPQ_encode_vectors__SWIG_1(long jarg1, IndexIVFPQ jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void IndexIVFPQ_sa_decode(long jarg1, IndexIVFPQ jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexIVFPQ_add_core(long jarg1, IndexIVFPQ jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, LongVector jarg5_); public final static native void IndexIVFPQ_add_core_o__SWIG_0(long jarg1, IndexIVFPQ jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexIVFPQ_add_core_o__SWIG_1(long jarg1, IndexIVFPQ jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void IndexIVFPQ_train_residual(long jarg1, IndexIVFPQ jarg1_, long jarg2, long jarg3); public final static native void IndexIVFPQ_train_residual_o(long jarg1, IndexIVFPQ jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexIVFPQ_reconstruct_from_offset(long jarg1, IndexIVFPQ jarg1_, long jarg2, long jarg3, long jarg4); public final static native long IndexIVFPQ_find_duplicates(long jarg1, IndexIVFPQ jarg1_, long jarg2, LongVector jarg2_, long jarg3); public final static native void IndexIVFPQ_encode(long jarg1, IndexIVFPQ jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexIVFPQ_encode_multiple__SWIG_0(long jarg1, IndexIVFPQ jarg1_, long jarg2, long jarg3, LongVector jarg3_, long jarg4, long jarg5, boolean jarg6); public final static native void IndexIVFPQ_encode_multiple__SWIG_1(long jarg1, IndexIVFPQ jarg1_, long jarg2, long jarg3, LongVector jarg3_, long jarg4, long jarg5); public final static native void IndexIVFPQ_decode_multiple(long jarg1, IndexIVFPQ jarg1_, long jarg2, long jarg3, LongVector jarg3_, long jarg4, long jarg5); public final static native long IndexIVFPQ_get_InvertedListScanner(long jarg1, IndexIVFPQ jarg1_, boolean jarg2); public final static native void IndexIVFPQ_precompute_table(long jarg1, IndexIVFPQ jarg1_); public final static native long new_IndexIVFPQ__SWIG_2(); public final static native void delete_IndexIVFPQ(long jarg1); public final static native void initialize_IVFPQ_precomputed_table(long jarg1, long jarg2, Index jarg2_, long jarg3, ProductQuantizer jarg3_, long jarg4, boolean jarg5); public final static native void IndexIVFPQStats_nrefine_set(long jarg1, IndexIVFPQStats jarg1_, long jarg2); public final static native long IndexIVFPQStats_nrefine_get(long jarg1, IndexIVFPQStats jarg1_); public final static native void IndexIVFPQStats_n_hamming_pass_set(long jarg1, IndexIVFPQStats jarg1_, long jarg2); public final static native long IndexIVFPQStats_n_hamming_pass_get(long jarg1, IndexIVFPQStats jarg1_); public final static native void IndexIVFPQStats_search_cycles_set(long jarg1, IndexIVFPQStats jarg1_, long jarg2); public final static native long IndexIVFPQStats_search_cycles_get(long jarg1, IndexIVFPQStats jarg1_); public final static native void IndexIVFPQStats_refine_cycles_set(long jarg1, IndexIVFPQStats jarg1_, long jarg2); public final static native long IndexIVFPQStats_refine_cycles_get(long jarg1, IndexIVFPQStats jarg1_); public final static native long new_IndexIVFPQStats(); public final static native void IndexIVFPQStats_reset(long jarg1, IndexIVFPQStats jarg1_); public final static native void delete_IndexIVFPQStats(long jarg1); public final static native void indexIVFPQ_stats_set(long jarg1, IndexIVFPQStats jarg1_); public final static native long indexIVFPQ_stats_get(); public final static native void IndexBinary_d_set(long jarg1, IndexBinary jarg1_, int jarg2); public final static native int IndexBinary_d_get(long jarg1, IndexBinary jarg1_); public final static native void IndexBinary_code_size_set(long jarg1, IndexBinary jarg1_, int jarg2); public final static native int IndexBinary_code_size_get(long jarg1, IndexBinary jarg1_); public final static native void IndexBinary_ntotal_set(long jarg1, IndexBinary jarg1_, long jarg2); public final static native long IndexBinary_ntotal_get(long jarg1, IndexBinary jarg1_); public final static native void IndexBinary_verbose_set(long jarg1, IndexBinary jarg1_, boolean jarg2); public final static native boolean IndexBinary_verbose_get(long jarg1, IndexBinary jarg1_); public final static native void IndexBinary_is_trained_set(long jarg1, IndexBinary jarg1_, boolean jarg2); public final static native boolean IndexBinary_is_trained_get(long jarg1, IndexBinary jarg1_); public final static native void IndexBinary_metric_type_set(long jarg1, IndexBinary jarg1_, int jarg2); public final static native int IndexBinary_metric_type_get(long jarg1, IndexBinary jarg1_); public final static native void delete_IndexBinary(long jarg1); public final static native void IndexBinary_train(long jarg1, IndexBinary jarg1_, long jarg2, long jarg3); public final static native void IndexBinary_add(long jarg1, IndexBinary jarg1_, long jarg2, long jarg3); public final static native void IndexBinary_add_with_ids(long jarg1, IndexBinary jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void IndexBinary_search(long jarg1, IndexBinary jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexBinary_range_search(long jarg1, IndexBinary jarg1_, long jarg2, long jarg3, int jarg4, long jarg5, RangeSearchResult jarg5_); public final static native void IndexBinary_assign__SWIG_0(long jarg1, IndexBinary jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void IndexBinary_assign__SWIG_1(long jarg1, IndexBinary jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void IndexBinary_reset(long jarg1, IndexBinary jarg1_); public final static native long IndexBinary_remove_ids(long jarg1, IndexBinary jarg1_, long jarg2, IDSelector jarg2_); public final static native void IndexBinary_reconstruct(long jarg1, IndexBinary jarg1_, long jarg2, long jarg3); public final static native void IndexBinary_reconstruct_n(long jarg1, IndexBinary jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexBinary_search_and_reconstruct(long jarg1, IndexBinary jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_, long jarg7); public final static native void IndexBinary_display(long jarg1, IndexBinary jarg1_); public final static native void Index2Layer_q1_set(long jarg1, Index2Layer jarg1_, long jarg2, Level1Quantizer jarg2_); public final static native long Index2Layer_q1_get(long jarg1, Index2Layer jarg1_); public final static native void Index2Layer_pq_set(long jarg1, Index2Layer jarg1_, long jarg2, ProductQuantizer jarg2_); public final static native long Index2Layer_pq_get(long jarg1, Index2Layer jarg1_); public final static native void Index2Layer_code_size_1_set(long jarg1, Index2Layer jarg1_, long jarg2); public final static native long Index2Layer_code_size_1_get(long jarg1, Index2Layer jarg1_); public final static native void Index2Layer_code_size_2_set(long jarg1, Index2Layer jarg1_, long jarg2); public final static native long Index2Layer_code_size_2_get(long jarg1, Index2Layer jarg1_); public final static native long new_Index2Layer__SWIG_0(long jarg1, Index jarg1_, long jarg2, int jarg3, int jarg4, int jarg5); public final static native long new_Index2Layer__SWIG_1(long jarg1, Index jarg1_, long jarg2, int jarg3, int jarg4); public final static native long new_Index2Layer__SWIG_2(long jarg1, Index jarg1_, long jarg2, int jarg3); public final static native long new_Index2Layer__SWIG_3(); public final static native void delete_Index2Layer(long jarg1); public final static native void Index2Layer_train(long jarg1, Index2Layer jarg1_, long jarg2, long jarg3); public final static native void Index2Layer_search(long jarg1, Index2Layer jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native long Index2Layer_get_distance_computer(long jarg1, Index2Layer jarg1_); public final static native void Index2Layer_transfer_to_IVFPQ(long jarg1, Index2Layer jarg1_, long jarg2, IndexIVFPQ jarg2_); public final static native void Index2Layer_sa_encode(long jarg1, Index2Layer jarg1_, long jarg2, long jarg3, long jarg4); public final static native void Index2Layer_sa_decode(long jarg1, Index2Layer jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexBinaryFlat_xb_set(long jarg1, IndexBinaryFlat jarg1_, long jarg2, ByteVector jarg2_); public final static native long IndexBinaryFlat_xb_get(long jarg1, IndexBinaryFlat jarg1_); public final static native void IndexBinaryFlat_use_heap_set(long jarg1, IndexBinaryFlat jarg1_, boolean jarg2); public final static native boolean IndexBinaryFlat_use_heap_get(long jarg1, IndexBinaryFlat jarg1_); public final static native void IndexBinaryFlat_query_batch_size_set(long jarg1, IndexBinaryFlat jarg1_, long jarg2); public final static native long IndexBinaryFlat_query_batch_size_get(long jarg1, IndexBinaryFlat jarg1_); public final static native long new_IndexBinaryFlat__SWIG_0(long jarg1); public final static native void IndexBinaryFlat_add(long jarg1, IndexBinaryFlat jarg1_, long jarg2, long jarg3); public final static native void IndexBinaryFlat_reset(long jarg1, IndexBinaryFlat jarg1_); public final static native void IndexBinaryFlat_search(long jarg1, IndexBinaryFlat jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexBinaryFlat_range_search(long jarg1, IndexBinaryFlat jarg1_, long jarg2, long jarg3, int jarg4, long jarg5, RangeSearchResult jarg5_); public final static native void IndexBinaryFlat_reconstruct(long jarg1, IndexBinaryFlat jarg1_, long jarg2, long jarg3); public final static native long IndexBinaryFlat_remove_ids(long jarg1, IndexBinaryFlat jarg1_, long jarg2, IDSelector jarg2_); public final static native long new_IndexBinaryFlat__SWIG_1(); public final static native void delete_IndexBinaryFlat(long jarg1); public final static native void IndexBinaryIVF_invlists_set(long jarg1, IndexBinaryIVF jarg1_, long jarg2, InvertedLists jarg2_); public final static native long IndexBinaryIVF_invlists_get(long jarg1, IndexBinaryIVF jarg1_); public final static native void IndexBinaryIVF_own_invlists_set(long jarg1, IndexBinaryIVF jarg1_, boolean jarg2); public final static native boolean IndexBinaryIVF_own_invlists_get(long jarg1, IndexBinaryIVF jarg1_); public final static native void IndexBinaryIVF_nprobe_set(long jarg1, IndexBinaryIVF jarg1_, long jarg2); public final static native long IndexBinaryIVF_nprobe_get(long jarg1, IndexBinaryIVF jarg1_); public final static native void IndexBinaryIVF_max_codes_set(long jarg1, IndexBinaryIVF jarg1_, long jarg2); public final static native long IndexBinaryIVF_max_codes_get(long jarg1, IndexBinaryIVF jarg1_); public final static native void IndexBinaryIVF_use_heap_set(long jarg1, IndexBinaryIVF jarg1_, boolean jarg2); public final static native boolean IndexBinaryIVF_use_heap_get(long jarg1, IndexBinaryIVF jarg1_); public final static native void IndexBinaryIVF_direct_map_set(long jarg1, IndexBinaryIVF jarg1_, long jarg2); public final static native long IndexBinaryIVF_direct_map_get(long jarg1, IndexBinaryIVF jarg1_); public final static native void IndexBinaryIVF_quantizer_set(long jarg1, IndexBinaryIVF jarg1_, long jarg2, IndexBinary jarg2_); public final static native long IndexBinaryIVF_quantizer_get(long jarg1, IndexBinaryIVF jarg1_); public final static native void IndexBinaryIVF_nlist_set(long jarg1, IndexBinaryIVF jarg1_, long jarg2); public final static native long IndexBinaryIVF_nlist_get(long jarg1, IndexBinaryIVF jarg1_); public final static native void IndexBinaryIVF_own_fields_set(long jarg1, IndexBinaryIVF jarg1_, boolean jarg2); public final static native boolean IndexBinaryIVF_own_fields_get(long jarg1, IndexBinaryIVF jarg1_); public final static native void IndexBinaryIVF_cp_set(long jarg1, IndexBinaryIVF jarg1_, long jarg2, ClusteringParameters jarg2_); public final static native long IndexBinaryIVF_cp_get(long jarg1, IndexBinaryIVF jarg1_); public final static native void IndexBinaryIVF_clustering_index_set(long jarg1, IndexBinaryIVF jarg1_, long jarg2, Index jarg2_); public final static native long IndexBinaryIVF_clustering_index_get(long jarg1, IndexBinaryIVF jarg1_); public final static native long new_IndexBinaryIVF__SWIG_0(long jarg1, IndexBinary jarg1_, long jarg2, long jarg3); public final static native long new_IndexBinaryIVF__SWIG_1(); public final static native void delete_IndexBinaryIVF(long jarg1); public final static native void IndexBinaryIVF_reset(long jarg1, IndexBinaryIVF jarg1_); public final static native void IndexBinaryIVF_train(long jarg1, IndexBinaryIVF jarg1_, long jarg2, long jarg3); public final static native void IndexBinaryIVF_add(long jarg1, IndexBinaryIVF jarg1_, long jarg2, long jarg3); public final static native void IndexBinaryIVF_add_with_ids(long jarg1, IndexBinaryIVF jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void IndexBinaryIVF_add_core(long jarg1, IndexBinaryIVF jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, LongVector jarg5_); public final static native void IndexBinaryIVF_search_preassigned__SWIG_0(long jarg1, IndexBinaryIVF jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, LongVector jarg5_, long jarg6, long jarg7, long jarg8, LongVector jarg8_, boolean jarg9, long jarg10, IVFSearchParameters jarg10_); public final static native void IndexBinaryIVF_search_preassigned__SWIG_1(long jarg1, IndexBinaryIVF jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, LongVector jarg5_, long jarg6, long jarg7, long jarg8, LongVector jarg8_, boolean jarg9); public final static native long IndexBinaryIVF_get_InvertedListScanner__SWIG_0(long jarg1, IndexBinaryIVF jarg1_, boolean jarg2); public final static native long IndexBinaryIVF_get_InvertedListScanner__SWIG_1(long jarg1, IndexBinaryIVF jarg1_); public final static native void IndexBinaryIVF_search(long jarg1, IndexBinaryIVF jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexBinaryIVF_range_search(long jarg1, IndexBinaryIVF jarg1_, long jarg2, long jarg3, int jarg4, long jarg5, RangeSearchResult jarg5_); public final static native void IndexBinaryIVF_range_search_preassigned(long jarg1, IndexBinaryIVF jarg1_, long jarg2, long jarg3, int jarg4, long jarg5, LongVector jarg5_, long jarg6, long jarg7, RangeSearchResult jarg7_); public final static native void IndexBinaryIVF_reconstruct(long jarg1, IndexBinaryIVF jarg1_, long jarg2, long jarg3); public final static native void IndexBinaryIVF_reconstruct_n(long jarg1, IndexBinaryIVF jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexBinaryIVF_search_and_reconstruct(long jarg1, IndexBinaryIVF jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_, long jarg7); public final static native void IndexBinaryIVF_reconstruct_from_offset(long jarg1, IndexBinaryIVF jarg1_, long jarg2, long jarg3, long jarg4); public final static native long IndexBinaryIVF_remove_ids(long jarg1, IndexBinaryIVF jarg1_, long jarg2, IDSelector jarg2_); public final static native void IndexBinaryIVF_merge_from(long jarg1, IndexBinaryIVF jarg1_, long jarg2, IndexBinaryIVF jarg2_, long jarg3); public final static native long IndexBinaryIVF_get_list_size(long jarg1, IndexBinaryIVF jarg1_, long jarg2); public final static native void IndexBinaryIVF_make_direct_map__SWIG_0(long jarg1, IndexBinaryIVF jarg1_, boolean jarg2); public final static native void IndexBinaryIVF_make_direct_map__SWIG_1(long jarg1, IndexBinaryIVF jarg1_); public final static native void IndexBinaryIVF_set_direct_map_type(long jarg1, IndexBinaryIVF jarg1_, long jarg2); public final static native void IndexBinaryIVF_replace_invlists__SWIG_0(long jarg1, IndexBinaryIVF jarg1_, long jarg2, InvertedLists jarg2_, boolean jarg3); public final static native void IndexBinaryIVF_replace_invlists__SWIG_1(long jarg1, IndexBinaryIVF jarg1_, long jarg2, InvertedLists jarg2_); public final static native void IndexBinaryFromFloat_index_set(long jarg1, IndexBinaryFromFloat jarg1_, long jarg2, Index jarg2_); public final static native long IndexBinaryFromFloat_index_get(long jarg1, IndexBinaryFromFloat jarg1_); public final static native void IndexBinaryFromFloat_own_fields_set(long jarg1, IndexBinaryFromFloat jarg1_, boolean jarg2); public final static native boolean IndexBinaryFromFloat_own_fields_get(long jarg1, IndexBinaryFromFloat jarg1_); public final static native long new_IndexBinaryFromFloat__SWIG_0(); public final static native long new_IndexBinaryFromFloat__SWIG_1(long jarg1, Index jarg1_); public final static native void delete_IndexBinaryFromFloat(long jarg1); public final static native void IndexBinaryFromFloat_add(long jarg1, IndexBinaryFromFloat jarg1_, long jarg2, long jarg3); public final static native void IndexBinaryFromFloat_reset(long jarg1, IndexBinaryFromFloat jarg1_); public final static native void IndexBinaryFromFloat_search(long jarg1, IndexBinaryFromFloat jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexBinaryFromFloat_train(long jarg1, IndexBinaryFromFloat jarg1_, long jarg2, long jarg3); public final static native void IndexBinaryHNSW_hnsw_set(long jarg1, IndexBinaryHNSW jarg1_, long jarg2, HNSW jarg2_); public final static native long IndexBinaryHNSW_hnsw_get(long jarg1, IndexBinaryHNSW jarg1_); public final static native void IndexBinaryHNSW_own_fields_set(long jarg1, IndexBinaryHNSW jarg1_, boolean jarg2); public final static native boolean IndexBinaryHNSW_own_fields_get(long jarg1, IndexBinaryHNSW jarg1_); public final static native void IndexBinaryHNSW_storage_set(long jarg1, IndexBinaryHNSW jarg1_, long jarg2, IndexBinary jarg2_); public final static native long IndexBinaryHNSW_storage_get(long jarg1, IndexBinaryHNSW jarg1_); public final static native long new_IndexBinaryHNSW__SWIG_0(); public final static native long new_IndexBinaryHNSW__SWIG_1(int jarg1, int jarg2); public final static native long new_IndexBinaryHNSW__SWIG_2(int jarg1); public final static native long new_IndexBinaryHNSW__SWIG_3(long jarg1, IndexBinary jarg1_, int jarg2); public final static native long new_IndexBinaryHNSW__SWIG_4(long jarg1, IndexBinary jarg1_); public final static native void delete_IndexBinaryHNSW(long jarg1); public final static native long IndexBinaryHNSW_get_distance_computer(long jarg1, IndexBinaryHNSW jarg1_); public final static native void IndexBinaryHNSW_add(long jarg1, IndexBinaryHNSW jarg1_, long jarg2, long jarg3); public final static native void IndexBinaryHNSW_train(long jarg1, IndexBinaryHNSW jarg1_, long jarg2, long jarg3); public final static native void IndexBinaryHNSW_search(long jarg1, IndexBinaryHNSW jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexBinaryHNSW_reconstruct(long jarg1, IndexBinaryHNSW jarg1_, long jarg2, long jarg3); public final static native void IndexBinaryHNSW_reset(long jarg1, IndexBinaryHNSW jarg1_); public final static native void IndexRefine_base_index_set(long jarg1, IndexRefine jarg1_, long jarg2, Index jarg2_); public final static native long IndexRefine_base_index_get(long jarg1, IndexRefine jarg1_); public final static native void IndexRefine_refine_index_set(long jarg1, IndexRefine jarg1_, long jarg2, Index jarg2_); public final static native long IndexRefine_refine_index_get(long jarg1, IndexRefine jarg1_); public final static native void IndexRefine_own_fields_set(long jarg1, IndexRefine jarg1_, boolean jarg2); public final static native boolean IndexRefine_own_fields_get(long jarg1, IndexRefine jarg1_); public final static native void IndexRefine_own_refine_index_set(long jarg1, IndexRefine jarg1_, boolean jarg2); public final static native boolean IndexRefine_own_refine_index_get(long jarg1, IndexRefine jarg1_); public final static native void IndexRefine_k_factor_set(long jarg1, IndexRefine jarg1_, float jarg2); public final static native float IndexRefine_k_factor_get(long jarg1, IndexRefine jarg1_); public final static native long new_IndexRefine__SWIG_0(long jarg1, Index jarg1_, long jarg2, Index jarg2_); public final static native long new_IndexRefine__SWIG_1(); public final static native void IndexRefine_train(long jarg1, IndexRefine jarg1_, long jarg2, long jarg3); public final static native void IndexRefine_add(long jarg1, IndexRefine jarg1_, long jarg2, long jarg3); public final static native void IndexRefine_reset(long jarg1, IndexRefine jarg1_); public final static native void IndexRefine_search(long jarg1, IndexRefine jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexRefine_reconstruct(long jarg1, IndexRefine jarg1_, long jarg2, long jarg3); public final static native long IndexRefine_sa_code_size(long jarg1, IndexRefine jarg1_); public final static native void IndexRefine_sa_encode(long jarg1, IndexRefine jarg1_, long jarg2, long jarg3, long jarg4); public final static native void IndexRefine_sa_decode(long jarg1, IndexRefine jarg1_, long jarg2, long jarg3, long jarg4); public final static native void delete_IndexRefine(long jarg1); public final static native long new_IndexRefineFlat__SWIG_0(long jarg1, Index jarg1_); public final static native long new_IndexRefineFlat__SWIG_1(long jarg1, Index jarg1_, long jarg2); public final static native long new_IndexRefineFlat__SWIG_2(); public final static native void IndexRefineFlat_search(long jarg1, IndexRefineFlat jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void delete_IndexRefineFlat(long jarg1); public final static native void IndexSplitVectors_own_fields_set(long jarg1, IndexSplitVectors jarg1_, boolean jarg2); public final static native boolean IndexSplitVectors_own_fields_get(long jarg1, IndexSplitVectors jarg1_); public final static native void IndexSplitVectors_threaded_set(long jarg1, IndexSplitVectors jarg1_, boolean jarg2); public final static native boolean IndexSplitVectors_threaded_get(long jarg1, IndexSplitVectors jarg1_); public final static native void IndexSplitVectors_sub_indexes_set(long jarg1, IndexSplitVectors jarg1_, long jarg2); public final static native long IndexSplitVectors_sub_indexes_get(long jarg1, IndexSplitVectors jarg1_); public final static native void IndexSplitVectors_sum_d_set(long jarg1, IndexSplitVectors jarg1_, long jarg2); public final static native long IndexSplitVectors_sum_d_get(long jarg1, IndexSplitVectors jarg1_); public final static native void IndexSplitVectors_add_sub_index(long jarg1, IndexSplitVectors jarg1_, long jarg2, Index jarg2_); public final static native void IndexSplitVectors_sync_with_sub_indexes(long jarg1, IndexSplitVectors jarg1_); public final static native void IndexSplitVectors_add(long jarg1, IndexSplitVectors jarg1_, long jarg2, long jarg3); public final static native void IndexSplitVectors_search(long jarg1, IndexSplitVectors jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexSplitVectors_train(long jarg1, IndexSplitVectors jarg1_, long jarg2, long jarg3); public final static native void IndexSplitVectors_reset(long jarg1, IndexSplitVectors jarg1_); public final static native void delete_IndexSplitVectors(long jarg1); public final static native void IndexIDMap_index_set(long jarg1, IndexIDMap jarg1_, long jarg2, Index jarg2_); public final static native long IndexIDMap_index_get(long jarg1, IndexIDMap jarg1_); public final static native void IndexIDMap_own_fields_set(long jarg1, IndexIDMap jarg1_, boolean jarg2); public final static native boolean IndexIDMap_own_fields_get(long jarg1, IndexIDMap jarg1_); public final static native void IndexIDMap_id_map_set(long jarg1, IndexIDMap jarg1_, long jarg2); public final static native long IndexIDMap_id_map_get(long jarg1, IndexIDMap jarg1_); public final static native long new_IndexIDMap__SWIG_0(long jarg1, Index jarg1_); public final static native void IndexIDMap_add_with_ids(long jarg1, IndexIDMap jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void IndexIDMap_add(long jarg1, IndexIDMap jarg1_, long jarg2, long jarg3); public final static native void IndexIDMap_search(long jarg1, IndexIDMap jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexIDMap_train(long jarg1, IndexIDMap jarg1_, long jarg2, long jarg3); public final static native void IndexIDMap_reset(long jarg1, IndexIDMap jarg1_); public final static native long IndexIDMap_remove_ids(long jarg1, IndexIDMap jarg1_, long jarg2, IDSelector jarg2_); public final static native void IndexIDMap_range_search(long jarg1, IndexIDMap jarg1_, long jarg2, long jarg3, float jarg4, long jarg5, RangeSearchResult jarg5_); public final static native void delete_IndexIDMap(long jarg1); public final static native long new_IndexIDMap__SWIG_1(); public final static native long new_IndexShards__SWIG_0(boolean jarg1, boolean jarg2); public final static native long new_IndexShards__SWIG_1(boolean jarg1); public final static native long new_IndexShards__SWIG_2(); public final static native long new_IndexShards__SWIG_3(int jarg1, boolean jarg2, boolean jarg3); public final static native long new_IndexShards__SWIG_4(int jarg1, boolean jarg2); public final static native long new_IndexShards__SWIG_5(int jarg1); public final static native void IndexShards_add_shard(long jarg1, IndexShards jarg1_, long jarg2, Index jarg2_); public final static native void IndexShards_remove_shard(long jarg1, IndexShards jarg1_, long jarg2, Index jarg2_); public final static native void IndexShards_add(long jarg1, IndexShards jarg1_, long jarg2, long jarg3); public final static native void IndexShards_add_with_ids(long jarg1, IndexShards jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void IndexShards_search(long jarg1, IndexShards jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6, LongVector jarg6_); public final static native void IndexShards_train(long jarg1, IndexShards jarg1_, long jarg2, long jarg3); public final static native void IndexShards_successive_ids_set(long jarg1, IndexShards jarg1_, boolean jarg2); public final static native boolean IndexShards_successive_ids_get(long jarg1, IndexShards jarg1_); public final static native void IndexShards_syncWithSubIndexes(long jarg1, IndexShards jarg1_); public final static native void delete_IndexShards(long jarg1); public final static native long downcast_index(long jarg1, Index jarg1_); public final static native long downcast_VectorTransform(long jarg1, VectorTransform jarg1_); public final static native long downcast_IndexBinary(long jarg1, IndexBinary jarg1_); public final static native long upcast_IndexShards(long jarg1, IndexShards jarg1_); public final static native void write_index__SWIG_0(long jarg1, Index jarg1_, String jarg2); public final static native void write_index__SWIG_1(long jarg1, Index jarg1_, long jarg2); public final static native void write_index__SWIG_2(long jarg1, Index jarg1_, long jarg2); public final static native void write_index_binary__SWIG_0(long jarg1, IndexBinary jarg1_, String jarg2); public final static native void write_index_binary__SWIG_1(long jarg1, IndexBinary jarg1_, long jarg2); public final static native void write_index_binary__SWIG_2(long jarg1, IndexBinary jarg1_, long jarg2); public final static native int IO_FLAG_READ_ONLY_get(); public final static native int IO_FLAG_ONDISK_SAME_DIR_get(); public final static native int IO_FLAG_SKIP_IVF_DATA_get(); public final static native int IO_FLAG_MMAP_get(); public final static native long read_index__SWIG_0(String jarg1, int jarg2); public final static native long read_index__SWIG_1(String jarg1); public final static native long read_index__SWIG_2(long jarg1, int jarg2); public final static native long read_index__SWIG_3(long jarg1); public final static native long read_index__SWIG_4(long jarg1, int jarg2); public final static native long read_index__SWIG_5(long jarg1); public final static native long read_index_binary__SWIG_0(String jarg1, int jarg2); public final static native long read_index_binary__SWIG_1(String jarg1); public final static native long read_index_binary__SWIG_2(long jarg1, int jarg2); public final static native long read_index_binary__SWIG_3(long jarg1); public final static native long read_index_binary__SWIG_4(long jarg1, int jarg2); public final static native long read_index_binary__SWIG_5(long jarg1); public final static native void write_VectorTransform(long jarg1, VectorTransform jarg1_, String jarg2); public final static native long read_VectorTransform(String jarg1); public final static native long read_ProductQuantizer__SWIG_0(String jarg1); public final static native long read_ProductQuantizer__SWIG_1(long jarg1); public final static native void write_ProductQuantizer__SWIG_0(long jarg1, ProductQuantizer jarg1_, String jarg2); public final static native void write_ProductQuantizer__SWIG_1(long jarg1, ProductQuantizer jarg1_, long jarg2); public final static native void write_InvertedLists(long jarg1, InvertedLists jarg1_, long jarg2); public final static native long read_InvertedLists__SWIG_0(long jarg1, int jarg2); public final static native long read_InvertedLists__SWIG_1(long jarg1); public final static native void AutoTuneCriterion_nq_set(long jarg1, AutoTuneCriterion jarg1_, long jarg2); public final static native long AutoTuneCriterion_nq_get(long jarg1, AutoTuneCriterion jarg1_); public final static native void AutoTuneCriterion_nnn_set(long jarg1, AutoTuneCriterion jarg1_, long jarg2); public final static native long AutoTuneCriterion_nnn_get(long jarg1, AutoTuneCriterion jarg1_); public final static native void AutoTuneCriterion_gt_nnn_set(long jarg1, AutoTuneCriterion jarg1_, long jarg2); public final static native long AutoTuneCriterion_gt_nnn_get(long jarg1, AutoTuneCriterion jarg1_); public final static native void AutoTuneCriterion_gt_D_set(long jarg1, AutoTuneCriterion jarg1_, long jarg2, FloatVector jarg2_); public final static native long AutoTuneCriterion_gt_D_get(long jarg1, AutoTuneCriterion jarg1_); public final static native void AutoTuneCriterion_gt_I_set(long jarg1, AutoTuneCriterion jarg1_, long jarg2); public final static native long AutoTuneCriterion_gt_I_get(long jarg1, AutoTuneCriterion jarg1_); public final static native void AutoTuneCriterion_set_groundtruth(long jarg1, AutoTuneCriterion jarg1_, int jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native double AutoTuneCriterion_evaluate(long jarg1, AutoTuneCriterion jarg1_, long jarg2, long jarg3, LongVector jarg3_); public final static native void delete_AutoTuneCriterion(long jarg1); public final static native void OneRecallAtRCriterion_R_set(long jarg1, OneRecallAtRCriterion jarg1_, long jarg2); public final static native long OneRecallAtRCriterion_R_get(long jarg1, OneRecallAtRCriterion jarg1_); public final static native long new_OneRecallAtRCriterion(long jarg1, long jarg2); public final static native double OneRecallAtRCriterion_evaluate(long jarg1, OneRecallAtRCriterion jarg1_, long jarg2, long jarg3, LongVector jarg3_); public final static native void delete_OneRecallAtRCriterion(long jarg1); public final static native void IntersectionCriterion_R_set(long jarg1, IntersectionCriterion jarg1_, long jarg2); public final static native long IntersectionCriterion_R_get(long jarg1, IntersectionCriterion jarg1_); public final static native long new_IntersectionCriterion(long jarg1, long jarg2); public final static native double IntersectionCriterion_evaluate(long jarg1, IntersectionCriterion jarg1_, long jarg2, long jarg3, LongVector jarg3_); public final static native void delete_IntersectionCriterion(long jarg1); public final static native void OperatingPoint_perf_set(long jarg1, OperatingPoint jarg1_, double jarg2); public final static native double OperatingPoint_perf_get(long jarg1, OperatingPoint jarg1_); public final static native void OperatingPoint_t_set(long jarg1, OperatingPoint jarg1_, double jarg2); public final static native double OperatingPoint_t_get(long jarg1, OperatingPoint jarg1_); public final static native void OperatingPoint_key_set(long jarg1, OperatingPoint jarg1_, String jarg2); public final static native String OperatingPoint_key_get(long jarg1, OperatingPoint jarg1_); public final static native void OperatingPoint_cno_set(long jarg1, OperatingPoint jarg1_, long jarg2); public final static native long OperatingPoint_cno_get(long jarg1, OperatingPoint jarg1_); public final static native long new_OperatingPoint(); public final static native void delete_OperatingPoint(long jarg1); public final static native void OperatingPoints_all_pts_set(long jarg1, OperatingPoints jarg1_, long jarg2, OperatingPointVector jarg2_); public final static native long OperatingPoints_all_pts_get(long jarg1, OperatingPoints jarg1_); public final static native void OperatingPoints_optimal_pts_set(long jarg1, OperatingPoints jarg1_, long jarg2, OperatingPointVector jarg2_); public final static native long OperatingPoints_optimal_pts_get(long jarg1, OperatingPoints jarg1_); public final static native long new_OperatingPoints(); public final static native int OperatingPoints_merge_with__SWIG_0(long jarg1, OperatingPoints jarg1_, long jarg2, OperatingPoints jarg2_, String jarg3); public final static native int OperatingPoints_merge_with__SWIG_1(long jarg1, OperatingPoints jarg1_, long jarg2, OperatingPoints jarg2_); public final static native void OperatingPoints_clear(long jarg1, OperatingPoints jarg1_); public final static native boolean OperatingPoints_add__SWIG_0(long jarg1, OperatingPoints jarg1_, double jarg2, double jarg3, String jarg4, long jarg5); public final static native boolean OperatingPoints_add__SWIG_1(long jarg1, OperatingPoints jarg1_, double jarg2, double jarg3, String jarg4); public final static native double OperatingPoints_t_for_perf(long jarg1, OperatingPoints jarg1_, double jarg2); public final static native void OperatingPoints_display__SWIG_0(long jarg1, OperatingPoints jarg1_, boolean jarg2); public final static native void OperatingPoints_display__SWIG_1(long jarg1, OperatingPoints jarg1_); public final static native void OperatingPoints_all_to_gnuplot(long jarg1, OperatingPoints jarg1_, String jarg2); public final static native void OperatingPoints_optimal_to_gnuplot(long jarg1, OperatingPoints jarg1_, String jarg2); public final static native void delete_OperatingPoints(long jarg1); public final static native void ParameterRange_name_set(long jarg1, ParameterRange jarg1_, String jarg2); public final static native String ParameterRange_name_get(long jarg1, ParameterRange jarg1_); public final static native void ParameterRange_values_set(long jarg1, ParameterRange jarg1_, long jarg2, DoubleVector jarg2_); public final static native long ParameterRange_values_get(long jarg1, ParameterRange jarg1_); public final static native long new_ParameterRange(); public final static native void delete_ParameterRange(long jarg1); public final static native void ParameterSpace_parameter_ranges_set(long jarg1, ParameterSpace jarg1_, long jarg2); public final static native long ParameterSpace_parameter_ranges_get(long jarg1, ParameterSpace jarg1_); public final static native void ParameterSpace_verbose_set(long jarg1, ParameterSpace jarg1_, int jarg2); public final static native int ParameterSpace_verbose_get(long jarg1, ParameterSpace jarg1_); public final static native void ParameterSpace_n_experiments_set(long jarg1, ParameterSpace jarg1_, int jarg2); public final static native int ParameterSpace_n_experiments_get(long jarg1, ParameterSpace jarg1_); public final static native void ParameterSpace_batchsize_set(long jarg1, ParameterSpace jarg1_, long jarg2); public final static native long ParameterSpace_batchsize_get(long jarg1, ParameterSpace jarg1_); public final static native void ParameterSpace_thread_over_batches_set(long jarg1, ParameterSpace jarg1_, boolean jarg2); public final static native boolean ParameterSpace_thread_over_batches_get(long jarg1, ParameterSpace jarg1_); public final static native void ParameterSpace_min_test_duration_set(long jarg1, ParameterSpace jarg1_, double jarg2); public final static native double ParameterSpace_min_test_duration_get(long jarg1, ParameterSpace jarg1_); public final static native long new_ParameterSpace(); public final static native long ParameterSpace_n_combinations(long jarg1, ParameterSpace jarg1_); public final static native boolean ParameterSpace_combination_ge(long jarg1, ParameterSpace jarg1_, long jarg2, long jarg3); public final static native String ParameterSpace_combination_name(long jarg1, ParameterSpace jarg1_, long jarg2); public final static native void ParameterSpace_display(long jarg1, ParameterSpace jarg1_); public final static native long ParameterSpace_add_range(long jarg1, ParameterSpace jarg1_, String jarg2); public final static native void ParameterSpace_initialize(long jarg1, ParameterSpace jarg1_, long jarg2, Index jarg2_); public final static native void ParameterSpace_set_index_parameters__SWIG_0(long jarg1, ParameterSpace jarg1_, long jarg2, Index jarg2_, long jarg3); public final static native void ParameterSpace_set_index_parameters__SWIG_1(long jarg1, ParameterSpace jarg1_, long jarg2, Index jarg2_, String jarg3); public final static native void ParameterSpace_set_index_parameter(long jarg1, ParameterSpace jarg1_, long jarg2, Index jarg2_, String jarg3, double jarg4); public final static native void ParameterSpace_update_bounds(long jarg1, ParameterSpace jarg1_, long jarg2, long jarg3, OperatingPoint jarg3_, long jarg4, long jarg5); public final static native void ParameterSpace_explore(long jarg1, ParameterSpace jarg1_, long jarg2, Index jarg2_, long jarg3, long jarg4, long jarg5, AutoTuneCriterion jarg5_, long jarg6, OperatingPoints jarg6_); public final static native void delete_ParameterSpace(long jarg1); public final static native long index_factory__SWIG_0(int jarg1, String jarg2, int jarg3); public final static native long index_factory__SWIG_1(int jarg1, String jarg2); public final static native void index_factory_verbose_set(int jarg1); public final static native int index_factory_verbose_get(); public final static native long index_binary_factory(int jarg1, String jarg2); public final static native void simd_histogram_8(long jarg1, int jarg2, long jarg3, int jarg4, long jarg5); public final static native void simd_histogram_16(long jarg1, int jarg2, long jarg3, int jarg4, long jarg5); public final static native void PartitionStats_bissect_cycles_set(long jarg1, PartitionStats jarg1_, long jarg2); public final static native long PartitionStats_bissect_cycles_get(long jarg1, PartitionStats jarg1_); public final static native void PartitionStats_compress_cycles_set(long jarg1, PartitionStats jarg1_, long jarg2); public final static native long PartitionStats_compress_cycles_get(long jarg1, PartitionStats jarg1_); public final static native long new_PartitionStats(); public final static native void PartitionStats_reset(long jarg1, PartitionStats jarg1_); public final static native void delete_PartitionStats(long jarg1); public final static native void partition_stats_set(long jarg1, PartitionStats jarg1_); public final static native long partition_stats_get(); public final static native void float_minheap_array_t_nh_set(long jarg1, float_minheap_array_t jarg1_, long jarg2); public final static native long float_minheap_array_t_nh_get(long jarg1, float_minheap_array_t jarg1_); public final static native void float_minheap_array_t_k_set(long jarg1, float_minheap_array_t jarg1_, long jarg2); public final static native long float_minheap_array_t_k_get(long jarg1, float_minheap_array_t jarg1_); public final static native void float_minheap_array_t_ids_set(long jarg1, float_minheap_array_t jarg1_, long jarg2, LongVector jarg2_); public final static native long float_minheap_array_t_ids_get(long jarg1, float_minheap_array_t jarg1_); public final static native void float_minheap_array_t_val_set(long jarg1, float_minheap_array_t jarg1_, long jarg2); public final static native long float_minheap_array_t_val_get(long jarg1, float_minheap_array_t jarg1_); public final static native long float_minheap_array_t_get_val(long jarg1, float_minheap_array_t jarg1_, long jarg2); public final static native long float_minheap_array_t_get_ids(long jarg1, float_minheap_array_t jarg1_, long jarg2); public final static native void float_minheap_array_t_heapify(long jarg1, float_minheap_array_t jarg1_); public final static native void float_minheap_array_t_addn__SWIG_0(long jarg1, float_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6); public final static native void float_minheap_array_t_addn__SWIG_1(long jarg1, float_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, long jarg5); public final static native void float_minheap_array_t_addn__SWIG_2(long jarg1, float_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4); public final static native void float_minheap_array_t_addn__SWIG_3(long jarg1, float_minheap_array_t jarg1_, long jarg2, long jarg3); public final static native void float_minheap_array_t_addn_with_ids__SWIG_0(long jarg1, float_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, long jarg6, long jarg7); public final static native void float_minheap_array_t_addn_with_ids__SWIG_1(long jarg1, float_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, long jarg6); public final static native void float_minheap_array_t_addn_with_ids__SWIG_2(long jarg1, float_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void float_minheap_array_t_addn_with_ids__SWIG_3(long jarg1, float_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void float_minheap_array_t_addn_with_ids__SWIG_4(long jarg1, float_minheap_array_t jarg1_, long jarg2, long jarg3); public final static native void float_minheap_array_t_reorder(long jarg1, float_minheap_array_t jarg1_); public final static native void float_minheap_array_t_per_line_extrema(long jarg1, float_minheap_array_t jarg1_, long jarg2, long jarg3, LongVector jarg3_); public final static native long new_float_minheap_array_t(); public final static native void delete_float_minheap_array_t(long jarg1); public final static native void int_minheap_array_t_nh_set(long jarg1, int_minheap_array_t jarg1_, long jarg2); public final static native long int_minheap_array_t_nh_get(long jarg1, int_minheap_array_t jarg1_); public final static native void int_minheap_array_t_k_set(long jarg1, int_minheap_array_t jarg1_, long jarg2); public final static native long int_minheap_array_t_k_get(long jarg1, int_minheap_array_t jarg1_); public final static native void int_minheap_array_t_ids_set(long jarg1, int_minheap_array_t jarg1_, long jarg2, LongVector jarg2_); public final static native long int_minheap_array_t_ids_get(long jarg1, int_minheap_array_t jarg1_); public final static native void int_minheap_array_t_val_set(long jarg1, int_minheap_array_t jarg1_, long jarg2); public final static native long int_minheap_array_t_val_get(long jarg1, int_minheap_array_t jarg1_); public final static native long int_minheap_array_t_get_val(long jarg1, int_minheap_array_t jarg1_, long jarg2); public final static native long int_minheap_array_t_get_ids(long jarg1, int_minheap_array_t jarg1_, long jarg2); public final static native void int_minheap_array_t_heapify(long jarg1, int_minheap_array_t jarg1_); public final static native void int_minheap_array_t_addn__SWIG_0(long jarg1, int_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6); public final static native void int_minheap_array_t_addn__SWIG_1(long jarg1, int_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, long jarg5); public final static native void int_minheap_array_t_addn__SWIG_2(long jarg1, int_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4); public final static native void int_minheap_array_t_addn__SWIG_3(long jarg1, int_minheap_array_t jarg1_, long jarg2, long jarg3); public final static native void int_minheap_array_t_addn_with_ids__SWIG_0(long jarg1, int_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, long jarg6, long jarg7); public final static native void int_minheap_array_t_addn_with_ids__SWIG_1(long jarg1, int_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, long jarg6); public final static native void int_minheap_array_t_addn_with_ids__SWIG_2(long jarg1, int_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void int_minheap_array_t_addn_with_ids__SWIG_3(long jarg1, int_minheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void int_minheap_array_t_addn_with_ids__SWIG_4(long jarg1, int_minheap_array_t jarg1_, long jarg2, long jarg3); public final static native void int_minheap_array_t_reorder(long jarg1, int_minheap_array_t jarg1_); public final static native void int_minheap_array_t_per_line_extrema(long jarg1, int_minheap_array_t jarg1_, long jarg2, long jarg3, LongVector jarg3_); public final static native long new_int_minheap_array_t(); public final static native void delete_int_minheap_array_t(long jarg1); public final static native void float_maxheap_array_t_nh_set(long jarg1, float_maxheap_array_t jarg1_, long jarg2); public final static native long float_maxheap_array_t_nh_get(long jarg1, float_maxheap_array_t jarg1_); public final static native void float_maxheap_array_t_k_set(long jarg1, float_maxheap_array_t jarg1_, long jarg2); public final static native long float_maxheap_array_t_k_get(long jarg1, float_maxheap_array_t jarg1_); public final static native void float_maxheap_array_t_ids_set(long jarg1, float_maxheap_array_t jarg1_, long jarg2, LongVector jarg2_); public final static native long float_maxheap_array_t_ids_get(long jarg1, float_maxheap_array_t jarg1_); public final static native void float_maxheap_array_t_val_set(long jarg1, float_maxheap_array_t jarg1_, long jarg2); public final static native long float_maxheap_array_t_val_get(long jarg1, float_maxheap_array_t jarg1_); public final static native long float_maxheap_array_t_get_val(long jarg1, float_maxheap_array_t jarg1_, long jarg2); public final static native long float_maxheap_array_t_get_ids(long jarg1, float_maxheap_array_t jarg1_, long jarg2); public final static native void float_maxheap_array_t_heapify(long jarg1, float_maxheap_array_t jarg1_); public final static native void float_maxheap_array_t_addn__SWIG_0(long jarg1, float_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6); public final static native void float_maxheap_array_t_addn__SWIG_1(long jarg1, float_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, long jarg5); public final static native void float_maxheap_array_t_addn__SWIG_2(long jarg1, float_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4); public final static native void float_maxheap_array_t_addn__SWIG_3(long jarg1, float_maxheap_array_t jarg1_, long jarg2, long jarg3); public final static native void float_maxheap_array_t_addn_with_ids__SWIG_0(long jarg1, float_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, long jarg6, long jarg7); public final static native void float_maxheap_array_t_addn_with_ids__SWIG_1(long jarg1, float_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, long jarg6); public final static native void float_maxheap_array_t_addn_with_ids__SWIG_2(long jarg1, float_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void float_maxheap_array_t_addn_with_ids__SWIG_3(long jarg1, float_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void float_maxheap_array_t_addn_with_ids__SWIG_4(long jarg1, float_maxheap_array_t jarg1_, long jarg2, long jarg3); public final static native void float_maxheap_array_t_reorder(long jarg1, float_maxheap_array_t jarg1_); public final static native void float_maxheap_array_t_per_line_extrema(long jarg1, float_maxheap_array_t jarg1_, long jarg2, long jarg3, LongVector jarg3_); public final static native long new_float_maxheap_array_t(); public final static native void delete_float_maxheap_array_t(long jarg1); public final static native void int_maxheap_array_t_nh_set(long jarg1, int_maxheap_array_t jarg1_, long jarg2); public final static native long int_maxheap_array_t_nh_get(long jarg1, int_maxheap_array_t jarg1_); public final static native void int_maxheap_array_t_k_set(long jarg1, int_maxheap_array_t jarg1_, long jarg2); public final static native long int_maxheap_array_t_k_get(long jarg1, int_maxheap_array_t jarg1_); public final static native void int_maxheap_array_t_ids_set(long jarg1, int_maxheap_array_t jarg1_, long jarg2, LongVector jarg2_); public final static native long int_maxheap_array_t_ids_get(long jarg1, int_maxheap_array_t jarg1_); public final static native void int_maxheap_array_t_val_set(long jarg1, int_maxheap_array_t jarg1_, long jarg2); public final static native long int_maxheap_array_t_val_get(long jarg1, int_maxheap_array_t jarg1_); public final static native long int_maxheap_array_t_get_val(long jarg1, int_maxheap_array_t jarg1_, long jarg2); public final static native long int_maxheap_array_t_get_ids(long jarg1, int_maxheap_array_t jarg1_, long jarg2); public final static native void int_maxheap_array_t_heapify(long jarg1, int_maxheap_array_t jarg1_); public final static native void int_maxheap_array_t_addn__SWIG_0(long jarg1, int_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6); public final static native void int_maxheap_array_t_addn__SWIG_1(long jarg1, int_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, long jarg5); public final static native void int_maxheap_array_t_addn__SWIG_2(long jarg1, int_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4); public final static native void int_maxheap_array_t_addn__SWIG_3(long jarg1, int_maxheap_array_t jarg1_, long jarg2, long jarg3); public final static native void int_maxheap_array_t_addn_with_ids__SWIG_0(long jarg1, int_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, long jarg6, long jarg7); public final static native void int_maxheap_array_t_addn_with_ids__SWIG_1(long jarg1, int_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5, long jarg6); public final static native void int_maxheap_array_t_addn_with_ids__SWIG_2(long jarg1, int_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void int_maxheap_array_t_addn_with_ids__SWIG_3(long jarg1, int_maxheap_array_t jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_); public final static native void int_maxheap_array_t_addn_with_ids__SWIG_4(long jarg1, int_maxheap_array_t jarg1_, long jarg2, long jarg3); public final static native void int_maxheap_array_t_reorder(long jarg1, int_maxheap_array_t jarg1_); public final static native void int_maxheap_array_t_per_line_extrema(long jarg1, int_maxheap_array_t jarg1_, long jarg2, long jarg3, LongVector jarg3_); public final static native long new_int_maxheap_array_t(); public final static native void delete_int_maxheap_array_t(long jarg1); public final static native float CMin_float_partition_fuzzy(long jarg1, long jarg2, LongVector jarg2_, long jarg3, long jarg4, long jarg5, long jarg6); public final static native float CMax_float_partition_fuzzy(long jarg1, long jarg2, LongVector jarg2_, long jarg3, long jarg4, long jarg5, long jarg6); public final static native void AlignedTableUint8_tab_set(long jarg1, AlignedTableUint8 jarg1_, long jarg2); public final static native long AlignedTableUint8_tab_get(long jarg1, AlignedTableUint8 jarg1_); public final static native void AlignedTableUint8_numel_set(long jarg1, AlignedTableUint8 jarg1_, long jarg2); public final static native long AlignedTableUint8_numel_get(long jarg1, AlignedTableUint8 jarg1_); public final static native long AlignedTableUint8_round_capacity(long jarg1); public final static native long new_AlignedTableUint8__SWIG_0(); public final static native long new_AlignedTableUint8__SWIG_1(long jarg1); public final static native long AlignedTableUint8_itemsize(long jarg1, AlignedTableUint8 jarg1_); public final static native void AlignedTableUint8_resize(long jarg1, AlignedTableUint8 jarg1_, long jarg2); public final static native void AlignedTableUint8_clear(long jarg1, AlignedTableUint8 jarg1_); public final static native long AlignedTableUint8_size(long jarg1, AlignedTableUint8 jarg1_); public final static native long AlignedTableUint8_nbytes(long jarg1, AlignedTableUint8 jarg1_); public final static native long AlignedTableUint8_get__SWIG_0(long jarg1, AlignedTableUint8 jarg1_); public final static native long AlignedTableUint8_data__SWIG_0(long jarg1, AlignedTableUint8 jarg1_); public final static native void delete_AlignedTableUint8(long jarg1); public final static native void AlignedTableUint16_tab_set(long jarg1, AlignedTableUint16 jarg1_, long jarg2); public final static native long AlignedTableUint16_tab_get(long jarg1, AlignedTableUint16 jarg1_); public final static native void AlignedTableUint16_numel_set(long jarg1, AlignedTableUint16 jarg1_, long jarg2); public final static native long AlignedTableUint16_numel_get(long jarg1, AlignedTableUint16 jarg1_); public final static native long AlignedTableUint16_round_capacity(long jarg1); public final static native long new_AlignedTableUint16__SWIG_0(); public final static native long new_AlignedTableUint16__SWIG_1(long jarg1); public final static native long AlignedTableUint16_itemsize(long jarg1, AlignedTableUint16 jarg1_); public final static native void AlignedTableUint16_resize(long jarg1, AlignedTableUint16 jarg1_, long jarg2); public final static native void AlignedTableUint16_clear(long jarg1, AlignedTableUint16 jarg1_); public final static native long AlignedTableUint16_size(long jarg1, AlignedTableUint16 jarg1_); public final static native long AlignedTableUint16_nbytes(long jarg1, AlignedTableUint16 jarg1_); public final static native long AlignedTableUint16_get__SWIG_0(long jarg1, AlignedTableUint16 jarg1_); public final static native long AlignedTableUint16_data__SWIG_0(long jarg1, AlignedTableUint16 jarg1_); public final static native void delete_AlignedTableUint16(long jarg1); public final static native void AlignedTableFloat32_tab_set(long jarg1, AlignedTableFloat32 jarg1_, long jarg2); public final static native long AlignedTableFloat32_tab_get(long jarg1, AlignedTableFloat32 jarg1_); public final static native void AlignedTableFloat32_numel_set(long jarg1, AlignedTableFloat32 jarg1_, long jarg2); public final static native long AlignedTableFloat32_numel_get(long jarg1, AlignedTableFloat32 jarg1_); public final static native long AlignedTableFloat32_round_capacity(long jarg1); public final static native long new_AlignedTableFloat32__SWIG_0(); public final static native long new_AlignedTableFloat32__SWIG_1(long jarg1); public final static native long AlignedTableFloat32_itemsize(long jarg1, AlignedTableFloat32 jarg1_); public final static native void AlignedTableFloat32_resize(long jarg1, AlignedTableFloat32 jarg1_, long jarg2); public final static native void AlignedTableFloat32_clear(long jarg1, AlignedTableFloat32 jarg1_); public final static native long AlignedTableFloat32_size(long jarg1, AlignedTableFloat32 jarg1_); public final static native long AlignedTableFloat32_nbytes(long jarg1, AlignedTableFloat32 jarg1_); public final static native long AlignedTableFloat32_get__SWIG_0(long jarg1, AlignedTableFloat32 jarg1_); public final static native long AlignedTableFloat32_data__SWIG_0(long jarg1, AlignedTableFloat32 jarg1_); public final static native void delete_AlignedTableFloat32(long jarg1); public final static native long CMax_uint16_partition_fuzzy__SWIG_0(long jarg1, long jarg2, LongVector jarg2_, long jarg3, long jarg4, long jarg5, long jarg6); public final static native long CMin_uint16_partition_fuzzy__SWIG_0(long jarg1, long jarg2, LongVector jarg2_, long jarg3, long jarg4, long jarg5, long jarg6); public final static native long CMax_uint16_partition_fuzzy__SWIG_1(long jarg1, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6); public final static native long CMin_uint16_partition_fuzzy__SWIG_1(long jarg1, long jarg2, long jarg3, long jarg4, long jarg5, long jarg6); public final static native void omp_set_num_threads(int jarg1); public final static native int omp_get_max_threads(); public final static native long memcpy(long jarg1, long jarg2, long jarg3); public final static native long cast_integer_to_float_ptr(int jarg1); public final static native long cast_integer_to_long_ptr(int jarg1); public final static native long cast_integer_to_int_ptr(int jarg1); public final static native void RangeSearchResult_nq_set(long jarg1, RangeSearchResult jarg1_, long jarg2); public final static native long RangeSearchResult_nq_get(long jarg1, RangeSearchResult jarg1_); public final static native void RangeSearchResult_lims_set(long jarg1, RangeSearchResult jarg1_, long jarg2); public final static native long RangeSearchResult_lims_get(long jarg1, RangeSearchResult jarg1_); public final static native void RangeSearchResult_labels_set(long jarg1, RangeSearchResult jarg1_, long jarg2, LongVector jarg2_); public final static native long RangeSearchResult_labels_get(long jarg1, RangeSearchResult jarg1_); public final static native void RangeSearchResult_distances_set(long jarg1, RangeSearchResult jarg1_, long jarg2); public final static native long RangeSearchResult_distances_get(long jarg1, RangeSearchResult jarg1_); public final static native void RangeSearchResult_buffer_size_set(long jarg1, RangeSearchResult jarg1_, long jarg2); public final static native long RangeSearchResult_buffer_size_get(long jarg1, RangeSearchResult jarg1_); public final static native void RangeSearchResult_do_allocation(long jarg1, RangeSearchResult jarg1_); public final static native void delete_RangeSearchResult(long jarg1); public final static native boolean IDSelector_is_member(long jarg1, IDSelector jarg1_, long jarg2); public final static native void delete_IDSelector(long jarg1); public final static native void IDSelectorRange_imin_set(long jarg1, IDSelectorRange jarg1_, long jarg2); public final static native long IDSelectorRange_imin_get(long jarg1, IDSelectorRange jarg1_); public final static native void IDSelectorRange_imax_set(long jarg1, IDSelectorRange jarg1_, long jarg2); public final static native long IDSelectorRange_imax_get(long jarg1, IDSelectorRange jarg1_); public final static native long new_IDSelectorRange(long jarg1, long jarg2); public final static native boolean IDSelectorRange_is_member(long jarg1, IDSelectorRange jarg1_, long jarg2); public final static native void delete_IDSelectorRange(long jarg1); public final static native void IDSelectorArray_n_set(long jarg1, IDSelectorArray jarg1_, long jarg2); public final static native long IDSelectorArray_n_get(long jarg1, IDSelectorArray jarg1_); public final static native void IDSelectorArray_ids_set(long jarg1, IDSelectorArray jarg1_, long jarg2, LongVector jarg2_); public final static native long IDSelectorArray_ids_get(long jarg1, IDSelectorArray jarg1_); public final static native long new_IDSelectorArray(long jarg1, long jarg2, LongVector jarg2_); public final static native boolean IDSelectorArray_is_member(long jarg1, IDSelectorArray jarg1_, long jarg2); public final static native void delete_IDSelectorArray(long jarg1); public final static native void IDSelectorBatch_nbits_set(long jarg1, IDSelectorBatch jarg1_, int jarg2); public final static native int IDSelectorBatch_nbits_get(long jarg1, IDSelectorBatch jarg1_); public final static native void IDSelectorBatch_mask_set(long jarg1, IDSelectorBatch jarg1_, long jarg2); public final static native long IDSelectorBatch_mask_get(long jarg1, IDSelectorBatch jarg1_); public final static native long new_IDSelectorBatch(long jarg1, long jarg2, LongVector jarg2_); public final static native boolean IDSelectorBatch_is_member(long jarg1, IDSelectorBatch jarg1_, long jarg2); public final static native void delete_IDSelectorBatch(long jarg1); public final static native void BufferList_buffer_size_set(long jarg1, BufferList jarg1_, long jarg2); public final static native long BufferList_buffer_size_get(long jarg1, BufferList jarg1_); public final static native void BufferList_buffers_set(long jarg1, BufferList jarg1_, long jarg2); public final static native long BufferList_buffers_get(long jarg1, BufferList jarg1_); public final static native void BufferList_wp_set(long jarg1, BufferList jarg1_, long jarg2); public final static native long BufferList_wp_get(long jarg1, BufferList jarg1_); public final static native long new_BufferList(long jarg1); public final static native void delete_BufferList(long jarg1); public final static native void BufferList_append_buffer(long jarg1, BufferList jarg1_); public final static native void BufferList_add(long jarg1, BufferList jarg1_, long jarg2, float jarg3); public final static native void BufferList_copy_range(long jarg1, BufferList jarg1_, long jarg2, long jarg3, long jarg4, LongVector jarg4_, long jarg5); public final static native void RangeQueryResult_qno_set(long jarg1, RangeQueryResult jarg1_, long jarg2); public final static native long RangeQueryResult_qno_get(long jarg1, RangeQueryResult jarg1_); public final static native void RangeQueryResult_nres_set(long jarg1, RangeQueryResult jarg1_, long jarg2); public final static native long RangeQueryResult_nres_get(long jarg1, RangeQueryResult jarg1_); public final static native void RangeQueryResult_pres_set(long jarg1, RangeQueryResult jarg1_, long jarg2, RangeSearchPartialResult jarg2_); public final static native long RangeQueryResult_pres_get(long jarg1, RangeQueryResult jarg1_); public final static native void RangeQueryResult_add(long jarg1, RangeQueryResult jarg1_, float jarg2, long jarg3); public final static native long new_RangeQueryResult(); public final static native void delete_RangeQueryResult(long jarg1); public final static native void RangeSearchPartialResult_res_set(long jarg1, RangeSearchPartialResult jarg1_, long jarg2, RangeSearchResult jarg2_); public final static native long RangeSearchPartialResult_res_get(long jarg1, RangeSearchPartialResult jarg1_); public final static native void RangeSearchPartialResult_queries_set(long jarg1, RangeSearchPartialResult jarg1_, long jarg2); public final static native long RangeSearchPartialResult_queries_get(long jarg1, RangeSearchPartialResult jarg1_); public final static native long RangeSearchPartialResult_new_result(long jarg1, RangeSearchPartialResult jarg1_, long jarg2); public final static native void RangeSearchPartialResult_set_lims(long jarg1, RangeSearchPartialResult jarg1_); public final static native void RangeSearchPartialResult_copy_result__SWIG_0(long jarg1, RangeSearchPartialResult jarg1_, boolean jarg2); public final static native void RangeSearchPartialResult_copy_result__SWIG_1(long jarg1, RangeSearchPartialResult jarg1_); public final static native void RangeSearchPartialResult_merge__SWIG_0(long jarg1, boolean jarg2); public final static native void RangeSearchPartialResult_merge__SWIG_1(long jarg1); public final static native void delete_RangeSearchPartialResult(long jarg1); public final static native void DistanceComputer_set_query(long jarg1, DistanceComputer jarg1_, long jarg2); public final static native float DistanceComputer_symmetric_dis(long jarg1, DistanceComputer jarg1_, long jarg2, long jarg3); public final static native void delete_DistanceComputer(long jarg1); public final static native boolean InterruptCallback_want_interrupt(long jarg1, InterruptCallback jarg1_); public final static native void delete_InterruptCallback(long jarg1); public final static native void InterruptCallback_clear_instance(); public final static native void InterruptCallback_check(); public final static native boolean InterruptCallback_is_interrupted(); public final static native long InterruptCallback_get_period_hint(long jarg1); public final static native void VisitedTable_visited_set(long jarg1, VisitedTable jarg1_, long jarg2, ByteVector jarg2_); public final static native long VisitedTable_visited_get(long jarg1, VisitedTable jarg1_); public final static native void VisitedTable_visno_set(long jarg1, VisitedTable jarg1_, int jarg2); public final static native int VisitedTable_visno_get(long jarg1, VisitedTable jarg1_); public final static native long new_VisitedTable(int jarg1); public final static native void VisitedTable_set(long jarg1, VisitedTable jarg1_, int jarg2); public final static native boolean VisitedTable_get(long jarg1, VisitedTable jarg1_, int jarg2); public final static native void VisitedTable_advance(long jarg1, VisitedTable jarg1_); public final static native void delete_VisitedTable(long jarg1); public final static native void ignore_SIGTTIN(); public final static native void MapLong2Long_map_set(long jarg1, MapLong2Long jarg1_, long jarg2); public final static native long MapLong2Long_map_get(long jarg1, MapLong2Long jarg1_); public final static native void MapLong2Long_add(long jarg1, MapLong2Long jarg1_, long jarg2, long jarg3, long jarg4); public final static native int MapLong2Long_search(long jarg1, MapLong2Long jarg1_, int jarg2); public final static native void MapLong2Long_search_multiple(long jarg1, MapLong2Long jarg1_, long jarg2, long jarg3, long jarg4); public final static native long new_MapLong2Long(); public final static native void delete_MapLong2Long(long jarg1); public final static native long Clustering_SWIGUpcast(long jarg1); public final static native long Clustering1D_SWIGUpcast(long jarg1); public final static native long ProgressiveDimClusteringParameters_SWIGUpcast(long jarg1); public final static native long ProgressiveDimClustering_SWIGUpcast(long jarg1); public final static native long LinearTransform_SWIGUpcast(long jarg1); public final static native long RandomRotationMatrix_SWIGUpcast(long jarg1); public final static native long PCAMatrix_SWIGUpcast(long jarg1); public final static native long ITQMatrix_SWIGUpcast(long jarg1); public final static native long ITQTransform_SWIGUpcast(long jarg1); public final static native long OPQMatrix_SWIGUpcast(long jarg1); public final static native long RemapDimensionsTransform_SWIGUpcast(long jarg1); public final static native long NormalizationTransform_SWIGUpcast(long jarg1); public final static native long CenteringTransform_SWIGUpcast(long jarg1); public final static native long IndexFlatCodes_SWIGUpcast(long jarg1); public final static native long IndexFlat_SWIGUpcast(long jarg1); public final static native long IndexFlatIP_SWIGUpcast(long jarg1); public final static native long IndexFlatL2_SWIGUpcast(long jarg1); public final static native long IndexFlat1D_SWIGUpcast(long jarg1); public final static native long IndexLSH_SWIGUpcast(long jarg1); public final static native long ReproduceDistancesObjective_SWIGUpcast(long jarg1); public final static native long SimulatedAnnealingOptimizer_SWIGUpcast(long jarg1); public final static native long PolysemousTraining_SWIGUpcast(long jarg1); public final static native long IndexPQ_SWIGUpcast(long jarg1); public final static native long MultiIndexQuantizer_SWIGUpcast(long jarg1); public final static native long MultiIndexQuantizer2_SWIGUpcast(long jarg1); public final static native long ArrayInvertedLists_SWIGUpcast(long jarg1); public final static native long ReadOnlyInvertedLists_SWIGUpcast(long jarg1); public final static native long HStackInvertedLists_SWIGUpcast(long jarg1); public final static native long SliceInvertedLists_SWIGUpcast(long jarg1); public final static native long VStackInvertedLists_SWIGUpcast(long jarg1); public final static native long MaskedInvertedLists_SWIGUpcast(long jarg1); public final static native long StopWordsInvertedLists_SWIGUpcast(long jarg1); public final static native long IndexIVF_SWIGUpcast(long jarg1); public final static native long IndexScalarQuantizer_SWIGUpcast(long jarg1); public final static native long IndexIVFScalarQuantizer_SWIGUpcast(long jarg1); public final static native long IndexHNSW_SWIGUpcast(long jarg1); public final static native long IndexHNSWFlat_SWIGUpcast(long jarg1); public final static native long IndexHNSWPQ_SWIGUpcast(long jarg1); public final static native long IndexHNSWSQ_SWIGUpcast(long jarg1); public final static native long IndexHNSW2Level_SWIGUpcast(long jarg1); public final static native long IndexIVFFlat_SWIGUpcast(long jarg1); public final static native long IndexIVFFlatDedup_SWIGUpcast(long jarg1); public final static native long OnDiskInvertedLists_SWIGUpcast(long jarg1); public final static native long IVFPQSearchParameters_SWIGUpcast(long jarg1); public final static native long IndexIVFPQ_SWIGUpcast(long jarg1); public final static native long Index2Layer_SWIGUpcast(long jarg1); public final static native long IndexBinaryFlat_SWIGUpcast(long jarg1); public final static native long IndexBinaryIVF_SWIGUpcast(long jarg1); public final static native long IndexBinaryFromFloat_SWIGUpcast(long jarg1); public final static native long IndexBinaryHNSW_SWIGUpcast(long jarg1); public final static native long IndexRefine_SWIGUpcast(long jarg1); public final static native long IndexRefineFlat_SWIGUpcast(long jarg1); public final static native long IndexSplitVectors_SWIGUpcast(long jarg1); public final static native long IndexIDMap_SWIGUpcast(long jarg1); public final static native long OneRecallAtRCriterion_SWIGUpcast(long jarg1); public final static native long IntersectionCriterion_SWIGUpcast(long jarg1); public final static native long IDSelectorRange_SWIGUpcast(long jarg1); public final static native long IDSelectorArray_SWIGUpcast(long jarg1); public final static native long IDSelectorBatch_SWIGUpcast(long jarg1); public final static native long RangeSearchPartialResult_SWIGUpcast(long jarg1); }
the-algorithm-main/ann/src/main/java/com/twitter/ann/hnsw/BUILD
java_library( sources = ["*.java"], compiler_option_sets = ["fatal_warnings"], platform = "java8", tags = ["bazel-compatible"], dependencies = [ "3rdparty/jvm/com/google/guava", "3rdparty/jvm/com/google/inject:guice", "3rdparty/jvm/com/twitter/bijection:core", "3rdparty/jvm/commons-lang", "3rdparty/jvm/org/apache/thrift", "ann/src/main/scala/com/twitter/ann/common", "ann/src/main/thrift/com/twitter/ann/common:ann-common-java", "mediaservices/commons/src/main/scala:futuretracker", "scrooge/scrooge-core", "src/java/com/twitter/search/common/file", ], )
the-algorithm-main/ann/src/main/java/com/twitter/ann/hnsw/DistanceFunction.java
package com.twitter.ann.hnsw; public interface DistanceFunction<T, Q> { /** * Distance between two items. */ float distance(T t, Q q); }
the-algorithm-main/ann/src/main/java/com/twitter/ann/hnsw/DistancedItem.java
package com.twitter.ann.hnsw; /** * An item associated with a float distance * @param <T> The type of the item. */ public class DistancedItem<T> { private final T item; private final float distance; public DistancedItem(T item, float distance) { this.item = item; this.distance = distance; } public T getItem() { return item; } public float getDistance() { return distance; } }
the-algorithm-main/ann/src/main/java/com/twitter/ann/hnsw/DistancedItemQueue.java
package com.twitter.ann.hnsw; import java.util.ArrayList; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.PriorityQueue; /** * Container for items with their distance. * * @param <U> Type of origin/reference element. * @param <T> Type of element that the queue will hold */ public class DistancedItemQueue<U, T> implements Iterable<DistancedItem<T>> { private final U origin; private final DistanceFunction<U, T> distFn; private final PriorityQueue<DistancedItem<T>> queue; private final boolean minQueue; /** * Creates ontainer for items with their distances. * * @param origin Origin (reference) point * @param initial Initial list of elements to add in the structure * @param minQueue True for min queue, False for max queue * @param distFn Distance function */ public DistancedItemQueue( U origin, List<T> initial, boolean minQueue, DistanceFunction<U, T> distFn ) { this.origin = origin; this.distFn = distFn; this.minQueue = minQueue; final Comparator<DistancedItem<T>> cmp; if (minQueue) { cmp = (o1, o2) -> Float.compare(o1.getDistance(), o2.getDistance()); } else { cmp = (o1, o2) -> Float.compare(o2.getDistance(), o1.getDistance()); } this.queue = new PriorityQueue<>(cmp); enqueueAll(initial); new DistancedItemQueue<>(origin, distFn, queue, minQueue); } private DistancedItemQueue( U origin, DistanceFunction<U, T> distFn, PriorityQueue<DistancedItem<T>> queue, boolean minQueue ) { this.origin = origin; this.distFn = distFn; this.queue = queue; this.minQueue = minQueue; } /** * Enqueues all the items into the queue. */ public void enqueueAll(List<T> list) { for (T t : list) { enqueue(t); } } /** * Return if queue is non empty or not * * @return true if queue is not empty else false */ public boolean nonEmpty() { return !queue.isEmpty(); } /** * Return root of the queue * * @return root of the queue i.e min/max element depending upon min-max queue */ public DistancedItem<T> peek() { return queue.peek(); } /** * Dequeue root of the queue. * * @return remove and return root of the queue i.e min/max element depending upon min-max queue */ public DistancedItem<T> dequeue() { return queue.poll(); } /** * Dequeue all the elements from queueu with ordering mantained * * @return remove all the elements in the order of the queue i.e min/max queue. */ public List<DistancedItem<T>> dequeueAll() { final List<DistancedItem<T>> list = new ArrayList<>(queue.size()); while (!queue.isEmpty()) { list.add(queue.poll()); } return list; } /** * Convert queue to list * * @return list of elements of queue with distance and without any specific ordering */ public List<DistancedItem<T>> toList() { return new ArrayList<>(queue); } /** * Convert queue to list * * @return list of elements of queue without any specific ordering */ List<T> toListWithItem() { List<T> list = new ArrayList<>(queue.size()); Iterator<DistancedItem<T>> itr = iterator(); while (itr.hasNext()) { list.add(itr.next().getItem()); } return list; } /** * Enqueue an item into the queue */ public void enqueue(T item) { queue.add(new DistancedItem<>(item, distFn.distance(origin, item))); } /** * Enqueue an item into the queue with its distance. */ public void enqueue(T item, float distance) { queue.add(new DistancedItem<>(item, distance)); } /** * Size * * @return size of the queue */ public int size() { return queue.size(); } /** * Is Min queue * * @return true if min queue else false */ public boolean isMinQueue() { return minQueue; } /** * Returns origin (base element) of the queue * * @return origin of the queue */ public U getOrigin() { return origin; } /** * Return a new queue with ordering reversed. */ public DistancedItemQueue<U, T> reverse() { final PriorityQueue<DistancedItem<T>> rqueue = new PriorityQueue<>(queue.comparator().reversed()); if (queue.isEmpty()) { return new DistancedItemQueue<>(origin, distFn, rqueue, !isMinQueue()); } final Iterator<DistancedItem<T>> itr = iterator(); while (itr.hasNext()) { rqueue.add(itr.next()); } return new DistancedItemQueue<>(origin, distFn, rqueue, !isMinQueue()); } @Override public Iterator<DistancedItem<T>> iterator() { return queue.iterator(); } }
the-algorithm-main/ann/src/main/java/com/twitter/ann/hnsw/HnswIndex.java
package com.twitter.ann.hnsw; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Random; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Function; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import org.apache.thrift.TException; import com.twitter.ann.common.IndexOutputFile; import com.twitter.ann.common.thriftjava.HnswInternalIndexMetadata; import com.twitter.bijection.Injection; import com.twitter.logging.Logger; import com.twitter.mediaservices.commons.codec.ArrayByteBufferCodec; import com.twitter.search.common.file.AbstractFile; /** * Typed multithreaded HNSW implementation supporting creation/querying of approximate nearest neighbour * Paper: https://arxiv.org/pdf/1603.09320.pdf * Multithreading impl based on NMSLIB version : https://github.com/nmslib/hnsw/blob/master/hnswlib/hnswalg.h * * @param <T> The type of items inserted / searched in the HNSW index. * @param <Q> The type of KNN query. */ public class HnswIndex<T, Q> { private static final Logger LOG = Logger.get(HnswIndex.class); private static final String METADATA_FILE_NAME = "hnsw_internal_metadata"; private static final String GRAPH_FILE_NAME = "hnsw_internal_graph"; private static final int MAP_SIZE_FACTOR = 5; private final DistanceFunction<T, T> distFnIndex; private final DistanceFunction<Q, T> distFnQuery; private final int efConstruction; private final int maxM; private final int maxM0; private final double levelMultiplier; private final AtomicReference<HnswMeta<T>> graphMeta = new AtomicReference<>(); private final Map<HnswNode<T>, ImmutableList<T>> graph; // To take lock on vertex level private final ConcurrentHashMap<T, ReadWriteLock> locks; // To take lock on whole graph only if vertex addition is on layer above the current maxLevel private final ReentrantLock globalLock; private final Function<T, ReadWriteLock> lockProvider; private final RandomProvider randomProvider; // Probability of reevaluating connections of an element in the neighborhood during an update // Can be used as a knob to adjust update_speed/search_speed tradeoff. private final float updateNeighborProbability; /** * Creates instance of hnsw index. * * @param distFnIndex Any distance metric/non metric that specifies similarity between two items for indexing. * @param distFnQuery Any distance metric/non metric that specifies similarity between item for which nearest neighbours queried for and already indexed item. * @param efConstruction Provide speed vs index quality tradeoff, higher the value better the quality and higher the time to create index. * Valid range of efConstruction can be anywhere between 1 and tens of thousand. Typically, it should be set so that a search of M * neighbors with ef=efConstruction should end in recall>0.95. * @param maxM Maximum connections per layer except 0th level. * Optimal values between 5-48. * Smaller M generally produces better result for lower recalls and/ or lower dimensional data, * while bigger M is better for high recall and/ or high dimensional, data on the expense of more memory/disk usage * @param expectedElements Approximate number of elements to be indexed */ protected HnswIndex( DistanceFunction<T, T> distFnIndex, DistanceFunction<Q, T> distFnQuery, int efConstruction, int maxM, int expectedElements, RandomProvider randomProvider ) { this(distFnIndex, distFnQuery, efConstruction, maxM, expectedElements, new HnswMeta<>(-1, Optional.empty()), new ConcurrentHashMap<>(MAP_SIZE_FACTOR * expectedElements), randomProvider ); } private HnswIndex( DistanceFunction<T, T> distFnIndex, DistanceFunction<Q, T> distFnQuery, int efConstruction, int maxM, int expectedElements, HnswMeta<T> graphMeta, Map<HnswNode<T>, ImmutableList<T>> graph, RandomProvider randomProvider ) { this.distFnIndex = distFnIndex; this.distFnQuery = distFnQuery; this.efConstruction = efConstruction; this.maxM = maxM; this.maxM0 = 2 * maxM; this.levelMultiplier = 1.0 / Math.log(1.0 * maxM); this.graphMeta.set(graphMeta); this.graph = graph; this.locks = new ConcurrentHashMap<>(MAP_SIZE_FACTOR * expectedElements); this.globalLock = new ReentrantLock(); this.lockProvider = key -> new ReentrantReadWriteLock(); this.randomProvider = randomProvider; this.updateNeighborProbability = 1.0f; } /** * wireConnectionForAllLayers finds connections for a new element and creates bi-direction links. * The method assumes using a reentrant lock to link list reads. * * @param entryPoint the global entry point * @param item the item for which the connections are found * @param itemLevel the level of the added item (maximum layer in which we wire the connections) * @param maxLayer the level of the entry point */ private void wireConnectionForAllLayers(final T entryPoint, final T item, final int itemLevel, final int maxLayer, final boolean isUpdate) { T curObj = entryPoint; if (itemLevel < maxLayer) { curObj = bestEntryPointUntilLayer(curObj, item, maxLayer, itemLevel, distFnIndex); } for (int level = Math.min(itemLevel, maxLayer); level >= 0; level--) { final DistancedItemQueue<T, T> candidates = searchLayerForCandidates(item, curObj, efConstruction, level, distFnIndex, isUpdate); curObj = mutuallyConnectNewElement(item, candidates, level, isUpdate); } } /** * Insert the item into HNSW index. */ public void insert(final T item) throws IllegalDuplicateInsertException { final Lock itemLock = locks.computeIfAbsent(item, lockProvider).writeLock(); itemLock.lock(); try { final HnswMeta<T> metadata = graphMeta.get(); // If the graph already have the item, should not re-insert it again // Need to check entry point in case we reinsert first item where is are no graph // but only a entry point if (graph.containsKey(HnswNode.from(0, item)) || (metadata.getEntryPoint().isPresent() && Objects.equals(metadata.getEntryPoint().get(), item))) { throw new IllegalDuplicateInsertException( "Duplicate insertion is not supported: " + item); } final int curLevel = getRandomLevel(); Optional<T> entryPoint = metadata.getEntryPoint(); // The global lock prevents two threads from making changes to the entry point. This lock // should get taken very infrequently. Something like log-base-levelMultiplier(num items) // For a full explanation of locking see this document: http://go/hnsw-locking int maxLevelCopy = metadata.getMaxLevel(); if (curLevel > maxLevelCopy) { globalLock.lock(); // Re initialize the entryPoint and maxLevel in case these are changed by any other thread // No need to check the condition again since, // it is already checked at the end before updating entry point struct // No need to unlock for optimization and keeping as is if condition fails since threads // will not be entering this section a lot. final HnswMeta<T> temp = graphMeta.get(); entryPoint = temp.getEntryPoint(); maxLevelCopy = temp.getMaxLevel(); } if (entryPoint.isPresent()) { wireConnectionForAllLayers(entryPoint.get(), item, curLevel, maxLevelCopy, false); } if (curLevel > maxLevelCopy) { Preconditions.checkState(globalLock.isHeldByCurrentThread(), "Global lock not held before updating entry point"); graphMeta.set(new HnswMeta<>(curLevel, Optional.of(item))); } } finally { if (globalLock.isHeldByCurrentThread()) { globalLock.unlock(); } itemLock.unlock(); } } /** * set connections of an element with synchronization * The only other place that should have the lock for writing is during * the element insertion */ private void setConnectionList(final T item, int layer, List<T> connections) { final Lock candidateLock = locks.computeIfAbsent(item, lockProvider).writeLock(); candidateLock.lock(); try { graph.put( HnswNode.from(layer, item), ImmutableList.copyOf(connections) ); } finally { candidateLock.unlock(); } } /** * Reinsert the item into HNSW index. * This method updates the links of an element assuming * the element's distance function is changed externally (e.g. by updating the features) */ public void reInsert(final T item) { final HnswMeta<T> metadata = graphMeta.get(); Optional<T> entryPoint = metadata.getEntryPoint(); Preconditions.checkState(entryPoint.isPresent(), "Update cannot be performed if entry point is not present"); // This is a check for the single element case if (entryPoint.get().equals(item) && graph.isEmpty()) { return; } Preconditions.checkState(graph.containsKey(HnswNode.from(0, item)), "Graph does not contain the item to be updated at level 0"); int curLevel = 0; int maxLevelCopy = metadata.getMaxLevel(); for (int layer = maxLevelCopy; layer >= 0; layer--) { if (graph.containsKey(HnswNode.from(layer, item))) { curLevel = layer; break; } } // Updating the links of the elements from the 1-hop radius of the updated element for (int layer = 0; layer <= curLevel; layer++) { // Filling the element sets for candidates and updated elements final HashSet<T> setCand = new HashSet<T>(); final HashSet<T> setNeigh = new HashSet<T>(); final List<T> listOneHop = getConnectionListForRead(item, layer); if (listOneHop.isEmpty()) { LOG.debug("No links for the updated element. Empty dataset?"); continue; } setCand.add(item); for (T elOneHop : listOneHop) { setCand.add(elOneHop); if (randomProvider.get().nextFloat() > updateNeighborProbability) { continue; } setNeigh.add(elOneHop); final List<T> listTwoHop = getConnectionListForRead(elOneHop, layer); if (listTwoHop.isEmpty()) { LOG.debug("No links for the updated element. Empty dataset?"); } for (T oneHopEl : listTwoHop) { setCand.add(oneHopEl); } } // No need to update the item itself, so remove it setNeigh.remove(item); // Updating the link lists of elements from setNeigh: for (T neigh : setNeigh) { final HashSet<T> setCopy = new HashSet<T>(setCand); setCopy.remove(neigh); int keepElementsNum = Math.min(efConstruction, setCopy.size()); final DistancedItemQueue<T, T> candidates = new DistancedItemQueue<>( neigh, ImmutableList.of(), false, distFnIndex ); for (T cand : setCopy) { final float distance = distFnIndex.distance(neigh, cand); if (candidates.size() < keepElementsNum) { candidates.enqueue(cand, distance); } else { if (distance < candidates.peek().getDistance()) { candidates.dequeue(); candidates.enqueue(cand, distance); } } } final ImmutableList<T> neighbours = selectNearestNeighboursByHeuristic( candidates, layer == 0 ? maxM0 : maxM ); final List<T> temp = getConnectionListForRead(neigh, layer); if (temp.isEmpty()) { LOG.debug("existing linkslist is empty. Corrupt index"); } if (neighbours.isEmpty()) { LOG.debug("predicted linkslist is empty. Corrupt index"); } setConnectionList(neigh, layer, neighbours); } } wireConnectionForAllLayers(metadata.getEntryPoint().get(), item, curLevel, maxLevelCopy, true); } /** * This method can be used to get the graph statistics, specifically * it prints the histogram of inbound connections for each element. */ private String getStats() { int histogramMaxBins = 50; int[] histogram = new int[histogramMaxBins]; HashMap<T, Integer> mmap = new HashMap<T, Integer>(); for (HnswNode<T> key : graph.keySet()) { if (key.level == 0) { List<T> linkList = getConnectionListForRead(key.item, key.level); for (T node : linkList) { int a = mmap.computeIfAbsent(node, k -> 0); mmap.put(node, a + 1); } } } for (T key : mmap.keySet()) { int ind = mmap.get(key) < histogramMaxBins - 1 ? mmap.get(key) : histogramMaxBins - 1; histogram[ind]++; } int minNonZeroIndex; for (minNonZeroIndex = histogramMaxBins - 1; minNonZeroIndex >= 0; minNonZeroIndex--) { if (histogram[minNonZeroIndex] > 0) { break; } } String output = ""; for (int i = 0; i <= minNonZeroIndex; i++) { output += "" + i + "\t" + histogram[i] / (0.01f * mmap.keySet().size()) + "\n"; } return output; } private int getRandomLevel() { return (int) (-Math.log(randomProvider.get().nextDouble()) * levelMultiplier); } /** * Note that to avoid deadlocks it is important that this method is called after all the searches * of the graph have completed. If you take a lock on any items discovered in the graph after * this, you may get stuck waiting on a thread that is waiting for item to be fully inserted. * <p> * Note: when using concurrent writers we can miss connections that we would otherwise get. * This will reduce the recall. * <p> * For a full explanation of locking see this document: http://go/hnsw-locking * The method returns the closest nearest neighbor (can be used as an enter point) */ private T mutuallyConnectNewElement( final T item, final DistancedItemQueue<T, T> candidates, // Max queue final int level, final boolean isUpdate ) { // Using maxM here. Its implementation is ambiguous in HNSW paper, // so using the way it is getting used in Hnsw lib. final ImmutableList<T> neighbours = selectNearestNeighboursByHeuristic(candidates, maxM); setConnectionList(item, level, neighbours); final int M = level == 0 ? maxM0 : maxM; for (T nn : neighbours) { if (nn.equals(item)) { continue; } final Lock curLock = locks.computeIfAbsent(nn, lockProvider).writeLock(); curLock.lock(); try { final HnswNode<T> key = HnswNode.from(level, nn); final ImmutableList<T> connections = graph.getOrDefault(key, ImmutableList.of()); final boolean isItemAlreadyPresent = isUpdate && connections.indexOf(item) != -1 ? true : false; // If `item` is already present in the neighboring connections, // then no need to modify any connections or run the search heuristics. if (isItemAlreadyPresent) { continue; } final ImmutableList<T> updatedConnections; if (connections.size() < M) { final List<T> temp = new ArrayList<>(connections); temp.add(item); updatedConnections = ImmutableList.copyOf(temp.iterator()); } else { // Max Queue final DistancedItemQueue<T, T> queue = new DistancedItemQueue<>( nn, connections, false, distFnIndex ); queue.enqueue(item); updatedConnections = selectNearestNeighboursByHeuristic(queue, M); } if (updatedConnections.isEmpty()) { LOG.debug("Internal error: predicted linkslist is empty"); } graph.put(key, updatedConnections); } finally { curLock.unlock(); } } return neighbours.get(0); } /* * bestEntryPointUntilLayer starts the graph search for item from the entry point * until the searches reaches the selectedLayer layer. * @return a point from selectedLayer layer, was the closest on the (selectedLayer+1) layer */ private <K> T bestEntryPointUntilLayer( final T entryPoint, final K item, int maxLayer, int selectedLayer, DistanceFunction<K, T> distFn ) { T curObj = entryPoint; if (selectedLayer < maxLayer) { float curDist = distFn.distance(item, curObj); for (int level = maxLayer; level > selectedLayer; level--) { boolean changed = true; while (changed) { changed = false; final List<T> list = getConnectionListForRead(curObj, level); for (T nn : list) { final float tempDist = distFn.distance(item, nn); if (tempDist < curDist) { curDist = tempDist; curObj = nn; changed = true; } } } } } return curObj; } @VisibleForTesting protected ImmutableList<T> selectNearestNeighboursByHeuristic( final DistancedItemQueue<T, T> candidates, // Max queue final int maxConnections ) { Preconditions.checkState(!candidates.isMinQueue(), "candidates in selectNearestNeighboursByHeuristic should be a max queue"); final T baseElement = candidates.getOrigin(); if (candidates.size() <= maxConnections) { List<T> list = candidates.toListWithItem(); list.remove(baseElement); return ImmutableList.copyOf(list); } else { final List<T> resSet = new ArrayList<>(maxConnections); // Min queue for closest elements first final DistancedItemQueue<T, T> minQueue = candidates.reverse(); while (minQueue.nonEmpty()) { if (resSet.size() >= maxConnections) { break; } final DistancedItem<T> candidate = minQueue.dequeue(); // We do not want to creates loops: // While heuristic is used only for creating the links if (candidate.getItem().equals(baseElement)) { continue; } boolean toInclude = true; for (T e : resSet) { // Do not include candidate if the distance from candidate to any of existing item in // resSet is closer to the distance from the candidate to the item. By doing this, the // connection of graph will be more diverse, and in case of highly clustered data set, // connections will be made between clusters instead of all being in the same cluster. final float dist = distFnIndex.distance(e, candidate.getItem()); if (dist < candidate.getDistance()) { toInclude = false; break; } } if (toInclude) { resSet.add(candidate.getItem()); } } return ImmutableList.copyOf(resSet); } } /** * Search the index for the neighbours. * * @param query Query * @param numOfNeighbours Number of neighbours to search for. * @param ef This param controls the accuracy of the search. * Bigger the ef better the accuracy on the expense of latency. * Keep it atleast number of neighbours to find. * @return Neighbours */ public List<DistancedItem<T>> searchKnn(final Q query, final int numOfNeighbours, final int ef) { final HnswMeta<T> metadata = graphMeta.get(); if (metadata.getEntryPoint().isPresent()) { T entryPoint = bestEntryPointUntilLayer(metadata.getEntryPoint().get(), query, metadata.getMaxLevel(), 0, distFnQuery); // Get the actual neighbours from 0th layer final List<DistancedItem<T>> neighbours = searchLayerForCandidates(query, entryPoint, Math.max(ef, numOfNeighbours), 0, distFnQuery, false).dequeueAll(); Collections.reverse(neighbours); return neighbours.size() > numOfNeighbours ? neighbours.subList(0, numOfNeighbours) : neighbours; } else { return Collections.emptyList(); } } // This method is currently not used // It is needed for debugging purposes only private void checkIntegrity(String message) { final HnswMeta<T> metadata = graphMeta.get(); for (HnswNode<T> node : graph.keySet()) { List<T> linkList = graph.get(node); for (T el : linkList) { if (el.equals(node.item)) { LOG.debug(message); throw new RuntimeException("integrity check failed"); } } } } private <K> DistancedItemQueue<K, T> searchLayerForCandidates( final K item, final T entryPoint, final int ef, final int level, final DistanceFunction<K, T> distFn, boolean isUpdate ) { // Min queue final DistancedItemQueue<K, T> cQueue = new DistancedItemQueue<>( item, Collections.singletonList(entryPoint), true, distFn ); // Max Queue final DistancedItemQueue<K, T> wQueue = cQueue.reverse(); final Set<T> visited = new HashSet<>(); float lowerBoundDistance = wQueue.peek().getDistance(); visited.add(entryPoint); while (cQueue.nonEmpty()) { final DistancedItem<T> candidate = cQueue.peek(); if (candidate.getDistance() > lowerBoundDistance) { break; } cQueue.dequeue(); final List<T> list = getConnectionListForRead(candidate.getItem(), level); for (T nn : list) { if (!visited.contains(nn)) { visited.add(nn); final float distance = distFn.distance(item, nn); if (wQueue.size() < ef || distance < wQueue.peek().getDistance()) { cQueue.enqueue(nn, distance); if (isUpdate && item.equals(nn)) { continue; } wQueue.enqueue(nn, distance); if (wQueue.size() > ef) { wQueue.dequeue(); } lowerBoundDistance = wQueue.peek().getDistance(); } } } } return wQueue; } /** * Serialize hnsw index */ public void toDirectory(IndexOutputFile indexOutputFile, Injection<T, byte[]> injection) throws IOException, TException { final int totalGraphEntries = HnswIndexIOUtil.saveHnswGraphEntries( graph, indexOutputFile.createFile(GRAPH_FILE_NAME).getOutputStream(), injection); HnswIndexIOUtil.saveMetadata( graphMeta.get(), efConstruction, maxM, totalGraphEntries, injection, indexOutputFile.createFile(METADATA_FILE_NAME).getOutputStream()); } /** * Load hnsw index */ public static <T, Q> HnswIndex<T, Q> loadHnswIndex( DistanceFunction<T, T> distFnIndex, DistanceFunction<Q, T> distFnQuery, AbstractFile directory, Injection<T, byte[]> injection, RandomProvider randomProvider) throws IOException, TException { final AbstractFile graphFile = directory.getChild(GRAPH_FILE_NAME); final AbstractFile metadataFile = directory.getChild(METADATA_FILE_NAME); final HnswInternalIndexMetadata metadata = HnswIndexIOUtil.loadMetadata(metadataFile); final Map<HnswNode<T>, ImmutableList<T>> graph = HnswIndexIOUtil.loadHnswGraph(graphFile, injection, metadata.numElements); final ByteBuffer entryPointBB = metadata.entryPoint; final HnswMeta<T> graphMeta = new HnswMeta<>( metadata.maxLevel, entryPointBB == null ? Optional.empty() : Optional.of(injection.invert(ArrayByteBufferCodec.decode(entryPointBB)).get()) ); return new HnswIndex<>( distFnIndex, distFnQuery, metadata.efConstruction, metadata.maxM, metadata.numElements, graphMeta, graph, randomProvider ); } private List<T> getConnectionListForRead(T node, int level) { final Lock curLock = locks.computeIfAbsent(node, lockProvider).readLock(); curLock.lock(); final List<T> list; try { list = graph .getOrDefault(HnswNode.from(level, node), ImmutableList.of()); } finally { curLock.unlock(); } return list; } @VisibleForTesting AtomicReference<HnswMeta<T>> getGraphMeta() { return graphMeta; } @VisibleForTesting Map<T, ReadWriteLock> getLocks() { return locks; } @VisibleForTesting Map<HnswNode<T>, ImmutableList<T>> getGraph() { return graph; } public interface RandomProvider { /** * RandomProvider interface made public for scala 2.12 compat */ Random get(); } }
the-algorithm-main/ann/src/main/java/com/twitter/ann/hnsw/HnswIndexIOUtil.java
package com.twitter.ann.hnsw; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import com.google.common.collect.ImmutableList; import org.apache.thrift.TDeserializer; import org.apache.thrift.TException; import org.apache.thrift.TSerializer; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.TIOStreamTransport; import org.apache.thrift.transport.TTransportException; import com.twitter.ann.common.thriftjava.HnswGraphEntry; import com.twitter.ann.common.thriftjava.HnswInternalIndexMetadata; import com.twitter.bijection.Injection; import com.twitter.mediaservices.commons.codec.ArrayByteBufferCodec; import com.twitter.search.common.file.AbstractFile; public final class HnswIndexIOUtil { private HnswIndexIOUtil() { } /** * Save thrift object in file */ public static <T> void saveMetadata( HnswMeta<T> graphMeta, int efConstruction, int maxM, int numElements, Injection<T, byte[]> injection, OutputStream outputStream ) throws IOException, TException { final int maxLevel = graphMeta.getMaxLevel(); final HnswInternalIndexMetadata metadata = new HnswInternalIndexMetadata( maxLevel, efConstruction, maxM, numElements ); if (graphMeta.getEntryPoint().isPresent()) { metadata.setEntryPoint(injection.apply(graphMeta.getEntryPoint().get())); } final TSerializer serializer = new TSerializer(new TBinaryProtocol.Factory()); outputStream.write(serializer.serialize(metadata)); outputStream.close(); } /** * Load Hnsw index metadata */ public static HnswInternalIndexMetadata loadMetadata(AbstractFile file) throws IOException, TException { final HnswInternalIndexMetadata obj = new HnswInternalIndexMetadata(); final TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory()); deserializer.deserialize(obj, file.getByteSource().read()); return obj; } /** * Load Hnsw graph entries from file */ public static <T> Map<HnswNode<T>, ImmutableList<T>> loadHnswGraph( AbstractFile file, Injection<T, byte[]> injection, int numElements ) throws IOException, TException { final InputStream stream = file.getByteSource().openBufferedStream(); final TProtocol protocol = new TBinaryProtocol(new TIOStreamTransport(stream)); final Map<HnswNode<T>, ImmutableList<T>> graph = new HashMap<>(numElements); while (true) { try { final HnswGraphEntry entry = new HnswGraphEntry(); entry.read(protocol); final HnswNode<T> node = HnswNode.from(entry.level, injection.invert(ArrayByteBufferCodec.decode(entry.key)).get()); final List<T> list = entry.getNeighbours().stream() .map(bb -> injection.invert(ArrayByteBufferCodec.decode(bb)).get()) .collect(Collectors.toList()); graph.put(node, ImmutableList.copyOf(list.iterator())); } catch (TException e) { if (e instanceof TTransportException && TTransportException.class.cast(e).getType() == TTransportException.END_OF_FILE) { stream.close(); break; } stream.close(); throw e; } } return graph; } /** * Save hnsw graph in file * * @return number of keys in the graph */ public static <T> int saveHnswGraphEntries( Map<HnswNode<T>, ImmutableList<T>> graph, OutputStream outputStream, Injection<T, byte[]> injection ) throws IOException, TException { final TProtocol protocol = new TBinaryProtocol(new TIOStreamTransport(outputStream)); final Set<HnswNode<T>> nodes = graph.keySet(); for (HnswNode<T> node : nodes) { final HnswGraphEntry entry = new HnswGraphEntry(); entry.setLevel(node.level); entry.setKey(injection.apply(node.item)); final List<ByteBuffer> nn = graph.getOrDefault(node, ImmutableList.of()).stream() .map(t -> ByteBuffer.wrap(injection.apply(t))) .collect(Collectors.toList()); entry.setNeighbours(nn); entry.write(protocol); } outputStream.close(); return nodes.size(); } }
the-algorithm-main/ann/src/main/java/com/twitter/ann/hnsw/HnswMeta.java
package com.twitter.ann.hnsw; import java.util.Objects; import java.util.Optional; class HnswMeta<T> { private final int maxLevel; private final Optional<T> entryPoint; HnswMeta(int maxLevel, Optional<T> entryPoint) { this.maxLevel = maxLevel; this.entryPoint = entryPoint; } public int getMaxLevel() { return maxLevel; } public Optional<T> getEntryPoint() { return entryPoint; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } HnswMeta<?> hnswMeta = (HnswMeta<?>) o; return maxLevel == hnswMeta.maxLevel && Objects.equals(entryPoint, hnswMeta.entryPoint); } @Override public int hashCode() { return Objects.hash(maxLevel, entryPoint); } @Override public String toString() { return "HnswMeta{maxLevel=" + maxLevel + ", entryPoint=" + entryPoint + '}'; } }
the-algorithm-main/ann/src/main/java/com/twitter/ann/hnsw/HnswNode.java
package com.twitter.ann.hnsw; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; public class HnswNode<T> { public final int level; public final T item; public HnswNode(int level, T item) { this.level = level; this.item = item; } /** * Create a hnsw node. */ public static <T> HnswNode<T> from(int level, T item) { return new HnswNode<>(level, item); } @Override public boolean equals(Object o) { if (o == this) { return true; } if (!(o instanceof HnswNode)) { return false; } HnswNode<?> that = (HnswNode<?>) o; return new EqualsBuilder() .append(this.item, that.item) .append(this.level, that.level) .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder() .append(item) .append(level) .toHashCode(); } }
the-algorithm-main/ann/src/main/java/com/twitter/ann/hnsw/IllegalDuplicateInsertException.java
package com.twitter.ann.hnsw; public class IllegalDuplicateInsertException extends Exception { public IllegalDuplicateInsertException(String message) { super(message); } }
the-algorithm-main/ann/src/main/python/dataflow/BUILD.bazel
resources( name = "sql", sources = ["bq.sql"], ) python3_library( name = "faiss_indexing", sources = ["**/*.py"], tags = ["bazel-compatible"], dependencies = [ ":sql", "3rdparty/python/apache-beam:default", "3rdparty/python/faiss-gpu:default", "3rdparty/python/gcsfs:default", "3rdparty/python/google-cloud-bigquery:default", "3rdparty/python/google-cloud-storage", "3rdparty/python/numpy:default", "3rdparty/python/pandas:default", "3rdparty/python/pandas-gbq:default", "3rdparty/python/pyarrow:default", "src/python/twitter/ml/common/apache_beam", ], ) python37_binary( name = "faiss_indexing_bin", sources = ["faiss_index_bq_dataset.py"], platforms = [ "current", "linux_x86_64", ], tags = ["no-mypy"], zip_safe = False, dependencies = [ ":faiss_indexing", "3rdparty/python/_closures/ann/src/main/python/dataflow:faiss_indexing_bin", ], )
the-algorithm-main/ann/src/main/python/dataflow/bq.sql
WITH maxts as (SELECT as value MAX(ts) as ts FROM `twttr-recos-ml-prod.ssedhain.twhin_tweet_avg_embedding`) SELECT entityId, embedding FROM `twttr-recos-ml-prod.ssedhain.twhin_tweet_avg_embedding` WHERE ts >= (select max(maxts) from maxts) AND DATE(TIMESTAMP_MILLIS(createdAt)) <= (select max(maxts) from maxts) AND DATE(TIMESTAMP_MILLIS(createdAt)) >= DATE_SUB((select max(maxts) from maxts), INTERVAL 1 DAY)
the-algorithm-main/ann/src/main/python/dataflow/faiss_index_bq_dataset.py
import argparse import logging import os import pkgutil import sys from urllib.parse import urlsplit import apache_beam as beam from apache_beam.options.pipeline_options import PipelineOptions import faiss def parse_d6w_config(argv=None): """Parse d6w config. :param argv: d6w config :return: dictionary containing d6w config """ parser = argparse.ArgumentParser( description="See https://docbird.twitter.biz/d6w/model.html for any parameters inherited from d6w job config" ) parser.add_argument("--job_name", dest="job_name", required=True, help="d6w attribute") parser.add_argument("--project", dest="project", required=True, help="d6w attribute") parser.add_argument( "--staging_location", dest="staging_location", required=True, help="d6w attribute" ) parser.add_argument("--temp_location", dest="temp_location", required=True, help="d6w attribute") parser.add_argument( "--output_location", dest="output_location", required=True, help="GCS bucket and path where resulting artifacts are uploaded", ) parser.add_argument( "--service_account_email", dest="service_account_email", required=True, help="d6w attribute" ) parser.add_argument( "--factory_string", dest="factory_string", required=False, help="FAISS factory string describing index to build. See https://github.com/facebookresearch/faiss/wiki/The-index-factory", ) parser.add_argument( "--metric", dest="metric", required=True, help="Metric used to compute distance between embeddings. Valid values are 'l2', 'ip', 'l1', 'linf'", ) parser.add_argument( "--use_gpu", dest="gpu", required=True, help="--use_gpu=yes if you want to use GPU during index building", ) known_args, unknown_args = parser.parse_known_args(argv) d6w_config = vars(known_args) d6w_config["gpu"] = d6w_config["gpu"].lower() == "yes" d6w_config["metric"] = parse_metric(d6w_config) """ WARNING: Currently, d6w (a Twitter tool used to deploy Dataflow jobs to GCP) and PipelineOptions.for_dataflow_runner (a helper method in twitter.ml.common.apache_beam) do not play nicely together. The helper method will overwrite some of the config specified in the d6w file using the defaults in https://sourcegraph.twitter.biz/git.twitter.biz/source/-/blob/src/python/twitter/ml/common/apache_beam/__init__.py?L24.' However, the d6w output message will still report that the config specified in the d6w file was used. """ logging.warning( f"The following d6w config parameters will be overwritten by the defaults in " f"https://sourcegraph.twitter.biz/git.twitter.biz/source/-/blob/src/python/twitter/ml/common/apache_beam/__init__.py?L24\n" f"{str(unknown_args)}" ) return d6w_config def get_bq_query(): """ Query is expected to return rows with unique entityId """ return pkgutil.get_data(__name__, "bq.sql").decode("utf-8") def parse_metric(config): metric_str = config["metric"].lower() if metric_str == "l2": return faiss.METRIC_L2 elif metric_str == "ip": return faiss.METRIC_INNER_PRODUCT elif metric_str == "l1": return faiss.METRIC_L1 elif metric_str == "linf": return faiss.METRIC_Linf else: raise Exception(f"Unknown metric: {metric_str}") def run_pipeline(argv=[]): config = parse_d6w_config(argv) argv_with_extras = argv if config["gpu"]: argv_with_extras.extend(["--experiments", "use_runner_v2"]) argv_with_extras.extend( ["--experiments", "worker_accelerator=type:nvidia-tesla-t4;count:1;install-nvidia-driver"] ) argv_with_extras.extend( [ "--worker_harness_container_image", "gcr.io/twttr-recos-ml-prod/dataflow-gpu/beam2_39_0_py3_7", ] ) options = PipelineOptions(argv_with_extras) output_bucket_name = urlsplit(config["output_location"]).netloc with beam.Pipeline(options=options) as p: input_data = p | "Read from BigQuery" >> beam.io.ReadFromBigQuery( method=beam.io.ReadFromBigQuery.Method.DIRECT_READ, query=get_bq_query(), use_standard_sql=True, ) index_built = input_data | "Build and upload index" >> beam.CombineGlobally( MergeAndBuildIndex( output_bucket_name, config["output_location"], config["factory_string"], config["metric"], config["gpu"], ) ) # Make linter happy index_built class MergeAndBuildIndex(beam.CombineFn): def __init__(self, bucket_name, gcs_output_path, factory_string, metric, gpu): self.bucket_name = bucket_name self.gcs_output_path = gcs_output_path self.factory_string = factory_string self.metric = metric self.gpu = gpu def create_accumulator(self): return [] def add_input(self, accumulator, element): accumulator.append(element) return accumulator def merge_accumulators(self, accumulators): merged = [] for accum in accumulators: merged.extend(accum) return merged def extract_output(self, rows): # Reimports are needed on workers import glob import subprocess import faiss from google.cloud import storage import numpy as np client = storage.Client() bucket = client.get_bucket(self.bucket_name) logging.info("Building FAISS index") logging.info(f"There are {len(rows)} rows") ids = np.array([x["entityId"] for x in rows]).astype("long") embeds = np.array([x["embedding"] for x in rows]).astype("float32") dimensions = len(embeds[0]) N = ids.shape[0] logging.info(f"There are {dimensions} dimensions") if self.factory_string is None: M = 48 divideable_dimensions = (dimensions // M) * M if divideable_dimensions != dimensions: opq_prefix = f"OPQ{M}_{divideable_dimensions}" else: opq_prefix = f"OPQ{M}" clusters = N // 20 self.factory_string = f"{opq_prefix},IVF{clusters},PQ{M}" logging.info(f"Factory string is {self.factory_string}, metric={self.metric}") if self.gpu: logging.info("Using GPU") res = faiss.StandardGpuResources() cpu_index = faiss.index_factory(dimensions, self.factory_string, self.metric) cpu_index = faiss.IndexIDMap(cpu_index) gpu_index = faiss.index_cpu_to_gpu(res, 0, cpu_index) gpu_index.train(embeds) gpu_index.add_with_ids(embeds, ids) cpu_index = faiss.index_gpu_to_cpu(gpu_index) else: logging.info("Using CPU") cpu_index = faiss.index_factory(dimensions, self.factory_string, self.metric) cpu_index = faiss.IndexIDMap(cpu_index) cpu_index.train(embeds) cpu_index.add_with_ids(embeds, ids) logging.info("Built faiss index") local_path = "/indices" logging.info(f"Writing indices to local {local_path}") subprocess.run(f"mkdir -p {local_path}".strip().split()) local_index_path = os.path.join(local_path, "result.index") faiss.write_index(cpu_index, local_index_path) logging.info(f"Done writing indices to local {local_path}") logging.info(f"Uploading to GCS with path {self.gcs_output_path}") assert os.path.isdir(local_path) for local_file in glob.glob(local_path + "/*"): remote_path = os.path.join( self.gcs_output_path.split("/")[-1], local_file[1 + len(local_path) :] ) blob = bucket.blob(remote_path) blob.upload_from_filename(local_file) if __name__ == "__main__": logging.getLogger().setLevel(logging.INFO) run_pipeline(sys.argv)
the-algorithm-main/ann/src/main/python/dataflow/worker_harness/Dockerfile
FROM --platform=linux/amd64 nvidia/cuda:11.2.2-cudnn8-runtime-ubuntu20.04 RUN \ # Add Deadsnakes repository that has a variety of Python packages for Ubuntu. # See: https://launchpad.net/~deadsnakes/+archive/ubuntu/ppa apt-key adv --keyserver keyserver.ubuntu.com --recv-keys F23C5A6CF475977595C89F51BA6932366A755776 \ && echo "deb http://ppa.launchpad.net/deadsnakes/ppa/ubuntu focal main" >> /etc/apt/sources.list.d/custom.list \ && echo "deb-src http://ppa.launchpad.net/deadsnakes/ppa/ubuntu focal main" >> /etc/apt/sources.list.d/custom.list \ && apt-get update \ && apt-get install -y curl \ python3.7 \ # With python3.8 package, distutils need to be installed separately. python3.7-distutils \ python3-dev \ python3.7-dev \ libpython3.7-dev \ python3-apt \ gcc \ g++ \ && rm -rf /var/lib/apt/lists/* RUN update-alternatives --install /usr/bin/python python /usr/bin/python3.7 10 RUN rm -f /usr/bin/python3 && ln -s /usr/bin/python3.7 /usr/bin/python3 RUN \ curl https://bootstrap.pypa.io/get-pip.py | python \ && pip3 install pip==22.0.3 \ && python3 -m pip install --no-cache-dir apache-beam[gcp]==2.39.0 # Verify that there are no conflicting dependencies. RUN pip3 check # Copy the Apache Beam worker dependencies from the Beam Python 3.7 SDK image. COPY --from=apache/beam_python3.7_sdk:2.39.0 /opt/apache/beam /opt/apache/beam # Set the entrypoint to Apache Beam SDK worker launcher. ENTRYPOINT [ "/opt/apache/beam/boot" ]
the-algorithm-main/ann/src/main/python/dataflow/worker_harness/cloudbuild.yml
steps: - name: 'gcr.io/cloud-builders/docker' args: ['build', '-t', 'gcr.io/twttr-recos-ml-prod/dataflow-gpu/beam2_39_0_py3_7', '.'] - name: 'gcr.io/cloud-builders/docker' args: ['push', 'gcr.io/twttr-recos-ml-prod/dataflow-gpu/beam2_39_0_py3_7'] images: ['gcr.io/twttr-recos-ml-prod/dataflow-gpu/beam2_39_0_py3_7']
the-algorithm-main/ann/src/main/scala/com/twitter/ann/annoy/AnnoyCommon.scala
package com.twitter.ann.annoy import com.twitter.ann.common.RuntimeParams import com.twitter.ann.common.thriftscala.AnnoyIndexMetadata import com.twitter.bijection.Injection import com.twitter.mediaservices.commons.codec.ThriftByteBufferCodec import com.twitter.ann.common.thriftscala.{AnnoyRuntimeParam, RuntimeParams => ServiceRuntimeParams} import scala.util.{Failure, Success, Try} object AnnoyCommon { private[annoy] lazy val MetadataCodec = new ThriftByteBufferCodec(AnnoyIndexMetadata) private[annoy] val IndexFileName = "annoy_index" private[annoy] val MetaDataFileName = "annoy_index_metadata" private[annoy] val IndexIdMappingFileName = "annoy_index_id_mapping" val RuntimeParamsInjection: Injection[AnnoyRuntimeParams, ServiceRuntimeParams] = new Injection[AnnoyRuntimeParams, ServiceRuntimeParams] { override def apply(scalaParams: AnnoyRuntimeParams): ServiceRuntimeParams = { ServiceRuntimeParams.AnnoyParam( AnnoyRuntimeParam( scalaParams.nodesToExplore ) ) } override def invert(thriftParams: ServiceRuntimeParams): Try[AnnoyRuntimeParams] = thriftParams match { case ServiceRuntimeParams.AnnoyParam(annoyParam) => Success( AnnoyRuntimeParams(annoyParam.numOfNodesToExplore) ) case p => Failure(new IllegalArgumentException(s"Expected AnnoyRuntimeParams got $p")) } } } case class AnnoyRuntimeParams( /* Number of vectors to evaluate while searching. A larger value will give more accurate results, but will take longer time to return. * Default value would be numberOfTrees*numberOfNeigboursRequested */ nodesToExplore: Option[Int]) extends RuntimeParams { override def toString: String = s"AnnoyRuntimeParams( nodesToExplore = $nodesToExplore)" }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/annoy/BUILD
scala_library( sources = ["*.scala"], compiler_option_sets = ["fatal_warnings"], platform = "java8", tags = ["bazel-compatible"], dependencies = [ "3rdparty/jvm/com/spotify:annoy-java", "3rdparty/jvm/com/spotify:annoy-snapshot", "3rdparty/jvm/com/twitter/storehaus:core", "ann/src/main/scala/com/twitter/ann/common", "ann/src/main/scala/com/twitter/ann/file_store", "ann/src/main/thrift/com/twitter/ann/common:ann-common-scala", "mediaservices/commons", "src/java/com/twitter/search/common/file", "src/scala/com/twitter/ml/api/embedding", ], exports = [ "ann/src/main/scala/com/twitter/ann/common", "src/java/com/twitter/common_internal/hadoop", "src/java/com/twitter/search/common/file", "src/scala/com/twitter/ml/api/embedding", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/annoy/RawAnnoyIndexBuilder.scala
package com.twitter.ann.annoy import com.spotify.annoy.jni.base.{Annoy => AnnoyLib} import com.twitter.ann.annoy.AnnoyCommon.IndexFileName import com.twitter.ann.annoy.AnnoyCommon.MetaDataFileName import com.twitter.ann.annoy.AnnoyCommon.MetadataCodec import com.twitter.ann.common.EmbeddingType._ import com.twitter.ann.common._ import com.twitter.ann.common.thriftscala.AnnoyIndexMetadata import com.twitter.concurrent.AsyncSemaphore import com.twitter.mediaservices.commons.codec.ArrayByteBufferCodec import com.twitter.search.common.file.AbstractFile import com.twitter.search.common.file.LocalFile import com.twitter.util.Future import com.twitter.util.FuturePool import java.io.File import java.nio.file.Files import org.apache.beam.sdk.io.fs.ResourceId import scala.collection.JavaConverters._ private[annoy] object RawAnnoyIndexBuilder { private[annoy] def apply[D <: Distance[D]]( dimension: Int, numOfTrees: Int, metric: Metric[D], futurePool: FuturePool ): RawAppendable[AnnoyRuntimeParams, D] with Serialization = { val indexBuilder = AnnoyLib.newIndex(dimension, annoyMetric(metric)) new RawAnnoyIndexBuilder(dimension, numOfTrees, metric, indexBuilder, futurePool) } private[this] def annoyMetric(metric: Metric[_]): AnnoyLib.Metric = { metric match { case L2 => AnnoyLib.Metric.EUCLIDEAN case Cosine => AnnoyLib.Metric.ANGULAR case _ => throw new RuntimeException("Not supported: " + metric) } } } private[this] class RawAnnoyIndexBuilder[D <: Distance[D]]( dimension: Int, numOfTrees: Int, metric: Metric[D], indexBuilder: AnnoyLib.Builder, futurePool: FuturePool) extends RawAppendable[AnnoyRuntimeParams, D] with Serialization { private[this] var counter = 0 // Note: Only one thread can access the underlying index, multithreaded index building not supported private[this] val semaphore = new AsyncSemaphore(1) override def append(embedding: EmbeddingVector): Future[Long] = semaphore.acquireAndRun({ counter += 1 indexBuilder.addItem( counter, embedding.toArray .map(float => float2Float(float)) .toList .asJava ) Future.value(counter) }) override def toQueryable: Queryable[Long, AnnoyRuntimeParams, D] = { val tempDirParent = Files.createTempDirectory("raw_annoy_index").toFile tempDirParent.deleteOnExit val tempDir = new LocalFile(tempDirParent) this.toDirectory(tempDir) RawAnnoyQueryIndex( dimension, metric, futurePool, tempDir ) } override def toDirectory(directory: ResourceId): Unit = { toDirectory(new IndexOutputFile(directory)) } /** * Serialize the annoy index in a directory. * @param directory: Directory to save to. */ override def toDirectory(directory: AbstractFile): Unit = { toDirectory(new IndexOutputFile(directory)) } private def toDirectory(directory: IndexOutputFile): Unit = { val indexFile = directory.createFile(IndexFileName) saveIndex(indexFile) val metaDataFile = directory.createFile(MetaDataFileName) saveMetadata(metaDataFile) } private[this] def saveIndex(indexFile: IndexOutputFile): Unit = { val index = indexBuilder .build(numOfTrees) val temp = new LocalFile(File.createTempFile(IndexFileName, null)) index.save(temp.getPath) indexFile.copyFrom(temp.getByteSource.openStream()) temp.delete() } private[this] def saveMetadata(metadataFile: IndexOutputFile): Unit = { val numberOfVectorsIndexed = counter val metadata = AnnoyIndexMetadata( dimension, Metric.toThrift(metric), numOfTrees, numberOfVectorsIndexed ) val bytes = ArrayByteBufferCodec.decode(MetadataCodec.encode(metadata)) val temp = new LocalFile(File.createTempFile(MetaDataFileName, null)) temp.getByteSink.write(bytes) metadataFile.copyFrom(temp.getByteSource.openStream()) temp.delete() } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/annoy/RawAnnoyQueryIndex.scala
package com.twitter.ann.annoy import com.spotify.annoy.{ANNIndex, IndexType} import com.twitter.ann.annoy.AnnoyCommon._ import com.twitter.ann.common._ import com.twitter.ann.common.EmbeddingType._ import com.twitter.mediaservices.commons.codec.ArrayByteBufferCodec import com.twitter.search.common.file.{AbstractFile, LocalFile} import com.twitter.util.{Future, FuturePool} import java.io.File import scala.collection.JavaConverters._ private[annoy] object RawAnnoyQueryIndex { private[annoy] def apply[D <: Distance[D]]( dimension: Int, metric: Metric[D], futurePool: FuturePool, directory: AbstractFile ): Queryable[Long, AnnoyRuntimeParams, D] = { val metadataFile = directory.getChild(MetaDataFileName) val indexFile = directory.getChild(IndexFileName) val metadata = MetadataCodec.decode( ArrayByteBufferCodec.encode(metadataFile.getByteSource.read()) ) val existingDimension = metadata.dimension assert( existingDimension == dimension, s"Dimensions do not match. requested: $dimension existing: $existingDimension" ) val existingMetric = Metric.fromThrift(metadata.distanceMetric) assert( existingMetric == metric, s"DistanceMetric do not match. requested: $metric existing: $existingMetric" ) val index = loadIndex(indexFile, dimension, annoyMetric(metric)) new RawAnnoyQueryIndex[D]( dimension, metric, metadata.numOfTrees, index, futurePool ) } private[this] def annoyMetric(metric: Metric[_]): IndexType = { metric match { case L2 => IndexType.EUCLIDEAN case Cosine => IndexType.ANGULAR case _ => throw new RuntimeException("Not supported: " + metric) } } private[this] def loadIndex( indexFile: AbstractFile, dimension: Int, indexType: IndexType ): ANNIndex = { var localIndexFile = indexFile // If not a local file copy to local, so that it can be memory mapped. if (!indexFile.isInstanceOf[LocalFile]) { val tempFile = File.createTempFile(IndexFileName, null) tempFile.deleteOnExit() val temp = new LocalFile(tempFile) indexFile.copyTo(temp) localIndexFile = temp } new ANNIndex( dimension, localIndexFile.getPath(), indexType ) } } private[this] class RawAnnoyQueryIndex[D <: Distance[D]]( dimension: Int, metric: Metric[D], numOfTrees: Int, index: ANNIndex, futurePool: FuturePool) extends Queryable[Long, AnnoyRuntimeParams, D] with AutoCloseable { override def query( embedding: EmbeddingVector, numOfNeighbours: Int, runtimeParams: AnnoyRuntimeParams ): Future[List[Long]] = { queryWithDistance(embedding, numOfNeighbours, runtimeParams) .map(_.map(_.neighbor)) } override def queryWithDistance( embedding: EmbeddingVector, numOfNeighbours: Int, runtimeParams: AnnoyRuntimeParams ): Future[List[NeighborWithDistance[Long, D]]] = { futurePool { val queryVector = embedding.toArray val neigboursToRequest = neighboursToRequest(numOfNeighbours, runtimeParams) val neigbours = index .getNearestWithDistance(queryVector, neigboursToRequest) .asScala .take(numOfNeighbours) .map { nn => val id = nn.getFirst.toLong val distance = metric.fromAbsoluteDistance(nn.getSecond) NeighborWithDistance(id, distance) } .toList neigbours } } // Annoy java lib do not expose param for numOfNodesToExplore. // Default number is numOfTrees*numOfNeigbours. // Simple hack is to artificially increase the numOfNeighbours to be requested and then just cap it before returning. private[this] def neighboursToRequest( numOfNeighbours: Int, annoyParams: AnnoyRuntimeParams ): Int = { annoyParams.nodesToExplore match { case Some(nodesToExplore) => { val neigboursToRequest = nodesToExplore / numOfTrees if (neigboursToRequest < numOfNeighbours) numOfNeighbours else neigboursToRequest } case _ => numOfNeighbours } } // To close the memory map based file resource. override def close(): Unit = index.close() }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/annoy/TypedAnnoyIndex.scala
package com.twitter.ann.annoy import com.twitter.ann.common._ import com.twitter.bijection.Injection import com.twitter.search.common.file.AbstractFile import com.twitter.util.FuturePool // Class to provide Annoy based ann index. object TypedAnnoyIndex { /** * Create Annoy based typed index builder that serializes index to a directory (HDFS/Local file system). * It cannot be used in scalding as it leverage C/C++ jni bindings, whose build conflicts with version of some libs installed on hadoop. * You can use it on aurora or with IndexBuilding job which triggers scalding job but then streams data to aurora machine for building index. * @param dimension dimension of embedding * @param numOfTrees builds a forest of numOfTrees trees. * More trees gives higher precision when querying at the cost of increased memory and disk storage requirement at the build time. * At runtime the index will be memory mapped, so memory wont be an issue but disk storage would be needed. * @param metric distance metric for nearest neighbour search * @param injection Injection to convert bytes to Id. * @tparam T Type of Id for embedding * @tparam D Typed Distance * @return Serializable AnnoyIndex */ def indexBuilder[T, D <: Distance[D]]( dimension: Int, numOfTrees: Int, metric: Metric[D], injection: Injection[T, Array[Byte]], futurePool: FuturePool ): Appendable[T, AnnoyRuntimeParams, D] with Serialization = { TypedAnnoyIndexBuilderWithFile(dimension, numOfTrees, metric, injection, futurePool) } /** * Load Annoy based queryable index from a directory * @param dimension dimension of embedding * @param metric distance metric for nearest neighbour search * @param injection Injection to convert bytes to Id. * @param futurePool FuturePool * @param directory Directory (HDFS/Local file system) where serialized index is stored. * @tparam T Type of Id for embedding * @tparam D Typed Distance * @return Typed Queryable AnnoyIndex */ def loadQueryableIndex[T, D <: Distance[D]]( dimension: Int, metric: Metric[D], injection: Injection[T, Array[Byte]], futurePool: FuturePool, directory: AbstractFile ): Queryable[T, AnnoyRuntimeParams, D] = { TypedAnnoyQueryIndexWithFile(dimension, metric, injection, futurePool, directory) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/annoy/TypedAnnoyIndexBuilderWithFile.scala
package com.twitter.ann.annoy import com.twitter.ann.annoy.AnnoyCommon.IndexIdMappingFileName import com.twitter.ann.common._ import com.twitter.ann.file_store.WritableIndexIdFileStore import com.twitter.bijection.Injection import com.twitter.search.common.file.AbstractFile import com.twitter.util.Future import com.twitter.util.FuturePool import org.apache.beam.sdk.io.fs.ResourceId private[annoy] object TypedAnnoyIndexBuilderWithFile { private[annoy] def apply[T, D <: Distance[D]]( dimension: Int, numOfTrees: Int, metric: Metric[D], injection: Injection[T, Array[Byte]], futurePool: FuturePool ): Appendable[T, AnnoyRuntimeParams, D] with Serialization = { val index = RawAnnoyIndexBuilder(dimension, numOfTrees, metric, futurePool) val writableFileStore = WritableIndexIdFileStore(injection) new TypedAnnoyIndexBuilderWithFile[T, D](index, writableFileStore) } } private[this] class TypedAnnoyIndexBuilderWithFile[T, D <: Distance[D]]( indexBuilder: RawAppendable[AnnoyRuntimeParams, D] with Serialization, store: WritableIndexIdFileStore[T]) extends Appendable[T, AnnoyRuntimeParams, D] with Serialization { private[this] val transformedIndex = IndexTransformer.transformAppendable(indexBuilder, store) override def append(entity: EntityEmbedding[T]): Future[Unit] = { transformedIndex.append(entity) } override def toDirectory(directory: ResourceId): Unit = { indexBuilder.toDirectory(directory) toDirectory(new IndexOutputFile(directory)) } override def toDirectory(directory: AbstractFile): Unit = { indexBuilder.toDirectory(directory) toDirectory(new IndexOutputFile(directory)) } private def toDirectory(directory: IndexOutputFile): Unit = { val indexIdFile = directory.createFile(IndexIdMappingFileName) store.save(indexIdFile) } override def toQueryable: Queryable[T, AnnoyRuntimeParams, D] = { transformedIndex.toQueryable } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/annoy/TypedAnnoyQueryIndexWithFile.scala
package com.twitter.ann.annoy import com.twitter.ann.annoy.AnnoyCommon._ import com.twitter.ann.common._ import com.twitter.ann.file_store.ReadableIndexIdFileStore import com.twitter.bijection.Injection import com.twitter.search.common.file.AbstractFile import com.twitter.util.FuturePool private[annoy] object TypedAnnoyQueryIndexWithFile { private[annoy] def apply[T, D <: Distance[D]]( dimension: Int, metric: Metric[D], injection: Injection[T, Array[Byte]], futurePool: FuturePool, directory: AbstractFile ): Queryable[T, AnnoyRuntimeParams, D] = { val deserializer = new TypedAnnoyQueryIndexWithFile(dimension, metric, futurePool, injection) deserializer.fromDirectory(directory) } } private[this] class TypedAnnoyQueryIndexWithFile[T, D <: Distance[D]]( dimension: Int, metric: Metric[D], futurePool: FuturePool, injection: Injection[T, Array[Byte]]) extends QueryableDeserialization[ T, AnnoyRuntimeParams, D, Queryable[T, AnnoyRuntimeParams, D] ] { override def fromDirectory(directory: AbstractFile): Queryable[T, AnnoyRuntimeParams, D] = { val index = RawAnnoyQueryIndex(dimension, metric, futurePool, directory) val indexIdFile = directory.getChild(IndexIdMappingFileName) val readableFileStore = ReadableIndexIdFileStore(indexIdFile, injection) IndexTransformer.transformQueryable(index, readableFileStore) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/brute_force/BUILD
scala_library( sources = ["*.scala"], compiler_option_sets = ["fatal_warnings"], platform = "java8", tags = ["bazel-compatible"], dependencies = [ "ann/src/main/scala/com/twitter/ann/common", "ann/src/main/scala/com/twitter/ann/serialization", "ann/src/main/thrift/com/twitter/ann/serialization:serialization-scala", "src/java/com/twitter/search/common/file", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/brute_force/BruteForceDeserialization.scala
package com.twitter.ann.brute_force import com.google.common.annotations.VisibleForTesting import com.twitter.ann.common.{Distance, EntityEmbedding, Metric, QueryableDeserialization} import com.twitter.ann.serialization.{PersistedEmbeddingInjection, ThriftIteratorIO} import com.twitter.ann.serialization.thriftscala.PersistedEmbedding import com.twitter.search.common.file.{AbstractFile, LocalFile} import com.twitter.util.FuturePool import java.io.File /** * @param factory creates a BruteForceIndex from the arguments. This is only exposed for testing. * If for some reason you pass this arg in make sure that it eagerly consumes the * iterator. If you don't you might close the input stream that the iterator is * using. * @tparam T the id of the embeddings */ class BruteForceDeserialization[T, D <: Distance[D]] @VisibleForTesting private[brute_force] ( metric: Metric[D], embeddingInjection: PersistedEmbeddingInjection[T], futurePool: FuturePool, thriftIteratorIO: ThriftIteratorIO[PersistedEmbedding], factory: (Metric[D], FuturePool, Iterator[EntityEmbedding[T]]) => BruteForceIndex[T, D]) extends QueryableDeserialization[T, BruteForceRuntimeParams.type, D, BruteForceIndex[T, D]] { import BruteForceIndex._ def this( metric: Metric[D], embeddingInjection: PersistedEmbeddingInjection[T], futurePool: FuturePool, thriftIteratorIO: ThriftIteratorIO[PersistedEmbedding] ) = { this( metric, embeddingInjection, futurePool, thriftIteratorIO, factory = BruteForceIndex.apply[T, D] ) } override def fromDirectory( serializationDirectory: AbstractFile ): BruteForceIndex[T, D] = { val file = File.createTempFile(DataFileName, "tmp") file.deleteOnExit() val temp = new LocalFile(file) val dataFile = serializationDirectory.getChild(DataFileName) dataFile.copyTo(temp) val inputStream = temp.getByteSource.openBufferedStream() try { val iterator: Iterator[PersistedEmbedding] = thriftIteratorIO.fromInputStream(inputStream) val embeddings = iterator.map { thriftEmbedding => embeddingInjection.invert(thriftEmbedding).get } factory(metric, futurePool, embeddings) } finally { inputStream.close() temp.delete() } } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/brute_force/BruteForceIndex.scala
package com.twitter.ann.brute_force import com.twitter.ann.common.Appendable import com.twitter.ann.common.Distance import com.twitter.ann.common.EmbeddingType._ import com.twitter.ann.common.EntityEmbedding import com.twitter.ann.common.IndexOutputFile import com.twitter.ann.common.Metric import com.twitter.ann.common.NeighborWithDistance import com.twitter.ann.common.Queryable import com.twitter.ann.common.RuntimeParams import com.twitter.ann.common.Serialization import com.twitter.ann.serialization.PersistedEmbeddingInjection import com.twitter.ann.serialization.ThriftIteratorIO import com.twitter.ann.serialization.thriftscala.PersistedEmbedding import com.twitter.search.common.file.AbstractFile import com.twitter.util.Future import com.twitter.util.FuturePool import java.util.concurrent.ConcurrentLinkedQueue import org.apache.beam.sdk.io.fs.ResourceId import scala.collection.JavaConverters._ import scala.collection.mutable object BruteForceRuntimeParams extends RuntimeParams object BruteForceIndex { val DataFileName = "BruteForceFileData" def apply[T, D <: Distance[D]]( metric: Metric[D], futurePool: FuturePool, initialEmbeddings: Iterator[EntityEmbedding[T]] = Iterator() ): BruteForceIndex[T, D] = { val linkedQueue = new ConcurrentLinkedQueue[EntityEmbedding[T]] initialEmbeddings.foreach(embedding => linkedQueue.add(embedding)) new BruteForceIndex(metric, futurePool, linkedQueue) } } class BruteForceIndex[T, D <: Distance[D]] private ( metric: Metric[D], futurePool: FuturePool, // visible for serialization private[brute_force] val linkedQueue: ConcurrentLinkedQueue[EntityEmbedding[T]]) extends Appendable[T, BruteForceRuntimeParams.type, D] with Queryable[T, BruteForceRuntimeParams.type, D] { override def append(embedding: EntityEmbedding[T]): Future[Unit] = { futurePool { linkedQueue.add(embedding) } } override def toQueryable: Queryable[T, BruteForceRuntimeParams.type, D] = this override def query( embedding: EmbeddingVector, numOfNeighbours: Int, runtimeParams: BruteForceRuntimeParams.type ): Future[List[T]] = { queryWithDistance(embedding, numOfNeighbours, runtimeParams).map { neighborsWithDistance => neighborsWithDistance.map(_.neighbor) } } override def queryWithDistance( embedding: EmbeddingVector, numOfNeighbours: Int, runtimeParams: BruteForceRuntimeParams.type ): Future[List[NeighborWithDistance[T, D]]] = { futurePool { // Use the reverse ordering so that we can call dequeue to remove the largest element. val ordering = Ordering.by[NeighborWithDistance[T, D], D](_.distance) val priorityQueue = new mutable.PriorityQueue[NeighborWithDistance[T, D]]()(ordering) linkedQueue .iterator() .asScala .foreach { entity => val neighborWithDistance = NeighborWithDistance(entity.id, metric.distance(entity.embedding, embedding)) priorityQueue.+=(neighborWithDistance) if (priorityQueue.size > numOfNeighbours) { priorityQueue.dequeue() } } val reverseList: List[NeighborWithDistance[T, D]] = priorityQueue.dequeueAll reverseList.reverse } } } object SerializableBruteForceIndex { def apply[T, D <: Distance[D]]( metric: Metric[D], futurePool: FuturePool, embeddingInjection: PersistedEmbeddingInjection[T], thriftIteratorIO: ThriftIteratorIO[PersistedEmbedding] ): SerializableBruteForceIndex[T, D] = { val bruteForceIndex = BruteForceIndex[T, D](metric, futurePool) new SerializableBruteForceIndex(bruteForceIndex, embeddingInjection, thriftIteratorIO) } } /** * This is a class that wrapps a BruteForceIndex and provides a method for serialization. * * @param bruteForceIndex all queries and updates are sent to this index. * @param embeddingInjection injection that can convert embeddings to thrift embeddings. * @param thriftIteratorIO class that provides a way to write PersistedEmbeddings to disk */ class SerializableBruteForceIndex[T, D <: Distance[D]]( bruteForceIndex: BruteForceIndex[T, D], embeddingInjection: PersistedEmbeddingInjection[T], thriftIteratorIO: ThriftIteratorIO[PersistedEmbedding]) extends Appendable[T, BruteForceRuntimeParams.type, D] with Queryable[T, BruteForceRuntimeParams.type, D] with Serialization { import BruteForceIndex._ override def append(entity: EntityEmbedding[T]): Future[Unit] = bruteForceIndex.append(entity) override def toQueryable: Queryable[T, BruteForceRuntimeParams.type, D] = this override def query( embedding: EmbeddingVector, numOfNeighbours: Int, runtimeParams: BruteForceRuntimeParams.type ): Future[List[T]] = bruteForceIndex.query(embedding, numOfNeighbours, runtimeParams) override def queryWithDistance( embedding: EmbeddingVector, numOfNeighbours: Int, runtimeParams: BruteForceRuntimeParams.type ): Future[List[NeighborWithDistance[T, D]]] = bruteForceIndex.queryWithDistance(embedding, numOfNeighbours, runtimeParams) override def toDirectory(serializationDirectory: ResourceId): Unit = { toDirectory(new IndexOutputFile(serializationDirectory)) } override def toDirectory(serializationDirectory: AbstractFile): Unit = { toDirectory(new IndexOutputFile(serializationDirectory)) } private def toDirectory(serializationDirectory: IndexOutputFile): Unit = { val outputStream = serializationDirectory.createFile(DataFileName).getOutputStream() val thriftEmbeddings = bruteForceIndex.linkedQueue.iterator().asScala.map { embedding => embeddingInjection(embedding) } try { thriftIteratorIO.toOutputStream(thriftEmbeddings, outputStream) } finally { outputStream.close() } } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/AnnInjections.scala
package com.twitter.ann.common import com.twitter.bijection.{Bijection, Injection} // Class providing commonly used injections that can be used directly with ANN apis. // Injection prefixed with `J` can be used in java directly with ANN apis. object AnnInjections { val LongInjection: Injection[Long, Array[Byte]] = Injection.long2BigEndian def StringInjection: Injection[String, Array[Byte]] = Injection.utf8 def IntInjection: Injection[Int, Array[Byte]] = Injection.int2BigEndian val JLongInjection: Injection[java.lang.Long, Array[Byte]] = Bijection.long2Boxed .asInstanceOf[Bijection[Long, java.lang.Long]] .inverse .andThen(LongInjection) val JStringInjection: Injection[java.lang.String, Array[Byte]] = StringInjection val JIntInjection: Injection[java.lang.Integer, Array[Byte]] = Bijection.int2Boxed .asInstanceOf[Bijection[Int, java.lang.Integer]] .inverse .andThen(IntInjection) }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/Api.scala
package com.twitter.ann.common import com.twitter.ann.common.EmbeddingType.EmbeddingVector import com.twitter.ml.api.embedding.Embedding import com.twitter.ml.api.embedding.EmbeddingMath import com.twitter.ml.api.embedding.EmbeddingSerDe import com.twitter.util.Future object EmbeddingType { type EmbeddingVector = Embedding[Float] val embeddingSerDe = EmbeddingSerDe.apply[Float] private[common] val math = EmbeddingMath.Float } /** * Typed entity with an embedding associated with it. * @param id : Unique Id for an entity. * @param embedding : Embedding/Vector of an entity. * @tparam T: Type of id. */ case class EntityEmbedding[T](id: T, embedding: EmbeddingVector) // Query interface for ANN trait Queryable[T, P <: RuntimeParams, D <: Distance[D]] { /** * ANN query for ids. * @param embedding: Embedding/Vector to be queried with. * @param numOfNeighbors: Number of neighbours to be queried for. * @param runtimeParams: Runtime params associated with index to control accuracy/latency etc. * @return List of approximate nearest neighbour ids. */ def query( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P ): Future[List[T]] /** * ANN query for ids with distance. * @param embedding: Embedding/Vector to be queried with. * @param numOfNeighbors: Number of neighbours to be queried for. * @param runtimeParams: Runtime params associated with index to control accuracy/latency etc. * @return List of approximate nearest neighbour ids with distance from the query embedding. */ def queryWithDistance( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P ): Future[List[NeighborWithDistance[T, D]]] } // Query interface for ANN over indexes that are grouped trait QueryableGrouped[T, P <: RuntimeParams, D <: Distance[D]] extends Queryable[T, P, D] { /** * ANN query for ids. * @param embedding: Embedding/Vector to be queried with. * @param numOfNeighbors: Number of neighbours to be queried for. * @param runtimeParams: Runtime params associated with index to control accuracy/latency etc. * @param key: Optional key to lookup specific ANN index and perform query there * @return List of approximate nearest neighbour ids. */ def query( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P, key: Option[String] ): Future[List[T]] /** * ANN query for ids with distance. * @param embedding: Embedding/Vector to be queried with. * @param numOfNeighbors: Number of neighbours to be queried for. * @param runtimeParams: Runtime params associated with index to control accuracy/latency etc. * @param key: Optional key to lookup specific ANN index and perform query there * @return List of approximate nearest neighbour ids with distance from the query embedding. */ def queryWithDistance( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P, key: Option[String] ): Future[List[NeighborWithDistance[T, D]]] } /** * Runtime params associated with index to control accuracy/latency etc while querying. */ trait RuntimeParams {} /** * ANN query result with distance. * @param neighbor : Id of the neighbours * @param distance: Distance of neighbour from query ex: D: CosineDistance, L2Distance, InnerProductDistance */ case class NeighborWithDistance[T, D <: Distance[D]](neighbor: T, distance: D) /** * ANN query result with seed entity for which this neighbor was provided. * @param seed: Seed Id for which ann query was called * @param neighbor : Id of the neighbours */ case class NeighborWithSeed[T1, T2](seed: T1, neighbor: T2) /** * ANN query result with distance with seed entity for which this neighbor was provided. * @param seed: Seed Id for which ann query was called * @param neighbor : Id of the neighbours * @param distance: Distance of neighbour from query ex: D: CosineDistance, L2Distance, InnerProductDistance */ case class NeighborWithDistanceWithSeed[T1, T2, D <: Distance[D]]( seed: T1, neighbor: T2, distance: D) trait RawAppendable[P <: RuntimeParams, D <: Distance[D]] { /** * Append an embedding in an index. * @param embedding: Embedding/Vector * @return Future of long id associated with embedding autogenerated. */ def append(embedding: EmbeddingVector): Future[Long] /** * Convert an Appendable to Queryable interface to query an index. */ def toQueryable: Queryable[Long, P, D] } // Index building interface for ANN. trait Appendable[T, P <: RuntimeParams, D <: Distance[D]] { /** * Append an entity with embedding in an index. * @param entity: Entity with its embedding */ def append(entity: EntityEmbedding[T]): Future[Unit] /** * Convert an Appendable to Queryable interface to query an index. */ def toQueryable: Queryable[T, P, D] } // Updatable index interface for ANN. trait Updatable[T] { def update(entity: EntityEmbedding[T]): Future[Unit] }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/BUILD
scala_library( sources = ["*.scala"], compiler_option_sets = ["fatal_warnings"], platform = "java8", tags = ["bazel-compatible"], dependencies = [ "3rdparty/jvm/com/google/guava", "3rdparty/jvm/com/twitter/bijection:core", "3rdparty/jvm/com/twitter/storehaus:core", "3rdparty/jvm/org/apache/beam:beam-sdks-java-io-google-cloud-platform", "ann/src/main/thrift/com/twitter/ann/common:ann-common-scala", "finatra/inject/inject-mdc/src/main/scala", "mediaservices/commons/src/main/scala:futuretracker", "src/java/com/twitter/search/common/file", "src/scala/com/twitter/ml/api/embedding", "stitch/stitch-core", ], exports = [ "3rdparty/jvm/com/twitter/bijection:core", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/EmbeddingProducer.scala
package com.twitter.ann.common import com.twitter.stitch.Stitch trait EmbeddingProducer[T] { /** * Produce an embedding from type T. Implementations of this could do a lookup from an id to an * embedding. Or they could run a deep model on features that output and embedding. * @return An embedding Stitch. See go/stitch for details on how to use the Stitch API. */ def produceEmbedding(input: T): Stitch[Option[EmbeddingType.EmbeddingVector]] }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/IndexOutputFile.scala
package com.twitter.ann.common import com.google.common.io.ByteStreams import com.twitter.ann.common.thriftscala.AnnIndexMetadata import com.twitter.mediaservices.commons.codec.ArrayByteBufferCodec import com.twitter.mediaservices.commons.codec.ThriftByteBufferCodec import com.twitter.search.common.file.AbstractFile import java.io.IOException import java.io.InputStream import java.io.OutputStream import java.nio.channels.Channels import org.apache.beam.sdk.io.FileSystems import org.apache.beam.sdk.io.fs.MoveOptions import org.apache.beam.sdk.io.fs.ResolveOptions import org.apache.beam.sdk.io.fs.ResolveOptions.StandardResolveOptions import org.apache.beam.sdk.io.fs.ResourceId import org.apache.beam.sdk.util.MimeTypes import org.apache.hadoop.io.IOUtils import scala.collection.JavaConverters._ /** * This class creates a wrapper around GCS filesystem and HDFS filesystem for the index * generation job. It implements the basic methods required by the index generation job and hides * the logic around handling HDFS vs GCS. */ class IndexOutputFile(val abstractFile: AbstractFile, val resourceId: ResourceId) { // Success file name private val SUCCESS_FILE = "_SUCCESS" private val INDEX_METADATA_FILE = "ANN_INDEX_METADATA" private val MetadataCodec = new ThriftByteBufferCodec[AnnIndexMetadata](AnnIndexMetadata) /** * Constructor for ResourceId. This is used for GCS filesystem * @param resourceId */ def this(resourceId: ResourceId) = { this(null, resourceId) } /** * Constructor for AbstractFile. This is used for HDFS and local filesystem * @param abstractFile */ def this(abstractFile: AbstractFile) = { this(abstractFile, null) } /** * Returns true if this instance is around an AbstractFile. * @return */ def isAbstractFile(): Boolean = { abstractFile != null } /** * Creates a _SUCCESS file in the current directory. */ def createSuccessFile(): Unit = { if (isAbstractFile()) { abstractFile.createSuccessFile() } else { val successFile = resourceId.resolve(SUCCESS_FILE, ResolveOptions.StandardResolveOptions.RESOLVE_FILE) val successWriterChannel = FileSystems.create(successFile, MimeTypes.BINARY) successWriterChannel.close() } } /** * Returns whether the current instance represents a directory * @return True if the current instance is a directory */ def isDirectory(): Boolean = { if (isAbstractFile()) { abstractFile.isDirectory } else { resourceId.isDirectory } } /** * Return the current path of the file represented by the current instance * @return The path string of the file/directory */ def getPath(): String = { if (isAbstractFile()) { abstractFile.getPath.toString } else { if (resourceId.isDirectory) { resourceId.getCurrentDirectory.toString } else { resourceId.getCurrentDirectory.toString + resourceId.getFilename } } } /** * Creates a new file @param fileName in the current directory. * @param fileName * @return A new file inside the current directory */ def createFile(fileName: String): IndexOutputFile = { if (isAbstractFile()) { // AbstractFile treats files and directories the same way. Hence, not checking for directory // here. new IndexOutputFile(abstractFile.getChild(fileName)) } else { if (!resourceId.isDirectory) { // If this is not a directory, throw exception. throw new IllegalArgumentException(getPath() + " is not a directory.") } new IndexOutputFile( resourceId.resolve(fileName, ResolveOptions.StandardResolveOptions.RESOLVE_FILE)) } } /** * Creates a new directory @param directoryName in the current directory. * @param directoryName * @return A new directory inside the current directory */ def createDirectory(directoryName: String): IndexOutputFile = { if (isAbstractFile()) { // AbstractFile treats files and directories the same way. Hence, not checking for directory // here. val dir = abstractFile.getChild(directoryName) dir.mkdirs() new IndexOutputFile(dir) } else { if (!resourceId.isDirectory) { // If this is not a directory, throw exception. throw new IllegalArgumentException(getPath() + " is not a directory.") } val newResourceId = resourceId.resolve(directoryName, ResolveOptions.StandardResolveOptions.RESOLVE_DIRECTORY) // Create a tmp file and delete in order to trigger directory creation val tmpFile = newResourceId.resolve("tmp", ResolveOptions.StandardResolveOptions.RESOLVE_FILE) val tmpWriterChannel = FileSystems.create(tmpFile, MimeTypes.BINARY) tmpWriterChannel.close() FileSystems.delete(List(tmpFile).asJava, MoveOptions.StandardMoveOptions.IGNORE_MISSING_FILES) new IndexOutputFile(newResourceId) } } def getChild(fileName: String, isDirectory: Boolean = false): IndexOutputFile = { if (isAbstractFile()) { new IndexOutputFile(abstractFile.getChild(fileName)) } else { val resolveOption = if (isDirectory) { StandardResolveOptions.RESOLVE_DIRECTORY } else { StandardResolveOptions.RESOLVE_FILE } new IndexOutputFile(resourceId.resolve(fileName, resolveOption)) } } /** * Returns an OutputStream for the underlying file. * Note: Close the OutputStream after writing * @return */ def getOutputStream(): OutputStream = { if (isAbstractFile()) { abstractFile.getByteSink.openStream() } else { if (resourceId.isDirectory) { // If this is a directory, throw exception. throw new IllegalArgumentException(getPath() + " is a directory.") } val writerChannel = FileSystems.create(resourceId, MimeTypes.BINARY) Channels.newOutputStream(writerChannel) } } /** * Returns an InputStream for the underlying file. * Note: Close the InputStream after reading * @return */ def getInputStream(): InputStream = { if (isAbstractFile()) { abstractFile.getByteSource.openStream() } else { if (resourceId.isDirectory) { // If this is a directory, throw exception. throw new IllegalArgumentException(getPath() + " is a directory.") } val readChannel = FileSystems.open(resourceId) Channels.newInputStream(readChannel) } } /** * Copies content from the srcIn into the current file. * @param srcIn */ def copyFrom(srcIn: InputStream): Unit = { val out = getOutputStream() try { IOUtils.copyBytes(srcIn, out, 4096) out.close() } catch { case ex: IOException => IOUtils.closeStream(out); throw ex; } } def writeIndexMetadata(annIndexMetadata: AnnIndexMetadata): Unit = { val out = createFile(INDEX_METADATA_FILE).getOutputStream() val bytes = ArrayByteBufferCodec.decode(MetadataCodec.encode(annIndexMetadata)) out.write(bytes) out.close() } def loadIndexMetadata(): AnnIndexMetadata = { val in = ByteStreams.toByteArray(getInputStream()) MetadataCodec.decode(ArrayByteBufferCodec.encode(in)) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/IndexTransformer.scala
package com.twitter.ann.common import com.twitter.ann.common.EmbeddingType.EmbeddingVector import com.twitter.storehaus.{ReadableStore, Store} import com.twitter.util.Future // Utility to transform raw index to typed index using Store object IndexTransformer { /** * Transform a long type queryable index to Typed queryable index * @param index: Raw Queryable index * @param store: Readable store to provide mappings between Long and T * @tparam T: Type to transform to * @tparam P: Runtime params * @return Queryable index typed on T */ def transformQueryable[T, P <: RuntimeParams, D <: Distance[D]]( index: Queryable[Long, P, D], store: ReadableStore[Long, T] ): Queryable[T, P, D] = { new Queryable[T, P, D] { override def query( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P ): Future[List[T]] = { val neighbors = index.query(embedding, numOfNeighbors, runtimeParams) neighbors .flatMap(nn => { val ids = nn.map(id => store.get(id).map(_.get)) Future .collect(ids) .map(_.toList) }) } override def queryWithDistance( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P ): Future[List[NeighborWithDistance[T, D]]] = { val neighbors = index.queryWithDistance(embedding, numOfNeighbors, runtimeParams) neighbors .flatMap(nn => { val ids = nn.map(obj => store.get(obj.neighbor).map(id => NeighborWithDistance(id.get, obj.distance))) Future .collect(ids) .map(_.toList) }) } } } /** * Transform a long type appendable index to Typed appendable index * @param index: Raw Appendable index * @param store: Writable store to store mappings between Long and T * @tparam T: Type to transform to * @return Appendable index typed on T */ def transformAppendable[T, P <: RuntimeParams, D <: Distance[D]]( index: RawAppendable[P, D], store: Store[Long, T] ): Appendable[T, P, D] = { new Appendable[T, P, D]() { override def append(entity: EntityEmbedding[T]): Future[Unit] = { index .append(entity.embedding) .flatMap(id => store.put((id, Some(entity.id)))) } override def toQueryable: Queryable[T, P, D] = { transformQueryable(index.toQueryable, store) } } } /** * Transform a long type appendable and queryable index to Typed appendable and queryable index * @param index: Raw Appendable and queryable index * @param store: Store to provide/store mappings between Long and T * @tparam T: Type to transform to * @tparam Index: Index * @return Appendable and queryable index typed on T */ def transform1[ Index <: RawAppendable[P, D] with Queryable[Long, P, D], T, P <: RuntimeParams, D <: Distance[D] ]( index: Index, store: Store[Long, T] ): Queryable[T, P, D] with Appendable[T, P, D] = { val queryable = transformQueryable(index, store) val appendable = transformAppendable(index, store) new Queryable[T, P, D] with Appendable[T, P, D] { override def query( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P ) = queryable.query(embedding, numOfNeighbors, runtimeParams) override def queryWithDistance( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P ) = queryable.queryWithDistance(embedding, numOfNeighbors, runtimeParams) override def append(entity: EntityEmbedding[T]) = appendable.append(entity) override def toQueryable: Queryable[T, P, D] = appendable.toQueryable } } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/MemoizedInEpochs.scala
package com.twitter.ann.common import com.twitter.util.Return import com.twitter.util.Throw import com.twitter.util.Try import com.twitter.util.logging.Logging // Memoization with a twist // New epoch reuse K:V pairs from previous and recycle everything else class MemoizedInEpochs[K, V](f: K => Try[V]) extends Logging { private var memoizedCalls: Map[K, V] = Map.empty def epoch(keys: Seq[K]): Seq[V] = { val newSet = keys.toSet val keysToBeComputed = newSet.diff(memoizedCalls.keySet) val computedKeysAndValues = keysToBeComputed.map { key => info(s"Memoize ${key}") (key, f(key)) } val keysAndValuesAfterFilteringFailures = computedKeysAndValues .flatMap { case (key, Return(value)) => Some((key, value)) case (key, Throw(e)) => warn(s"Calling f for ${key} has failed", e) None } val keysReusedFromLastEpoch = memoizedCalls.filterKeys(newSet.contains) memoizedCalls = keysReusedFromLastEpoch ++ keysAndValuesAfterFilteringFailures debug(s"Final memoization is ${memoizedCalls.keys.mkString(", ")}") keys.flatMap(memoizedCalls.get) } def currentEpochKeys: Set[K] = memoizedCalls.keySet }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/Metric.scala
package com.twitter.ann.common import com.google.common.collect.ImmutableBiMap import com.twitter.ann.common.EmbeddingType._ import com.twitter.ann.common.thriftscala.DistanceMetric import com.twitter.ann.common.thriftscala.{CosineDistance => ServiceCosineDistance} import com.twitter.ann.common.thriftscala.{Distance => ServiceDistance} import com.twitter.ann.common.thriftscala.{InnerProductDistance => ServiceInnerProductDistance} import com.twitter.ann.common.thriftscala.{EditDistance => ServiceEditDistance} import com.twitter.ann.common.thriftscala.{L2Distance => ServiceL2Distance} import com.twitter.bijection.Injection import scala.util.Failure import scala.util.Success import scala.util.Try // Ann distance metrics trait Distance[D] extends Any with Ordered[D] { def distance: Float } case class L2Distance(distance: Float) extends AnyVal with Distance[L2Distance] { override def compare(that: L2Distance): Int = Ordering.Float.compare(this.distance, that.distance) } case class CosineDistance(distance: Float) extends AnyVal with Distance[CosineDistance] { override def compare(that: CosineDistance): Int = Ordering.Float.compare(this.distance, that.distance) } case class InnerProductDistance(distance: Float) extends AnyVal with Distance[InnerProductDistance] { override def compare(that: InnerProductDistance): Int = Ordering.Float.compare(this.distance, that.distance) } case class EditDistance(distance: Float) extends AnyVal with Distance[EditDistance] { override def compare(that: EditDistance): Int = Ordering.Float.compare(this.distance, that.distance) } object Metric { private[this] val thriftMetricMapping = ImmutableBiMap.of( L2, DistanceMetric.L2, Cosine, DistanceMetric.Cosine, InnerProduct, DistanceMetric.InnerProduct, Edit, DistanceMetric.EditDistance ) def fromThrift(metric: DistanceMetric): Metric[_ <: Distance[_]] = { thriftMetricMapping.inverse().get(metric) } def toThrift(metric: Metric[_ <: Distance[_]]): DistanceMetric = { thriftMetricMapping.get(metric) } def fromString(metricName: String): Metric[_ <: Distance[_]] with Injection[_, ServiceDistance] = { metricName match { case "Cosine" => Cosine case "L2" => L2 case "InnerProduct" => InnerProduct case "EditDistance" => Edit case _ => throw new IllegalArgumentException(s"No Metric with the name $metricName") } } } sealed trait Metric[D <: Distance[D]] { def distance( embedding1: EmbeddingVector, embedding2: EmbeddingVector ): D def absoluteDistance( embedding1: EmbeddingVector, embedding2: EmbeddingVector ): Float def fromAbsoluteDistance(distance: Float): D } case object L2 extends Metric[L2Distance] with Injection[L2Distance, ServiceDistance] { override def distance( embedding1: EmbeddingVector, embedding2: EmbeddingVector ): L2Distance = { fromAbsoluteDistance(MetricUtil.l2distance(embedding1, embedding2).toFloat) } override def fromAbsoluteDistance(distance: Float): L2Distance = { L2Distance(distance) } override def absoluteDistance( embedding1: EmbeddingVector, embedding2: EmbeddingVector ): Float = distance(embedding1, embedding2).distance override def apply(scalaDistance: L2Distance): ServiceDistance = { ServiceDistance.L2Distance(ServiceL2Distance(scalaDistance.distance)) } override def invert(serviceDistance: ServiceDistance): Try[L2Distance] = { serviceDistance match { case ServiceDistance.L2Distance(l2Distance) => Success(L2Distance(l2Distance.distance.toFloat)) case distance => Failure(new IllegalArgumentException(s"Expected an l2 distance but got $distance")) } } } case object Cosine extends Metric[CosineDistance] with Injection[CosineDistance, ServiceDistance] { override def distance( embedding1: EmbeddingVector, embedding2: EmbeddingVector ): CosineDistance = { fromAbsoluteDistance(1 - MetricUtil.cosineSimilarity(embedding1, embedding2)) } override def fromAbsoluteDistance(distance: Float): CosineDistance = { CosineDistance(distance) } override def absoluteDistance( embedding1: EmbeddingVector, embedding2: EmbeddingVector ): Float = distance(embedding1, embedding2).distance override def apply(scalaDistance: CosineDistance): ServiceDistance = { ServiceDistance.CosineDistance(ServiceCosineDistance(scalaDistance.distance)) } override def invert(serviceDistance: ServiceDistance): Try[CosineDistance] = { serviceDistance match { case ServiceDistance.CosineDistance(cosineDistance) => Success(CosineDistance(cosineDistance.distance.toFloat)) case distance => Failure(new IllegalArgumentException(s"Expected a cosine distance but got $distance")) } } } case object InnerProduct extends Metric[InnerProductDistance] with Injection[InnerProductDistance, ServiceDistance] { override def distance( embedding1: EmbeddingVector, embedding2: EmbeddingVector ): InnerProductDistance = { fromAbsoluteDistance(1 - MetricUtil.dot(embedding1, embedding2)) } override def fromAbsoluteDistance(distance: Float): InnerProductDistance = { InnerProductDistance(distance) } override def absoluteDistance( embedding1: EmbeddingVector, embedding2: EmbeddingVector ): Float = distance(embedding1, embedding2).distance override def apply(scalaDistance: InnerProductDistance): ServiceDistance = { ServiceDistance.InnerProductDistance(ServiceInnerProductDistance(scalaDistance.distance)) } override def invert( serviceDistance: ServiceDistance ): Try[InnerProductDistance] = { serviceDistance match { case ServiceDistance.InnerProductDistance(cosineDistance) => Success(InnerProductDistance(cosineDistance.distance.toFloat)) case distance => Failure( new IllegalArgumentException(s"Expected a inner product distance but got $distance") ) } } } case object Edit extends Metric[EditDistance] with Injection[EditDistance, ServiceDistance] { private def intDistance( embedding1: EmbeddingVector, embedding2: EmbeddingVector, pos1: Int, pos2: Int, precomputedDistances: scala.collection.mutable.Map[(Int, Int), Int] ): Int = { // return the remaining characters of other String if (pos1 == 0) return pos2 if (pos2 == 0) return pos1 // To check if the recursive tree // for given n & m has already been executed precomputedDistances.getOrElse( (pos1, pos2), { // We might want to change this so that capitals are considered the same. // Also maybe some characters that look similar should also be the same. val computed = if (embedding1(pos1 - 1) == embedding2(pos2 - 1)) { intDistance(embedding1, embedding2, pos1 - 1, pos2 - 1, precomputedDistances) } else { // If characters are nt equal, we need to // find the minimum cost out of all 3 operations. val insert = intDistance(embedding1, embedding2, pos1, pos2 - 1, precomputedDistances) val del = intDistance(embedding1, embedding2, pos1 - 1, pos2, precomputedDistances) val replace = intDistance(embedding1, embedding2, pos1 - 1, pos2 - 1, precomputedDistances) 1 + Math.min(insert, Math.min(del, replace)) } precomputedDistances.put((pos1, pos2), computed) computed } ) } override def distance( embedding1: EmbeddingVector, embedding2: EmbeddingVector ): EditDistance = { val editDistance = intDistance( embedding1, embedding2, embedding1.length, embedding2.length, scala.collection.mutable.Map[(Int, Int), Int]() ) EditDistance(editDistance) } override def fromAbsoluteDistance(distance: Float): EditDistance = { EditDistance(distance.toInt) } override def absoluteDistance( embedding1: EmbeddingVector, embedding2: EmbeddingVector ): Float = distance(embedding1, embedding2).distance override def apply(scalaDistance: EditDistance): ServiceDistance = { ServiceDistance.EditDistance(ServiceEditDistance(scalaDistance.distance.toInt)) } override def invert( serviceDistance: ServiceDistance ): Try[EditDistance] = { serviceDistance match { case ServiceDistance.EditDistance(cosineDistance) => Success(EditDistance(cosineDistance.distance.toFloat)) case distance => Failure( new IllegalArgumentException(s"Expected a inner product distance but got $distance") ) } } } object MetricUtil { private[ann] def dot( embedding1: EmbeddingVector, embedding2: EmbeddingVector ): Float = { math.dotProduct(embedding1, embedding2) } private[ann] def l2distance( embedding1: EmbeddingVector, embedding2: EmbeddingVector ): Double = { math.l2Distance(embedding1, embedding2) } private[ann] def cosineSimilarity( embedding1: EmbeddingVector, embedding2: EmbeddingVector ): Float = { math.cosineSimilarity(embedding1, embedding2).toFloat } private[ann] def norm( embedding: EmbeddingVector ): EmbeddingVector = { math.normalize(embedding) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/QueryableById.scala
package com.twitter.ann.common import com.twitter.stitch.Stitch /** * This is a trait that allows you to query for nearest neighbors given an arbitrary type T1. This is * in contrast to a regular com.twitter.ann.common.Appendable, which takes an embedding as the input * argument. * * This interface uses the Stitch API for batching. See go/stitch for details on how to use it. * * @tparam T1 type of the query. * @tparam T2 type of the result. * @tparam P runtime parameters supported by the index. * @tparam D distance function used in the index. */ trait QueryableById[T1, T2, P <: RuntimeParams, D <: Distance[D]] { def queryById( id: T1, numOfNeighbors: Int, runtimeParams: P ): Stitch[List[T2]] def queryByIdWithDistance( id: T1, numOfNeighbors: Int, runtimeParams: P ): Stitch[List[NeighborWithDistance[T2, D]]] def batchQueryById( ids: Seq[T1], numOfNeighbors: Int, runtimeParams: P ): Stitch[List[NeighborWithSeed[T1, T2]]] def batchQueryWithDistanceById( ids: Seq[T1], numOfNeighbors: Int, runtimeParams: P ): Stitch[List[NeighborWithDistanceWithSeed[T1, T2, D]]] }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/QueryableByIdImplementation.scala
package com.twitter.ann.common import com.twitter.stitch.Stitch /** * Implementation of QueryableById that composes an EmbeddingProducer and a Queryable so that we * can get nearest neighbors given an id of type T1 * @param embeddingProducer provides an embedding given an id. * @param queryable provides a list of neighbors given an embedding. * @tparam T1 type of the query. * @tparam T2 type of the result. * @tparam P runtime parameters supported by the index. * @tparam D distance function used in the index. */ class QueryableByIdImplementation[T1, T2, P <: RuntimeParams, D <: Distance[D]]( embeddingProducer: EmbeddingProducer[T1], queryable: Queryable[T2, P, D]) extends QueryableById[T1, T2, P, D] { override def queryById( id: T1, numOfNeighbors: Int, runtimeParams: P ): Stitch[List[T2]] = { embeddingProducer.produceEmbedding(id).flatMap { embeddingOption => embeddingOption .map { embedding => Stitch.callFuture(queryable.query(embedding, numOfNeighbors, runtimeParams)) }.getOrElse { Stitch.value(List.empty) } } } override def queryByIdWithDistance( id: T1, numOfNeighbors: Int, runtimeParams: P ): Stitch[List[NeighborWithDistance[T2, D]]] = { embeddingProducer.produceEmbedding(id).flatMap { embeddingOption => embeddingOption .map { embedding => Stitch.callFuture(queryable.queryWithDistance(embedding, numOfNeighbors, runtimeParams)) }.getOrElse { Stitch.value(List.empty) } } } override def batchQueryById( ids: Seq[T1], numOfNeighbors: Int, runtimeParams: P ): Stitch[List[NeighborWithSeed[T1, T2]]] = { Stitch .traverse(ids) { id => embeddingProducer.produceEmbedding(id).flatMap { embeddingOption => embeddingOption .map { embedding => Stitch .callFuture(queryable.query(embedding, numOfNeighbors, runtimeParams)).map( _.map(neighbor => NeighborWithSeed(id, neighbor))) }.getOrElse { Stitch.value(List.empty) }.handle { case _ => List.empty } } }.map { _.toList.flatten } } override def batchQueryWithDistanceById( ids: Seq[T1], numOfNeighbors: Int, runtimeParams: P ): Stitch[List[NeighborWithDistanceWithSeed[T1, T2, D]]] = { Stitch .traverse(ids) { id => embeddingProducer.produceEmbedding(id).flatMap { embeddingOption => embeddingOption .map { embedding => Stitch .callFuture(queryable.queryWithDistance(embedding, numOfNeighbors, runtimeParams)) .map(_.map(neighbor => NeighborWithDistanceWithSeed(id, neighbor.neighbor, neighbor.distance))) }.getOrElse { Stitch.value(List.empty) }.handle { case _ => List.empty } } }.map { _.toList.flatten } } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/QueryableOperations.scala
package com.twitter.ann.common import com.twitter.ann.common.EmbeddingType.EmbeddingVector import com.twitter.util.Future object QueryableOperations { implicit class Map[T, P <: RuntimeParams, D <: Distance[D]]( val q: Queryable[T, P, D]) { def mapRuntimeParameters(f: P => P): Queryable[T, P, D] = { new Queryable[T, P, D] { def query( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P ): Future[List[T]] = q.query(embedding, numOfNeighbors, f(runtimeParams)) def queryWithDistance( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P ): Future[List[NeighborWithDistance[T, D]]] = q.queryWithDistance(embedding, numOfNeighbors, f(runtimeParams)) } } } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/ReadWriteFuturePool.scala
package com.twitter.ann.common import com.google.common.annotations.VisibleForTesting import com.twitter.util.{Future, FuturePool} trait ReadWriteFuturePool { def read[T](f: => T): Future[T] def write[T](f: => T): Future[T] } object ReadWriteFuturePool { def apply(readPool: FuturePool, writePool: FuturePool): ReadWriteFuturePool = { new ReadWriteFuturePoolANN(readPool, writePool) } def apply(commonPool: FuturePool): ReadWriteFuturePool = { new ReadWriteFuturePoolANN(commonPool, commonPool) } } @VisibleForTesting private[ann] class ReadWriteFuturePoolANN(readPool: FuturePool, writePool: FuturePool) extends ReadWriteFuturePool { def read[T](f: => T): Future[T] = { readPool.apply(f) } def write[T](f: => T): Future[T] = { writePool.apply(f) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/Serialization.scala
package com.twitter.ann.common import com.twitter.search.common.file.AbstractFile import org.apache.beam.sdk.io.fs.ResourceId /** * Interface for writing an Appendable to a directory. */ trait Serialization { def toDirectory( serializationDirectory: AbstractFile ): Unit def toDirectory( serializationDirectory: ResourceId ): Unit } /** * Interface for reading a Queryable from a directory * @tparam T the id of the embeddings * @tparam Q type of the Queryable that is deserialized. */ trait QueryableDeserialization[T, P <: RuntimeParams, D <: Distance[D], Q <: Queryable[T, P, D]] { def fromDirectory( serializationDirectory: AbstractFile ): Q }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/ServiceClientQueryable.scala
package com.twitter.ann.common import com.twitter.ann.common.EmbeddingType._ import com.twitter.ann.common.thriftscala.{ NearestNeighborQuery, NearestNeighborResult, Distance => ServiceDistance, RuntimeParams => ServiceRuntimeParams } import com.twitter.bijection.Injection import com.twitter.finagle.Service import com.twitter.mediaservices.commons.codec.ArrayByteBufferCodec import com.twitter.util.Future class ServiceClientQueryable[T, P <: RuntimeParams, D <: Distance[D]]( service: Service[NearestNeighborQuery, NearestNeighborResult], runtimeParamInjection: Injection[P, ServiceRuntimeParams], distanceInjection: Injection[D, ServiceDistance], idInjection: Injection[T, Array[Byte]]) extends Queryable[T, P, D] { override def query( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P ): Future[List[T]] = { service .apply( NearestNeighborQuery( embeddingSerDe.toThrift(embedding), withDistance = false, runtimeParamInjection(runtimeParams), numOfNeighbors ) ) .map { result => result.nearestNeighbors.map { nearestNeighbor => idInjection.invert(ArrayByteBufferCodec.decode(nearestNeighbor.id)).get }.toList } } override def queryWithDistance( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P ): Future[List[NeighborWithDistance[T, D]]] = service .apply( NearestNeighborQuery( embeddingSerDe.toThrift(embedding), withDistance = true, runtimeParamInjection(runtimeParams), numOfNeighbors ) ) .map { result => result.nearestNeighbors.map { nearestNeighbor => NeighborWithDistance( idInjection.invert(ArrayByteBufferCodec.decode(nearestNeighbor.id)).get, distanceInjection.invert(nearestNeighbor.distance.get).get ) }.toList } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/ShardApi.scala
package com.twitter.ann.common import com.twitter.ann.common.EmbeddingType.EmbeddingVector import com.twitter.util.Future import scala.util.Random trait ShardFunction[T] { /** * Shard function to shard embedding based on total shards and embedding data. * @param shards * @param entity * @return Shard index, from 0(Inclusive) to shards(Exclusive)) */ def apply(shards: Int, entity: EntityEmbedding[T]): Int } /** * Randomly shards the embeddings based on number of total shards. */ class RandomShardFunction[T] extends ShardFunction[T] { def apply(shards: Int, entity: EntityEmbedding[T]): Int = { Random.nextInt(shards) } } /** * Sharded appendable to shard the embedding into different appendable indices * @param indices: Sequence of appendable indices * @param shardFn: Shard function to shard data into different indices * @param shards: Total shards * @tparam T: Type of id. */ class ShardedAppendable[T, P <: RuntimeParams, D <: Distance[D]]( indices: Seq[Appendable[T, P, D]], shardFn: ShardFunction[T], shards: Int) extends Appendable[T, P, D] { override def append(entity: EntityEmbedding[T]): Future[Unit] = { val shard = shardFn(shards, entity) val index = indices(shard) index.append(entity) } override def toQueryable: Queryable[T, P, D] = { new ComposedQueryable[T, P, D](indices.map(_.toQueryable)) } } /** * Composition of sequence of queryable indices, it queries all the indices, * and merges the result in memory to return the K nearest neighbours * @param indices: Sequence of queryable indices * @tparam T: Type of id * @tparam P: Type of runtime param * @tparam D: Type of distance metric */ class ComposedQueryable[T, P <: RuntimeParams, D <: Distance[D]]( indices: Seq[Queryable[T, P, D]]) extends Queryable[T, P, D] { private[this] val ordering = Ordering.by[NeighborWithDistance[T, D], D](_.distance) override def query( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P ): Future[List[T]] = { val neighbours = queryWithDistance(embedding, numOfNeighbors, runtimeParams) neighbours.map(list => list.map(nn => nn.neighbor)) } override def queryWithDistance( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: P ): Future[List[NeighborWithDistance[T, D]]] = { val futures = Future.collect( indices.map(index => index.queryWithDistance(embedding, numOfNeighbors, runtimeParams)) ) futures.map { list => list.flatten .sorted(ordering) .take(numOfNeighbors) .toList } } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/ShardedSerialization.scala
package com.twitter.ann.common import com.twitter.search.common.file.AbstractFile import com.twitter.search.common.file.AbstractFile.Filter import com.twitter.util.Future import org.apache.beam.sdk.io.fs.ResourceId import scala.collection.JavaConverters._ object ShardConstants { val ShardPrefix = "shard_" } /** * Serialize shards to directory * @param shards: List of shards to serialize */ class ShardedSerialization( shards: Seq[Serialization]) extends Serialization { override def toDirectory(directory: AbstractFile): Unit = { toDirectory(new IndexOutputFile(directory)) } override def toDirectory(directory: ResourceId): Unit = { toDirectory(new IndexOutputFile(directory)) } private def toDirectory(directory: IndexOutputFile): Unit = { shards.indices.foreach { shardId => val shardDirectory = directory.createDirectory(ShardConstants.ShardPrefix + shardId) val serialization = shards(shardId) if (shardDirectory.isAbstractFile) { serialization.toDirectory(shardDirectory.abstractFile) } else { serialization.toDirectory(shardDirectory.resourceId) } } } } /** * Deserialize directories containing index shards data to a composed queryable * @param deserializationFn function to deserialize a shard file to Queryable * @tparam T the id of the embeddings * @tparam P : Runtime params type * @tparam D: Distance metric type */ class ComposedQueryableDeserialization[T, P <: RuntimeParams, D <: Distance[D]]( deserializationFn: (AbstractFile) => Queryable[T, P, D]) extends QueryableDeserialization[T, P, D, Queryable[T, P, D]] { override def fromDirectory(directory: AbstractFile): Queryable[T, P, D] = { val shardDirs = directory .listFiles(new Filter { override def accept(file: AbstractFile): Boolean = file.getName.startsWith(ShardConstants.ShardPrefix) }) .asScala .toList val indices = shardDirs .map { shardDir => deserializationFn(shardDir) } new ComposedQueryable[T, P, D](indices) } } class ShardedIndexBuilderWithSerialization[T, P <: RuntimeParams, D <: Distance[D]]( shardedIndex: ShardedAppendable[T, P, D], shardedSerialization: ShardedSerialization) extends Appendable[T, P, D] with Serialization { override def append(entity: EntityEmbedding[T]): Future[Unit] = { shardedIndex.append(entity) } override def toDirectory(directory: AbstractFile): Unit = { shardedSerialization.toDirectory(directory) } override def toDirectory(directory: ResourceId): Unit = { shardedSerialization.toDirectory(directory) } override def toQueryable: Queryable[T, P, D] = { shardedIndex.toQueryable } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/common/Task.scala
package com.twitter.ann.common import com.twitter.finagle.stats.CategorizingExceptionStatsHandler import com.twitter.finagle.stats.StatsReceiver import com.twitter.finagle.tracing.DefaultTracer import com.twitter.finagle.tracing.Trace import com.twitter.finagle.util.DefaultTimer import com.twitter.finagle.util.Rng import com.twitter.inject.logging.MDCKeys import com.twitter.util.Closable import com.twitter.util.Duration import com.twitter.util.Future import com.twitter.util.Time import com.twitter.util.Timer import com.twitter.util.logging.Logging import java.util.concurrent.atomic.AtomicInteger import org.slf4j.MDC /** * A Task that will be scheduled to execute periodically on every interval. If a task takes * longer than an interval to complete, it will be immediately scheduled to run. */ trait Task extends Closable { self: Logging => // Exposed if the implementation of `task` need to report failures val exnStatsHandler = new CategorizingExceptionStatsHandler(categorizer = _ => Some("failures")) protected val statsReceiver: StatsReceiver private val totalTasks = statsReceiver.counter("total") private val successfulTasks = statsReceiver.counter("success") private val taskLatency = statsReceiver.stat("latency_ms") private val activeTasks = new AtomicInteger(0) protected[common] val rng: Rng = Rng.threadLocal protected[common] val timer: Timer = DefaultTimer @volatile private var taskLoop: Future[Unit] = null /** Execute the task wih bookkeeping **/ private def run(): Future[Unit] = { totalTasks.incr() activeTasks.getAndIncrement() val start = Time.now val runningTask = // Setup a new trace root for this task. We also want logs to contain // the same trace information finatra populates for requests. // See com.twitter.finatra.thrift.filters.TraceIdMDCFilter Trace.letTracerAndNextId(DefaultTracer) { val trace = Trace() MDC.put(MDCKeys.TraceId, trace.id.traceId.toString) MDC.put(MDCKeys.TraceSampled, trace.id._sampled.getOrElse(false).toString) MDC.put(MDCKeys.TraceSpanId, trace.id.spanId.toString) info(s"starting task ${getClass.toString}") task() .onSuccess({ _ => info(s"completed task ${getClass.toString}") successfulTasks.incr() }) .onFailure({ e => warn(s"failed task. ", e) exnStatsHandler.record(statsReceiver, e) }) } runningTask.transform { _ => val elapsed = Time.now - start activeTasks.getAndDecrement() taskLatency.add(elapsed.inMilliseconds) Future .sleep(taskInterval)(timer) .before(run()) } } // Body of a task to run protected def task(): Future[Unit] // Task interval protected def taskInterval: Duration /** * Start the task after random jitter */ final def jitteredStart(): Unit = synchronized { if (taskLoop != null) { throw new RuntimeException(s"task already started") } else { val jitterNs = rng.nextLong(taskInterval.inNanoseconds) val jitter = Duration.fromNanoseconds(jitterNs) taskLoop = Future .sleep(jitter)(timer) .before(run()) } } /** * Start the task without applying any delay */ final def startImmediately(): Unit = synchronized { if (taskLoop != null) { throw new RuntimeException(s"task already started") } else { taskLoop = run() } } /** * Close the task. A closed task cannot be restarted. */ override def close(deadline: Time): Future[Unit] = { if (taskLoop != null) { taskLoop.raise(new InterruptedException("task closed")) } Future.Done } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/dataflow/offline/ANNIndexBuilderBeamJob.scala
package com.twitter.ann.dataflow.offline import com.spotify.scio.ScioContext import com.spotify.scio.ScioMetrics import com.twitter.ann.annoy.TypedAnnoyIndex import com.twitter.ann.brute_force.SerializableBruteForceIndex import com.twitter.ann.common.thriftscala.AnnIndexMetadata import com.twitter.ann.common.Distance import com.twitter.ann.common.Cosine import com.twitter.ann.common.EntityEmbedding import com.twitter.ann.common.IndexOutputFile import com.twitter.ann.common.Metric import com.twitter.ann.common.ReadWriteFuturePool import com.twitter.ann.faiss.FaissIndexer import com.twitter.ann.hnsw.TypedHnswIndex import com.twitter.ann.serialization.PersistedEmbeddingInjection import com.twitter.ann.serialization.ThriftIteratorIO import com.twitter.ann.serialization.thriftscala.PersistedEmbedding import com.twitter.ann.util.IndexBuilderUtils import com.twitter.beam.io.bigquery.BigQueryIO import com.twitter.beam.io.dal.DalObservedDatasetRegistration import com.twitter.beam.job.DateRange import com.twitter.beam.job.DateRangeOptions import com.twitter.cortex.ml.embeddings.common._ import com.twitter.ml.api.embedding.Embedding import com.twitter.ml.api.embedding.EmbeddingMath import com.twitter.ml.api.embedding.EmbeddingSerDe import com.twitter.ml.api.thriftscala.{Embedding => TEmbedding} import com.twitter.ml.featurestore.lib.EntityId import com.twitter.ml.featurestore.lib.SemanticCoreId import com.twitter.ml.featurestore.lib.TfwId import com.twitter.ml.featurestore.lib.TweetId import com.twitter.ml.featurestore.lib.UserId import com.twitter.scalding.DateOps import com.twitter.scalding.RichDate import com.twitter.scio_internal.job.ScioBeamJob import com.twitter.statebird.v2.thriftscala.{Environment => StatebirdEnvironment} import com.twitter.util.Await import com.twitter.util.FuturePool import com.twitter.wtf.beam.bq_embedding_export.BQQueryUtils import java.time.Instant import java.util.TimeZone import java.util.concurrent.Executors import org.apache.beam.sdk.io.FileSystems import org.apache.beam.sdk.io.fs.ResolveOptions import org.apache.beam.sdk.io.fs.ResourceId import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TypedRead import org.apache.beam.sdk.options.Default import org.apache.beam.sdk.options.Description import org.apache.beam.sdk.transforms.DoFn import org.apache.beam.sdk.transforms.DoFn._ import org.apache.beam.sdk.transforms.PTransform import org.apache.beam.sdk.transforms.ParDo import org.apache.beam.sdk.values.KV import org.apache.beam.sdk.values.PCollection import org.apache.beam.sdk.values.PDone import org.slf4j.Logger import org.slf4j.LoggerFactory trait ANNOptions extends DateRangeOptions { @Description("Output GCS path for the generated index") def getOutputPath(): String def setOutputPath(value: String): Unit @Description("If set, the index is grouped") @Default.Boolean(false) def getGrouped: Boolean def setGrouped(value: Boolean): Unit @Description( "If set, a segment will be registered for the provided DAL dataset module which will trigger " + "DAL registration.") @Default.Boolean(false) def getEnableDalRegistration: Boolean def setEnableDalRegistration(value: Boolean): Unit @Description( "Output GCS path for the generated index. The OutputPath should be of the format " + "'gs://user.{{user_name}}.dp.gcp.twttr.net/subDir/outputDir' and OutputDALPath will be " + "'subDir/outputDir' for this to work") def getOutputDALPath: String def setOutputDALPath(value: String): Unit @Description("Get ANN index dataset name") def getDatasetModuleName: String def setDatasetModuleName(value: String): Unit @Description("Get ANN index dataset owner role") def getDatasetOwnerRole: String def setDatasetOwnerRole(value: String): Unit @Description("If set, index is written in <output>/<timestamp>") @Default.Boolean(false) def getOutputWithTimestamp: Boolean def setOutputWithTimestamp(value: Boolean): Unit @Description("File which contains a SQL query to retrieve embeddings from BQ") def getDatasetSqlPath: String def setDatasetSqlPath(value: String): Unit @Description("Dimension of embedding in the input data. See go/ann") def getDimension: Int def setDimension(value: Int): Unit @Description("The type of entity ID that is used with the embeddings. See go/ann") def getEntityKind: String def setEntityKind(value: String): Unit @Description("The kind of index you want to generate (HNSW/Annoy/Brute Force/faiss). See go/ann") def getAlgo: String def setAlgo(value: String): Unit @Description("Distance metric (InnerProduct/Cosine/L2). See go/ann") def getMetric: String def setMetric(value: String): Unit @Description("Specifies how many parallel inserts happen to the index. See go/ann") def getConcurrencyLevel: Int def setConcurrencyLevel(value: Int): Unit @Description( "Used by HNSW algo. Larger value increases build time but will give better recall. See go/ann") def getEfConstruction: Int def setEfConstruction(value: Int): Unit @Description( "Used by HNSW algo. Larger value increases the index size but will give better recall. " + "See go/ann") def getMaxM: Int def setMaxM(value: Int): Unit @Description("Used by HNSW algo. Approximate number of elements that will be indexed. See go/ann") def getExpectedElements: Int def setExpectedElements(value: Int): Unit @Description( "Used by Annoy. num_trees is provided during build time and affects the build time and the " + "index size. A larger value will give more accurate results, but larger indexes. See go/ann") def getAnnoyNumTrees: Int def setAnnoyNumTrees(value: Int): Unit @Description( "FAISS factory string determines the ANN algorithm and compression. " + "See https://github.com/facebookresearch/faiss/wiki/The-index-factory") def getFAISSFactoryString: String def setFAISSFactoryString(value: String): Unit @Description("Sample rate for training during creation of FAISS index. Default is 0.05f") @Default.Float(0.05f) def getTrainingSampleRate: Float def setTrainingSampleRate(value: Float): Unit } /** * Builds ANN index. * * The input embeddings are read from BigQuery using the input SQL query. The output from this SQL * query needs to have two columns, "entityID" [Long] and "embedding" [List[Double]] * * Output directory supported is GCS bucket */ object ANNIndexBuilderBeamJob extends ScioBeamJob[ANNOptions] { val counterNameSpace = "ANNIndexBuilderBeamJob" val LOG: Logger = LoggerFactory.getLogger(this.getClass) implicit val timeZone: TimeZone = DateOps.UTC def configurePipeline(sc: ScioContext, opts: ANNOptions): Unit = { val startDate: RichDate = RichDate(opts.interval.getStart.toDate) val endDate: RichDate = RichDate(opts.interval.getEnd.toDate) val instant = Instant.now() val out = { val base = FileSystems.matchNewResource(opts.getOutputPath, /*isDirectory=*/ true) if (opts.getOutputWithTimestamp) { base.resolve( instant.toEpochMilli.toString, ResolveOptions.StandardResolveOptions.RESOLVE_DIRECTORY) } else { base } } // Define template variables which we would like to be replaced in the corresponding sql file val templateVariables = Map( "START_DATE" -> startDate.toString(DateOps.DATETIME_HMS_WITH_DASH), "END_DATE" -> endDate.toString(DateOps.DATETIME_HMS_WITH_DASH) ) val embeddingFetchQuery = BQQueryUtils.getBQQueryFromSqlFile(opts.getDatasetSqlPath, templateVariables) val sCollection = if (opts.getGrouped) { sc.customInput( "Read grouped data from BQ", BigQueryIO .readClass[GroupedEmbeddingData]() .fromQuery(embeddingFetchQuery).usingStandardSql() .withMethod(TypedRead.Method.DIRECT_READ) ) } else { sc.customInput( "Read flat data from BQ", BigQueryIO .readClass[FlatEmbeddingData]().fromQuery(embeddingFetchQuery).usingStandardSql() .withMethod(TypedRead.Method.DIRECT_READ) ) } val processedCollection = sCollection .flatMap(transformTableRowToKeyVal) .groupBy(_.getKey) .map { case (groupName, groupValue) => Map(groupName -> groupValue.map(_.getValue)) } val annIndexMetadata = AnnIndexMetadata(timestamp = Some(instant.getEpochSecond), withGroups = Some(opts.getGrouped)) // Count the number of groups and output the ANN index metadata processedCollection.count.map(count => { val annGroupedIndexMetadata = annIndexMetadata.copy( numGroups = Some(count.intValue()) ) val indexOutDir = new IndexOutputFile(out) indexOutDir.writeIndexMetadata(annGroupedIndexMetadata) }) // Generate Index processedCollection.saveAsCustomOutput( "Serialise to Disk", OutputSink( out, opts.getAlgo.equals("faiss"), opts.getOutputDALPath, opts.getEnableDalRegistration, opts.getDatasetModuleName, opts.getDatasetOwnerRole, instant, opts.getDate(), counterNameSpace ) ) } def transformTableRowToKeyVal( data: BaseEmbeddingData ): Option[KV[String, KV[Long, TEmbedding]]] = { val transformTable = ScioMetrics.counter(counterNameSpace, "transform_table_row_to_kv") for { id <- data.entityId } yield { transformTable.inc() val groupName: String = if (data.isInstanceOf[GroupedEmbeddingData]) { (data.asInstanceOf[GroupedEmbeddingData]).groupId.get } else { "" } KV.of[String, KV[Long, TEmbedding]]( groupName, KV.of[Long, TEmbedding]( id, EmbeddingSerDe.toThrift(Embedding(data.embedding.map(_.toFloat).toArray))) ) } } case class OutputSink( outDir: ResourceId, isFaiss: Boolean, outputDALPath: String, enableDalRegistration: Boolean, datasetModuleName: String, datasetOwnerRole: String, instant: Instant, date: DateRange, counterNameSpace: String) extends PTransform[PCollection[Map[String, Iterable[KV[Long, TEmbedding]]]], PDone] { override def expand(input: PCollection[Map[String, Iterable[KV[Long, TEmbedding]]]]): PDone = { PDone.in { val dummyOutput = { if (isFaiss) { input .apply( "Build&WriteFaissANNIndex", ParDo.of(new BuildFaissANNIndex(outDir, counterNameSpace)) ) } else { input .apply( "Build&WriteANNIndex", ParDo.of(new BuildANNIndex(outDir, counterNameSpace)) ) } } if (enableDalRegistration) { input .apply( "Register DAL Dataset", DalObservedDatasetRegistration( datasetModuleName, datasetOwnerRole, outputDALPath, instant, Some(StatebirdEnvironment.Prod), Some("ANN Index Data Files")) ) .getPipeline } else { dummyOutput.getPipeline } } } } class BuildANNIndex(outDir: ResourceId, counterNameSpace: String) extends DoFn[Map[String, Iterable[KV[Long, TEmbedding]]], Unit] { def transformKeyValToEmbeddingWithEntity[T <: EntityId]( entityKind: EntityKind[T] )( keyVal: KV[Long, TEmbedding] ): EntityEmbedding[T] = { val entityId = entityKind match { case UserKind => UserId(keyVal.getKey).toThrift case TweetKind => TweetId(keyVal.getKey).toThrift case TfwKind => TfwId(keyVal.getKey).toThrift case SemanticCoreKind => SemanticCoreId(keyVal.getKey).toThrift case _ => throw new IllegalArgumentException(s"Unsupported embedding kind: $entityKind") } EntityEmbedding[T]( EntityId.fromThrift(entityId).asInstanceOf[T], EmbeddingSerDe.fromThrift(keyVal.getValue)) } @ProcessElement def processElement[T <: EntityId, D <: Distance[D]]( @Element dataGrouped: Map[String, Iterable[KV[Long, TEmbedding]]], context: ProcessContext ): Unit = { val opts = context.getPipelineOptions.as(classOf[ANNOptions]) val uncastEntityKind = EntityKind.getEntityKind(opts.getEntityKind) val entityKind = uncastEntityKind.asInstanceOf[EntityKind[T]] val transformKVtoEmbeddings = ScioMetrics.counter(counterNameSpace, "transform_kv_to_embeddings") val _ = dataGrouped.map { case (groupName, data) => val annEmbeddings = data.map { kv => transformKVtoEmbeddings.inc() transformKeyValToEmbeddingWithEntity(entityKind)(kv) } val out = { if (opts.getGrouped && groupName != "") { outDir.resolve(groupName, ResolveOptions.StandardResolveOptions.RESOLVE_DIRECTORY) } else { outDir } } LOG.info(s"Writing output to ${out}") val metric = Metric.fromString(opts.getMetric).asInstanceOf[Metric[D]] val concurrencyLevel = opts.getConcurrencyLevel val dimension = opts.getDimension val threadPool = Executors.newFixedThreadPool(concurrencyLevel) LOG.info(s"Building ANN index of type ${opts.getAlgo}") val serialization = opts.getAlgo match { case "brute_force" => val PersistedEmbeddingIO = new ThriftIteratorIO[PersistedEmbedding](PersistedEmbedding) SerializableBruteForceIndex( metric, FuturePool.apply(threadPool), new PersistedEmbeddingInjection(entityKind.byteInjection), PersistedEmbeddingIO ) case "annoy" => TypedAnnoyIndex.indexBuilder( dimension, opts.getAnnoyNumTrees, metric, entityKind.byteInjection, FuturePool.apply(threadPool) ) case "hnsw" => val efConstruction = opts.getEfConstruction val maxM = opts.getMaxM val expectedElements = opts.getExpectedElements TypedHnswIndex.serializableIndex( dimension, metric, efConstruction, maxM, expectedElements, entityKind.byteInjection, ReadWriteFuturePool(FuturePool.apply(threadPool)) ) } val future = IndexBuilderUtils.addToIndex(serialization, annEmbeddings.toSeq, concurrencyLevel) Await.result(future.map { _ => serialization.toDirectory(out) }) } } } class BuildFaissANNIndex(outDir: ResourceId, counterNameSpace: String) extends DoFn[Map[String, Iterable[KV[Long, TEmbedding]]], Unit] { @ProcessElement def processElement[D <: Distance[D]]( @Element dataGrouped: Map[String, Iterable[KV[Long, TEmbedding]]], context: ProcessContext ): Unit = { val opts = context.getPipelineOptions.as(classOf[ANNOptions]) val transformKVtoEmbeddings = ScioMetrics.counter(counterNameSpace, "transform_kv_to_embeddings") val _ = dataGrouped.map { case (groupName, data) => val out = { if (opts.getGrouped && groupName != "") { outDir.resolve(groupName, ResolveOptions.StandardResolveOptions.RESOLVE_DIRECTORY) } else { outDir } } LOG.info(s"Writing output to ${out}") val metric = Metric.fromString(opts.getMetric).asInstanceOf[Metric[D]] val maybeNormalizedPipe = data.map { kv => transformKVtoEmbeddings.inc() val embedding = EmbeddingSerDe.floatEmbeddingSerDe.fromThrift(kv.getValue) EntityEmbedding[Long]( kv.getKey, if (metric == Cosine) { EmbeddingMath.Float.normalize(embedding) } else { embedding } ) } // Generate Index FaissIndexer.buildAndWriteFaissIndex( maybeNormalizedPipe, opts.getTrainingSampleRate, opts.getFAISSFactoryString, metric, new IndexOutputFile(out)) } } } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/dataflow/offline/BUILD
scala_library( name = "index_builder_lib", sources = [ "*.scala", ], tags = ["bazel-compatible"], dependencies = [ "3rdparty/jvm/com/spotify:scio-core", "3rdparty/jvm/org/apache/beam:beam-sdks-java-core", "ann/src/main/java/com/twitter/ann/faiss", "ann/src/main/scala/com/twitter/ann/annoy", "ann/src/main/scala/com/twitter/ann/brute_force", "ann/src/main/scala/com/twitter/ann/common", "ann/src/main/scala/com/twitter/ann/faiss", "ann/src/main/scala/com/twitter/ann/hnsw", "ann/src/main/scala/com/twitter/ann/serialization", "ann/src/main/scala/com/twitter/ann/util", "ann/src/main/thrift/com/twitter/ann/common:ann-common-scala", "beam-internal/src/main/scala/com/twitter/beam/io/bigquery", "beam-internal/src/main/scala/com/twitter/beam/io/dal", "beam-internal/src/main/scala/com/twitter/beam/job", "beam-internal/src/main/scala/com/twitter/scio_internal/runner/dataflow", "src/scala/com/twitter/cortex/ml/embeddings/common:Helpers", "src/scala/com/twitter/ml/featurestore/lib", "src/scala/com/twitter/wtf/beam/bq_embedding_export:bq_embedding_export_lib", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/dataflow/offline/BaseEmbeddingData.scala
package com.twitter.ann.dataflow.offline trait BaseEmbeddingData { val entityId: Option[Long] val embedding: Seq[Double] }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/dataflow/offline/FlatEmbeddingData.scala
package com.twitter.ann.dataflow.offline import com.twitter.beam.schemas.SchemaFieldName case class FlatEmbeddingData( @SchemaFieldName("entityId") entityId: Option[Long], @SchemaFieldName("embedding") embedding: Seq[Double]) extends BaseEmbeddingData
the-algorithm-main/ann/src/main/scala/com/twitter/ann/dataflow/offline/GroupedEmbeddingData.scala
package com.twitter.ann.dataflow.offline import com.twitter.beam.schemas.SchemaFieldName case class GroupedEmbeddingData( @SchemaFieldName("entityId") entityId: Option[Long], @SchemaFieldName("embedding") embedding: Seq[Double], @SchemaFieldName("groupId") groupId: Option[String], ) extends BaseEmbeddingData
the-algorithm-main/ann/src/main/scala/com/twitter/ann/experimental/BUILD.bazel
scala_library( name = "server", sources = ["*.scala"], compiler_option_sets = ["fatal_warnings"], platform = "java8", tags = ["bazel-only"], dependencies = [ "ann/src/main/scala/com/twitter/ann/annoy", "ann/src/main/scala/com/twitter/ann/brute_force", "ann/src/main/scala/com/twitter/ann/common", "ann/src/main/scala/com/twitter/ann/hnsw", ], ) hadoop_binary( name = "benchmarking", basename = "benchmarking", main = "com.twitter.ann.experimental.Runner", platform = "java8", runtime_platform = "java8", tags = [ "bazel-compatible", "bazel-compatible:migrated", "bazel-only", ], dependencies = [ ":server", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/experimental/Runner.scala
package com.twitter.ann.experimental import com.twitter.ann.annoy.{AnnoyRuntimeParams, TypedAnnoyIndex} import com.twitter.ann.brute_force.{BruteForceIndex, BruteForceRuntimeParams} import com.twitter.ann.common.{Cosine, CosineDistance, EntityEmbedding, ReadWriteFuturePool} import com.twitter.ann.hnsw.{HnswParams, TypedHnswIndex} import com.twitter.bijection.Injection import com.twitter.ml.api.embedding.Embedding import com.twitter.search.common.file.LocalFile import com.twitter.util.{Await, Future, FuturePool} import java.nio.file.Files import java.util import java.util.concurrent.Executors import java.util.{Collections, Random} import scala.collection.JavaConverters._ import scala.collection.mutable object Runner { def main(args: Array[String]): Unit = { val rng = new Random() val dimen = 300 val neighbours = 20 val trainDataSetSize = 2000 val testDataSetSize = 30 // Hnsw (ef -> (time, recall)) val hnswEfConfig = new mutable.HashMap[Int, (Float, Float)] val efConstruction = 200 val maxM = 16 val threads = 24 val efSearch = Seq(20, 30, 50, 70, 100, 120) efSearch.foreach(hnswEfConfig.put(_, (0.0f, 0.0f))) // Annoy (nodes to explore -> (time, recall)) val numOfTrees = 80 val annoyConfig = new mutable.HashMap[Int, (Float, Float)] val nodesToExplore = Seq(0, 2000, 3000, 5000, 7000, 10000, 15000, 20000, 30000, 35000, 40000, 50000) nodesToExplore.foreach(annoyConfig.put(_, (0.0f, 0.0f))) val injection = Injection.int2BigEndian val distance = Cosine val exec = Executors.newFixedThreadPool(threads) val pool = FuturePool.apply(exec) val hnswMultiThread = TypedHnswIndex.index[Int, CosineDistance]( dimen, distance, efConstruction = efConstruction, maxM = maxM, trainDataSetSize, ReadWriteFuturePool(pool) ) val bruteforce = BruteForceIndex[Int, CosineDistance](distance, pool) val annoyBuilder = TypedAnnoyIndex.indexBuilder(dimen, numOfTrees, distance, injection, FuturePool.immediatePool) val temp = new LocalFile(Files.createTempDirectory("test").toFile) println("Creating bruteforce.........") val data = Collections.synchronizedList(new util.ArrayList[EntityEmbedding[Int]]()) val bruteforceFutures = 1 to trainDataSetSize map { id => val vec = Array.fill(dimen)(rng.nextFloat() * 50) val emb = EntityEmbedding[Int](id, Embedding(vec)) data.add(emb) bruteforce.append(emb) } Await.result(Future.collect(bruteforceFutures)) println("Creating hnsw multithread test.........") val (_, multiThreadInsertion) = time { Await.result(Future.collect(data.asScala.toList.map { emb => hnswMultiThread.append(emb) })) } println("Creating annoy.........") val (_, annoyTime) = time { Await.result(Future.collect(data.asScala.toList.map(emb => annoyBuilder.append(emb)))) annoyBuilder.toDirectory(temp) } val annoyQuery = TypedAnnoyIndex.loadQueryableIndex( dimen, Cosine, injection, FuturePool.immediatePool, temp ) val hnswQueryable = hnswMultiThread.toQueryable println(s"Total train size : $trainDataSetSize") println(s"Total querySize : $testDataSetSize") println(s"Dimension : $dimen") println(s"Distance type : $distance") println(s"Annoy index creation time trees: $numOfTrees => $annoyTime ms") println( s"Hnsw multi thread creation time : $multiThreadInsertion ms efCons: $efConstruction maxM $maxM thread : $threads") println("Querying.........") var bruteForceTime = 0.0f 1 to testDataSetSize foreach { id => println("Querying id " + id) val embedding = Embedding(Array.fill(dimen)(rng.nextFloat())) val (list, timeTakenB) = time( Await .result( bruteforce.query(embedding, neighbours, BruteForceRuntimeParams)) .toSet) bruteForceTime += timeTakenB val annoyConfigCopy = annoyConfig.toMap val hnswEfConfigCopy = hnswEfConfig.toMap hnswEfConfigCopy.keys.foreach { ef => val (nn, timeTaken) = time(Await .result(hnswQueryable.query(embedding, neighbours, HnswParams(ef))) .toSet) val recall = (list.intersect(nn).size) * 1.0f / neighbours val (oldTime, oldRecall) = hnswEfConfig(ef) hnswEfConfig.put(ef, (oldTime + timeTaken, oldRecall + recall)) } annoyConfigCopy.keys.foreach { nodes => val (nn, timeTaken) = time( Await.result( annoyQuery .query(embedding, neighbours, AnnoyRuntimeParams(nodesToExplore = Some(nodes))) .map(_.toSet))) val recall = (list.intersect(nn).size) * 1.0f / neighbours val (oldTime, oldRecall) = annoyConfig(nodes) annoyConfig.put(nodes, (oldTime + timeTaken, oldRecall + recall)) } } println( s"Bruteforce avg query time : ${bruteForceTime / testDataSetSize} ms") efSearch.foreach { ef => val data = hnswEfConfig(ef) println( s"Hnsw avg recall and time with query ef : $ef => ${data._2 / testDataSetSize} ${data._1 / testDataSetSize} ms" ) } nodesToExplore.foreach { n => val data = annoyConfig(n) println( s"Annoy avg recall and time with nodes_to_explore : $n => ${data._2 / testDataSetSize} ${data._1 / testDataSetSize} ms" ) } exec.shutdown() } def time[T](fn: => T): (T, Long) = { val start = System.currentTimeMillis() val result = fn val end = System.currentTimeMillis() (result, (end - start)) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/faiss/BUILD
scala_library( sources = ["*.scala"], compiler_option_sets = ["fatal_warnings"], platform = "java8", tags = ["bazel-compatible"], dependencies = [ "3rdparty/jvm/org/mapdb", "ann/src/main/java/com/twitter/ann/faiss", "ann/src/main/scala/com/twitter/ann/common", "ann/src/main/scala/com/twitter/ann/serialization", "ann/src/main/thrift/com/twitter/ann/common:ann-common-scala", "mediaservices/commons/src/main/scala:futuretracker", "src/java/com/twitter/common_internal/hadoop", "src/java/com/twitter/search/common/file", "src/scala/com/twitter/ml/api/embedding", ], exports = [ "ann/src/main/scala/com/twitter/ann/common", "src/java/com/twitter/common_internal/hadoop", "src/java/com/twitter/search/common/file", "src/scala/com/twitter/ml/api/embedding", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/faiss/FaissCommon.scala
package com.twitter.ann.faiss import com.twitter.ann.common.thriftscala.FaissRuntimeParam import com.twitter.bijection.Injection import scala.util.Failure import scala.util.Success import scala.util.Try import com.twitter.ann.common.thriftscala.{RuntimeParams => ServiceRuntimeParams} import com.twitter.search.common.file.AbstractFile object FaissCommon { val RuntimeParamsInjection: Injection[FaissParams, ServiceRuntimeParams] = new Injection[FaissParams, ServiceRuntimeParams] { override def apply(scalaParams: FaissParams): ServiceRuntimeParams = { ServiceRuntimeParams.FaissParam( FaissRuntimeParam( scalaParams.nprobe, scalaParams.quantizerEf, scalaParams.quantizerKFactorRF, scalaParams.quantizerNprobe, scalaParams.ht) ) } override def invert(thriftParams: ServiceRuntimeParams): Try[FaissParams] = thriftParams match { case ServiceRuntimeParams.FaissParam(faissParam) => Success( FaissParams( faissParam.nprobe, faissParam.quantizerEf, faissParam.quantizerKfactorRf, faissParam.quantizerNprobe, faissParam.ht)) case p => Failure(new IllegalArgumentException(s"Expected FaissParams got $p")) } } def isValidFaissIndex(path: AbstractFile): Boolean = { path.isDirectory && path.hasSuccessFile && path.getChild("faiss.index").exists() } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/faiss/FaissIndex.scala
package com.twitter.ann.faiss import com.twitter.ann.common.Queryable import com.twitter.ann.common._ import com.twitter.search.common.file.AbstractFile import com.twitter.util.logging.Logging case class FaissParams( nprobe: Option[Int], quantizerEf: Option[Int], quantizerKFactorRF: Option[Int], quantizerNprobe: Option[Int], ht: Option[Int]) extends RuntimeParams { override def toString: String = s"FaissParams(${toLibraryString})" def toLibraryString: String = Seq( nprobe.map { n => s"nprobe=${n}" }, quantizerEf.map { ef => s"quantizer_efSearch=${ef}" }, quantizerKFactorRF.map { k => s"quantizer_k_factor_rf=${k}" }, quantizerNprobe.map { n => s"quantizer_nprobe=${n}" }, ht.map { ht => s"ht=${ht}" }, ).flatten.mkString(",") } object FaissIndex { def loadIndex[T, D <: Distance[D]]( outerDimension: Int, outerMetric: Metric[D], directory: AbstractFile ): Queryable[T, FaissParams, D] = { new QueryableIndexAdapter[T, D] with Logging { protected val metric: Metric[D] = outerMetric protected val dimension: Int = outerDimension protected val index: Index = { info(s"Loading faiss with ${swigfaiss.get_compile_options()}") QueryableIndexAdapter.loadJavaIndex(directory) } } } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/faiss/FaissIndexer.scala
package com.twitter.ann.faiss import com.google.common.base.Preconditions import com.twitter.ann.common.Cosine import com.twitter.ann.common.Distance import com.twitter.ann.common.EntityEmbedding import com.twitter.ann.common.IndexOutputFile import com.twitter.ann.common.InnerProduct import com.twitter.ann.common.L2 import com.twitter.ann.common.Metric import com.twitter.ml.api.embedding.EmbeddingMath import com.twitter.scalding.Execution import com.twitter.scalding.TypedPipe import com.twitter.search.common.file.AbstractFile import com.twitter.search.common.file.FileUtils import com.twitter.util.logging.Logging import java.io.File import scala.util.Random trait FaissIndexer extends Logging { /** * Produce faiss index file specified by factory string * * @param pipe Embeddings to be indexed * @param sampleRate Fraction of embeddings used for training. Regardless of this parameter, all embeddings are present in the output. * @param factoryString Faiss factory string, see https://github.com/facebookresearch/faiss/wiki/The-index-factory * @param metric Metric to use * @param outputDirectory Directory where _SUCCESS and faiss.index will be written. */ def build[D <: Distance[D]]( pipe: TypedPipe[EntityEmbedding[Long]], sampleRate: Float, factoryString: String, metric: Metric[D], outputDirectory: AbstractFile ): Execution[Unit] = { outputDirectory.mkdirs() Preconditions.checkState( outputDirectory.canRead, "Failed to create parent directories for %s", outputDirectory.toString) val maybeNormalizedPipe = if (l2Normalize(metric)) { pipe.map { idAndEmbedding => EntityEmbedding(idAndEmbedding.id, EmbeddingMath.Float.normalize(idAndEmbedding.embedding)) } } else { pipe } maybeNormalizedPipe.toIterableExecution.flatMap { annEmbeddings => logger.info(s"${factoryString}") val t1 = System.nanoTime buildAndWriteFaissIndex( Random.shuffle(annEmbeddings), sampleRate, factoryString, metric, new IndexOutputFile(outputDirectory)) val duration = (System.nanoTime - t1) / 1e9d logger.info(s"It took ${duration}s to build and index") Execution.unit } } def buildAndWriteFaissIndex[D <: Distance[D]]( entities: Iterable[EntityEmbedding[Long]], sampleRate: Float, factoryString: String, metricType: Metric[D], outputDirectory: IndexOutputFile ): Unit = { val metric = parseMetric(metricType) val datasetSize = entities.size.toLong val dimensions = entities.head.embedding.length logger.info(s"There are $datasetSize embeddings") logger.info(s"Faiss compile options are ${swigfaiss.get_compile_options()}") logger.info(s"OMP threads count is ${swigfaiss.omp_get_max_threads()}") val index = swigfaiss.index_factory(dimensions, factoryString, metric) index.setVerbose(true) val idMap = new IndexIDMap(index) val trainingSetSize = Math.min(datasetSize, Math.round(datasetSize * sampleRate)) val ids = toIndexVector(entities) val fullDataset = toFloatVector(dimensions, entities) logger.info("Finished bridging full dataset") idMap.train(trainingSetSize, fullDataset.data()) logger.info("Finished training") idMap.add_with_ids(datasetSize, fullDataset.data(), ids) logger.info("Added data to the index") val tmpFile = File.createTempFile("faiss.index", ".tmp") swigfaiss.write_index(idMap, tmpFile.toString) logger.info(s"Wrote to tmp file ${tmpFile.toString}") copyToOutputAndCreateSuccess(FileUtils.getFileHandle(tmpFile.toString), outputDirectory) logger.info("Copied file") } private def copyToOutputAndCreateSuccess( tmpFile: AbstractFile, outputDirectory: IndexOutputFile ) = { val outputFile = outputDirectory.createFile("faiss.index") logger.info(s"Final output file is ${outputFile.getPath()}") outputFile.copyFrom(tmpFile.getByteSource.openStream()) outputDirectory.createSuccessFile() } private def toFloatVector( dimensions: Int, entities: Iterable[EntityEmbedding[Long]] ): FloatVector = { require(entities.nonEmpty) val vector = new FloatVector() vector.reserve(dimensions.toLong * entities.size.toLong) for (entity <- entities) { for (value <- entity.embedding) { vector.push_back(value) } } vector } private def toIndexVector(embeddings: Iterable[EntityEmbedding[Long]]): LongVector = { require(embeddings.nonEmpty) val vector = new LongVector() vector.reserve(embeddings.size) for (embedding <- embeddings) { vector.push_back(embedding.id) } vector } private def parseMetric[D <: Distance[D]](metric: Metric[D]): MetricType = metric match { case L2 => MetricType.METRIC_L2 case InnerProduct => MetricType.METRIC_INNER_PRODUCT case Cosine => MetricType.METRIC_INNER_PRODUCT case _ => throw new AbstractMethodError(s"Not implemented for metric ${metric}") } private def l2Normalize[D <: Distance[D]](metric: Metric[D]): Boolean = metric match { case Cosine => true case _ => false } } object FaissIndexer extends FaissIndexer {}
the-algorithm-main/ann/src/main/scala/com/twitter/ann/faiss/HourlyDirectoryWithSuccessFileListing.scala
package com.twitter.ann.faiss import com.twitter.conversions.DurationOps.richDurationFromInt import com.twitter.search.common.file.AbstractFile import com.twitter.search.common.file.FileUtils import com.twitter.util.Return import com.twitter.util.Throw import com.twitter.util.Time import com.twitter.util.Try import com.twitter.util.logging.Logging import java.util.Locale object HourlyDirectoryWithSuccessFileListing extends Logging { private val SUCCESS_FILE_NAME = "_SUCCESS" def listHourlyIndexDirectories( root: AbstractFile, startingFrom: Time, count: Int, lookbackInterval: Int ): Seq[AbstractFile] = listingStep(root, startingFrom, count, lookbackInterval) private def listingStep( root: AbstractFile, startingFrom: Time, remainingDirectoriesToFind: Int, remainingAttempts: Int ): List[AbstractFile] = { if (remainingDirectoriesToFind == 0 || remainingAttempts == 0) { return List.empty } val head = getSuccessfulDirectoryForDate(root, startingFrom) val previousHour = startingFrom - 1.hour head match { case Throw(e) => listingStep(root, previousHour, remainingDirectoriesToFind, remainingAttempts - 1) case Return(directory) => directory :: listingStep(root, previousHour, remainingDirectoriesToFind - 1, remainingAttempts - 1) } } private def getSuccessfulDirectoryForDate( root: AbstractFile, date: Time ): Try[AbstractFile] = { val folder = root.getPath + "/" + date.format("yyyy/MM/dd/HH", Locale.ROOT) val successPath = folder + "/" + SUCCESS_FILE_NAME debug(s"Checking ${successPath}") Try(FileUtils.getFileHandle(successPath)).flatMap { file => if (file.canRead) { Try(FileUtils.getFileHandle(folder)) } else { Throw(new IllegalArgumentException(s"Found ${file.toString} but can't read it")) } } } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/faiss/HourlyShardedIndex.scala
package com.twitter.ann.faiss import com.twitter.ann.common.Distance import com.twitter.ann.common.MemoizedInEpochs import com.twitter.ann.common.Metric import com.twitter.ann.common.Task import com.twitter.finagle.stats.StatsReceiver import com.twitter.search.common.file.AbstractFile import com.twitter.util.Duration import com.twitter.util.Future import com.twitter.util.Time import com.twitter.util.Try import com.twitter.util.logging.Logging import java.util.concurrent.atomic.AtomicReference object HourlyShardedIndex { def loadIndex[T, D <: Distance[D]]( dimension: Int, metric: Metric[D], directory: AbstractFile, shardsToLoad: Int, shardWatchInterval: Duration, lookbackInterval: Int, statsReceiver: StatsReceiver ): HourlyShardedIndex[T, D] = { new HourlyShardedIndex[T, D]( metric, dimension, directory, shardsToLoad, shardWatchInterval, lookbackInterval, statsReceiver) } } class HourlyShardedIndex[T, D <: Distance[D]]( outerMetric: Metric[D], outerDimension: Int, directory: AbstractFile, shardsToLoad: Int, shardWatchInterval: Duration, lookbackInterval: Int, override protected val statsReceiver: StatsReceiver) extends QueryableIndexAdapter[T, D] with Logging with Task { // QueryableIndexAdapter protected val metric: Metric[D] = outerMetric protected val dimension: Int = outerDimension protected def index: Index = { castedIndex.get() } // Task trait protected def task(): Future[Unit] = Future.value(reloadShards()) protected def taskInterval: Duration = shardWatchInterval private def loadIndex(directory: AbstractFile): Try[Index] = Try(QueryableIndexAdapter.loadJavaIndex(directory)) private val shardsCache = new MemoizedInEpochs[AbstractFile, Index](loadIndex) // Destroying original index invalidate casted index. Keep a reference to both. private val originalIndex = new AtomicReference[IndexShards]() private val castedIndex = new AtomicReference[Index]() private def reloadShards(): Unit = { val freshDirectories = HourlyDirectoryWithSuccessFileListing.listHourlyIndexDirectories( directory, Time.now, shardsToLoad, lookbackInterval) if (shardsCache.currentEpochKeys == freshDirectories.toSet) { info("Not reloading shards, as they're exactly same") } else { val shards = shardsCache.epoch(freshDirectories) val indexShards = new IndexShards(dimension, false, false) for (shard <- shards) { indexShards.add_shard(shard) } replaceIndex(() => { castedIndex.set(swigfaiss.upcast_IndexShards(indexShards)) originalIndex.set(indexShards) }) // Potentially it's time to drop huge native index from memory, ask for GC System.gc() } require(castedIndex.get() != null, "Failed to find any shards during startup") } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/faiss/QueryableIndexAdapter.scala
package com.twitter.ann.faiss import com.twitter.ann.common.Cosine import com.twitter.ann.common.Distance import com.twitter.ann.common.EmbeddingType.EmbeddingVector import com.twitter.ann.common.Metric import com.twitter.ann.common.NeighborWithDistance import com.twitter.ann.common.Queryable import com.twitter.ml.api.embedding.EmbeddingMath import com.twitter.search.common.file.AbstractFile import com.twitter.search.common.file.FileUtils import com.twitter.util.Future import com.twitter.util.logging.Logging import java.io.File import java.util.concurrent.locks.ReentrantReadWriteLock object QueryableIndexAdapter extends Logging { // swigfaiss.read_index doesn't support hdfs files, hence a copy to temporary directory def loadJavaIndex(directory: AbstractFile): Index = { val indexFile = directory.getChild("faiss.index") val tmpFile = File.createTempFile("faiss.index", ".tmp") val tmpAbstractFile = FileUtils.getFileHandle(tmpFile.toString) indexFile.copyTo(tmpAbstractFile) val index = swigfaiss.read_index(tmpAbstractFile.getPath) if (!tmpFile.delete()) { error(s"Failed to delete ${tmpFile.toString}") } index } } trait QueryableIndexAdapter[T, D <: Distance[D]] extends Queryable[T, FaissParams, D] { this: Logging => private val MAX_COSINE_DISTANCE = 1f protected def index: Index protected val metric: Metric[D] protected val dimension: Int private def maybeNormalizeEmbedding(embeddingVector: EmbeddingVector): EmbeddingVector = { // There is no direct support for Cosine, but l2norm + ip == Cosine by definition if (metric == Cosine) { EmbeddingMath.Float.normalize(embeddingVector) } else { embeddingVector } } private def maybeTranslateToCosineDistanceInplace(array: floatArray, len: Int): Unit = { // Faiss reports Cosine similarity while we need Cosine distance. if (metric == Cosine) { for (index <- 0 until len) { val similarity = array.getitem(index) if (similarity < 0 || similarity > 1) { warn(s"Expected similarity to be between 0 and 1, got ${similarity} instead") array.setitem(index, MAX_COSINE_DISTANCE) } else { array.setitem(index, 1 - similarity) } } } } private val paramsLock = new ReentrantReadWriteLock() private var currentParams: Option[String] = None // Assume that parameters rarely change and try read lock first private def ensuringParams[R](parameterString: String, f: () => R): R = { paramsLock.readLock().lock() try { if (currentParams.contains(parameterString)) { return f() } } finally { paramsLock.readLock().unlock() } paramsLock.writeLock().lock() try { currentParams = Some(parameterString) new ParameterSpace().set_index_parameters(index, parameterString) f() } finally { paramsLock.writeLock().unlock() } } def replaceIndex(f: () => Unit): Unit = { paramsLock.writeLock().lock() try { currentParams = None f() } finally { paramsLock.writeLock().unlock() } } def query( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: FaissParams ): Future[List[T]] = { Future.value( ensuringParams( runtimeParams.toLibraryString, () => { val distances = new floatArray(numOfNeighbors) val indexes = new LongVector() indexes.resize(numOfNeighbors) val normalizedEmbedding = maybeNormalizeEmbedding(embedding) index.search( // Number of query embeddings 1, // Array of query embeddings toFloatArray(normalizedEmbedding).cast(), // Number of neighbours to return numOfNeighbors, // Location to store neighbour distances distances.cast(), // Location to store neighbour identifiers indexes ) // This is a shortcoming of current swig bindings // Nothing prevents JVM from freeing distances while inside index.search // This might be removed once we start passing FloatVector // Why java.lang.ref.Reference.reachabilityFence doesn't compile? debug(distances) toSeq(indexes, numOfNeighbors).toList.asInstanceOf[List[T]] } )) } def queryWithDistance( embedding: EmbeddingVector, numOfNeighbors: Int, runtimeParams: FaissParams ): Future[List[NeighborWithDistance[T, D]]] = { Future.value( ensuringParams( runtimeParams.toLibraryString, () => { val distances = new floatArray(numOfNeighbors) val indexes = new LongVector() indexes.resize(numOfNeighbors) val normalizedEmbedding = maybeNormalizeEmbedding(embedding) index.search( // Number of query embeddings 1, // Array of query embeddings toFloatArray(normalizedEmbedding).cast(), // Number of neighbours to return numOfNeighbors, // Location to store neighbour distances distances.cast(), // Location to store neighbour identifiers indexes ) val ids = toSeq(indexes, numOfNeighbors).toList.asInstanceOf[List[T]] maybeTranslateToCosineDistanceInplace(distances, numOfNeighbors) val distancesSeq = toSeq(distances, numOfNeighbors) ids.zip(distancesSeq).map { case (id, distance) => NeighborWithDistance(id, metric.fromAbsoluteDistance(distance)) } } )) } private def toFloatArray(emb: EmbeddingVector): floatArray = { val nativeArray = new floatArray(emb.length) for ((value, aIdx) <- emb.iterator.zipWithIndex) { nativeArray.setitem(aIdx, value) } nativeArray } private def toSeq(vector: LongVector, len: Long): Seq[Long] = { (0L until len).map(vector.at) } private def toSeq(array: floatArray, len: Int): Seq[Float] = { (0 until len).map(array.getitem) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/featurestore/BUILD
scala_library( sources = ["*.scala"], compiler_option_sets = ["fatal_warnings"], tags = ["bazel-compatible"], dependencies = [ "ann/src/main/scala/com/twitter/ann/common", "src/scala/com/twitter/ml/featurestore/lib", "src/scala/com/twitter/ml/featurestore/lib/online", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/featurestore/FeatureStoreEmbeddingProducer.scala
package com.twitter.ann.featurestore import com.twitter.ann.common.EmbeddingProducer import com.twitter.finagle.stats.{InMemoryStatsReceiver, StatsReceiver} import com.twitter.ml.api.embedding.{Embedding, EmbeddingSerDe} import com.twitter.ml.api.thriftscala import com.twitter.ml.api.thriftscala.{Embedding => TEmbedding} import com.twitter.ml.featurestore.lib.dataset.online.VersionedOnlineAccessDataset import com.twitter.ml.featurestore.lib.{EntityId, RawFloatTensor} import com.twitter.ml.featurestore.lib.dataset.DatasetParams import com.twitter.ml.featurestore.lib.entity.EntityWithId import com.twitter.ml.featurestore.lib.feature.{BoundFeature, BoundFeatureSet} import com.twitter.ml.featurestore.lib.online.{FeatureStoreClient, FeatureStoreRequest} import com.twitter.ml.featurestore.lib.params.FeatureStoreParams import com.twitter.stitch.Stitch import com.twitter.strato.opcontext.Attribution import com.twitter.strato.client.Client object FeatureStoreEmbeddingProducer { def apply[T <: EntityId]( dataset: VersionedOnlineAccessDataset[T, TEmbedding], version: Long, boundFeature: BoundFeature[T, RawFloatTensor], client: Client, statsReceiver: StatsReceiver = new InMemoryStatsReceiver, featureStoreAttributions: Seq[Attribution] = Seq.empty ): EmbeddingProducer[EntityWithId[T]] = { val featureStoreParams = FeatureStoreParams( perDataset = Map( dataset.id -> DatasetParams(datasetVersion = Some(version)) ), global = DatasetParams(attributions = featureStoreAttributions) ) val featureStoreClient = FeatureStoreClient( BoundFeatureSet(boundFeature), client, statsReceiver, featureStoreParams ) new FeatureStoreEmbeddingProducer(boundFeature, featureStoreClient) } } private[featurestore] class FeatureStoreEmbeddingProducer[T <: EntityId]( boundFeature: BoundFeature[T, RawFloatTensor], featureStoreClient: FeatureStoreClient) extends EmbeddingProducer[EntityWithId[T]] { // Looks up embedding from online feature store for an entity. override def produceEmbedding(input: EntityWithId[T]): Stitch[Option[Embedding[Float]]] = { val featureStoreRequest = FeatureStoreRequest( entityIds = Seq(input) ) Stitch.callFuture(featureStoreClient(featureStoreRequest).map { predictionRecord => predictionRecord.getFeatureValue(boundFeature) match { case Some(featureValue) => { val embedding = EmbeddingSerDe.floatEmbeddingSerDe.fromThrift( thriftscala.Embedding(Some(featureValue.value)) ) Some(embedding) } case _ => None } }) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/file_store/BUILD
scala_library( sources = ["*.scala"], compiler_option_sets = ["fatal_warnings"], platform = "java8", tags = ["bazel-compatible"], dependencies = [ "3rdparty/jvm/com/twitter/storehaus:core", "ann/src/main/scala/com/twitter/ann/common", "ann/src/main/thrift/com/twitter/ann/common:ann-common-scala", "mediaservices/commons/src/main/scala", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/file_store/ReadableIndexIdFileStore.scala
package com.twitter.ann.file_store import com.twitter.ann.common.thriftscala.FileBasedIndexIdStore import com.twitter.bijection.Injection import com.twitter.mediaservices.commons.codec.{ArrayByteBufferCodec, ThriftByteBufferCodec} import com.twitter.search.common.file.AbstractFile import com.twitter.storehaus.ReadableStore import java.nio.ByteBuffer object ReadableIndexIdFileStore { /** * @param file : File path to read serialized long indexId <-> Id mapping from. * @param injection: Injection to convert bytes to Id. * @tparam V: Type of Id * @return File based Readable Store */ def apply[V]( file: AbstractFile, injection: Injection[V, Array[Byte]] ): ReadableStore[Long, V] = { val codec = new ThriftByteBufferCodec(FileBasedIndexIdStore) val store: Map[Long, V] = codec .decode(loadFile(file)) .indexIdMap .getOrElse(Map.empty[Long, ByteBuffer]) .toMap .mapValues(value => injection.invert(ArrayByteBufferCodec.decode(value)).get) ReadableStore.fromMap[Long, V](store) } private[this] def loadFile(file: AbstractFile): ByteBuffer = { ArrayByteBufferCodec.encode(file.getByteSource.read()) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/file_store/WritableIndexIdFileStore.scala
package com.twitter.ann.file_store import com.twitter.ann.common.IndexOutputFile import com.twitter.ann.common.thriftscala.FileBasedIndexIdStore import com.twitter.bijection.Injection import com.twitter.mediaservices.commons.codec.ArrayByteBufferCodec import com.twitter.mediaservices.commons.codec.ThriftByteBufferCodec import com.twitter.storehaus.Store import com.twitter.util.Future import java.util.concurrent.{ConcurrentHashMap => JConcurrentHashMap} import scala.collection.JavaConverters._ object WritableIndexIdFileStore { /** * @param injection: Injection to convert typed Id to bytes. * @tparam V: Type of Id * @return File based Writable Store */ def apply[V]( injection: Injection[V, Array[Byte]] ): WritableIndexIdFileStore[V] = { new WritableIndexIdFileStore[V]( new JConcurrentHashMap[Long, Option[V]], injection ) } } class WritableIndexIdFileStore[V] private ( map: JConcurrentHashMap[Long, Option[V]], injection: Injection[V, Array[Byte]]) extends Store[Long, V] { private[this] val store = Store.fromJMap(map) override def get(k: Long): Future[Option[V]] = { store.get(k) } override def put(kv: (Long, Option[V])): Future[Unit] = { store.put(kv) } /** * Serialize and store the mapping in thrift format * @param file : File path to store serialized long indexId <-> Id mapping */ def save(file: IndexOutputFile): Unit = { saveThrift(toThrift(), file) } def getInjection: Injection[V, Array[Byte]] = injection private[this] def toThrift(): FileBasedIndexIdStore = { val indexIdMap = map.asScala .collect { case (key, Some(value)) => (key, ArrayByteBufferCodec.encode(injection.apply(value))) } FileBasedIndexIdStore(Some(indexIdMap)) } private[this] def saveThrift(thriftObj: FileBasedIndexIdStore, file: IndexOutputFile): Unit = { val codec = new ThriftByteBufferCodec(FileBasedIndexIdStore) val bytes = ArrayByteBufferCodec.decode(codec.encode(thriftObj)) val outputStream = file.getOutputStream() outputStream.write(bytes) outputStream.close() } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/hnsw/BUILD
scala_library( sources = ["*.scala"], compiler_option_sets = ["fatal_warnings"], platform = "java8", tags = ["bazel-compatible"], dependencies = [ "3rdparty/jvm/org/mapdb", "ann/src/main/java/com/twitter/ann/hnsw", "ann/src/main/scala/com/twitter/ann/common", "ann/src/main/scala/com/twitter/ann/serialization", "ann/src/main/thrift/com/twitter/ann/common:ann-common-scala", "mediaservices/commons/src/main/scala:futuretracker", "src/java/com/twitter/common_internal/hadoop", "src/java/com/twitter/search/common/file", "src/scala/com/twitter/ml/api/embedding", ], exports = [ "ann/src/main/scala/com/twitter/ann/common", "src/java/com/twitter/common_internal/hadoop", "src/java/com/twitter/search/common/file", "src/scala/com/twitter/ml/api/embedding", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/hnsw/DistanceFunctionGenerator.scala
package com.twitter.ann.hnsw import com.twitter.ann.common.EmbeddingType.EmbeddingVector import com.twitter.ann.common.{Cosine, Distance, InnerProduct, Metric} private[hnsw] object DistanceFunctionGenerator { def apply[T, D <: Distance[D]]( metric: Metric[D], idToEmbeddingFn: (T) => EmbeddingVector ): DistanceFunctionGenerator[T] = { // Use InnerProduct for cosine and normalize the vectors before appending and querying. val updatedMetric = metric match { case Cosine => InnerProduct case _ => metric } val distFnIndex = new DistanceFunction[T, T] { override def distance(id1: T, id2: T) = updatedMetric.absoluteDistance( idToEmbeddingFn(id1), idToEmbeddingFn(id2) ) } val distFnQuery = new DistanceFunction[EmbeddingVector, T] { override def distance(embedding: EmbeddingVector, id: T) = updatedMetric.absoluteDistance(embedding, idToEmbeddingFn(id)) } DistanceFunctionGenerator(distFnIndex, distFnQuery, metric == Cosine) } } private[hnsw] case class DistanceFunctionGenerator[T]( index: DistanceFunction[T, T], query: DistanceFunction[EmbeddingVector, T], shouldNormalize: Boolean)
the-algorithm-main/ann/src/main/scala/com/twitter/ann/hnsw/Hnsw.scala
package com.twitter.ann.hnsw import com.google.common.annotations.VisibleForTesting import com.twitter.ann.common.EmbeddingType._ import com.twitter.ann.common.Metric.toThrift import com.twitter.ann.common._ import com.twitter.ann.common.thriftscala.DistanceMetric import com.twitter.ann.hnsw.HnswIndex.RandomProvider import com.twitter.util.Future import java.util.Random import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.ThreadLocalRandom import java.util.concurrent.locks.Lock import java.util.concurrent.locks.ReentrantLock import scala.collection.JavaConverters._ private[hnsw] object Hnsw { private[hnsw] def apply[T, D <: Distance[D]]( dimension: Int, metric: Metric[D], efConstruction: Int, maxM: Int, expectedElements: Int, futurePool: ReadWriteFuturePool, idEmbeddingMap: IdEmbeddingMap[T] ): Hnsw[T, D] = { val randomProvider = new RandomProvider { override def get(): Random = ThreadLocalRandom.current() } val distFn = DistanceFunctionGenerator(metric, (key: T) => idEmbeddingMap.get(key)) val internalIndex = new HnswIndex[T, EmbeddingVector]( distFn.index, distFn.query, efConstruction, maxM, expectedElements, randomProvider ) new Hnsw[T, D]( dimension, metric, internalIndex, futurePool, idEmbeddingMap, distFn.shouldNormalize, LockedAccess.apply(expectedElements) ) } } private[hnsw] object LockedAccess { protected[hnsw] def apply[T](expectedElements: Int): LockedAccess[T] = DefaultLockedAccess(new ConcurrentHashMap[T, Lock](expectedElements)) protected[hnsw] def apply[T](): LockedAccess[T] = DefaultLockedAccess(new ConcurrentHashMap[T, Lock]()) } private[hnsw] case class DefaultLockedAccess[T](locks: ConcurrentHashMap[T, Lock]) extends LockedAccess[T] { override def lockProvider(item: T) = locks.computeIfAbsent(item, (_: T) => new ReentrantLock()) } private[hnsw] trait LockedAccess[T] { protected def lockProvider(item: T): Lock def lock[K](item: T)(fn: => K): K = { val lock = lockProvider(item) lock.lock() try { fn } finally { lock.unlock() } } } @VisibleForTesting private[hnsw] class Hnsw[T, D <: Distance[D]]( dimension: Int, metric: Metric[D], hnswIndex: HnswIndex[T, EmbeddingVector], readWriteFuturePool: ReadWriteFuturePool, idEmbeddingMap: IdEmbeddingMap[T], shouldNormalize: Boolean, lockedAccess: LockedAccess[T] = LockedAccess.apply[T]()) extends Appendable[T, HnswParams, D] with Queryable[T, HnswParams, D] with Updatable[T] { override def append(entity: EntityEmbedding[T]): Future[Unit] = { readWriteFuturePool.write { val indexDimension = entity.embedding.length assert( toThrift(metric) == DistanceMetric.EditDistance || indexDimension == dimension, s"Dimension mismatch for index(${indexDimension}) and embedding($dimension)" ) lockedAccess.lock(entity.id) { // To make this thread-safe, we are using ConcurrentHashMap#putIfAbsent underneath, // so if there is a pre-existing item, put() will return something that is not null val embedding = idEmbeddingMap.putIfAbsent(entity.id, updatedEmbedding(entity.embedding)) if (embedding == null) { // New element - insert into the index hnswIndex.insert(entity.id) } else { // Existing element - update the embedding and graph structure throw new IllegalDuplicateInsertException( "Append method does not permit duplicates (try using update method): " + entity.id) } } } onFailure { e => Future.exception(e) } } override def toQueryable: Queryable[T, HnswParams, D] = this override def query( embedding: EmbeddingVector, numOfNeighbours: Int, runtimeParams: HnswParams ): Future[List[T]] = { queryWithDistance(embedding, numOfNeighbours, runtimeParams) .map(_.map(_.neighbor)) } override def queryWithDistance( embedding: EmbeddingVector, numOfNeighbours: Int, runtimeParams: HnswParams ): Future[List[NeighborWithDistance[T, D]]] = { val indexDimension = embedding.length assert( toThrift(metric) == DistanceMetric.EditDistance || indexDimension == dimension, s"Dimension mismatch for index(${indexDimension}) and embedding($dimension)" ) readWriteFuturePool.read { hnswIndex .searchKnn(updatedEmbedding(embedding), numOfNeighbours, runtimeParams.ef) .asScala .map { nn => NeighborWithDistance( nn.getItem, metric.fromAbsoluteDistance(nn.getDistance) ) } .toList } } private[this] def updatedEmbedding(embedding: EmbeddingVector): EmbeddingVector = { if (shouldNormalize) { MetricUtil.norm(embedding) } else { embedding } } def getIndex: HnswIndex[T, EmbeddingVector] = hnswIndex def getDimen: Int = dimension def getMetric: Metric[D] = metric def getIdEmbeddingMap: IdEmbeddingMap[T] = idEmbeddingMap override def update( entity: EntityEmbedding[T] ): Future[Unit] = { readWriteFuturePool.write { val indexDimension = entity.embedding.length assert( toThrift(metric) == DistanceMetric.EditDistance || indexDimension == dimension, s"Dimension mismatch for index(${indexDimension}) and embedding($dimension)" ) lockedAccess.lock(entity.id) { val embedding = idEmbeddingMap.put(entity.id, updatedEmbedding(entity.embedding)) if (embedding == null) { // New element - insert into the index hnswIndex.insert(entity.id) } else { // Existing element - update the embedding and graph structure hnswIndex.reInsert(entity.id); } } } onFailure { e => Future.exception(e) } } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/hnsw/HnswCommon.scala
package com.twitter.ann.hnsw import com.twitter.ann.common.RuntimeParams import com.twitter.ann.common.thriftscala.HnswIndexMetadata import com.twitter.ann.common.thriftscala.HnswRuntimeParam import com.twitter.ann.common.thriftscala.{RuntimeParams => ServiceRuntimeParams} import com.twitter.bijection.Injection import com.twitter.mediaservices.commons.codec.ThriftByteBufferCodec import com.twitter.search.common.file.AbstractFile import scala.util.Failure import scala.util.Success import scala.util.Try object HnswCommon { private[hnsw] lazy val MetadataCodec = new ThriftByteBufferCodec(HnswIndexMetadata) private[hnsw] val MetaDataFileName = "hnsw_index_metadata" private[hnsw] val EmbeddingMappingFileName = "hnsw_embedding_mapping" private[hnsw] val InternalIndexDir = "hnsw_internal_index" private[hnsw] val HnswInternalMetadataFileName = "hnsw_internal_metadata" private[hnsw] val HnswInternalGraphFileName = "hnsw_internal_graph" val RuntimeParamsInjection: Injection[HnswParams, ServiceRuntimeParams] = new Injection[HnswParams, ServiceRuntimeParams] { override def apply(scalaParams: HnswParams): ServiceRuntimeParams = { ServiceRuntimeParams.HnswParam( HnswRuntimeParam( scalaParams.ef ) ) } override def invert(thriftParams: ServiceRuntimeParams): Try[HnswParams] = thriftParams match { case ServiceRuntimeParams.HnswParam(hnswParam) => Success( HnswParams(hnswParam.ef) ) case p => Failure(new IllegalArgumentException(s"Expected HnswRuntimeParam got $p")) } } def isValidHnswIndex(path: AbstractFile): Boolean = { path.isDirectory && path.hasSuccessFile && path.getChild(MetaDataFileName).exists() && path.getChild(EmbeddingMappingFileName).exists() && path.getChild(InternalIndexDir).exists() && path.getChild(InternalIndexDir).getChild(HnswInternalMetadataFileName).exists() && path.getChild(InternalIndexDir).getChild(HnswInternalGraphFileName).exists() } } /** * Hnsw runtime params * @param ef: The size of the dynamic list for the nearest neighbors (used during the search). * Higher ef leads to more accurate but slower search. * ef cannot be set lower than the number of queried nearest neighbors k. * The value ef of can be anything between k and the size of the dataset. */ case class HnswParams(ef: Int) extends RuntimeParams { override def toString: String = s"HnswParams(ef = $ef)" }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/hnsw/HnswIOUtil.scala
package com.twitter.ann.hnsw import com.google.common.annotations.VisibleForTesting import com.twitter.ann.common.EmbeddingType.EmbeddingVector import com.twitter.ann.common.thriftscala.HnswIndexMetadata import com.twitter.ann.common.Distance import com.twitter.ann.common.EntityEmbedding import com.twitter.ann.common.Metric import com.twitter.ann.hnsw.HnswCommon._ import com.twitter.ann.serialization.PersistedEmbeddingInjection import com.twitter.ann.serialization.ThriftIteratorIO import com.twitter.ann.serialization.thriftscala.PersistedEmbedding import com.twitter.bijection.Injection import com.twitter.mediaservices.commons.codec.ArrayByteBufferCodec import com.twitter.search.common.file.AbstractFile import java.io.BufferedInputStream import java.io.BufferedOutputStream import java.io.OutputStream private[hnsw] object HnswIOUtil { private val BufferSize = 64 * 1024 // Default 64Kb @VisibleForTesting private[hnsw] def loadEmbeddings[T]( embeddingFile: AbstractFile, injection: Injection[T, Array[Byte]], idEmbeddingMap: IdEmbeddingMap[T], ): IdEmbeddingMap[T] = { val inputStream = { val stream = embeddingFile.getByteSource.openStream() if (stream.isInstanceOf[BufferedInputStream]) { stream } else { new BufferedInputStream(stream, BufferSize) } } val thriftIteratorIO = new ThriftIteratorIO[PersistedEmbedding](PersistedEmbedding) val iterator = thriftIteratorIO.fromInputStream(inputStream) val embeddingInjection = new PersistedEmbeddingInjection(injection) try { iterator.foreach { persistedEmbedding => val embedding = embeddingInjection.invert(persistedEmbedding).get idEmbeddingMap.putIfAbsent(embedding.id, embedding.embedding) Unit } } finally { inputStream.close() } idEmbeddingMap } @VisibleForTesting private[hnsw] def saveEmbeddings[T]( stream: OutputStream, injection: Injection[T, Array[Byte]], iter: Iterator[(T, EmbeddingVector)] ): Unit = { val thriftIteratorIO = new ThriftIteratorIO[PersistedEmbedding](PersistedEmbedding) val embeddingInjection = new PersistedEmbeddingInjection(injection) val iterator = iter.map { case (id, emb) => embeddingInjection(EntityEmbedding(id, emb)) } val outputStream = { if (stream.isInstanceOf[BufferedOutputStream]) { stream } else { new BufferedOutputStream(stream, BufferSize) } } try { thriftIteratorIO.toOutputStream(iterator, outputStream) } finally { outputStream.close() } } @VisibleForTesting private[hnsw] def saveIndexMetadata( dimension: Int, metric: Metric[_ <: Distance[_]], numElements: Int, metadataStream: OutputStream ): Unit = { val metadata = HnswIndexMetadata( dimension, Metric.toThrift(metric), numElements ) val bytes = ArrayByteBufferCodec.decode(MetadataCodec.encode(metadata)) metadataStream.write(bytes) metadataStream.close() } @VisibleForTesting private[hnsw] def loadIndexMetadata( metadataFile: AbstractFile ): HnswIndexMetadata = { MetadataCodec.decode( ArrayByteBufferCodec.encode(metadataFile.getByteSource.read()) ) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/hnsw/IdEmbeddingMap.scala
package com.twitter.ann.hnsw import com.twitter.ann.common.EmbeddingType._ import java.io.OutputStream trait IdEmbeddingMap[T] { def putIfAbsent(id: T, embedding: EmbeddingVector): EmbeddingVector def put(id: T, embedding: EmbeddingVector): EmbeddingVector def get(id: T): EmbeddingVector def iter(): Iterator[(T, EmbeddingVector)] def size(): Int def toDirectory(embeddingFileOutputStream: OutputStream): Unit }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/hnsw/JMapBasedIdEmbeddingMap.scala
package com.twitter.ann.hnsw import com.twitter.ann.common.EmbeddingType.EmbeddingVector import com.twitter.bijection.Injection import com.twitter.search.common.file.AbstractFile import java.io.OutputStream import java.util.concurrent.ConcurrentHashMap import scala.collection.JavaConverters._ private[hnsw] object JMapBasedIdEmbeddingMap { /** * Creates in-memory concurrent hashmap based container that for storing id embedding mapping. * @param expectedElements: Expected num of elements for sizing hint, need not be exact. */ def applyInMemory[T](expectedElements: Int): IdEmbeddingMap[T] = new JMapBasedIdEmbeddingMap[T]( new ConcurrentHashMap[T, EmbeddingVector](expectedElements), Option.empty ) /** * Creates in-memory concurrent hashmap based container that can be serialized to disk for storing id embedding mapping. * @param expectedElements: Expected num of elements for sizing hint, need not be exact. * @param injection : Injection for typed Id T to Array[Byte] */ def applyInMemoryWithSerialization[T]( expectedElements: Int, injection: Injection[T, Array[Byte]] ): IdEmbeddingMap[T] = new JMapBasedIdEmbeddingMap[T]( new ConcurrentHashMap[T, EmbeddingVector](expectedElements), Some(injection) ) /** * Loads id embedding mapping in in-memory concurrent hashmap. * @param embeddingFile: Local/Hdfs file path for embeddings * @param injection : Injection for typed Id T to Array[Byte] * @param numElements: Expected num of elements for sizing hint, need not be exact */ def loadInMemory[T]( embeddingFile: AbstractFile, injection: Injection[T, Array[Byte]], numElements: Option[Int] = Option.empty ): IdEmbeddingMap[T] = { val map = numElements match { case Some(elements) => new ConcurrentHashMap[T, EmbeddingVector](elements) case None => new ConcurrentHashMap[T, EmbeddingVector]() } HnswIOUtil.loadEmbeddings( embeddingFile, injection, new JMapBasedIdEmbeddingMap(map, Some(injection)) ) } } private[this] class JMapBasedIdEmbeddingMap[T]( map: java.util.concurrent.ConcurrentHashMap[T, EmbeddingVector], injection: Option[Injection[T, Array[Byte]]]) extends IdEmbeddingMap[T] { override def putIfAbsent(id: T, embedding: EmbeddingVector): EmbeddingVector = { map.putIfAbsent(id, embedding) } override def put(id: T, embedding: EmbeddingVector): EmbeddingVector = { map.put(id, embedding) } override def get(id: T): EmbeddingVector = { map.get(id) } override def iter(): Iterator[(T, EmbeddingVector)] = map .entrySet() .iterator() .asScala .map(e => (e.getKey, e.getValue)) override def size(): Int = map.size() override def toDirectory(embeddingFile: OutputStream): Unit = { HnswIOUtil.saveEmbeddings(embeddingFile, injection.get, iter()) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/hnsw/MapDbBasedIdEmbeddingMap.scala
package com.twitter.ann.hnsw import com.twitter.ann.common.EmbeddingType.EmbeddingVector import com.twitter.bijection.Injection import com.twitter.ml.api.embedding.Embedding import com.twitter.search.common.file.AbstractFile import java.io.OutputStream import org.mapdb.DBMaker import org.mapdb.HTreeMap import org.mapdb.Serializer import scala.collection.JavaConverters._ /** * This class currently only support querying and creates map db on fly from thrift serialized embedding mapping * Implement index creation with this or altogether replace mapdb with some better performing solution as it takes a lot of time to create/query or precreate while serializing thrift embeddings */ private[hnsw] object MapDbBasedIdEmbeddingMap { /** * Loads id embedding mapping in mapDB based container leveraging memory mapped files. * @param embeddingFile: Local/Hdfs file path for embeddings * @param injection : Injection for typed Id T to Array[Byte] */ def loadAsReadonly[T]( embeddingFile: AbstractFile, injection: Injection[T, Array[Byte]] ): IdEmbeddingMap[T] = { val diskDb = DBMaker .tempFileDB() .concurrencyScale(32) .fileMmapEnable() .fileMmapEnableIfSupported() .fileMmapPreclearDisable() .cleanerHackEnable() .closeOnJvmShutdown() .make() val mapDb = diskDb .hashMap("mapdb", Serializer.BYTE_ARRAY, Serializer.FLOAT_ARRAY) .createOrOpen() HnswIOUtil.loadEmbeddings( embeddingFile, injection, new MapDbBasedIdEmbeddingMap(mapDb, injection) ) } } private[this] class MapDbBasedIdEmbeddingMap[T]( mapDb: HTreeMap[Array[Byte], Array[Float]], injection: Injection[T, Array[Byte]]) extends IdEmbeddingMap[T] { override def putIfAbsent(id: T, embedding: EmbeddingVector): EmbeddingVector = { val value = mapDb.putIfAbsent(injection.apply(id), embedding.toArray) if (value == null) null else Embedding(value) } override def put(id: T, embedding: EmbeddingVector): EmbeddingVector = { val value = mapDb.put(injection.apply(id), embedding.toArray) if (value == null) null else Embedding(value) } override def get(id: T): EmbeddingVector = { Embedding(mapDb.get(injection.apply(id))) } override def iter(): Iterator[(T, EmbeddingVector)] = { mapDb .entrySet() .iterator() .asScala .map(entry => (injection.invert(entry.getKey).get, Embedding(entry.getValue))) } override def size(): Int = mapDb.size() override def toDirectory(embeddingFile: OutputStream): Unit = { HnswIOUtil.saveEmbeddings(embeddingFile, injection, iter()) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/hnsw/SerializableHnsw.scala
package com.twitter.ann.hnsw import com.google.common.annotations.VisibleForTesting import com.twitter.ann.common.EmbeddingType.EmbeddingVector import com.twitter.ann.common._ import com.twitter.ann.common.thriftscala.HnswIndexMetadata import com.twitter.ann.hnsw.HnswCommon._ import com.twitter.ann.hnsw.HnswIndex.RandomProvider import com.twitter.bijection.Injection import com.twitter.search.common.file.AbstractFile import com.twitter.search.common.file.FileUtils import com.twitter.util.Future import java.io.IOException import java.util.concurrent.ThreadLocalRandom import java.util.Random import org.apache.beam.sdk.io.fs.ResourceId private[hnsw] object SerializableHnsw { private[hnsw] def apply[T, D <: Distance[D]]( index: Hnsw[T, D], injection: Injection[T, Array[Byte]] ): SerializableHnsw[T, D] = { new SerializableHnsw[T, D]( index, injection ) } private[hnsw] def loadMapBasedQueryableIndex[T, D <: Distance[D]]( dimension: Int, metric: Metric[D], injection: Injection[T, Array[Byte]], futurePool: ReadWriteFuturePool, directory: AbstractFile ): SerializableHnsw[T, D] = { val metadata = HnswIOUtil.loadIndexMetadata(directory.getChild(MetaDataFileName)) validateMetadata(dimension, metric, metadata) val idEmbeddingMap = JMapBasedIdEmbeddingMap.loadInMemory( directory.getChild(EmbeddingMappingFileName), injection, Some(metadata.numElements) ) loadIndex( dimension, metric, injection, futurePool, directory, idEmbeddingMap, metadata ) } private[hnsw] def loadMMappedBasedQueryableIndex[T, D <: Distance[D]]( dimension: Int, metric: Metric[D], injection: Injection[T, Array[Byte]], futurePool: ReadWriteFuturePool, directory: AbstractFile ): SerializableHnsw[T, D] = { val metadata = HnswIOUtil.loadIndexMetadata(directory.getChild(MetaDataFileName)) validateMetadata(dimension, metric, metadata) loadIndex( dimension, metric, injection, futurePool, directory, MapDbBasedIdEmbeddingMap .loadAsReadonly(directory.getChild(EmbeddingMappingFileName), injection), metadata ) } private[hnsw] def loadIndex[T, D <: Distance[D]]( dimension: Int, metric: Metric[D], injection: Injection[T, Array[Byte]], futurePool: ReadWriteFuturePool, directory: AbstractFile, idEmbeddingMap: IdEmbeddingMap[T], metadata: HnswIndexMetadata ): SerializableHnsw[T, D] = { val distFn = DistanceFunctionGenerator(metric, (key: T) => idEmbeddingMap.get(key)) val randomProvider = new RandomProvider { override def get(): Random = ThreadLocalRandom.current() } val internalIndex = HnswIndex.loadHnswIndex[T, EmbeddingVector]( distFn.index, distFn.query, directory.getChild(InternalIndexDir), injection, randomProvider ) val index = new Hnsw[T, D]( dimension, metric, internalIndex, futurePool, idEmbeddingMap, distFn.shouldNormalize, LockedAccess.apply(metadata.numElements) ) new SerializableHnsw(index, injection) } private[this] def validateMetadata[D <: Distance[D]]( dimension: Int, metric: Metric[D], existingMetadata: HnswIndexMetadata ): Unit = { assert( existingMetadata.dimension == dimension, s"Dimensions do not match. requested: $dimension existing: ${existingMetadata.dimension}" ) val existingMetric = Metric.fromThrift(existingMetadata.distanceMetric) assert( existingMetric == metric, s"DistanceMetric do not match. requested: $metric existing: $existingMetric" ) } } @VisibleForTesting private[hnsw] class SerializableHnsw[T, D <: Distance[D]]( index: Hnsw[T, D], injection: Injection[T, Array[Byte]]) extends Appendable[T, HnswParams, D] with Queryable[T, HnswParams, D] with Serialization with Updatable[T] { override def append(entity: EntityEmbedding[T]) = index.append(entity) override def toQueryable: Queryable[T, HnswParams, D] = index.toQueryable override def query( embedding: EmbeddingVector, numOfNeighbours: Int, runtimeParams: HnswParams ) = index.query(embedding, numOfNeighbours, runtimeParams) override def queryWithDistance( embedding: EmbeddingVector, numOfNeighbours: Int, runtimeParams: HnswParams ) = index.queryWithDistance(embedding, numOfNeighbours, runtimeParams) def toDirectory(directory: ResourceId): Unit = { toDirectory(new IndexOutputFile(directory)) } def toDirectory(directory: AbstractFile): Unit = { // Create a temp dir with time prefix, and then do a rename after serialization val tmpDir = FileUtils.getTmpFileHandle(directory) if (!tmpDir.exists()) { tmpDir.mkdirs() } toDirectory(new IndexOutputFile(tmpDir)) // Rename tmp dir to original directory supplied if (!tmpDir.rename(directory)) { throw new IOException(s"Failed to rename ${tmpDir.getPath} to ${directory.getPath}") } } private def toDirectory(indexFile: IndexOutputFile): Unit = { // Save java based hnsw index index.getIndex.toDirectory(indexFile.createDirectory(InternalIndexDir), injection) // Save index metadata HnswIOUtil.saveIndexMetadata( index.getDimen, index.getMetric, index.getIdEmbeddingMap.size(), indexFile.createFile(MetaDataFileName).getOutputStream() ) // Save embedding mapping index.getIdEmbeddingMap.toDirectory( indexFile.createFile(EmbeddingMappingFileName).getOutputStream()) // Create _SUCCESS file indexFile.createSuccessFile() } override def update( entity: EntityEmbedding[T] ): Future[Unit] = { index.update(entity) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/hnsw/TypedHnswIndex.scala
package com.twitter.ann.hnsw import com.twitter.ann.common._ import com.twitter.bijection.Injection import com.twitter.search.common.file.AbstractFile // Class to provide HNSW based approximate nearest neighbour index object TypedHnswIndex { /** * Creates in-memory HNSW based index which supports querying/addition/updates of the entity embeddings. * See https://docbird.twitter.biz/ann/hnsw.html to check information about arguments. * * @param dimension Dimension of the embedding to be indexed * @param metric Distance metric (InnerProduct/Cosine/L2) * @param efConstruction The parameter has the same meaning as ef, but controls the * index_time/index_accuracy ratio. Bigger ef_construction leads to longer * construction, but better index quality. At some point, increasing * ef_construction does not improve the quality of the index. One way to * check if the selection of ef_construction was ok is to measure a recall * for M nearest neighbor search when ef = ef_constuction: if the recall is * lower than 0.9, than there is room for improvement. * @param maxM The number of bi-directional links created for every new element during construction. * Reasonable range for M is 2-100. Higher M work better on datasets with high * intrinsic dimensionality and/or high recall, while low M work better for datasets * with low intrinsic dimensionality and/or low recalls. The parameter also determines * the algorithm's memory consumption, bigger the param more the memory requirement. * For high dimensional datasets (word embeddings, good face descriptors), higher M * are required (e.g. M=48, 64) for optimal performance at high recall. * The range M=12-48 is ok for the most of the use cases. * @param expectedElements Approximate number of elements to be indexed * @param readWriteFuturePool Future pool for performing read (query) and write operation (addition/updates). * @tparam T Type of item to index * @tparam D Type of distance */ def index[T, D <: Distance[D]]( dimension: Int, metric: Metric[D], efConstruction: Int, maxM: Int, expectedElements: Int, readWriteFuturePool: ReadWriteFuturePool ): Appendable[T, HnswParams, D] with Queryable[T, HnswParams, D] with Updatable[T] = { Hnsw[T, D]( dimension, metric, efConstruction, maxM, expectedElements, readWriteFuturePool, JMapBasedIdEmbeddingMap.applyInMemory[T](expectedElements) ) } /** * Creates in-memory HNSW based index which supports querying/addition/updates of the entity embeddings. * It can be serialized to a directory (HDFS/Local file system) * See https://docbird.twitter.biz/ann/hnsw.html to check information about arguments. * * @param dimension Dimension of the embedding to be indexed * @param metric Distance metric (InnerProduct/Cosine/L2) * @param efConstruction The parameter has the same meaning as ef, but controls the * index_time/index_accuracy ratio. Bigger ef_construction leads to longer * construction, but better index quality. At some point, increasing * ef_construction does not improve the quality of the index. One way to * check if the selection of ef_construction was ok is to measure a recall * for M nearest neighbor search when ef = ef_constuction: if the recall is * lower than 0.9, than there is room for improvement. * @param maxM The number of bi-directional links created for every new element during construction. * Reasonable range for M is 2-100. Higher M work better on datasets with high * intrinsic dimensionality and/or high recall, while low M work better for datasets * with low intrinsic dimensionality and/or low recalls. The parameter also determines * the algorithm's memory consumption, bigger the param more the memory requirement. * For high dimensional datasets (word embeddings, good face descriptors), higher M * are required (e.g. M=48, 64) for optimal performance at high recall. * The range M=12-48 is ok for the most of the use cases. * @param expectedElements Approximate number of elements to be indexed * @param injection Injection for typed Id T to Array[Byte] * @param readWriteFuturePool Future pool for performing read (query) and write operation (addition/updates). * @tparam T Type of item to index * @tparam D Type of distance */ def serializableIndex[T, D <: Distance[D]]( dimension: Int, metric: Metric[D], efConstruction: Int, maxM: Int, expectedElements: Int, injection: Injection[T, Array[Byte]], readWriteFuturePool: ReadWriteFuturePool ): Appendable[T, HnswParams, D] with Queryable[T, HnswParams, D] with Updatable[T] with Serialization = { val index = Hnsw[T, D]( dimension, metric, efConstruction, maxM, expectedElements, readWriteFuturePool, JMapBasedIdEmbeddingMap .applyInMemoryWithSerialization[T](expectedElements, injection) ) SerializableHnsw[T, D]( index, injection ) } /** * Loads HNSW index from a directory to in-memory * @param dimension dimension of the embedding to be indexed * @param metric Distance metric * @param readWriteFuturePool Future pool for performing read (query) and write operation (addition/updates). * @param injection : Injection for typed Id T to Array[Byte] * @param directory : Directory(HDFS/Local file system) where hnsw index is stored * @tparam T : Type of item to index * @tparam D : Type of distance */ def loadIndex[T, D <: Distance[D]]( dimension: Int, metric: Metric[D], injection: Injection[T, Array[Byte]], readWriteFuturePool: ReadWriteFuturePool, directory: AbstractFile ): Appendable[T, HnswParams, D] with Queryable[T, HnswParams, D] with Updatable[T] with Serialization = { SerializableHnsw.loadMapBasedQueryableIndex[T, D]( dimension, metric, injection, readWriteFuturePool, directory ) } /** * Loads a HNSW index from a directory and memory map it. * It will take less memory but rely more on disk as it leverages memory mapped file backed by disk. * Latency will go up considerably (Could be by factor of > 10x) if used on instance with low * memory since lot of page faults may occur. Best use case to use would with scalding jobs * where mapper/reducers instance are limited by 8gb memory. * @param dimension dimension of the embedding to be indexed * @param metric Distance metric * @param readWriteFuturePool Future pool for performing read (query) and write operation (addition/updates). * @param injection Injection for typed Id T to Array[Byte] * @param directory Directory(HDFS/Local file system) where hnsw index is stored * @tparam T Type of item to index * @tparam D Type of distance */ def loadMMappedIndex[T, D <: Distance[D]]( dimension: Int, metric: Metric[D], injection: Injection[T, Array[Byte]], readWriteFuturePool: ReadWriteFuturePool, directory: AbstractFile ): Appendable[T, HnswParams, D] with Queryable[T, HnswParams, D] with Updatable[T] with Serialization = { SerializableHnsw.loadMMappedBasedQueryableIndex[T, D]( dimension, metric, injection, readWriteFuturePool, directory ) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/manhattan/BUILD
scala_library( sources = ["*.scala"], compiler_option_sets = ["fatal_warnings"], tags = ["bazel-compatible"], dependencies = [ "3rdparty/jvm/com/twitter/bijection:core", "3rdparty/jvm/com/twitter/bijection:scrooge", "ann/src/main/scala/com/twitter/ann/common", "src/scala/com/twitter/ml/api/embedding", "storage/clients/manhattan", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/manhattan/ManhattanEmbeddingProducer.scala
package com.twitter.ann.manhattan import com.twitter.ann.common.EmbeddingType.EmbeddingVector import com.twitter.ann.common.{EmbeddingProducer, EmbeddingType} import com.twitter.bijection.Injection import com.twitter.ml.api.embedding.{EmbeddingBijection, EmbeddingSerDe} import com.twitter.ml.api.{thriftscala => thrift} import com.twitter.stitch.Stitch import com.twitter.storage.client.manhattan.bijections.Bijections import com.twitter.storage.client.manhattan.bijections.Bijections.BinaryScalaInjection import com.twitter.storage.client.manhattan.kv.ManhattanKVEndpoint import com.twitter.storage.client.manhattan.kv.impl.{ DescriptorP1L0, ReadOnlyKeyDescriptor, ValueDescriptor } private[manhattan] class ManhattanEmbeddingProducer[T]( keyDescriptor: DescriptorP1L0.DKey[T], valueDescriptor: ValueDescriptor.EmptyValue[EmbeddingVector], manhattanEndpoint: ManhattanKVEndpoint) extends EmbeddingProducer[T] { /** * Lookup an embedding from manhattan given a key of type T. * * @return An embedding stitch. * An easy way to get a Future from a Stitch is to run Stitch.run(stitch) */ override def produceEmbedding(input: T): Stitch[Option[EmbeddingVector]] = { val fullKey = keyDescriptor.withPkey(input) val stitchResult = manhattanEndpoint.get(fullKey, valueDescriptor) stitchResult.map { resultOption => resultOption.map(_.contents) } } } object ManhattanEmbeddingProducer { private[manhattan] def keyDescriptor[T]( injection: Injection[T, Array[Byte]], dataset: String ): DescriptorP1L0.DKey[T] = ReadOnlyKeyDescriptor(injection.andThen(Bijections.BytesBijection)) .withDataset(dataset) private[manhattan] val EmbeddingDescriptor: ValueDescriptor.EmptyValue[ EmbeddingType.EmbeddingVector ] = { val embeddingBijection = new EmbeddingBijection(EmbeddingSerDe.floatEmbeddingSerDe) val thriftInjection = BinaryScalaInjection[thrift.Embedding](thrift.Embedding) ValueDescriptor(embeddingBijection.andThen(thriftInjection)) } def apply[T]( dataset: String, injection: Injection[T, Array[Byte]], manhattanEndpoint: ManhattanKVEndpoint ): EmbeddingProducer[T] = { val descriptor = keyDescriptor(injection, dataset) new ManhattanEmbeddingProducer(descriptor, EmbeddingDescriptor, manhattanEndpoint) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/manhattan/README
# Description The ManhattanEmbeddingProducer is an EmbeddingProducer that is backed by a static manhattan dataset. # Setting up Data Data needs to be setup correctly in manhattan in order to be able to read the data using the ManhattanEmbeddingProducer. You can use the EmbeddingSamplingJob to do this. The job can reads embedding data from HDFS and re-writes it in the manhattan data format on HDFS.
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/benchmark/BUILD
scala_library( name = "benchmark", sources = ["*.scala"], compiler_option_sets = ["fatal_warnings"], platform = "java8", tags = [ "bazel-compatible", "bazel-compatible:migrated", "bazel-only", ], dependencies = [ ":user_item_knn-scala", "3rdparty/src/jvm/com/twitter/scalding:args", "3rdparty/src/jvm/com/twitter/scalding:core", "3rdparty/src/jvm/com/twitter/scalding:date", "ann/src/main/scala/com/twitter/ann/common", "ann/src/main/scala/com/twitter/ann/scalding/offline", "src/scala/com/twitter/scalding_internal/dalv2", "src/scala/com/twitter/scalding_internal/job", "src/scala/com/twitter/scalding_internal/job/analytics_batch", "src/scala/com/twitter/scalding_internal/multiformat/format", ], ) hadoop_binary( name = "benchmark-adhoc", main = "com.twitter.scalding.Tool", platform = "java8", runtime_platform = "java8", tags = [ "bazel-compatible", "bazel-compatible:migrated", "bazel-only", ], dependencies = [ ":benchmark", "3rdparty/jvm/org/slf4j:slf4j-jdk14", ], ) create_datasets( base_name = "user_item_knn", description = "List of the top recommendations per search entity (user)", java_schema = "com.twitter.ann.knn.thriftjava.Knn", platform = "java8", role = "cortex-mlx", scala_schema = "com.twitter.ann.knn.thriftscala.Knn", segment_type = "partitioned", tags = ["bazel-compatible"], java_dependencies = [ "ann/src/main/thrift/com/twitter/ann/knn:thrift-java", ], scala_dependencies = [ "ann/src/main/thrift/com/twitter/ann/knn:thrift-scala", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/benchmark/Knn.scala
package com.twitter.ann.scalding.offline.com.twitter.ann.scalding.benchmark /* This job will generate KNN ground truth based user and item embeddings. */ import com.twitter.scalding.typed.TypedPipe import com.twitter.scalding._ import com.twitter.scalding_internal.dalv2.DALWrite.D import com.twitter.ann.knn.thriftscala.Knn import com.twitter.ann.knn.thriftscala.Neighbor import com.twitter.ann.scalding.offline.IndexingStrategy import com.twitter.ann.scalding.offline.KnnHelper import com.twitter.ann.common.Distance import com.twitter.ml.featurestore.lib.embedding.EmbeddingWithEntity import com.twitter.cortex.ml.embeddings.common.EmbeddingFormatArgsParser import com.twitter.cortex.ml.embeddings.common.EntityKind import java.util.TimeZone import com.twitter.scalding_internal.dalv2.DALWrite._ import com.twitter.ann.scalding.benchmark.UserItemKnnScalaDataset import com.twitter.scalding_internal.job.TwitterExecutionApp import com.twitter.ml.featurestore.lib.EntityId import com.twitter.ml.featurestore.lib.UserId /** * This job will take consumer and item embeddings(either url or tweet) and output Knn entities (user id, (distance, item id)). * * Example command to run this adhoc job: * * scalding remote run \ * --target ann/src/main/scala/com/twitter/ann/scalding/benchmark:benchmark-adhoc \ * --hadoop-properties "mapreduce.map.memory.mb=8192 mapreduce.map.java.opts='-Xmx7618M' mapreduce.reduce.memory.mb=8192 mapreduce.reduce.java.opts='-Xmx7618M' mapred.task.timeout=0" \ * --submitter hadoopnest3.smf1.twitter.com \ * --user cortex-mlx \ * --submitter-memory 8000.megabyte \ * --main-class com.twitter.ann.scalding.offline.com.twitter.ann.scalding.benchmark.KnnJob -- \ * --dalEnvironment Prod \ * --search_space_entity_type user \ * --user.feature_store_embedding ConsumerFollowEmbedding300Dataset \ * --user.feature_store_major_version 1569196895 \ * --user.date_range 2019-10-23 \ * --search_space.feature_store_embedding ConsumerFollowEmbedding300Dataset \ * --search_space.feature_store_major_version 1569196895 \ * --search_space.date_range 2019-10-23 \ * --date 2019-10-25 \ * --version "consumer_follower_test" \ * --reducers 10000 \ * --num_of_random_groups 20 \ * --num_replicas 1000 \ * --indexing_strategy.metric InnerProduct \ * --indexing_strategy.type hnsw \ * --indexing_strategy.dimension 300 \ * --indexing_strategy.ef_construction 30 \ * --indexing_strategy.max_m 10 \ * --indexing_strategy.ef_query 50 \ * --search_space_shards 3000 \ * --query_shards 3000 \ * --search_space.read_sample_ratio 0.038 */ trait KnnJobBase { val seed: Long = 123 def getKnnDataset[B <: EntityId, D <: Distance[D]]( args: Args )( implicit uniqueID: UniqueID ): TypedPipe[Knn] = { val consumerPipe: TypedPipe[EmbeddingWithEntity[UserId]] = EmbeddingFormatArgsParser.User .getEmbeddingFormat(args, "user") .getEmbeddings val itemPipe = EntityKind .getEntityKind(args("search_space_entity_type")) .parser .getEmbeddingFormat(args, "search_space") .getEmbeddings KnnHelper // Refer to the documentation of findNearestNeighboursWithIndexingStrategy for more // information about how to set these settings. .findNearestNeighboursWithIndexingStrategy[UserId, B, D]( queryEmbeddings = consumerPipe, searchSpaceEmbeddings = itemPipe.asInstanceOf[TypedPipe[EmbeddingWithEntity[B]]], numNeighbors = args.int("candidate_per_user", 20), reducersOption = args.optional("reducers").map(_.toInt), numOfSearchGroups = args.int("num_of_random_groups"), numReplicas = args.int("num_replicas"), indexingStrategy = IndexingStrategy.parse(args).asInstanceOf[IndexingStrategy[D]], queryShards = args.optional("query_shards").map(_.toInt), searchSpaceShards = args.optional("search_space_shards").map(_.toInt) ) .map { case (user, items) => val neighbors = items.map { case (item, distance) => Neighbor( distance.distance, item.toThrift ) } Knn(user.toThrift, neighbors) } } } object KnnJob extends TwitterExecutionApp with KnnJobBase { val KnnPathSuffix: String = "/user/cortex-mlx/qualatative_analysis/knn_ground_truth/" val partitionKey: String = "version" override def job: Execution[Unit] = Execution.withId { implicit uniqueId => Execution.getArgs.flatMap { args: Args => implicit val timeZone: TimeZone = TimeZone.getDefault implicit val dateParser: DateParser = DateParser.default implicit val dateRange: DateRange = DateRange.parse(args.list("date"))(timeZone, dateParser) getKnnDataset(args).writeDALExecution( UserItemKnnScalaDataset, D.Daily, D.Suffix(KnnPathSuffix), D.Parquet, Set(D.Partition(partitionKey, args("version"), D.PartitionType.String)) ) } } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/BUILD.bazel
scala_library( sources = ["*.scala"], compiler_option_sets = ["fatal_warnings"], platform = "java8", tags = [ "bazel-compatible", "bazel-only", ], dependencies = [ "3rdparty/jvm/com/twitter/bijection:scrooge", "3rdparty/src/jvm/com/twitter/scalding:args", "3rdparty/src/jvm/com/twitter/scalding:commons", "3rdparty/src/jvm/com/twitter/scalding:core", "ann/src/main/scala/com/twitter/ann/brute_force", "ann/src/main/scala/com/twitter/ann/common", "ann/src/main/scala/com/twitter/ann/hnsw", "ann/src/main/scala/com/twitter/ann/util", "cortex-core/entity-embeddings/src/thrift/com/twitter/entityembeddings/neighbors:embeddings-knn-thrift-scala", "src/scala/com/twitter/cortex/ml/embeddings/common:Helpers-deploy", "src/scala/com/twitter/pluck/source/core_workflows/user_model:condensed_user_state-scala", "src/scala/com/twitter/scalding_internal/dalv2", "src/scala/com/twitter/scalding_internal/job", "src/scala/com/twitter/scalding_internal/multiformat/format", "src/scala/com/twitter/scalding_internal/parquet_thrift", "usersource/snapshot/src/main/scala/com/twitter/usersource/snapshot/flat:usersource_flat-scala", "usersource/snapshot/src/main/thrift/com/twitter/usersource/snapshot/flat:flat-scala", ], ) hadoop_binary( name = "ann-offline-deploy", main = "com.twitter.scalding.Tool", platform = "java8", runtime_platform = "java8", tags = [ "bazel-compatible", "bazel-compatible:migrated", "bazel-only", ], dependencies = [ ":offline", "3rdparty/jvm/org/slf4j:slf4j-jdk14", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/IndexingStrategy.scala
package com.twitter.ann.scalding.offline import com.twitter.ann.brute_force.{BruteForceIndex, BruteForceRuntimeParams} import com.twitter.ann.common.{Distance, EntityEmbedding, Metric, ReadWriteFuturePool} import com.twitter.ann.hnsw.{HnswParams, TypedHnswIndex} import com.twitter.ann.util.IndexBuilderUtils import com.twitter.scalding.Args import com.twitter.util.logging.Logger import com.twitter.util.{Await, FuturePool} /** * IndexingStrategy is used for determining how we will build the index when doing a KNN in * scalding. Right now there are 2 strategies a BruteForce and HNSW strategy. * @tparam D distance that the index uses. */ sealed trait IndexingStrategy[D <: Distance[D]] { private[offline] def buildIndex[T]( indexItems: TraversableOnce[EntityEmbedding[T]] ): ParameterlessQueryable[T, _, D] } object IndexingStrategy { /** * Parse an indexing strategy from scalding args. * ${argumentName}.type Is hsnw or brute_force * ${argumentName}.type is the metric to use. See Metric.fromString for options. * * hsnw has these additional parameters: * ${argumentName}.dimension the number of dimension for the embeddings. * ${argumentName}.ef_construction, ${argumentName}.ef_construction and ${argumentName}.ef_query. * See TypedHnswIndex for more details on these parameters. * @param args scalding arguments to parse. * @param argumentName A specifier to use in case you want to parse more than one indexing * strategy. indexing_strategy by default. * @return parse indexing strategy */ def parse( args: Args, argumentName: String = "indexing_strategy" ): IndexingStrategy[_] = { def metricArg[D <: Distance[D]] = Metric.fromString(args(s"$argumentName.metric")).asInstanceOf[Metric[D]] args(s"$argumentName.type") match { case "brute_force" => BruteForceIndexingStrategy(metricArg) case "hnsw" => val dimensionArg = args.int(s"$argumentName.dimension") val efConstructionArg = args.int(s"$argumentName.ef_construction") val maxMArg = args.int(s"$argumentName.max_m") val efQuery = args.int(s"$argumentName.ef_query") HnswIndexingStrategy( dimension = dimensionArg, metric = metricArg, efConstruction = efConstructionArg, maxM = maxMArg, hnswParams = HnswParams(efQuery) ) } } } case class BruteForceIndexingStrategy[D <: Distance[D]](metric: Metric[D]) extends IndexingStrategy[D] { private[offline] def buildIndex[T]( indexItems: TraversableOnce[EntityEmbedding[T]] ): ParameterlessQueryable[T, _, D] = { val appendable = BruteForceIndex[T, D](metric, FuturePool.immediatePool) indexItems.foreach { item => Await.result(appendable.append(item)) } val queryable = appendable.toQueryable ParameterlessQueryable[T, BruteForceRuntimeParams.type, D]( queryable, BruteForceRuntimeParams ) } } case class HnswIndexingStrategy[D <: Distance[D]]( dimension: Int, metric: Metric[D], efConstruction: Int, maxM: Int, hnswParams: HnswParams, concurrencyLevel: Int = 1) extends IndexingStrategy[D] { private[offline] def buildIndex[T]( indexItems: TraversableOnce[EntityEmbedding[T]] ): ParameterlessQueryable[T, _, D] = { val log: Logger = Logger(getClass) val appendable = TypedHnswIndex.index[T, D]( dimension = dimension, metric = metric, efConstruction = efConstruction, maxM = maxM, // This is not really that important. expectedElements = 1000, readWriteFuturePool = ReadWriteFuturePool(FuturePool.immediatePool) ) val future = IndexBuilderUtils .addToIndex(appendable, indexItems.toStream, concurrencyLevel) .map { numberUpdates => log.info(s"Performed $numberUpdates updates") } Await.result(future) val queryable = appendable.toQueryable ParameterlessQueryable( queryable, hnswParams ) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/KnnDebug.scala
package com.twitter.ann.scalding.offline import com.twitter.core_workflows.user_model.thriftscala.CondensedUserState import com.twitter.cortex.ml.embeddings.common.{DataSourceManager, GraphEdge, Helpers, UserKind} import com.twitter.ml.featurestore.lib.UserId import com.twitter.entityembeddings.neighbors.thriftscala.{EntityKey, NearestNeighbors} import com.twitter.pluck.source.core_workflows.user_model.CondensedUserStateScalaDataset import com.twitter.scalding._ import com.twitter.scalding.typed.TypedPipe import com.twitter.scalding_internal.dalv2.DAL import com.twitter.usersource.snapshot.flat.UsersourceFlatScalaDataset import com.twitter.usersource.snapshot.flat.thriftscala.FlatUser case class ConsumerAssoc(consumerId: UserId, assoc: List[String]) object KnnDebug { def getConsumerAssociations( graph: TypedPipe[GraphEdge[UserId, UserId]], usernames: TypedPipe[(UserId, String)], reducers: Int ): TypedPipe[ConsumerAssoc] = { graph .groupBy(_.itemId) .join(usernames).withReducers(reducers) .values .map { case (edge: GraphEdge[UserId, UserId], producerScreenName: String) => ConsumerAssoc(consumerId = edge.consumerId, assoc = List(producerScreenName)) } .groupBy(_.consumerId).withReducers(reducers) .reduce[ConsumerAssoc] { case (uFollow1: ConsumerAssoc, uFollow2: ConsumerAssoc) => ConsumerAssoc(consumerId = uFollow1.consumerId, assoc = uFollow1.assoc ++ uFollow2.assoc) } .values } /** * Write the neighbors and a set of follows to a tsv for easier analysis during debugging * We take the set of users with between 25-50 follows and grab only those users * * This returns 4 strings of the form: * consumerId, state, followUserName<f>followUserName<f>followUserName, neighborName<n>neighborName<n>neighborName */ def getDebugTable( neighborsPipe: TypedPipe[(EntityKey, NearestNeighbors)], shards: Int, reducers: Int, limit: Int = 10000, userDataset: Option[TypedPipe[FlatUser]] = None, followDataset: Option[TypedPipe[GraphEdge[UserId, UserId]]] = None, consumerStatesDataset: Option[TypedPipe[CondensedUserState]] = None, minFollows: Int = 25, maxFollows: Int = 50 )( implicit dateRange: DateRange ): TypedPipe[(String, String, String, String)] = { val usersourcePipe: TypedPipe[FlatUser] = userDataset .getOrElse(DAL.readMostRecentSnapshot(UsersourceFlatScalaDataset, dateRange).toTypedPipe) val followGraph: TypedPipe[GraphEdge[UserId, UserId]] = followDataset .getOrElse(new DataSourceManager().getFollowGraph()) val consumerStates: TypedPipe[CondensedUserState] = consumerStatesDataset .getOrElse(DAL.read(CondensedUserStateScalaDataset).toTypedPipe) val usernames: TypedPipe[(UserId, String)] = usersourcePipe.flatMap { flatUser => (flatUser.screenName, flatUser.id) match { case (Some(name: String), Some(userId: Long)) => Some((UserId(userId), name)) case _ => None } }.fork val consumerFollows: TypedPipe[ConsumerAssoc] = getConsumerAssociations(followGraph, usernames, reducers) .filter { uFollow => (uFollow.assoc.size > minFollows && uFollow.assoc.size < maxFollows) } val neighborGraph: TypedPipe[GraphEdge[UserId, UserId]] = neighborsPipe .limit(limit) .flatMap { case (entityKey: EntityKey, neighbors: NearestNeighbors) => Helpers.optionalToLong(entityKey.id) match { case Some(entityId: Long) => neighbors.neighbors.flatMap { neighbor => Helpers .optionalToLong(neighbor.neighbor.id) .map { neighborId => GraphEdge[UserId, UserId]( consumerId = UserId(entityId), itemId = UserId(neighborId), weight = 1.0F) } } case None => List() } } val consumerNeighbors: TypedPipe[ConsumerAssoc] = getConsumerAssociations(neighborGraph, usernames, reducers) consumerFollows .groupBy(_.consumerId) .join(consumerStates.groupBy { consumer => UserId(consumer.uid) }).withReducers(reducers) .join(consumerNeighbors.groupBy(_.consumerId)).withReducers(reducers) .values .map { case ((uFollow: ConsumerAssoc, state: CondensedUserState), uNeighbors: ConsumerAssoc) => ( UserKind.stringInjection(uFollow.consumerId), state.state.toString, uFollow.assoc mkString "<f>", uNeighbors.assoc mkString "<n>") } .shard(shards) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/KnnEntityRecoDebugJob.scala
package com.twitter.ann.scalding.offline import com.twitter.ann.common.Distance import com.twitter.ann.common.Metric import com.twitter.cortex.ml.embeddings.common.EntityKind import com.twitter.ml.featurestore.lib.EntityId import com.twitter.scalding.typed.TypedPipe import com.twitter.scalding._ import com.twitter.scalding_internal.job.TwitterExecutionApp /** * This job do an exhaustive search for nearest neighbours helpful for debugging recommendations * for a given list of sample queryIds and entity embeddings for the recos to be made. * Sample job script: ./bazel bundle ann/src/main/scala/com/twitter/ann/scalding/offline:ann-offline-deploy oscar hdfs \ --screen --tee log.txt \ --hadoop-client-memory 6000 \ --hadoop-properties "yarn.app.mapreduce.am.resource.mb=6000;yarn.app.mapreduce.am.command-opts='-Xmx7500m';mapreduce.map.memory.mb=7500;mapreduce.reduce.java.opts='-Xmx6000m';mapreduce.reduce.memory.mb=7500;mapred.task.timeout=36000000;" \ --bundle ann-offline-deploy \ --min-split-size 284217728 \ --host hadoopnest1.smf1.twitter.com \ --tool com.twitter.ann.scalding.offline.KnnEntityRecoDebugJob -- \ --neighbors 10 \ --metric InnerProduct \ --query_entity_kind user \ --search_space_entity_kind user \ --query.embedding_path /user/apoorvs/sample_embeddings \ --query.embedding_format tab \ --search_space.embedding_path /user/apoorvs/sample_embeddings \ --search_space.embedding_format tab \ --query_ids 974308319300149248 988871266244464640 2719685122 2489777564 \ --output_path /user/apoorvs/adhochadoop/test \ --reducers 100 */ object KnnEntityRecoDebugJob extends TwitterExecutionApp { override def job: Execution[Unit] = Execution.withId { implicit uniqueId => Execution.getArgs.flatMap { args: Args => val queryEntityKind = EntityKind.getEntityKind(args("query_entity_kind")) val searchSpaceEntityKind = EntityKind.getEntityKind(args("search_space_entity_kind")) val metric = Metric.fromString(args("metric")) run(queryEntityKind, searchSpaceEntityKind, metric, args) } } private[this] def run[A <: EntityId, B <: EntityId, D <: Distance[D]]( uncastQueryEntityKind: EntityKind[_], uncastSearchSpaceEntityKind: EntityKind[_], uncastMetric: Metric[_], args: Args )( implicit uniqueID: UniqueID ): Execution[Unit] = { import KnnHelper._ val numNeighbors = args.int("neighbors") val reducers = args.getOrElse("reducers", "100").toInt val queryEntityKind = uncastQueryEntityKind.asInstanceOf[EntityKind[A]] val searchSpaceEntityKind = uncastSearchSpaceEntityKind.asInstanceOf[EntityKind[B]] val metric = uncastMetric.asInstanceOf[Metric[D]] // Filter the query entity embeddings with the queryIds val queryIds = args.list("query_ids") assert(queryIds.nonEmpty) val filterQueryIds: TypedPipe[A] = TypedPipe .from(queryIds) .map(queryEntityKind.stringInjection.invert(_).get) val queryEmbeddings = queryEntityKind.parser.getEmbeddingFormat(args, "query").getEmbeddings // Get the neighbour embeddings val searchSpaceEmbeddings = searchSpaceEntityKind.parser.getEmbeddingFormat(args, "search_space").getEmbeddings val nearestNeighborString = findNearestNeighbours( queryEmbeddings, searchSpaceEmbeddings, metric, numNeighbors, Some(filterQueryIds), reducers )(queryEntityKind.ordering, uniqueID).map( nearestNeighborsToString(_, queryEntityKind, searchSpaceEntityKind) ) // Write the nearest neighbor string to one part file. nearestNeighborString .shard(1) .writeExecution(TypedTsv(args("output_path"))) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/KnnHelper.scala
package com.twitter.ann.scalding.offline import com.twitter.ann.common._ import com.twitter.ann.hnsw.{HnswParams, TypedHnswIndex} import com.twitter.bijection.Injection import com.twitter.cortex.ml.embeddings.common.{EntityKind, Helpers, UserKind} import com.twitter.entityembeddings.neighbors.thriftscala.{EntityKey, NearestNeighbors, Neighbor} import com.twitter.ml.api.embedding.Embedding import com.twitter.ml.api.embedding.EmbeddingMath.{Float => math} import com.twitter.ml.featurestore.lib.embedding.EmbeddingWithEntity import com.twitter.ml.featurestore.lib.{EntityId, UserId} import com.twitter.scalding.typed.{TypedPipe, UnsortedGrouped} import com.twitter.scalding.{Args, DateRange, Stat, TextLine, UniqueID} import com.twitter.search.common.file.AbstractFile import com.twitter.util.{Await, FuturePool} import scala.util.Random case class Index[T, D <: Distance[D]]( injection: Injection[T, Array[Byte]], metric: Metric[D], dimension: Int, directory: AbstractFile) { lazy val annIndex = TypedHnswIndex.loadIndex[T, D]( dimension, metric, injection, ReadWriteFuturePool(FuturePool.immediatePool), directory ) } object KnnHelper { def getFilteredUserEmbeddings( args: Args, filterPath: Option[String], reducers: Int, useHashJoin: Boolean )( implicit dateRange: DateRange ): TypedPipe[EmbeddingWithEntity[UserId]] = { val userEmbeddings: TypedPipe[EmbeddingWithEntity[UserId]] = UserKind.parser.getEmbeddingFormat(args, "consumer").getEmbeddings filterPath match { case Some(fileName: String) => val filterUserIds: TypedPipe[UserId] = TypedPipe .from(TextLine(fileName)) .flatMap { idLine => Helpers.optionalToLong(idLine) } .map { id => UserId(id) } Helpers .adjustableJoin( left = userEmbeddings.groupBy(_.entityId), right = filterUserIds.asKeys, useHashJoin = useHashJoin, reducers = Some(reducers) ).map { case (_, (embedding, _)) => embedding } case None => userEmbeddings } } def getNeighborsPipe[T <: EntityId, D <: Distance[D]]( args: Args, uncastEntityKind: EntityKind[_], uncastMetric: Metric[_], ef: Int, consumerEmbeddings: TypedPipe[EmbeddingWithEntity[UserId]], abstractFile: Option[AbstractFile], reducers: Int, numNeighbors: Int, dimension: Int )( implicit dateRange: DateRange ): TypedPipe[(EntityKey, NearestNeighbors)] = { val entityKind = uncastEntityKind.asInstanceOf[EntityKind[T]] val injection = entityKind.byteInjection val metric = uncastMetric.asInstanceOf[Metric[D]] abstractFile match { case Some(directory: AbstractFile) => val index = Index(injection, metric, dimension, directory) consumerEmbeddings .map { embedding => val knn = Await.result( index.annIndex.queryWithDistance( Embedding(embedding.embedding.toArray), numNeighbors, HnswParams(ef) ) ) val neighborList = knn .filter(_.neighbor.toString != embedding.entityId.userId.toString) .map(nn => Neighbor( neighbor = EntityKey(nn.neighbor.toString), similarity = Some(1 - nn.distance.distance))) EntityKey(embedding.entityId.toString) -> NearestNeighbors(neighborList) } case None => val producerEmbeddings: TypedPipe[EmbeddingWithEntity[UserId]] = UserKind.parser.getEmbeddingFormat(args, "producer").getEmbeddings bruteForceNearestNeighbors( consumerEmbeddings, producerEmbeddings, numNeighbors, reducers ) } } def bruteForceNearestNeighbors( consumerEmbeddings: TypedPipe[EmbeddingWithEntity[UserId]], producerEmbeddings: TypedPipe[EmbeddingWithEntity[UserId]], numNeighbors: Int, reducers: Int ): TypedPipe[(EntityKey, NearestNeighbors)] = { consumerEmbeddings .cross(producerEmbeddings) .map { case (cEmbed: EmbeddingWithEntity[UserId], pEmbed: EmbeddingWithEntity[UserId]) => // Cosine similarity val cEmbedNorm = math.l2Norm(cEmbed.embedding).toFloat val pEmbedNorm = math.l2Norm(pEmbed.embedding).toFloat val distance: Float = -math.dotProduct( (math.scalarProduct(cEmbed.embedding, 1 / cEmbedNorm)), math.scalarProduct(pEmbed.embedding, 1 / pEmbedNorm)) ( UserKind.stringInjection(cEmbed.entityId), (distance, UserKind.stringInjection(pEmbed.entityId))) } .groupBy(_._1).withReducers(reducers) .sortWithTake(numNeighbors) { case ((_: String, (sim1: Float, _: String)), (_: String, (sim2: Float, _: String))) => sim1 < sim2 } .map { case (consumerId: String, (prodSims: Seq[(String, (Float, String))])) => EntityKey(consumerId) -> NearestNeighbors( prodSims.map { case (consumerId: String, (sim: Float, prodId: String)) => Neighbor(neighbor = EntityKey(prodId), similarity = Some(-sim.toDouble)) } ) } } /** * Calculate the nearest neighbors exhaustively between two entity embeddings using one as query and other as the search space. * @param queryEmbeddings entity embeddings for queries * @param searchSpaceEmbeddings entity embeddings for search space * @param metric distance metric * @param numNeighbors number of neighbors * @param queryIdsFilter optional query ids to filter to query entity embeddings * @param reducers number of reducers for grouping * @param isSearchSpaceLarger Used for optimization: Is the search space larger than the query space? Ignored if numOfSearchGroups > 1. * @param numOfSearchGroups we divide the search space into these groups (randomly). Useful when the search space is too large. Overrides isSearchSpaceLarger. * @param numReplicas Each search group will be responsible for 1/numReplicas queryEmebeddings. * This might speed up the search when the size of the index embeddings is * large. * @tparam A type of query entity * @tparam B type of search space entity * @tparam D type of distance */ def findNearestNeighbours[A <: EntityId, B <: EntityId, D <: Distance[D]]( queryEmbeddings: TypedPipe[EmbeddingWithEntity[A]], searchSpaceEmbeddings: TypedPipe[EmbeddingWithEntity[B]], metric: Metric[D], numNeighbors: Int = 10, queryIdsFilter: Option[TypedPipe[A]] = Option.empty, reducers: Int = 100, mappers: Int = 100, isSearchSpaceLarger: Boolean = true, numOfSearchGroups: Int = 1, numReplicas: Int = 1, useCounters: Boolean = true )( implicit ordering: Ordering[A], uid: UniqueID ): TypedPipe[(A, Seq[(B, D)])] = { val filteredQueryEmbeddings = queryIdsFilter match { case Some(filter) => { queryEmbeddings.groupBy(_.entityId).hashJoin(filter.asKeys).map { case (x, (embedding, _)) => embedding } } case None => queryEmbeddings } if (numOfSearchGroups > 1) { val indexingStrategy = BruteForceIndexingStrategy(metric) findNearestNeighboursWithIndexingStrategy( queryEmbeddings, searchSpaceEmbeddings, numNeighbors, numOfSearchGroups, indexingStrategy, numReplicas, Some(reducers), useCounters = useCounters ) } else { findNearestNeighboursViaCross( filteredQueryEmbeddings, searchSpaceEmbeddings, metric, numNeighbors, reducers, mappers, isSearchSpaceLarger) } } /** * Calculate the nearest neighbors using the specified indexing strategy between two entity * embeddings using one as query and other as the search space. * @param queryEmbeddings entity embeddings for queries * @param searchSpaceEmbeddings entity embeddings for search space. You should be able to fit * searchSpaceEmbeddings.size / numOfSearchGroups into memory. * @param numNeighbors number of neighbors * @param reducersOption number of reducers for the final sortedTake. * @param numOfSearchGroups we divide the search space into these groups (randomly). Useful when * the search space is too large. Search groups are shards. Choose this * number by ensuring searchSpaceEmbeddings.size / numOfSearchGroups * embeddings will fit into memory. * @param numReplicas Each search group will be responsible for 1/numReplicas queryEmebeddings. * By increasing this number, we can parallelize the work and reduce end to end * running times. * @param indexingStrategy How we will search for nearest neighbors within a search group * @param queryShards one step we have is to fan out the query embeddings. We create one entry * per search group. If numOfSearchGroups is large, then this fan out can take * a long time. You can shard the query shard first to parallelize this * process. One way to estimate what value to use: * queryEmbeddings.size * numOfSearchGroups / queryShards should be around 1GB. * @param searchSpaceShards this param is similar to queryShards. Except it shards the search * space when numReplicas is too large. One way to estimate what value * to use: searchSpaceEmbeddings.size * numReplicas / searchSpaceShards * should be around 1GB. * @tparam A type of query entity * @tparam B type of search space entity * @tparam D type of distance * @return a pipe keyed by the index embedding. The values are the list of numNeighbors nearest * neighbors along with their distances. */ def findNearestNeighboursWithIndexingStrategy[A <: EntityId, B <: EntityId, D <: Distance[D]]( queryEmbeddings: TypedPipe[EmbeddingWithEntity[A]], searchSpaceEmbeddings: TypedPipe[EmbeddingWithEntity[B]], numNeighbors: Int, numOfSearchGroups: Int, indexingStrategy: IndexingStrategy[D], numReplicas: Int = 1, reducersOption: Option[Int] = None, queryShards: Option[Int] = None, searchSpaceShards: Option[Int] = None, useCounters: Boolean = true )( implicit ordering: Ordering[A], uid: UniqueID ): UnsortedGrouped[A, Seq[(B, D)]] = { implicit val ord: Ordering[NNKey] = Ordering.by(NNKey.unapply) val entityEmbeddings = searchSpaceEmbeddings.map { embedding: EmbeddingWithEntity[B] => val entityEmbedding = EntityEmbedding(embedding.entityId, Embedding(embedding.embedding.toArray)) entityEmbedding } val shardedSearchSpace = shard(entityEmbeddings, searchSpaceShards) val groupedSearchSpaceEmbeddings = shardedSearchSpace .flatMap { entityEmbedding => val searchGroup = Random.nextInt(numOfSearchGroups) (0 until numReplicas).map { replica => (NNKey(searchGroup, replica, Some(numReplicas)), entityEmbedding) } } val shardedQueries = shard(queryEmbeddings, queryShards) val groupedQueryEmbeddings = shardedQueries .flatMap { entity => val replica = Random.nextInt(numReplicas) (0 until numOfSearchGroups).map { searchGroup => (NNKey(searchGroup, replica, Some(numReplicas)), entity) } }.group .withReducers(reducersOption.getOrElse(numOfSearchGroups * numReplicas)) val numberAnnIndexQueries = Stat("NumberAnnIndexQueries") val annIndexQueryTotalMs = Stat("AnnIndexQueryTotalMs") val numberIndexBuilds = Stat("NumberIndexBuilds") val annIndexBuildTotalMs = Stat("AnnIndexBuildTotalMs") val groupedKnn = groupedQueryEmbeddings .cogroup(groupedSearchSpaceEmbeddings) { case (_, queryIter, searchSpaceIter) => // This index build happens numReplicas times. Ideally we could serialize the queryable. // And only build the index once per search group. // The issues with that now are: // - The HNSW queryable is not serializable in scalding // - The way that map reduce works requires that there is a job that write out the search // space embeddings numReplicas times. In the current setup, we can do that by sharding // the embeddings first and then fanning out. But if we had a single queryable, we would // not be able to shard it easily and writing this out would take a long time. val indexBuildStartTime = System.currentTimeMillis() val queryable = indexingStrategy.buildIndex(searchSpaceIter) if (useCounters) { numberIndexBuilds.inc() annIndexBuildTotalMs.incBy(System.currentTimeMillis() - indexBuildStartTime) } queryIter.flatMap { query => val queryStartTime = System.currentTimeMillis() val embedding = Embedding(query.embedding.toArray) val result = Await.result( queryable.queryWithDistance(embedding, numNeighbors) ) val queryToTopNeighbors = result .map { neighbor => (query.entityId, (neighbor.neighbor, neighbor.distance)) } if (useCounters) { numberAnnIndexQueries.inc() annIndexQueryTotalMs.incBy(System.currentTimeMillis() - queryStartTime) } queryToTopNeighbors } } .values .group val groupedKnnWithReducers = reducersOption .map { reducers => groupedKnn .withReducers(reducers) }.getOrElse(groupedKnn) groupedKnnWithReducers .sortedTake(numNeighbors) { Ordering .by[(B, D), D] { case (_, distance) => distance } } } private[this] def shard[T]( pipe: TypedPipe[T], numberOfShards: Option[Int] ): TypedPipe[T] = { numberOfShards .map { shards => pipe.shard(shards) }.getOrElse(pipe) } private[this] def findNearestNeighboursViaCross[A <: EntityId, B <: EntityId, D <: Distance[D]]( queryEmbeddings: TypedPipe[EmbeddingWithEntity[A]], searchSpaceEmbeddings: TypedPipe[EmbeddingWithEntity[B]], metric: Metric[D], numNeighbors: Int, reducers: Int, mappers: Int, isSearchSpaceLarger: Boolean )( implicit ordering: Ordering[A] ): TypedPipe[(A, Seq[(B, D)])] = { val crossed: TypedPipe[(A, (B, D))] = if (isSearchSpaceLarger) { searchSpaceEmbeddings .shard(mappers) .cross(queryEmbeddings).map { case (searchSpaceEmbedding, queryEmbedding) => val distance = metric.distance(searchSpaceEmbedding.embedding, queryEmbedding.embedding) (queryEmbedding.entityId, (searchSpaceEmbedding.entityId, distance)) } } else { queryEmbeddings .shard(mappers) .cross(searchSpaceEmbeddings).map { case (queryEmbedding, searchSpaceEmbedding) => val distance = metric.distance(searchSpaceEmbedding.embedding, queryEmbedding.embedding) (queryEmbedding.entityId, (searchSpaceEmbedding.entityId, distance)) } } crossed .groupBy(_._1) .withReducers(reducers) .sortedTake(numNeighbors) { Ordering .by[(A, (B, D)), D] { case (_, (_, distance)) => distance } // Sort by distance metric in ascending order }.map { case (queryId, neighbors) => (queryId, neighbors.map(_._2)) } } /** * Convert nearest neighbors to string format. * By default format would be (queryId neighbourId:distance neighbourId:distance .....) in ascending order of distance. * @param nearestNeighbors nearest neighbors tuple in form of (queryId, Seq[(neighborId, distance)] * @param queryEntityKind entity kind of query * @param neighborEntityKind entity kind of search space/neighbors * @param idDistanceSeparator String separator to separate a single neighborId and distance. Default to colon (:) * @param neighborSeparator String operator to separate neighbors. Default to tab * @tparam A type of query entity * @tparam B type of search space entity * @tparam D type of distance */ def nearestNeighborsToString[A <: EntityId, B <: EntityId, D <: Distance[D]]( nearestNeighbors: (A, Seq[(B, D)]), queryEntityKind: EntityKind[A], neighborEntityKind: EntityKind[B], idDistanceSeparator: String = ":", neighborSeparator: String = "\t" ): String = { val (queryId, neighbors) = nearestNeighbors val formattedNeighbors = neighbors.map { case (neighbourId, distance) => s"${neighborEntityKind.stringInjection.apply(neighbourId)}$idDistanceSeparator${distance.distance}" } (queryEntityKind.stringInjection.apply(queryId) +: formattedNeighbors) .mkString(neighborSeparator) } private[this] case class NNKey( searchGroup: Int, replica: Int, maxReplica: Option[Int] = None) { override def hashCode(): Int = maxReplica.map(_ * searchGroup + replica).getOrElse(super.hashCode()) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/KnnOfflineJob.scala
package com.twitter.ann.scalding.offline import com.twitter.ann.common.Metric import com.twitter.bijection.scrooge.BinaryScalaCodec import com.twitter.ml.featurestore.lib.UserId import com.twitter.ml.featurestore.lib.embedding.EmbeddingWithEntity import com.twitter.cortex.ml.embeddings.common.EntityKind import com.twitter.entityembeddings.neighbors.thriftscala.{EntityKey, NearestNeighbors} import com.twitter.scalding.commons.source.VersionedKeyValSource import com.twitter.scalding.typed.TypedPipe import com.twitter.scalding.{Args, DateOps, DateParser, DateRange, Execution, TypedTsv, UniqueID} import com.twitter.scalding_internal.job.TwitterExecutionApp import com.twitter.search.common.file.{AbstractFile, LocalFile} import java.util.TimeZone /** * Generates the nearest neighbour for users and store them in Manhattan format i.e sequence files. * See README for oscar usage. */ object KnnOfflineJob extends TwitterExecutionApp { override def job: Execution[Unit] = Execution.withId { implicit uniqueId => Execution.getArgs.flatMap { args: Args => val knnDirectoryOpt: Option[String] = args.optional("knn_directory") knnDirectoryOpt match { case Some(knnDirectory) => Execution.withCachedFile(knnDirectory) { directory => execute(args, Some(new LocalFile(directory.file))) } case None => execute(args, None) } } } /** * Execute KnnOfflineJob * @param args: The args object for this job * @param abstractFile: An optional of producer embedding path */ def execute( args: Args, abstractFile: Option[AbstractFile] )( implicit uniqueID: UniqueID ): Execution[Unit] = { implicit val tz: TimeZone = TimeZone.getDefault() implicit val dp: DateParser = DateParser.default implicit val dateRange = DateRange.parse(args.list("date"))(DateOps.UTC, DateParser.default) implicit val keyInject = BinaryScalaCodec(EntityKey) implicit val valueInject = BinaryScalaCodec(NearestNeighbors) val entityKind = EntityKind.getEntityKind(args("producer_entity_kind")) val metric = Metric.fromString(args("metric")) val outputPath: String = args("output_path") val numNeighbors: Int = args("neighbors").toInt val ef = args.getOrElse("ef", numNeighbors.toString).toInt val reducers: Int = args("reducers").toInt val knnDimension: Int = args("dimension").toInt val debugOutputPath: Option[String] = args.optional("debug_output_path") val filterPath: Option[String] = args.optional("users_filter_path") val shards: Int = args.getOrElse("shards", "100").toInt val useHashJoin: Boolean = args.getOrElse("use_hash_join", "false").toBoolean val mhOutput = VersionedKeyValSource[EntityKey, NearestNeighbors]( path = outputPath, sourceVersion = None, sinkVersion = None, maxFailures = 0, versionsToKeep = 1 ) val consumerEmbeddings: TypedPipe[EmbeddingWithEntity[UserId]] = KnnHelper.getFilteredUserEmbeddings( args, filterPath, reducers, useHashJoin ) val neighborsPipe: TypedPipe[(EntityKey, NearestNeighbors)] = KnnHelper.getNeighborsPipe( args, entityKind, metric, ef, consumerEmbeddings, abstractFile, reducers, numNeighbors, knnDimension ) val neighborsExecution: Execution[Unit] = neighborsPipe .writeExecution(mhOutput) // Write manual Inspection debugOutputPath match { case Some(path: String) => val debugExecution: Execution[Unit] = KnnDebug .getDebugTable( neighborsPipe = neighborsPipe, shards = shards, reducers = reducers ) .writeExecution(TypedTsv(path)) Execution.zip(debugExecution, neighborsExecution).unit case None => neighborsExecution } } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/KnnTruthSetGenerator.scala
package com.twitter.ann.scalding.offline import com.twitter.ann.common.Distance import com.twitter.ann.common.Metric import com.twitter.ann.scalding.offline.KnnHelper.nearestNeighborsToString import com.twitter.cortex.ml.embeddings.common.EntityKind import com.twitter.ml.featurestore.lib.EntityId import com.twitter.scalding.source.TypedText import com.twitter.scalding.Args import com.twitter.scalding.Execution import com.twitter.scalding.UniqueID import com.twitter.scalding_internal.job.TwitterExecutionApp /** * This job reads index embedding data, query embeddings data, and split into index set, query set and true nearest neigbor set * from query to index. */ object KnnTruthSetGenerator extends TwitterExecutionApp { override def job: Execution[Unit] = Execution.withId { implicit uniqueId => Execution.getArgs.flatMap { args: Args => val queryEntityKind = EntityKind.getEntityKind(args("query_entity_kind")) val indexEntityKind = EntityKind.getEntityKind(args("index_entity_kind")) val metric = Metric.fromString(args("metric")) run(queryEntityKind, indexEntityKind, metric, args) } } private[this] def run[A <: EntityId, B <: EntityId, D <: Distance[D]]( uncastQueryEntityKind: EntityKind[_], uncastIndexSpaceEntityKind: EntityKind[_], uncastMetric: Metric[_], args: Args )( implicit uniqueID: UniqueID ): Execution[Unit] = { val queryEntityKind = uncastQueryEntityKind.asInstanceOf[EntityKind[A]] val indexEntityKind = uncastIndexSpaceEntityKind.asInstanceOf[EntityKind[B]] val metric = uncastMetric.asInstanceOf[Metric[D]] val reducers = args.int("reducers") val mappers = args.int("mappers") val numNeighbors = args.int("neighbors") val knnOutputPath = args("truth_set_output_path") val querySamplePercent = args.double("query_sample_percent", 100) / 100 val indexSamplePercent = args.double("index_sample_percent", 100) / 100 val queryEmbeddings = queryEntityKind.parser .getEmbeddingFormat(args, "query") .getEmbeddings .sample(querySamplePercent) val indexEmbeddings = indexEntityKind.parser .getEmbeddingFormat(args, "index") .getEmbeddings .sample(indexSamplePercent) // calculate and write knn val knnExecution = KnnHelper .findNearestNeighbours( queryEmbeddings, indexEmbeddings, metric, numNeighbors, reducers = reducers, mappers = mappers )(queryEntityKind.ordering, uniqueID).map( nearestNeighborsToString(_, queryEntityKind, indexEntityKind) ) .shard(1) .writeExecution(TypedText.tsv(knnOutputPath)) // write query set embeddings val querySetExecution = queryEntityKind.parser .getEmbeddingFormat(args, "query_set_output") .writeEmbeddings(queryEmbeddings) // write index set embeddings val indexSetExecution = indexEntityKind.parser .getEmbeddingFormat(args, "index_set_output") .writeEmbeddings(indexEmbeddings) Execution.zip(knnExecution, querySetExecution, indexSetExecution).unit } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/ParameterlessQueryable.scala
package com.twitter.ann.scalding.offline import com.twitter.ann.common.EmbeddingType.EmbeddingVector import com.twitter.ann.common.{Distance, NeighborWithDistance, Queryable, RuntimeParams} import com.twitter.util.Future private[offline] case class ParameterlessQueryable[T, P <: RuntimeParams, D <: Distance[D]]( queryable: Queryable[T, P, D], runtimeParamsForAllQueries: P) { /** * ANN query for ids with distance. * * @param embedding : Embedding/Vector to be queried with. * @param numOfNeighbors : Number of neighbours to be queried for. * * @return List of approximate nearest neighbour ids with distance from the query embedding. */ def queryWithDistance( embedding: EmbeddingVector, numOfNeighbors: Int ): Future[List[NeighborWithDistance[T, D]]] = queryable.queryWithDistance(embedding, numOfNeighbors, runtimeParamsForAllQueries) }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/README
# Description This pipeline uses hnsw and scalding to create an hnsw index based on producers embeddings, which it then uses to construct lists of producer suggestions for each user.
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/faissindexbuilder/BUILD.bazel
scala_library( sources = ["*.scala"], compiler_option_sets = ["fatal_warnings"], platform = "java11", tags = [ "bazel-compatible", "bazel-only", ], dependencies = [ "3rdparty/src/jvm/com/twitter/scalding:args", "3rdparty/src/jvm/com/twitter/scalding:core", "ann/src/main/scala/com/twitter/ann/annoy", "ann/src/main/scala/com/twitter/ann/brute_force", "ann/src/main/scala/com/twitter/ann/common", "ann/src/main/scala/com/twitter/ann/faiss", "ann/src/main/scala/com/twitter/ann/serialization", "ann/src/main/scala/com/twitter/ann/util", "src/scala/com/twitter/cortex/ml/embeddings/common:Helpers", "src/scala/com/twitter/scalding_internal/job", ], ) hadoop_binary( name = "faissindexbuilder-deploy", main = "com.twitter.ann.scalding.offline.faissindexbuilder.IndexBuilderApp", platform = "java11", runtime_platform = "java11", tags = [ "bazel-compatible", "bazel-compatible:migrated", "bazel-only", ], dependencies = [ ":faissindexbuilder", "3rdparty/jvm/org/slf4j:slf4j-jdk14", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/faissindexbuilder/IndexBuilder.scala
package com.twitter.ann.scalding.offline.faissindexbuilder import com.twitter.ann.common.Distance import com.twitter.ann.common.EntityEmbedding import com.twitter.ann.common.Metric import com.twitter.ann.faiss.FaissIndexer import com.twitter.cortex.ml.embeddings.common.EmbeddingFormat import com.twitter.ml.api.embedding.Embedding import com.twitter.ml.featurestore.lib.UserId import com.twitter.scalding.Execution import com.twitter.search.common.file.AbstractFile import com.twitter.util.logging.Logging object IndexBuilder extends FaissIndexer with Logging { def run[T <: UserId, D <: Distance[D]]( embeddingFormat: EmbeddingFormat[T], embeddingLimit: Option[Int], sampleRate: Float, factoryString: String, metric: Metric[D], outputDirectory: AbstractFile, numDimensions: Int ): Execution[Unit] = { val embeddingsPipe = embeddingFormat.getEmbeddings val limitedEmbeddingsPipe = embeddingLimit .map { limit => embeddingsPipe.limit(limit) }.getOrElse(embeddingsPipe) val annEmbeddingPipe = limitedEmbeddingsPipe.map { embedding => val embeddingSize = embedding.embedding.length assert( embeddingSize == numDimensions, s"Specified number of dimensions $numDimensions does not match the dimensions of the " + s"embedding $embeddingSize" ) EntityEmbedding[Long](embedding.entityId.userId, Embedding(embedding.embedding.toArray)) } build(annEmbeddingPipe, sampleRate, factoryString, metric, outputDirectory) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/faissindexbuilder/IndexBuilderApp.scala
package com.twitter.ann.scalding.offline.faissindexbuilder import com.twitter.ann.common.Distance import com.twitter.ann.common.Metric import com.twitter.cortex.ml.embeddings.common._ import com.twitter.ml.featurestore.lib.UserId import com.twitter.scalding.Args import com.twitter.scalding.DateOps import com.twitter.scalding.DateParser import com.twitter.scalding.DateRange import com.twitter.scalding.Execution import com.twitter.scalding_internal.job.TwitterExecutionApp import com.twitter.search.common.file.FileUtils import com.twitter.util.logging.Logging import java.util.Calendar import java.util.TimeZone trait IndexBuilderExecutable extends Logging { // This method is used to cast the entityKind and the metric to have parameters. def indexBuilderExecution[T <: UserId, D <: Distance[D]]( args: Args ): Execution[Unit] = { // parse the arguments for this job val uncastEntityKind = EntityKind.getEntityKind(args("entity_kind")) val uncastMetric = Metric.fromString(args("metric")) val entityKind = uncastEntityKind.asInstanceOf[EntityKind[T]] val metric = uncastMetric.asInstanceOf[Metric[D]] val uncastDateRange = args.list("embedding_date_range") val embeddingDateRange = if (uncastDateRange.nonEmpty) { Some(DateRange.parse(uncastDateRange)(DateOps.UTC, DateParser.default)) } else { None } val embeddingFormat = entityKind.parser.getEmbeddingFormat(args, "input", providedDateRange = embeddingDateRange) val numDimensions = args.int("num_dimensions") val embeddingLimit = args.optional("embedding_limit").map(_.toInt) val outputDirectory = FileUtils.getFileHandle(args("output_dir")) val factoryString = args.optional("factory_string").get val sampleRate = args.float("training_sample_rate", 0.05f) logger.debug(s"Job args: ${args.toString}") val finalOutputDirectory = embeddingDateRange .map { range => val cal = Calendar.getInstance(TimeZone.getTimeZone("UTC")) cal.setTime(range.end) outputDirectory .getChild(s"${cal.get(Calendar.YEAR)}") .getChild(f"${cal.get(Calendar.MONTH) + 1}%02d") .getChild(f"${cal.get(Calendar.DAY_OF_MONTH)}%02d") }.getOrElse(outputDirectory) logger.info(s"Final output directory is ${finalOutputDirectory.getPath}") IndexBuilder .run( embeddingFormat, embeddingLimit, sampleRate, factoryString, metric, finalOutputDirectory, numDimensions ).onComplete { _ => Unit } } } object IndexBuilderApp extends TwitterExecutionApp with IndexBuilderExecutable { override def job: Execution[Unit] = Execution.getArgs.flatMap { args: Args => indexBuilderExecution(args) } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/indexbuilder/BUILD.bazel
scala_library( sources = ["*.scala"], compiler_option_sets = ["fatal_warnings"], platform = "java8", tags = [ "bazel-compatible", "bazel-only", ], dependencies = [ "3rdparty/src/jvm/com/twitter/scalding:args", "3rdparty/src/jvm/com/twitter/scalding:core", "ann/src/main/scala/com/twitter/ann/annoy", "ann/src/main/scala/com/twitter/ann/brute_force", "ann/src/main/scala/com/twitter/ann/common", "ann/src/main/scala/com/twitter/ann/hnsw", "ann/src/main/scala/com/twitter/ann/serialization", "ann/src/main/scala/com/twitter/ann/util", "src/scala/com/twitter/cortex/ml/embeddings/common:Helpers", "src/scala/com/twitter/scalding_internal/job", ], ) hadoop_binary( name = "indexbuilder-deploy", main = "com.twitter.ann.scalding.offline.indexbuilder.IndexBuilderApp", platform = "java8", runtime_platform = "java8", tags = [ "bazel-compatible", "bazel-compatible:migrated", "bazel-only", ], dependencies = [ ":indexbuilder", "3rdparty/jvm/org/slf4j:slf4j-jdk14", ], )
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/indexbuilder/IndexBuilder.scala
package com.twitter.ann.scalding.offline.indexbuilder import com.twitter.ann.common.Appendable import com.twitter.ann.common.Distance import com.twitter.ann.common.EntityEmbedding import com.twitter.ann.common.Serialization import com.twitter.ann.util.IndexBuilderUtils import com.twitter.cortex.ml.embeddings.common.EmbeddingFormat import com.twitter.ml.api.embedding.Embedding import com.twitter.ml.featurestore.lib.EntityId import com.twitter.scalding.Execution import com.twitter.scalding_internal.job.FutureHelper import com.twitter.search.common.file.AbstractFile import com.twitter.util.logging.Logger object IndexBuilder { private[this] val Log = Logger.apply[IndexBuilder.type] def run[T <: EntityId, _, D <: Distance[D]]( embeddingFormat: EmbeddingFormat[T], embeddingLimit: Option[Int], index: Appendable[T, _, D] with Serialization, concurrencyLevel: Int, outputDirectory: AbstractFile, numDimensions: Int ): Execution[Unit] = { val embeddingsPipe = embeddingFormat.getEmbeddings val limitedEmbeddingsPipe = embeddingLimit .map { limit => embeddingsPipe.limit(limit) }.getOrElse(embeddingsPipe) val annEmbeddingPipe = limitedEmbeddingsPipe.map { embedding => val embeddingSize = embedding.embedding.length assert( embeddingSize == numDimensions, s"Specified number of dimensions $numDimensions does not match the dimensions of the " + s"embedding $embeddingSize" ) EntityEmbedding[T](embedding.entityId, Embedding(embedding.embedding.toArray)) } annEmbeddingPipe.toIterableExecution.flatMap { annEmbeddings => val future = IndexBuilderUtils.addToIndex(index, annEmbeddings.toStream, concurrencyLevel) val result = future.map { numberUpdates => Log.info(s"Performed $numberUpdates updates") index.toDirectory(outputDirectory) Log.info(s"Finished writing to $outputDirectory") } FutureHelper.executionFrom(result).unit } } }
the-algorithm-main/ann/src/main/scala/com/twitter/ann/scalding/offline/indexbuilder/IndexBuilderApp.scala
package com.twitter.ann.scalding.offline.indexbuilder import com.twitter.ann.annoy.TypedAnnoyIndex import com.twitter.ann.brute_force.SerializableBruteForceIndex import com.twitter.ann.common.Distance import com.twitter.ann.common.Metric import com.twitter.ann.common.ReadWriteFuturePool import com.twitter.ann.hnsw.TypedHnswIndex import com.twitter.ann.serialization.thriftscala.PersistedEmbedding import com.twitter.ann.serialization.PersistedEmbeddingInjection import com.twitter.ann.serialization.ThriftIteratorIO import com.twitter.cortex.ml.embeddings.common._ import com.twitter.ml.featurestore.lib.EntityId import com.twitter.scalding.Args import com.twitter.scalding.Execution import com.twitter.scalding_internal.job.TwitterExecutionApp import com.twitter.search.common.file.FileUtils import com.twitter.util.FuturePool import java.util.concurrent.Executors trait IndexBuilderExecutable { // This method is used to cast the entityKind and the metric to have parameters. def indexBuilderExecution[T <: EntityId, D <: Distance[D]]( args: Args ): Execution[Unit] = { // parse the arguments for this job val uncastEntityKind = EntityKind.getEntityKind(args("entity_kind")) val uncastMetric = Metric.fromString(args("metric")) val entityKind = uncastEntityKind.asInstanceOf[EntityKind[T]] val metric = uncastMetric.asInstanceOf[Metric[D]] val embeddingFormat = entityKind.parser.getEmbeddingFormat(args, "input") val injection = entityKind.byteInjection val numDimensions = args.int("num_dimensions") val embeddingLimit = args.optional("embedding_limit").map(_.toInt) val concurrencyLevel = args.int("concurrency_level") val outputDirectory = FileUtils.getFileHandle(args("output_dir")) println(s"Job args: ${args.toString}") val threadPool = Executors.newFixedThreadPool(concurrencyLevel) val serialization = args("algo") match { case "brute_force" => val PersistedEmbeddingIO = new ThriftIteratorIO[PersistedEmbedding](PersistedEmbedding) SerializableBruteForceIndex[T, D]( metric, FuturePool.apply(threadPool), new PersistedEmbeddingInjection[T](injection), PersistedEmbeddingIO ) case "annoy" => TypedAnnoyIndex.indexBuilder[T, D]( numDimensions, args.int("annoy_num_trees"), metric, injection, FuturePool.apply(threadPool) ) case "hnsw" => val efConstruction = args.int("ef_construction") val maxM = args.int("max_m") val expectedElements = args.int("expected_elements") TypedHnswIndex.serializableIndex[T, D]( numDimensions, metric, efConstruction, maxM, expectedElements, injection, ReadWriteFuturePool(FuturePool.apply(threadPool)) ) } IndexBuilder .run( embeddingFormat, embeddingLimit, serialization, concurrencyLevel, outputDirectory, numDimensions ).onComplete { _ => threadPool.shutdown() Unit } } } object IndexBuilderApp extends TwitterExecutionApp with IndexBuilderExecutable { override def job: Execution[Unit] = Execution.getArgs.flatMap { args: Args => indexBuilderExecution(args) } }