Dataset Viewer
repo_name
stringlengths 1
62
| dataset
stringclasses 1
value | lang
stringclasses 11
values | pr_id
int64 1
20.1k
| owner
stringlengths 2
34
| reviewer
stringlengths 2
39
| diff_hunk
stringlengths 15
262k
| code_review_comment
stringlengths 1
99.6k
|
---|---|---|---|---|---|---|---|
fugit2.nvim
|
github_2023
|
others
| 42 |
SuperBo
|
SuperBo
|
@@ -0,0 +1,37 @@
+package = 'fugit2.nvim'
+
+version = 'scm-1'
+
+rockspec_format = '3.0'
+
+source = {
+ url = 'git://github.com/SuperBo/fugit2.nvim.git'
+}
+
+description = {
+ summary = 'Git plugin for Neovim (based on libgit2)',
+ homepage = 'https://github.com/SuperBo/fugit2.nvim',
+ license = 'MIT',
+}
+
+dependencies = {
+ 'lua >= 5.1',
+ 'nui.nvim',
+ 'nvim-web-devicons',
+ 'plenary.nvim',
+}
+
+external_dependencies = {
+ GIT2 = {
+ library = 'git2',
+ }
+}
|
Can we add gpgme as an optional dependencies. I used that for gpg signing git commit features.
|
fugit2.nvim
|
github_2023
|
others
| 42 |
SuperBo
|
SuperBo
|
@@ -0,0 +1,37 @@
+package = 'fugit2.nvim'
+
+version = 'scm-1'
+
+rockspec_format = '3.0'
+
+source = {
+ url = 'git://github.com/SuperBo/fugit2.nvim.git'
+}
+
+description = {
+ summary = 'Git plugin for Neovim (based on libgit2)',
+ homepage = 'https://github.com/SuperBo/fugit2.nvim',
+ license = 'MIT',
+}
+
+dependencies = {
+ 'lua >= 5.1',
|
Can we specify lua version to luajit because the implementation is based on luajit ffi, and doesn't work with normal lua.
|
fugit2.nvim
|
github_2023
|
others
| 42 |
SuperBo
|
SuperBo
|
@@ -0,0 +1,33 @@
+name: Push to Luarocks
+
+on:
+ push:
+ tags:
+ - '*'
+ release:
+ types:
+ - created
+ pull_request: # Runs test install without uploading
+ workflow_dispatch: # Allows to trigger manually
+
+jobs:
+ luarocks-upload:
+ runs-on: ubuntu-23.10
|
can we use ubuntu-22.04 here, a LTS is a better choice vs normal Ubuntu release
|
fugit2.nvim
|
github_2023
|
others
| 42 |
SuperBo
|
SuperBo
|
@@ -0,0 +1,35 @@
+local repo_url = '$repo_url'
+
+rockspec_format = '3.0'
+package = '$package'
+version = modrev ..'-'.. specrev
+
+description = {
+ summary = '$summary',
+ labels = $labels,
+ homepage = '$homepage',
+ $license
+}
+
+dependencies = {
+ 'lua >= 5.1',
+ 'nui.nvim',
+ 'nvim-web-devicons',
+ 'plenary.nvim',
+}
+
+external_dependencies = {
+ GIT2 = {
+ library = 'git2',
+ }
+}
|
Can we add GPGme as optional dependencies?
|
fugit2.nvim
|
github_2023
|
others
| 42 |
SuperBo
|
SuperBo
|
@@ -0,0 +1,35 @@
+local repo_url = '$repo_url'
+
+rockspec_format = '3.0'
+package = '$package'
+version = modrev ..'-'.. specrev
+
+description = {
+ summary = '$summary',
+ labels = $labels,
+ homepage = '$homepage',
+ $license
+}
+
+dependencies = {
+ 'lua >= 5.1',
+ 'nui.nvim',
+ 'nvim-web-devicons',
+ 'plenary.nvim',
+}
|
Just let it be like this, I have plan to replace all of that dependencies later.
|
fugit2.nvim
|
github_2023
|
others
| 39 |
SuperBo
|
ryancobb
|
@@ -1687,8 +1687,11 @@ function GitStatus:_init_branch_menu()
if vim.fn.exists ":Telescope" then
self:unmount()
vim.cmd { cmd = "Telescope", args = { "git_branches" } }
+ elseif vim.fn.exists ":FzfLua" then
|
I'm still getting `Command not found: Telescope` with these changes.
Looks like `vim.fn.exists` returns `0` if the command doesn't exist, which lua considers truthy so we're still trying the `Telescope` cmd.
I think we have to check `vim.fn.exists ... > 0` on this line and above
|
differential
|
github_2023
|
typescript
| 213 |
differentialhq
|
nadeesha
|
@@ -0,0 +1,64 @@
+import os from "os";
+import path from "path";
+import fs from "fs";
+
+export type CliContext = {
+ apiUrl?: string;
+ consoleUrl?: string;
+ cluster?: string;
+ service?: string;
+ deployment?: string;
+};
+
+const BASE_CONTEXT_PATH = path.join(os.homedir(), ".differential");
|
Nice one!
Another idea:
Save context per Differential project and instead save the `.differential` file in the directory where the user is doing the operation?
So, when a command is run, it gets the context from the directory from where it's being run from.
It could even be a `differential.toml` and we can follow what fly does.
I'll leave it up to you.
|
differential
|
github_2023
|
others
| 215 |
differentialhq
|
nadeesha
|
@@ -0,0 +1,85 @@
+# On-demand Compute (BETA)
+
+> On-demand compute is currently in private beta. To gain early access, please sign up for the waitlist [here](https://forms.fillout.com/t/9M1VhL8Wxyus).
|
Yeah, let's just do helo@differential.dev?
|
differential
|
github_2023
|
typescript
| 211 |
differentialhq
|
github-advanced-security[bot]
|
@@ -0,0 +1,133 @@
+import { upsertAccessPointForCluster } from "./management";
+import {
+ validateAccessPointAccess,
+ validateAccessPointOrClusterTokenAccess,
+ validateClusterTokenAccess,
+} from "./routing-helpers";
+import { createOwner } from "./test/util";
+
+describe("routing-helpers", () => {
+ describe("validateClusterTokenAccess", () => {
+ it("should reject bad tokens", async () => {
+ ["bad token", "Bearer bad token", "Bearerbadtoken"].forEach(
+ async (authorization) => {
+ await validateClusterTokenAccess(authorization);
+ },
+ );
+ });
+
+ it("should accept good tokens", async () => {
+ const owner = await createOwner();
+
+ const result = await validateClusterTokenAccess(
+ `Bearer ${owner.apiSecret}`,
+ );
+
+ expect(result).toEqual({
+ clusterId: owner.clusterId,
+ cloudEnabled: false,
+ organizationId: null,
+ });
+ });
+ });
+
+ describe("validateAccessPointAccess", () => {
+ it("should reject bad tokens", async () => {
+ const owner = await createOwner();
+
+ [
+ {
+ clusterId: owner.clusterId,
+ authorization: "bad token",
|
## Hard-coded credentials
The hard-coded value "bad token" is used as [authorization header](1).
[Show more details](https://github.com/differentialhq/differential/security/code-scanning/5)
|
differential
|
github_2023
|
typescript
| 211 |
differentialhq
|
github-advanced-security[bot]
|
@@ -0,0 +1,133 @@
+import { upsertAccessPointForCluster } from "./management";
+import {
+ validateAccessPointAccess,
+ validateAccessPointOrClusterTokenAccess,
+ validateClusterTokenAccess,
+} from "./routing-helpers";
+import { createOwner } from "./test/util";
+
+describe("routing-helpers", () => {
+ describe("validateClusterTokenAccess", () => {
+ it("should reject bad tokens", async () => {
+ ["bad token", "Bearer bad token", "Bearerbadtoken"].forEach(
+ async (authorization) => {
+ await validateClusterTokenAccess(authorization);
+ },
+ );
+ });
+
+ it("should accept good tokens", async () => {
+ const owner = await createOwner();
+
+ const result = await validateClusterTokenAccess(
+ `Bearer ${owner.apiSecret}`,
+ );
+
+ expect(result).toEqual({
+ clusterId: owner.clusterId,
+ cloudEnabled: false,
+ organizationId: null,
+ });
+ });
+ });
+
+ describe("validateAccessPointAccess", () => {
+ it("should reject bad tokens", async () => {
+ const owner = await createOwner();
+
+ [
+ {
+ clusterId: owner.clusterId,
+ authorization: "bad token",
+ },
+ {
+ clusterId: owner.clusterId,
+ authorization: "Bearer bad token",
|
## Hard-coded credentials
The hard-coded value "Bearer bad token" is used as [authorization header](1).
[Show more details](https://github.com/differentialhq/differential/security/code-scanning/6)
|
differential
|
github_2023
|
typescript
| 211 |
differentialhq
|
github-advanced-security[bot]
|
@@ -3,13 +3,15 @@
export const createOwner = async (params?: { clusterId?: string }) => {
const clusterId = params?.clusterId || `test-cluster-${Math.random()}`;
+ const apiSecret = `test-secret-${Math.random()}`;
|
## Insecure randomness
This uses a cryptographically insecure random number generated at [Math.random()](1) in a security context.
[Show more details](https://github.com/differentialhq/differential/security/code-scanning/7)
|
differential
|
github_2023
|
typescript
| 198 |
differentialhq
|
github-advanced-security[bot]
|
@@ -1,17 +1,13 @@
import * as data from "../data";
-export const createOwner = async (params?: {
- clusterId?: string;
- predictiveRetriesEnabled?: boolean;
-}) => {
+export const createOwner = async (params?: { clusterId?: string }) => {
const clusterId = params?.clusterId || `test-cluster-${Math.random()}`;
await data.db
.insert(data.clusters)
.values({
id: clusterId,
- api_secret: "test",
- predictive_retries_enabled: params?.predictiveRetriesEnabled,
+ api_secret: `test-secret-${Math.random()}`,
|
## Insecure randomness
This uses a cryptographically insecure random number generated at [Math.random()](1) in a security context.
[Show more details](https://github.com/differentialhq/differential/security/code-scanning/4)
|
differential
|
github_2023
|
typescript
| 167 |
differentialhq
|
github-advanced-security[bot]
|
@@ -0,0 +1,40 @@
+import { ConfirmSubscriptionCommand, SNSClient } from "@aws-sdk/client-sns";
+import MessageValidator from "sns-validator";
+
+export const DELOYMENT_SNS_TOPIC = process.env.DELOYMENT_SNS_TOPIC;
+
+const validator = new MessageValidator();
+const sns = new SNSClient();
+
+// "'stackId'='XXXX'\n";
+export const parseCloudFormationMessage = (notification: string) =>
+ notification
+ .replace('"', "")
|
## Incomplete string escaping or encoding
This replaces only the first occurrence of '"'.
[Show more details](https://github.com/differentialhq/differential/security/code-scanning/3)
|
differential
|
github_2023
|
others
| 166 |
differentialhq
|
johnjcsmith
|
@@ -18,4 +18,4 @@ It accepts an array of encryption keys. This is useful if you want to rotate you
Since this essentially makes function arguments opaque to the control plane, it is important to note that Differential will not be able to provide any of its usual features for these encrypted function arguments, such as predictive retries or predictive alerting.
-Caching and idempotency are still supported, as they are based on the function arguments and return values as supplied by the client.
+Caching is still supported, as they it is based on the function arguments and return values as supplied by the client.
|
```suggestion
Caching is still supported, as it is based on the function arguments and return values as supplied by the client.
```
|
differential
|
github_2023
|
others
| 166 |
differentialhq
|
johnjcsmith
|
@@ -0,0 +1,70 @@
+# Failure Modes and Error Handling
+
+Differential failure modes and error handling is not too dissimilar from the traditional failure modes and error handling in a service-oriented architecture.
+
+## A function call results in a rejection
+
+When a promise gets rejected from a remote function, Differential will serialize the error and send it back to the caller. The caller can then handle the error as needed.
+
+Differential does not do any retries or error handling on behalf of the caller in this case. It is up to the caller to decide how to handle the error.
+
+However, Differential does serialize the error and send it back to the caller, which allows the caller to handle the error as needed.
+
+For example, given this service function:
+
+```typescript
+class MyCustomError extends Error {
+ constructor(message: string) {
+ super(message);
+ this.name = "MyCustomError";
+ }
+}
+
+async function myFunction() {
+ throw new MyCustomError("This is a custom error");
+}
+```
+
+The caller can handle the error like this:
+
+```typescript
+try {
+ await myFunction();
+} catch (e) {
+ if (e.name === "MyCustomError") {
+ console.log("Caught a custom error:", e.message);
+ } else {
+ console.log("Caught an unknown error:", e.message);
+ }
+}
+```
+
+### Stack trace
+
+Differential does preserve the stack trace that is generated when the error is thrown, in the remote function.
|
🤯 that's neat. I didn't realise this.
|
differential
|
github_2023
|
others
| 166 |
differentialhq
|
johnjcsmith
|
@@ -0,0 +1,70 @@
+# Failure Modes and Error Handling
+
+Differential failure modes and error handling is not too dissimilar from the traditional failure modes and error handling in a service-oriented architecture.
+
+## A function call results in a rejection
+
+When a promise gets rejected from a remote function, Differential will serialize the error and send it back to the caller. The caller can then handle the error as needed.
+
+Differential does not do any retries or error handling on behalf of the caller in this case. It is up to the caller to decide how to handle the error.
+
+However, Differential does serialize the error and send it back to the caller, which allows the caller to handle the error as needed.
+
+For example, given this service function:
+
+```typescript
+class MyCustomError extends Error {
+ constructor(message: string) {
+ super(message);
+ this.name = "MyCustomError";
+ }
+}
+
+async function myFunction() {
+ throw new MyCustomError("This is a custom error");
+}
+```
+
+The caller can handle the error like this:
+
+```typescript
+try {
+ await myFunction();
+} catch (e) {
+ if (e.name === "MyCustomError") {
+ console.log("Caught a custom error:", e.message);
+ } else {
+ console.log("Caught an unknown error:", e.message);
+ }
+}
+```
+
+### Stack trace
+
+Differential does preserve the stack trace that is generated when the error is thrown, in the remote function.
+
+### Error prototype
+
+Differential does preserve the prototype of all the native JavaScript errors as defined in the [Well-Known Intrinsic Objects](https://262.ecma-international.org/12.0/#sec-well-known-intrinsic-objects). This means that the caller can check the error type using the `instanceof` operator for these errors.
+
+However, custom errors are not preserved across the boundary. This means that the caller cannot check the error type using the `instanceof` operator for custom errors. However, the caller can still check the error name and message.
|
It can be any property not just `name` and `message`?
|
differential
|
github_2023
|
others
| 166 |
differentialhq
|
johnjcsmith
|
@@ -0,0 +1,70 @@
+# Failure Modes and Error Handling
+
+Differential failure modes and error handling is not too dissimilar from the traditional failure modes and error handling in a service-oriented architecture.
+
+## A function call results in a rejection
+
+When a promise gets rejected from a remote function, Differential will serialize the error and send it back to the caller. The caller can then handle the error as needed.
+
+Differential does not do any retries or error handling on behalf of the caller in this case. It is up to the caller to decide how to handle the error.
+
+However, Differential does serialize the error and send it back to the caller, which allows the caller to handle the error as needed.
+
+For example, given this service function:
+
+```typescript
+class MyCustomError extends Error {
+ constructor(message: string) {
+ super(message);
+ this.name = "MyCustomError";
+ }
+}
+
+async function myFunction() {
+ throw new MyCustomError("This is a custom error");
+}
+```
+
+The caller can handle the error like this:
+
+```typescript
+try {
+ await myFunction();
+} catch (e) {
+ if (e.name === "MyCustomError") {
+ console.log("Caught a custom error:", e.message);
+ } else {
+ console.log("Caught an unknown error:", e.message);
+ }
+}
+```
+
+### Stack trace
+
+Differential does preserve the stack trace that is generated when the error is thrown, in the remote function.
+
+### Error prototype
+
+Differential does preserve the prototype of all the native JavaScript errors as defined in the [Well-Known Intrinsic Objects](https://262.ecma-international.org/12.0/#sec-well-known-intrinsic-objects). This means that the caller can check the error type using the `instanceof` operator for these errors.
+
+However, custom errors are not preserved across the boundary. This means that the caller cannot check the error type using the `instanceof` operator for custom errors. However, the caller can still check the error name and message.
+
+## A function call results in a timeout
+
+When a function call results in a timeout, Differential will reject the promise with a `DifferentialError`, with a message of `DifferentialError.REMOTE_EXECUTION_ERROR`. The caller can then handle the error as needed.
+
+## A function call results in continued machine failure
+
+Differential can detect when a machine has stopped responding and will automatically failover to another machine. This also means that the functions that were running on the failed machine will be marked as stalled, so that they can be re-executed on another machine.
+
+By default, a cluster will auto-retry a stalled function up to 3 times. If the function still fails after 3 retries, the function will be marked as failed and the caller will receive a `DifferentialError` with a message of `DifferentialError.REMOTE_EXECUTION_ERROR`.
+
+Differential will prevent retrying the function indefinitely, to prevent a machine from continuously failing.
+
+## Network gets interrupted during a function call
+
+When the network gets interrupted during a function call, Differential will try to re-establish the connection to the control-plane.
|
I think these are fine, I don't know if there is better naming.
In the context of portioning, It kind of feels like these are describing _where_ the error happens in relation to the control plane?
`client -> MAX_ERROR_CYCLES -> control-plane ->REMOTE_EXECUTION_ERROR - service`
|
differential
|
github_2023
|
others
| 165 |
differentialhq
|
tomellis91
|
@@ -0,0 +1,98 @@
+AWSTemplateFormatVersion: "2010-09-09"
+Description: Differential service configuration
+
+Parameters:
+ ClusterId:
+ Type: String
+ Description: Differential Cluster Id
+
+ ServiceName:
+ Type: String
+ Description: Differential Service name
+
+ DeploymentId:
+ Type: String
+ Description: Differential Deployment Id
+
+ FunctionName:
+ Type: String
+ Description: The Lambda function name
+
+ Runtime:
+ Type: String
+ Description: The Lambda runtime
+ Default: nodejs20.x
+
+ UploadBucketName:
+ Type: String
+ Description: The S3 Upload bucket name
+
+ UploadBucketKey:
+ Type: String
+ Description: The S3 Upload bucket key
+
+ APISecret:
+ Type: String
+ NoEcho: true
+ Description: Differential API key for handler
+
+ Handler:
+ Type: String
+ Description: The name of the method that Lambda calls to run the function
+ Default: "differential-index.handler"
+
+ Timeout:
+ Type: String
+ Description: The Lambda function timeout
+ Default: "60"
+
+ ReservedConcurrency:
+ Type: String
+ Description: The Lambda function reserved concurrency
+ Default: "1"
+
+ ExecutionRole:
+ Type: String
+ Description: Lambda execution role
+ Default: ""
+
+Conditions:
+ IsExecutionRoleEmpty:
+ "Fn::Equals":
+ - !Ref "ExecutionRole"
+ - ""
+
+Resources:
+ DifferentialService:
+ Type: AWS::Lambda::Function
+ Properties:
+ FunctionName: !Ref FunctionName
+ Handler: !Ref Handler
+ Role:
+ 'Fn::If':
+ - IsExecutionRoleEmpty
+ - !Sub 'arn:aws:iam::${AWS::AccountId}:role/DeploymentLambdaRuntimeRole'
|
Is this a role that was already created in AWS?
Would it be worth creating a role per lambda in this template and associating that with the function?
As well as providing the ability to override the role that is used?
|
differential
|
github_2023
|
typescript
| 154 |
differentialhq
|
github-advanced-security[bot]
|
@@ -14,6 +19,15 @@
fs.mkdirSync(dir, { recursive: true });
}
fs.writeFileSync(TOKEN_PATH, token);
+ setNpmConfig(`${CLIENT_PACKAGE_SCOPE}:registry`, NPM_REGISTRY_URL);
+ setNpmConfig(
+ `${NPM_REGISTRY_URL.replace(/^http(s?):/, "")}:_authToken`,
+ token,
+ );
+};
+
+const setNpmConfig = async (key: string, value: string) => {
+ childProcess.execSync(`npm config set ${key}=${value}`);
|
## Uncontrolled command line
This command line depends on a [user-provided value](1).
[Show more details](https://github.com/differentialhq/differential/security/code-scanning/2)
|
differential
|
github_2023
|
others
| 143 |
differentialhq
|
nadeesha
|
@@ -2,9 +2,9 @@
Status: **General Availability**
-In a cloud environment, machines can fail at any time. Differential transparently handles machine failures, and retries the operation on a healthy worker. This means that you don't have to worry about your service being unavailable due to a machine failure.
+In a cloud environment, machines can fail at any time. Differential transparently handles machine failures by periodically sending heartbeats to the control-plane, quickly catching and retrying failed operations on a healthy worker. This means that you don't have to worry about your service being unavailable due to a machine failure.
-Machines periodically send heartbeats to the control-plane. If a machine fails to send any heartbeats within a certain interval (default 90 seconds):
+If a machine fails to send any heartbeats within a definable interval (default 90 seconds):
|
```suggestion
If a machine fails to send any heartbeats within an interval (default 90 seconds):
```
|
differential
|
github_2023
|
others
| 141 |
differentialhq
|
ShawnSomething
|
@@ -0,0 +1,42 @@
+# Recovering from machine failures
+
+Status: **General Availability**
+
+In a cloud environment, machines can fail at any time. Differential transparently handles machine failures, and retries the operation on a healthy worker. This means that you don't have to worry about your service being unavailable due to a machine failure.
+
+Machines periodically send heartbeats to the control-plane. If a machine fails to send any heartbeats within a certain interval (default 90 seconds):
|
can we merge these two lines?
In a cloud environment, machines can fail at any time. Differential transparently handles machine failures by periodically sending heartbeats to the control-plane, quickly catching and retrying failed operations on a healthy worker. This means that you don't have to worry about your service being unavailable due to a machine failure
If a machine fails to send any heartbeats within a definable interval (default 90 seconds):
|
differential
|
github_2023
|
others
| 141 |
differentialhq
|
ShawnSomething
|
@@ -0,0 +1,42 @@
+# Recovering from machine failures
+
+Status: **General Availability**
+
+In a cloud environment, machines can fail at any time. Differential transparently handles machine failures, and retries the operation on a healthy worker. This means that you don't have to worry about your service being unavailable due to a machine failure.
+
+Machines periodically send heartbeats to the control-plane. If a machine fails to send any heartbeats within a certain interval (default 90 seconds):
+
+1. It is marked as unhealthy, and Differential will not send any new requests to it.
+2. The functions in progress are marked as failed, and Differential will retry them on a healthy worker.
+
+If the machine comes back online, Differential will mark it as healthy, and start sending new requests to it. However, it will disregard any results from the machine for the functions that were marked as failed.
|
When the machine comes back online
|
differential
|
github_2023
|
others
| 141 |
differentialhq
|
ShawnSomething
|
@@ -0,0 +1,45 @@
+# Distributed Caching
+
+Status: **General Availability**
+
+When a function returns a value, Differential will store it in the control-plane state. It's then available to be used by other calls, without having to call the function again. This is useful for caching expensive operations, and speeding up your application.
|
Can we swap the two sentences?
Differential is useful for caching expensive operations, and speeding up your application by storing returned function values in the control-plane. It is then available to be used by other calls without having to call that specific function again
(Value + How) : also it flows better with the next part. cause currently, it's how, then value, then the next paragraph is how again
|
differential
|
github_2023
|
others
| 141 |
differentialhq
|
ShawnSomething
|
@@ -0,0 +1,33 @@
+# Predictive Alerting
+
+Status: **In Development**
+
+Differential can predict if a function is failing due to an unrecoverable error that requires a code change, and alert you with reproduction steps.
+
+The control-plane has a lot of context on a particular failure when it happens. It knows the function, some metadata about the source, payload, and the error message. It can use this information to predict if the error is unrecoverable or not.
|
The control-plane has all the required context on a particular failure, when it happens.
|
differential
|
github_2023
|
others
| 141 |
differentialhq
|
ShawnSomething
|
@@ -0,0 +1,42 @@
+# Predictive Retries
+
+Status: **Technical Preview**
+
+Differential can predict transient errors and retry the operations without the developer having to write custom code.
+
+The control-plane has a lot of context on a particular failure when it happens. It knows the function, some metadata about the source, payload, and the error message. It can use this information to predict if the error is transient or not.
|
has all the required context
|
differential
|
github_2023
|
typescript
| 113 |
differentialhq
|
nadeesha
|
@@ -134,6 +134,7 @@ const functionRegistry: { [key: string]: ServiceRegistryFunction } = {};
class PollingAgent {
private errorCount = 0;
+ private idleCycleCount = 0;
|
Hey, should we make this a `idleTimeout`?
Since the polling interval can be configured, it might be confusing to think in terms of cycles.
Was there a specific reason you thought of doing cycles instead of time?
|
differential
|
github_2023
|
typescript
| 110 |
differentialhq
|
nadeesha
|
@@ -116,8 +116,8 @@ export const releaseDeployment = async (
): Promise<Deployment> => {
// Check if the service has been previously "released" (active or inactive) deployment
let meta = (await previouslyReleased(deployment))
|
```suggestion
const meta = (await previouslyReleased(deployment))
```
|
differential
|
github_2023
|
typescript
| 100 |
differentialhq
|
johnjcsmith
|
@@ -8,28 +8,59 @@ export class AuthenticationError extends Error {
}
}
-if (!process.env.JWKS_URL) {
- throw new Error("JWKS_URL must be set");
+if (!process.env.JWKS_URL && !process.env.MANAGEMENT_SECRET) {
+ throw new Error("No JWKS_URL or MANAGEMENT_SECRET in env. One is required.");
}
-const client = jwksClient({
- jwksUri: process.env.JWKS_URL,
-});
+if (process.env.MANAGEMENT_SECRET) {
+ const hasPrefix = process.env.MANAGEMENT_SECRET.startsWith("sk_management_");
+ const hasLength = process.env.MANAGEMENT_SECRET.length > 64;
+
+ if (!hasPrefix) {
+ throw new Error("MANAGEMENT_SECRET must start with sk_management_");
+ }
+
+ if (!hasLength) {
+ throw new Error("MANAGEMENT_SECRET must be longer than 64 characters");
+ }
+}
+
+const client = process.env.JWKS_URL
+ ? jwksClient({
+ jwksUri: process.env.JWKS_URL,
+ })
+ : null;
const getKey: GetPublicKeyOrSecret = (header, callback) => {
- return client.getSigningKey(header.kid, function (err, key) {
+ return client?.getSigningKey(header.kid, function (err, key) {
|
We could probably throw here If `client` is `undefined`?
|
differential
|
github_2023
|
typescript
| 95 |
differentialhq
|
johnjcsmith
|
@@ -301,33 +301,24 @@ export const definition = {
authorization: z.string(),
}),
responses: {
- 200: z.object({
- start: z.date(),
- stop: z.date(),
- success: z.object({
- count: z.array(z.object({ timestamp: z.date(), value: z.number() })),
- avgExecutionTime: z.array(
- z.object({ timestamp: z.date(), value: z.number() }),
- ),
- }),
- failure: z.object({
|
Ahh nice, I was thinking theses should be combined 👌
|
differential
|
github_2023
|
typescript
| 81 |
differentialhq
|
johnjcsmith
|
@@ -1,7 +1,13 @@
import { and, eq } from "drizzle-orm";
+import NodeCache from "node-cache";
+import { z } from "zod";
import * as data from "./data";
import { backgrounded } from "./util";
-import { z } from "zod";
+
+const cache = new NodeCache({
|
I wonder if we could cache function results in `ts-core` also based on the cache key, so they don't even hit the control-plane?
|
differential
|
github_2023
|
typescript
| 81 |
differentialhq
|
isaacsu
|
@@ -2,44 +2,164 @@ import { and, eq, sql } from "drizzle-orm";
import { ulid } from "ulid";
import * as cron from "./cron";
import * as data from "./data";
-import { backgrounded } from "./util";
import { writeEvent, writeJobActivity } from "./events";
import {
ServiceDefinition,
+ getServiceDefinitions,
storeServiceDefinitionBG,
} from "./service-definitions";
+import { backgrounded } from "./util";
-export const createJob = async ({
+const createJobStrategies = {
+ idempotece: async ({
|
speling
```suggestion
idempotence: async ({
```
|
differential
|
github_2023
|
typescript
| 43 |
differentialhq
|
nadeesha
|
@@ -247,4 +266,40 @@ export const router = s.router(contract, {
body: cluster,
};
},
+ getFunctionMetrics: async (request) => {
+ const managementToken = request.headers.authorization.split(" ")[1];
+
+ // TODO: Validate serviceName and functionName
+ // We don't currently store and service/function names in the database to validate against.
|
> We don't currently store and service/function names in the database to validate against.
Sorry, what do you mean by this?
|
differential
|
github_2023
|
typescript
| 37 |
differentialhq
|
nadeesha
|
@@ -163,6 +175,28 @@ export function LiveTables({
noDataMessage="No services with function calls have been detected in the cluster lately."
/>
</div>
+
+ <div className="mt-12">
+ {data.services.map((service) => (
|
No, this is the correct way to do it. Nice 🎉
|
differential
|
github_2023
|
typescript
| 36 |
differentialhq
|
github-advanced-security[bot]
|
@@ -10,20 +20,27 @@
});
});
-// describe("it should be able to unfurl promisis", () => {
-// it("when it succeeds", async () => {
-// const value = await unpack(pack(Promise.resolve(1)));
-// expect(value).toEqual(1);
-// });
+describe("encryption", () => {
+ const cryptoSettings = {
+ keys: [Buffer.from("abcdefghijklmnopqrstuvwxzy123456")],
|
## Hard-coded credentials
The hard-coded value "abcdefghijklmnopqrstuvwxzy123456" is used as [key](1).
[Show more details](https://github.com/differentialhq/differential/security/code-scanning/1)
|
differential
|
github_2023
|
typescript
| 36 |
differentialhq
|
johnjcsmith
|
@@ -0,0 +1,12 @@
+export class DifferentialError extends Error {
+ static UNAUTHORISED =
+ "Invalid API Key or API Secret. Make sure you are using the correct API Secret.";
+
+ static UNKNOWN_ENCRYPTION_KEY =
+ "Encounterd an encrypted message with an unknown encryption key. Make sure you are providing all encryption keys to the client.";
|
```suggestion
"Encountered an encrypted message with an unknown encryption key. Make sure you are providing all encryption keys to the client.";
```
|
differential
|
github_2023
|
others
| 28 |
differentialhq
|
nadeesha
|
@@ -0,0 +1,4 @@
+DROP TABLE "users";--> statement-breakpoint
|
Yes. Probably. Thanks!
|
differential
|
github_2023
|
others
| 28 |
differentialhq
|
nadeesha
|
@@ -0,0 +1,4 @@
+DROP TABLE "users";--> statement-breakpoint
+ALTER TABLE "jobs" ADD COLUMN "resulted_at" timestamp with time zone;--> statement-breakpoint
+ALTER TABLE "jobs" ADD COLUMN "function_execution_time_ms" integer;--> statement-breakpoint
+ALTER TABLE "jobs" ADD COLUMN "service" varchar(1024);
|
```suggestion
ALTER TABLE "jobs" ADD COLUMN IF NOT EXISTS "service" varchar(1024);
```
This was created, but not committed as a part of a previous deployment. This should fix your build.
|
differential
|
github_2023
|
typescript
| 27 |
differentialhq
|
nadeesha
|
@@ -22,6 +22,10 @@ export const DataTable = <T = any,>({
noDataMessage?: string;
columnDef?: ColumnDef<T>[];
}) => {
+ if (data === undefined || data.length === 0) {
+ return <div className="text-center">{noDataMessage}</div>;
|
Thank you. Can we update so that it follows the style guide?
```suggestion
return <p className="text-gray-400 mt-2">{noDataMessage}</p>;
```
|
differential
|
github_2023
|
others
| 8 |
differentialhq
|
johnjcsmith
|
@@ -51,116 +64,116 @@ Initializes a new Differential instance.
| Name | Type | Description |
| :------ | :------ | :------ |
-| `apiSecret` | `string` | The API Secret for your Differential cluster. Obtain this from [your Differential dashboard](https://admin.differential.dev/dashboard). |
+| `apiSecret` | `string` | The API Secret for your Differential cluster. You can obtain one from https://api.differential.dev/demo/token. |
#### Returns
[`Differential`](Differential.md)
#### Defined in
-[src/Differential.ts:374](https://github.com/differentialHQ/differential/blob/b306aab/ts-core/src/Differential.ts#L374)
+[src/Differential.ts:398](https://github.com/differentialHQ/differential/blob/44e4229/ts-core/src/Differential.ts#L398)
## Methods
-### background
+### client
-▸ **background**\<`T`, `U`\>(`fn`, `...args`): `Promise`\<\{ `id`: `string` }\>
+▸ **client**\<`T`\>(`service`): `ServiceClient`\<`T`\>
-Calls a function on a registered service, while ensuring the type safety of the function call through generics.
-Returns the job id of the function call, and doesn't wait for the function to complete.
+Provides a type safe client for performing calls to a registered service.
+Waits for the function to complete before returning, and returns the result of the function call.
#### Type parameters
| Name | Type |
| :------ | :------ |
-| `T` | extends `RegisteredService` |
-| `U` | extends `string` \| `number` \| `symbol` |
+| `T` | extends `RegisteredService`\<`any`\> |
#### Parameters
-| Name | Type | Description |
-| :------ | :------ | :------ |
-| `fn` | `U` | The function name to call. |
-| `...args` | `Parameters`\<`T`[``"definition"``][``"functions"``][`U`]\> | The arguments to pass to the function. |
+| Name | Type |
+| :------ | :------ |
+| `service` | `T`[``"definition"``][``"name"``] |
#### Returns
-`Promise`\<\{ `id`: `string` }\>
+`ServiceClient`\<`T`\>
-The job id of the function call.
+ServiceClient<T>
**`Example`**
```ts
import { d } from "./differential";
+import type { helloService } from "./hello-service";
-const result = await d.background<typeof helloService, "hello">("hello", "world");
+const client = d.buildClient<typeof helloService>();
|
Might need to re-generate the docs? (`buildClient` -> `client`)
|
differential
|
github_2023
|
typescript
| 7 |
differentialhq
|
nadeesha
|
@@ -36,6 +36,8 @@ const createClient = (baseUrl: string, machineId: string) =>
});
class DifferentialError extends Error {
+ static UNAUTHORISED = "Invalid API Key or API Secret. Make sure you are using the correct API Key and API Secret.";
|
Nice!
|
differential
|
github_2023
|
others
| 7 |
differentialhq
|
nadeesha
|
@@ -1,4 +1,5 @@
-439e942, 2023-12-24, Enforce service name param
+ca03e82, 2023-12-24, Raise exception on unauthorised polling errors
|
You don't have to generate these manually btw. The workflow on `main` does it.
|
differential
|
github_2023
|
typescript
| 2 |
differentialhq
|
nadeesha
|
@@ -510,6 +515,31 @@ export class Differential {
};
}
+ /**
+ * Provides a type safe client for performing calls to a registered service.
+ * Waits for the function to complete before returning, and returns the result of the function call.
+ * @returns ServiceClient<T>
+ * @example
+ * ```ts
+ * import { d } from "./differential";
+ * import { helloService } from "./hello-service";
|
```suggestion
* import type { helloService } from "./hello-service";
```
🙏🏽
|
sveltestrap
|
github_2023
|
javascript
| 92 |
sveltestrap
|
dysfunc
|
@@ -52,6 +53,21 @@ describe('Popover test', () => {
expect(popover).toMatchSnapshot();
});
+ it('should render text and auto placement', () => {
+ const containerPopover = renderPopover({
+ children: 'Hello',
+ target: 'btn',
+ placement: 'auto',
+ isOpen: true
+ });
+ console.log('containerPopover.outerHTML:', containerPopover.outerHTML);
|
```suggestion
```
|
sveltestrap
|
github_2023
|
others
| 45 |
sveltestrap
|
planetsLightningArrester
|
@@ -1,6 +1,18 @@
<script>
import { classnames } from '../utils';
+ /**
+ * Text to be read by screen readers.
+ * @type {string}
+ */
+ export let ariaLabel = '';
+
+ /**
+ * Determines if the badge should have a border
+ * @type {string}
|
Shouldn't this be `@type {boolean}`?
|
sveltestrap
|
github_2023
|
others
| 45 |
sveltestrap
|
planetsLightningArrester
|
@@ -26,13 +38,40 @@
*/
export let href = '';
+ /**
+ * Create a circular indicator for absolute positioned badge.
+ * @type {string}
|
Shouldn't this be `@type {boolean}` too?
|
sveltestrap
|
github_2023
|
typescript
| 45 |
sveltestrap
|
bestguy
|
@@ -3,9 +3,14 @@ import { HTMLAnchorAttributes } from 'svelte/elements';
import { Color } from '../shared';
export interface BadgeProps extends HTMLAnchorAttributes {
+ ariaLabel?: string;
+ border?: boolean;
|
Minor, but this seems a slippery slope as some users may rather use shadow, etc. The styling may be better for people to customize in their custom theme (e.g. our company theme used all UPPERCASE in badges, defined in css build).
Would this be best to define in user themes or pass via existing `class` prop?
|
sveltestrap
|
github_2023
|
others
| 35 |
sveltestrap
|
dysfunc
|
@@ -38,24 +35,21 @@
],
"repository": {
"type": "git",
- "url": "git@github.com:sveltestrap/sveltestrap.git"
+ "url": "https://github.com/sveltestrap/sveltestrap.git"
},
"files": [
"dist",
"src"
],
"exports": {
".": {
- "types": "./src/index.d.ts",
- "svelte": "./src/index.js",
- "require": "./dist/index.js",
- "module": "./dist/index.ejs",
- "import": "./dist/index.ejs"
+ "types": "./dist/index.d.ts",
+ "svelte": "./dist/index.js"
}
},
"scripts": {
"start": "storybook dev -p 6006",
- "build": "vite build",
+ "build": "svelte-package --input ./src && publint --strict",
|
```suggestion
"build": "pnpm lint:package && svelte-package --input ./src",
```
|
sveltestrap
|
github_2023
|
others
| 35 |
sveltestrap
|
dysfunc
|
@@ -38,24 +35,21 @@
],
"repository": {
"type": "git",
- "url": "git@github.com:sveltestrap/sveltestrap.git"
+ "url": "https://github.com/sveltestrap/sveltestrap.git"
},
"files": [
"dist",
"src"
],
"exports": {
".": {
- "types": "./src/index.d.ts",
- "svelte": "./src/index.js",
- "require": "./dist/index.js",
- "module": "./dist/index.ejs",
- "import": "./dist/index.ejs"
+ "types": "./dist/index.d.ts",
+ "svelte": "./dist/index.js"
}
},
"scripts": {
"start": "storybook dev -p 6006",
- "build": "vite build",
+ "build": "svelte-package --input ./src && publint --strict",
"docs:build": "storybook build -o docs && cp -r ./.storybook/docs/* ./docs",
"docs:deploy": "pnpm run docs:build && npx gh-pages -d docs",
"check": "svelte-check --tsconfig ./tsconfig.json",
|
add
```js
"lint:package": "publint --strict"
```
|
sveltestrap
|
github_2023
|
others
| 35 |
sveltestrap
|
dysfunc
|
@@ -0,0 +1,3 @@
+module.exports = {
+ plugins: [require('autoprefixer')]
+};
|
please rename this to postcss.config.js and update the contents:
```suggestion
import autoprefixer from 'autoprefixer';
export default {
plugins: [autoprefixer()]
};
```
|
sveltestrap
|
github_2023
|
typescript
| 36 |
sveltestrap
|
dysfunc
|
@@ -1,27 +1,25 @@
-declare module 'sveltestrap' {
- import { SvelteComponent } from 'svelte';
- import { HTMLDivElement } from 'svelte/elements';
- import { Direction } from '../shared';
+import { SvelteComponent } from 'svelte';
+import { HTMLDivElement } from 'svelte/elements';
+import { Direction } from '../shared';
- export interface DropdownItemProps extends HTMLAttributes<HTMLDivElement> {
- active?: boolean;
- autoClose?: boolean | string;
- direction?: Direction;
- dropup?: boolean;
- group?: boolean;
- inNavbar?: boolean;
- isOpen?: boolean;
- nav?: boolean;
- setActiveFromChild?: boolean;
- size?: string;
- toggle?: () => void;
- }
-
- export interface DropdownItemEvents {}
+export interface DropdownItemProps extends HTMLAttributes<HTMLDivElement> {
|
```suggestion
export interface DropdownProps extends HTMLAttributes<HTMLDivElement> {
```
|
sveltestrap
|
github_2023
|
typescript
| 36 |
sveltestrap
|
dysfunc
|
@@ -1,27 +1,25 @@
-declare module 'sveltestrap' {
- import { SvelteComponent } from 'svelte';
- import { HTMLDivElement } from 'svelte/elements';
- import { Direction } from '../shared';
+import { SvelteComponent } from 'svelte';
+import { HTMLDivElement } from 'svelte/elements';
+import { Direction } from '../shared';
- export interface DropdownItemProps extends HTMLAttributes<HTMLDivElement> {
- active?: boolean;
- autoClose?: boolean | string;
- direction?: Direction;
- dropup?: boolean;
- group?: boolean;
- inNavbar?: boolean;
- isOpen?: boolean;
- nav?: boolean;
- setActiveFromChild?: boolean;
- size?: string;
- toggle?: () => void;
- }
-
- export interface DropdownItemEvents {}
+export interface DropdownItemProps extends HTMLAttributes<HTMLDivElement> {
+ active?: boolean;
+ autoClose?: boolean | string;
+ direction?: Direction;
+ dropup?: boolean;
+ group?: boolean;
+ inNavbar?: boolean;
+ isOpen?: boolean;
+ nav?: boolean;
+ setActiveFromChild?: boolean;
+ size?: string;
+ toggle?: () => void;
+}
- export interface DropdownItemSlots {
- default: {};
- }
+export interface DropdownItemEvents {}
|
```suggestion
export interface DropdownEvents {}
```
|
sveltestrap
|
github_2023
|
typescript
| 36 |
sveltestrap
|
dysfunc
|
@@ -1,27 +1,25 @@
-declare module 'sveltestrap' {
- import { SvelteComponent } from 'svelte';
- import { HTMLDivElement } from 'svelte/elements';
- import { Direction } from '../shared';
+import { SvelteComponent } from 'svelte';
+import { HTMLDivElement } from 'svelte/elements';
+import { Direction } from '../shared';
- export interface DropdownItemProps extends HTMLAttributes<HTMLDivElement> {
- active?: boolean;
- autoClose?: boolean | string;
- direction?: Direction;
- dropup?: boolean;
- group?: boolean;
- inNavbar?: boolean;
- isOpen?: boolean;
- nav?: boolean;
- setActiveFromChild?: boolean;
- size?: string;
- toggle?: () => void;
- }
-
- export interface DropdownItemEvents {}
+export interface DropdownItemProps extends HTMLAttributes<HTMLDivElement> {
+ active?: boolean;
+ autoClose?: boolean | string;
+ direction?: Direction;
+ dropup?: boolean;
+ group?: boolean;
+ inNavbar?: boolean;
+ isOpen?: boolean;
+ nav?: boolean;
+ setActiveFromChild?: boolean;
+ size?: string;
+ toggle?: () => void;
+}
- export interface DropdownItemSlots {
- default: {};
- }
+export interface DropdownItemEvents {}
- export default class DropdownItem extends SvelteComponent<DropdownItemProps, DropdownItemEvents, DropdownItemSlots> {}
+export interface DropdownItemSlots {
|
```suggestion
export interface DropdownSlots {
```
|
sveltestrap
|
github_2023
|
typescript
| 36 |
sveltestrap
|
dysfunc
|
@@ -1,27 +1,25 @@
-declare module 'sveltestrap' {
- import { SvelteComponent } from 'svelte';
- import { HTMLDivElement } from 'svelte/elements';
- import { Direction } from '../shared';
+import { SvelteComponent } from 'svelte';
+import { HTMLDivElement } from 'svelte/elements';
+import { Direction } from '../shared';
- export interface DropdownItemProps extends HTMLAttributes<HTMLDivElement> {
- active?: boolean;
- autoClose?: boolean | string;
- direction?: Direction;
- dropup?: boolean;
- group?: boolean;
- inNavbar?: boolean;
- isOpen?: boolean;
- nav?: boolean;
- setActiveFromChild?: boolean;
- size?: string;
- toggle?: () => void;
- }
-
- export interface DropdownItemEvents {}
+export interface DropdownItemProps extends HTMLAttributes<HTMLDivElement> {
+ active?: boolean;
+ autoClose?: boolean | string;
+ direction?: Direction;
+ dropup?: boolean;
+ group?: boolean;
+ inNavbar?: boolean;
+ isOpen?: boolean;
+ nav?: boolean;
+ setActiveFromChild?: boolean;
+ size?: string;
+ toggle?: () => void;
+}
- export interface DropdownItemSlots {
- default: {};
- }
+export interface DropdownItemEvents {}
- export default class DropdownItem extends SvelteComponent<DropdownItemProps, DropdownItemEvents, DropdownItemSlots> {}
+export interface DropdownItemSlots {
+ default: {};
}
+
+export default class DropdownItem extends SvelteComponent<DropdownItemProps, DropdownItemEvents, DropdownItemSlots> {}
|
```suggestion
export default class Dropdown extends SvelteComponent<DropdownProps, DropdownEvents, DropdownSlots> {}
```
|
sveltestrap
|
github_2023
|
typescript
| 36 |
sveltestrap
|
dysfunc
|
@@ -0,0 +1 @@
+export { default as Dropdown, DropdownItemProps as DropdownProps } from './Dropdown';
|
```suggestion
export { default as Dropdown } from './Dropdown';
```
|
sveltestrap
|
github_2023
|
others
| 25 |
sveltestrap
|
dysfunc
|
@@ -58,11 +63,12 @@ The `<Alert>` component provide contextual feedback messages for typical user ac
<script lang="ts">
import { Alert } from '@sveltestrap/sveltestrap';
- let isOpen = true;
+let isOpen = true;
+
+const toggle = () => {
+visible = !visible;
|
The formatting is messed up. This shouldn't have changed.
|
sveltestrap
|
github_2023
|
others
| 25 |
sveltestrap
|
dysfunc
|
@@ -22,19 +23,21 @@ The `<Alert>` component provide contextual feedback messages for typical user ac
<script lang="ts">
import { Alert } from '@sveltestrap/sveltestrap';
- const colors = [
- 'primary',
- 'secondary',
- 'success',
- 'danger',
- 'warning',
- 'info',
- 'light',
- 'dark'
- ];
+const colors = [
|
The formatting is messed up. This shouldn't have changed.
|
sveltestrap
|
github_2023
|
others
| 25 |
sveltestrap
|
dysfunc
|
@@ -99,21 +107,25 @@ The `<Alert>` component provide contextual feedback messages for typical user ac
<script lang="ts">
import { Alert, Button } from '@sveltestrap/sveltestrap';
- let isOpen = true;
+let isOpen = true;
+
+const toggle = () => {
|
The formatting is messed up. This shouldn't have changed.
|
sveltestrap
|
github_2023
|
others
| 13 |
sveltestrap
|
dysfunc
|
@@ -1,14 +1,56 @@
<script>
import { classnames } from '../utils';
+ /**
+ * Additional CSS class name for the component
+ * @type {string}
+ */
export let className = '';
+ /**
+ * Additional CSS class name for the component
+ * @type {string}
+ */
export { className as class };
+ /**
+ * Indicates whether the FormGroup is checked.
+ * @type {boolean}
+ */
export let check = false;
+
+ /**
+ * Indicates whether the FormGroup is disabled.
+ * @type {boolean}
+ */
export let disabled = false;
+
+ /**
+ * Indicates whether the FormGroup is rendered in a floating style.
+ * @type {boolean}
+ */
export let floating = false;
+
+ /**
+ * Indicates whether the FormGroup is rendered inline.
+ * @type {boolean}
+ */
export let inline = false;
+
+ /**
+ * The label associated with the FormGroup.
+ * @type {string}
+ */
export let label = '';
+
+ /**
+ * Indicates whether the FormGroup is part of a row layout.
+ * @type {boolean}
+ */
export let row = false;
+
+ /**
+ * A custom tag for the FormGroup
+ * @type {?string}
|
```suggestion
* @type {string}
```
|
sveltestrap
|
github_2023
|
others
| 13 |
sveltestrap
|
dysfunc
|
@@ -0,0 +1,315 @@
+<script context="module">
+ import Input from './Input.svelte';
+
+ export const meta = {
+ title: 'Stories/Inputs',
+ component: Input,
+ parameters: {},
+ argTypes: {
+ class: {
+ className: 'string',
+ table: {
+ disable: true
+ }
+ },
+ bsSize: {
+ control: {
+ type: 'select'
+ },
+ options: ['sm', '', 'lg']
+ },
+ type: {
+ control: {
+ type: 'select'
+ },
+ options: [
+ 'checkbox',
+ 'color',
+ 'date',
+ 'datetime-local',
+ 'email',
+ 'file',
+ 'number',
+ 'password',
+ 'radio',
+ 'range',
+ 'search',
+ 'select',
+ 'switch',
+ 'text',
+ 'textarea',
+ 'time',
+ 'url'
+ ]
+ },
+ plaintext: {
+ control: {
+ type: 'boolean'
+ },
+ table: {
+ disable: false
+ }
+ },
+ size: {
+ control: {
+ type: 'select'
+ },
+ options: ['sm', 'lg'],
+ table: {
+ disable: true
+ }
+ },
+ color: {
+ control: 'color',
+ table: {
+ disable: true
+ }
+ },
+ feedback: {
+ control: 'text',
+ table: {
+ disable: true
+ }
+ },
+ disabled: {
+ control: 'boolean'
+ },
+ checked: {
+ control: 'boolean',
+ table: {
+ disable: true
+ }
+ },
+ files: {
+ control: 'array',
+ table: {
+ disable: true
+ }
+ },
+ group: {
+ control: 'text',
+ table: {
+ disable: true
+ }
+ },
+ inner: {
+ control: 'text',
+ table: {
+ disable: true
+ }
+ },
+ label: {
+ control: 'text',
+ table: {
+ disable: true
+ }
+ },
+ multiple: {
+ control: 'boolean',
+ table: {
+ disable: true
+ }
+ },
+ name: {
+ control: 'text',
+ table: {
+ disable: true
+ }
+ },
+ placeholder: {
+ control: 'text'
+ },
+ readonly: {
+ control: 'boolean',
+ table: {
+ disable: true
+ }
+ },
+ reverse: {
+ control: 'boolean'
+ },
+ value: {
+ control: 'text'
+ }
+ },
+ args: {
+ type: 'text',
+ plaintext: false,
+ invalid: false,
+ valid: false,
+ bsSize: undefined,
+ disabled: false,
+ placeholder: 'placeholder',
+ reverse: false,
+ value: ''
+ }
+ };
+</script>
+
+<script>
+ import { Story, Template } from '@storybook/addon-svelte-csf';
+ import { FormGroup, FormText, Label } from '@sveltestrap/sveltestrap';
+
+ let changeValue = '';
+ let focused = false;
+ let inner = '';
+ let inputValue = '';
+ let radioGroup;
+
+ const resize = () => {
+ inner.style.height = 'auto';
+ inner.style.height = 4 + inner.scrollHeight + 'px';
+ };
+
+ const changeEvent = (e) => {
+ changeValue = e.target.value;
+ };
+
+ const inputEvent = (e) => {
+ inputValue = e.target.value;
+ };
+</script>
+
+<Template let:args>
+ <Input {...args} />
+</Template>
+
+<Story name="Basic" />
+
+<Story name="Text">
+ <div class="input-example">
|
I would restrict the width of all the examples to something like 300px or 400px. it looks ugly spanning the entire length
|
sveltestrap
|
github_2023
|
typescript
| 11 |
sveltestrap
|
dysfunc
|
@@ -1,14 +1,18 @@
-import { SvelteComponent } from 'svelte';
+declare module 'sveltestrap' {
+ import { SvelteComponent } from 'svelte';
+ import { HTMLAttributes } from 'svelte/elements';
-export interface PaginationProps
- extends svelte.JSX.HTMLAttributes<HTMLElementTagNameMap['nav']> {
- listClassName?: string;
- size?: string;
- arialabel?: string;
-}
+ export interface PaginationProps extends HTMLAttributes<HTMLElementTagNameMap['nav']> {
|
```suggestion
export interface PaginationProps extends HTMLAttributes<HTMLElement> {
```
|
sveltestrap
|
github_2023
|
others
| 11 |
sveltestrap
|
dysfunc
|
@@ -0,0 +1,55 @@
+import { Meta, Canvas, Controls, Story, Source } from '@storybook/blocks';
+import * as PaginationStories from './Pagination.stories';
+
+<Meta title="Components/Pagination" />
+
+# Pagination <small class="bootstrap-docs">[Bootstrap Pagination](https://getbootstrap.com/docs/5.3/components/pagination/)</small>
+
+The `<Pagination>` component is a user interface element designed to enhance navigation within a set of content, breaking it into manageable segments. It enables users to easily traverse through multiple pages of information, promoting a streamlined and accessible browsing experience.
|
```suggestion
The `<Pagination>` component is used to enhance navigation within a set of content, breaking it into manageable segments. It enables users to easily traverse through multiple pages of information, promoting a streamlined and accessible browsing experience.
```
|
sveltestrap
|
github_2023
|
others
| 11 |
sveltestrap
|
dysfunc
|
@@ -0,0 +1,55 @@
+import { Meta, Canvas, Controls, Story, Source } from '@storybook/blocks';
+import * as PaginationStories from './Pagination.stories';
+
+<Meta title="Components/Pagination" />
+
+# Pagination <small class="bootstrap-docs">[Bootstrap Pagination](https://getbootstrap.com/docs/5.3/components/pagination/)</small>
+
+The `<Pagination>` component is a user interface element designed to enhance navigation within a set of content, breaking it into manageable segments. It enables users to easily traverse through multiple pages of information, promoting a streamlined and accessible browsing experience.
+
+<Canvas>
+ <Story of={PaginationStories.Basic} />
+</Canvas>
+
+<Controls of={PaginationStories.Basic} />
+
+## Sizes
+
+You can change the `Pagination` size by setting the `size` prop to `sm` or `lg`.
+
+<Canvas withSource="none">
+ <Story of={PaginationStories.Sizes} />
+</Canvas>
+
+<Source
+ dark
+ language="html"
+ code={`
+<script lang="ts">
+ import {(Pagination, PaginationItem, PaginationLink)} from 'sveltestrap';
|
```suggestion
import { Pagination, PaginationItem, PaginationLink } from '@sveltestrap/sveltestrap';
```
|
sveltestrap
|
github_2023
|
typescript
| 8 |
sveltestrap
|
dysfunc
|
@@ -1,18 +1,22 @@
-import { SvelteComponent } from 'svelte';
+declare module 'sveltestrap' {
+ import { SvelteComponent } from 'svelte';
+ import { HTMLDivAttributes } from 'svelte/elements';
|
I don't think `svelte/elements` exports directly, does it?
I've been using:
```ts
import { HTMLAttributes } from 'svelte/elements';
export interface ContainerProps extends HTMLAttributes<HTMLDivElement> {
```
|
sveltestrap
|
github_2023
|
others
| 5 |
sveltestrap
|
dysfunc
|
@@ -0,0 +1,58 @@
+import { Meta, Canvas, Controls, Story, Source } from '@storybook/blocks';
+import * as ImageStories from './Image.stories';
+
+<Meta title="Components/Image" />
|
```suggestion
<Meta title="Content/Image" />
```
|
Power-Ampache-2
|
github_2023
|
others
| 181 |
icefields
|
icefields
|
@@ -184,7 +184,7 @@ Before starting any contribution, read the above document and if necessary conta
- [x] Multi-account
- [ ] UI for Tablets
- [x] bulk-delete downloaded songs
-- [x] playlist edit dran and drop
+- [x] playlist edit drag and drop
|
thank you for your submission!
I will merge your fix in the new version coming out soon.
But that checklist is outdated, I should probably update the full README at some point soon.
|
tlrc
|
github_2023
|
others
| 48 |
tldr-pages
|
acuteenvy
|
@@ -26,9 +26,17 @@ A [tldr](https://tldr.sh) client written in Rust.
Install [tlrc](https://aur.archlinux.org/packages/tlrc) (from source) or [tlrc-bin](https://aur.archlinux.org/packages/tlrc-bin) (prebuilt) from the AUR.
+### Homebrew
+
+Install [tlrc](https://formulae.brew.sh/formula/tlrc) with [Homebrew](https://brew.sh/):
|
Every header has the OS in it and none of the descriptions link to the package manager itself.
```suggestion
### macOS/Linux using Homebrew
Install [tlrc](https://formulae.brew.sh/formula/tlrc) with Homebrew:
```
|
vectordb
|
github_2023
|
cpp
| 88 |
epsilla-cloud
|
richard-epsilla
|
@@ -699,10 +723,25 @@ bool VecSearchExecutor::BruteForceSearch(
if (brute_force_queue_.size() < end - start) {
brute_force_queue_.resize(end - start);
}
+ float dist;
+ if (std::holds_alternative<DenseVectorPtr>(vector_column_)) {
#pragma omp parallel for
- for (int64_t v_id = start; v_id < end; ++v_id) {
- float dist = fstdistfunc_(vector_table_ + dimension_ * v_id, query_data, dist_func_param_);
- brute_force_queue_[v_id - start] = Candidate(v_id, dist, false);
+ for (int64_t v_id = start; v_id < end; ++v_id) {
+ float dist = std::get<DenseVecDistFunc<float>>(fstdistfunc_)(
+ std::get<DenseVectorPtr>(vector_column_) + dimension_ * v_id,
+ std::get<DenseVectorPtr>(query_data),
+ dist_func_param_);
+ brute_force_queue_[v_id - start] = Candidate(v_id, dist, false);
+ }
+ } else {
+ // it holds sparse vector
|
Add "#pragma omp parallel for" here?
|
vectordb
|
github_2023
|
cpp
| 88 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,107 @@
+#include "db/vector.hpp"
+
+#include <iostream>
+namespace vectordb {
+namespace engine {
+
+float GetInnerProductDist(const SparseVector &v1, const SparseVector &v2) {
+ return 1.0f - GetInnerProduct(v1, v2);
+}
+
+float GetInnerProduct(const SparseVector &v1, const SparseVector &v2) {
|
Have we checked that index is increasing for sparse vector during data inseration?
|
vectordb
|
github_2023
|
cpp
| 88 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,107 @@
+#include "db/vector.hpp"
+
+#include <iostream>
+namespace vectordb {
+namespace engine {
+
+float GetInnerProductDist(const SparseVector &v1, const SparseVector &v2) {
+ return 1.0f - GetInnerProduct(v1, v2);
+}
+
+float GetInnerProduct(const SparseVector &v1, const SparseVector &v2) {
+ float dot_prod = 0;
+ for (int i1 = 0, i2 = 0; i1 < v1.size() && i2 < v2.size();) {
+ if (v1[i1].index == v2[i2].index) {
+ dot_prod += v1[i1].value * v2[i2].value;
+ i1++;
+ i2++;
+ } else if (v1[i1].index > v2[i2].index) {
+ i2++;
+ } else {
+ i1++;
+ }
+ }
+ return dot_prod;
+}
+
+float GetCosineDist(const SparseVector &v1, const SparseVector &v2) {
+ float dot_prod = 0, v1_prod = 0, v2_prod = 0;
+ for (int i = 0; i < v1.size(); i++) {
+ v1_prod += v1[i].value * v1[i].value;
+ }
+ for (int i = 0; i < v2.size(); i++) {
+ v2_prod += v2[i].value * v2[i].value;
+ }
+ for (int i1 = 0, i2 = 0; i1 < v1.size() && i2 < v2.size();) {
+ if (v1[i1].index == v2[i2].index) {
+ dot_prod += v1[i1].value * v2[i2].value;
+ i1++;
+ i2++;
+ } else if (v1[i1].index > v2[i2].index) {
+ i2++;
+ } else {
+ i1++;
+ }
+ }
+ return -dot_prod / std::sqrt(v1_prod * v2_prod);
+}
+void Normalize(SparseVector &v) {
+ float sum = 0;
+ for (auto &elem : v) {
+ sum += elem.value * elem.value;
+ }
+ sum = std::sqrt(sum);
+ for (auto &elem : v) {
+ elem.value /= sum;
+ }
+}
+
+void Normalize(DenseVectorPtr v, size_t dimension) {
+ float sum = 0;
+ for (int i = 0; i < dimension; i++) {
+ sum += v[i] * v[i];
+ }
+ sum = std::sqrt(sum);
+ for (int i = 0; i < dimension; i++) {
+ v[i] /= sum;
+ }
+}
+
+float GetL2DistSqr(const SparseVector &v1, const SparseVector &v2) {
+ float sum = 0;
+ for (int i1 = 0, i2 = 0; i1 < v1.size() && i2 < v2.size();) {
+ if (v1[i1].index == v2[i2].index) {
+ auto diff = v1[i1].value - v2[i2].value;
+ sum += diff * diff;
+ i1++;
+ i2++;
+ } else if (v1[i1].index > v2[i2].index) {
+ sum += v2[i2].value * v2[i2].value;
+ i2++;
+ } else {
+ sum += v1[i1].value * v1[i1].value;
+ i1++;
+ }
+ }
|
Here we need loop until i1 == v1.size() and i2 == v2.size(). Otherwise it is missing some component for L2 distance
|
vectordb
|
github_2023
|
cpp
| 88 |
epsilla-cloud
|
richard-epsilla
|
@@ -28,32 +30,34 @@ TableMVP::TableMVP(meta::TableSchema &table_schema,
// Replay operations in write ahead log.
wal_ = std::make_shared<WriteAheadLog>(db_catalog_path_, table_schema.id_, is_leader_);
- wal_->Replay(table_schema, field_name_type_map_, table_segment_);
+ wal_->Replay(table_schema, field_name_field_type_map_, table_segment_);
for (int i = 0; i < table_schema_.fields_.size(); ++i) {
- if (table_schema_.fields_[i].field_type_ == meta::FieldType::VECTOR_FLOAT ||
- table_schema_.fields_[i].field_type_ ==
- meta::FieldType::VECTOR_DOUBLE) {
+ auto fType = table_schema_.fields_[i].field_type_;
+ auto mType = table_schema_.fields_[i].metric_type_;
+ if (fType == meta::FieldType::VECTOR_FLOAT ||
+ fType == meta::FieldType::VECTOR_DOUBLE ||
+ fType == meta::FieldType::SPARSE_VECTOR_FLOAT ||
+ fType == meta::FieldType::SPARSE_VECTOR_DOUBLE) {
+ VectorColumnData columnData;
+ if (fType == meta::FieldType::VECTOR_FLOAT || fType == meta::FieldType::VECTOR_DOUBLE) {
+ columnData = table_segment_
+ ->vector_tables_[table_segment_->field_name_mem_offset_map_
+ [table_schema_.fields_[i].name_]];
+ } else {
+ // sparse vector
+ columnData = &table_segment_
|
Why here has & but line 44 doesn't?
|
vectordb
|
github_2023
|
cpp
| 88 |
epsilla-cloud
|
richard-epsilla
|
@@ -106,6 +115,19 @@ Status TableMVP::Rebuild(const std::string &db_catalog_path) {
++index;
continue;
}
+
+ VectorColumnData columnData;
+ if (fType == meta::FieldType::VECTOR_FLOAT || fType == meta::FieldType::VECTOR_DOUBLE) {
+ columnData = table_segment_
+ ->vector_tables_[table_segment_->field_name_mem_offset_map_
+ [table_schema_.fields_[i].name_]];
+ } else {
+ // sparse vector
+ columnData = &table_segment_
|
Same here
|
vectordb
|
github_2023
|
cpp
| 88 |
epsilla-cloud
|
richard-epsilla
|
@@ -898,14 +1035,11 @@ TableSegmentMVP::~TableSegmentMVP() {
delete[] attribute_table_;
}
if (vector_tables_ != nullptr) {
- for (auto i = 0; i < vector_num_; ++i) {
+ for (auto i = 0; i < dense_vector_num_; ++i) {
delete[] vector_tables_[i];
}
delete[] vector_tables_;
}
- if (string_table_ != nullptr) {
- delete[] string_table_;
|
Let's do some stress test (for example, run benchmark with --skip load multiple times and observe memory usage; unload DB and see if memory shrink to the bottom then reload db and compare with the peak memory usage) and make sure no memory leak?
|
vectordb
|
github_2023
|
cpp
| 88 |
epsilla-cloud
|
richard-epsilla
|
@@ -193,26 +207,47 @@ TableSegmentMVP::TableSegmentMVP(meta::TableSchema& table_schema, const std::str
}
}
+ var_len_attr_table_.resize(var_len_attr_num_);
+ for (auto attrIdx = 0; attrIdx < var_len_attr_num_; ++attrIdx) {
+ var_len_attr_table_[attrIdx].resize(record_number_);
+ }
+
// Read the string table
- for (auto i = 0; i < record_number_; ++i) {
- for (auto j = 0; j < string_num_; ++j) {
- int64_t offset = i * string_num_ + j;
- int64_t string_length = 0;
- file.read(reinterpret_cast<char*>(&string_length), sizeof(string_length));
- std::string str(string_length, '\0');
- file.read(&str[0], string_length);
- string_table_[offset] = str;
-
- // add pk into set
- if (!deleted_->test(i) && string_pk_offset_ && *string_pk_offset_ == j) {
- // do not check existance to avoid additional overhead
- primary_key_.addKeyIfNotExist(str, i);
+ // Loop order matters: on disk, the var-len attributes of whole entry is stored together
+ for (auto recordIdx = 0; recordIdx < record_number_; ++recordIdx) {
+ for (auto attrIdx = 0; attrIdx < var_len_attr_num_; ++attrIdx) {
+ int64_t dataLen = 0;
+ file.read(reinterpret_cast<char*>(&dataLen), sizeof(dataLen));
+ switch (var_len_attr_field_type_[attrIdx]) {
+ case meta::FieldType::STRING: {
+ std::string str(dataLen, '\0');
+ file.read(&str[0], dataLen);
+ var_len_attr_table_[attrIdx][recordIdx] = std::move(str);
|
Will std::move introduce overhead compared to old implementation?
|
vectordb
|
github_2023
|
cpp
| 88 |
epsilla-cloud
|
richard-epsilla
|
@@ -433,17 +465,24 @@ Status TableSegmentMVP::Insert(meta::TableSchema& table_schema, Json& records, i
if (!record.HasMember(field.name_)) {
return Status(INVALID_RECORD, "Record " + std::to_string(i) + " missing field: " + field.name_);
}
- if (field.field_type_ == meta::FieldType::VECTOR_FLOAT ||
- field.field_type_ == meta::FieldType::VECTOR_DOUBLE) {
- if (record.GetArraySize(field.name_) != field.vector_dimension_) {
+ if ((field.field_type_ == meta::FieldType::VECTOR_FLOAT ||
+ field.field_type_ == meta::FieldType::VECTOR_DOUBLE) &&
+ record.GetArraySize(field.name_) != field.vector_dimension_) {
+ return Status(INVALID_RECORD, "Record " + std::to_string(i) + " field " + field.name_ + " has wrong dimension.");
+ }
+ if ((field.field_type_ == meta::FieldType::SPARSE_VECTOR_FLOAT ||
+ field.field_type_ == meta::FieldType::SPARSE_VECTOR_DOUBLE)) {
+ auto indices = record.GetObject(field.name_).GetArray(SparseVecObjIndicesKey);
|
Here need check indices are all non-negative, and increasing consecutively
|
vectordb
|
github_2023
|
cpp
| 88 |
epsilla-cloud
|
richard-epsilla
|
@@ -457,6 +496,22 @@ Status TableSegmentMVP::Insert(meta::TableSchema& table_schema, Json& records, i
// Process the insert.
size_t cursor = record_number_;
+
+ // reserve vector memory size
+ for (auto& field : table_schema.fields_) {
+ switch (field.field_type_) {
+ case meta::FieldType::STRING:
+ case meta::FieldType::JSON:
+ case meta::FieldType::SPARSE_VECTOR_FLOAT:
+ case meta::FieldType::SPARSE_VECTOR_DOUBLE:
+ var_len_attr_table_[field_id_mem_offset_map_[field.id_]].resize(new_record_size + record_number_);
|
Resize won't introduce memory copy?
|
vectordb
|
github_2023
|
cpp
| 88 |
epsilla-cloud
|
richard-epsilla
|
@@ -466,16 +521,48 @@ Status TableSegmentMVP::Insert(meta::TableSchema& table_schema, Json& records, i
if (field.is_primary_key_) {
auto exist = !primary_key_.addKeyIfNotExist(value, cursor);
if (exist) {
- std::cerr << "primary key [" << value << "] already exists, skipping." << std::endl;
+ // std::cerr << "primary key [" << value << "] already exists, skipping." << std::endl;
skipped_entry++;
goto LOOP_END;
}
}
- string_table_[cursor * string_num_ + field_id_mem_offset_map_[field.id_]] = value;
+ var_len_attr_table_[field_id_mem_offset_map_[field.id_]][cursor] = value;
} else if (field.field_type_ == meta::FieldType::JSON) {
// Insert json dumped string attribute.
- auto value = record.Get(field.name_);
- string_table_[cursor * string_num_ + field_id_mem_offset_map_[field.id_]] = value.DumpToString();
+ auto value = record.Get(field.name_).DumpToString();
+ var_len_attr_table_[field_id_mem_offset_map_[field.id_]][cursor] = value;
+ } else if (field.field_type_ == meta::FieldType::SPARSE_VECTOR_FLOAT ||
+ field.field_type_ == meta::FieldType::SPARSE_VECTOR_DOUBLE) {
+ // Insert vector attribute.
+ auto sparseVecObject = record.GetObject(field.name_);
+
+ auto indices = record.Get(field.name_).GetArray(SparseVecObjIndicesKey);
+ auto values = record.Get(field.name_).GetArray(SparseVecObjValuesKey);
+ auto nonZeroValueSize = indices.GetSize();
+ if (indices.GetSize() != values.GetSize()) {
+ std::cerr << "mismatched indices array length (" << indices.GetSize() << ") and value array length (" << values.GetSize() << "), skipping." << std::endl;
+ skipped_entry++;
+ goto LOOP_END;
+ }
+
+ auto vec = std::make_shared<SparseVector>();
+
+ float sum = 0;
+ for (auto j = 0; j < indices.GetSize(); ++j) {
+ size_t index = static_cast<size_t>(indices.GetArrayElement(j).GetInt());
+ float value = static_cast<float>(values.GetArrayElement(j).GetDouble());
+ sum += value * value;
+ vec->emplace_back(SparseVectorElement{index, value});
+ }
+ // covert to length
|
typo: convert
|
vectordb
|
github_2023
|
cpp
| 88 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,1609 @@
+
+#include "db/db_server.hpp"
+
+#include <gtest/gtest.h>
+
+#include <algorithm>
+#include <chrono> // std::chrono::seconds
+#include <cstdio>
+#include <cstdlib>
+#include <filesystem>
+#include <future>
+#include <iterator>
+#include <memory>
+#include <random>
+#include <string>
+#include <thread> // std::this_thread::sleep_for
+#include <vector>
+
+TEST(DbServer, CreateTable) {
+ std::string tempDir = std::filesystem::temp_directory_path() / std::filesystem::path("ut_db_server_create_table");
+ vectordb::engine::DBServer database;
+ std::filesystem::remove_all(tempDir);
+ const auto dbName = "MyDb";
+ auto loadDbStatus = database.LoadDB(dbName, tempDir, 150000, true);
+ EXPECT_TRUE(loadDbStatus.ok()) << "message:" << loadDbStatus.message();
+
+ size_t tableId = 0;
+
+ const std::string schema = R"_(
+{
+ "name": "MyTable",
+ "fields": [
+ {
+ "name": "ID",
+ "dataType": "INT",
+ "primaryKey": true
+ },
+ {
+ "name": "Doc",
+ "dataType": "STRING"
+ },
+ {
+ "name": "EmbeddingEuclidean",
+ "dataType": "VECTOR_FLOAT",
+ "dimensions": 4,
+ "metricType": "EUCLIDEAN"
+ },
+ {
+ "name": "EmbeddingDotProduct",
+ "dataType": "VECTOR_FLOAT",
+ "dimensions": 4,
+ "metricType": "DOT_PRODUCT"
+ },
+ {
+ "name": "EmbeddingCosine",
+ "dataType": "VECTOR_FLOAT",
+ "dimensions": 4,
+ "metricType": "COSINE"
+ },
+ {
+ "name": "EmbeddingSparseEuclidean",
+ "dataType": "SPARSE_VECTOR_DOUBLE",
+ "dimensions": 4,
+ "metricType": "EUCLIDEAN"
+ },
+ {
+ "name": "EmbeddingSparseDotProduct",
+ "dataType": "SPARSE_VECTOR_DOUBLE",
+ "dimensions": 4,
+ "metricType": "DOT_PRODUCT"
+ },
+ {
+ "name": "EmbeddingSparseCosine",
+ "dataType": "SPARSE_VECTOR_DOUBLE",
+ "dimensions": 4,
+ "metricType": "COSINE"
+ }
+ ]
+}
+ )_";
+
+ auto createTableStatus = database.CreateTable(dbName, schema, tableId);
+ EXPECT_TRUE(createTableStatus.ok()) << "message:" << createTableStatus.message();
+ auto dropTableStatus = database.DropTable(dbName, "MyTable");
+ EXPECT_TRUE(dropTableStatus.ok()) << "message:" << dropTableStatus.message();
+ auto unloadDbStatus = database.UnloadDB(dbName);
+ EXPECT_TRUE(unloadDbStatus.ok()) << "message:" << unloadDbStatus.message();
+}
+
+TEST(DbServer, DenseVector) {
+ std::string tempDir = std::filesystem::temp_directory_path() / std::filesystem::path("ut_db_server_dense_vector");
+ vectordb::engine::DBServer database;
+ std::filesystem::remove_all(tempDir);
+ const auto dbName = "MyDb";
+ const auto tableName = "MyTable";
+ size_t queryDimension = 4;
+ database.LoadDB(dbName, tempDir, 150000, true);
+ size_t tableId = 0;
+
+ const std::string schema = R"_(
+{
+ "name": "MyTable",
+ "fields": [
+ {
+ "name": "ID",
+ "dataType": "INT",
+ "primaryKey": true
+ },
+ {
+ "name": "Doc",
+ "dataType": "STRING"
+ },
+ {
+ "name": "EmbeddingEuclidean",
+ "dataType": "VECTOR_FLOAT",
+ "dimensions": 4,
+ "metricType": "EUCLIDEAN"
+ },
+ {
+ "name": "EmbeddingDotProduct",
+ "dataType": "VECTOR_FLOAT",
+ "dimensions": 4,
+ "metricType": "DOT_PRODUCT"
+ },
+ {
+ "name": "EmbeddingCosine",
+ "dataType": "VECTOR_FLOAT",
+ "dimensions": 4,
+ "metricType": "COSINE"
+ }
+ ]
+}
+ )_";
+
+ const std::string records = R"_(
+[
+ {
+ "ID": 1,
+ "Doc": "Berlin",
+ "EmbeddingEuclidean": [
+ 0.05,
+ 0.61,
+ 0.76,
+ 0.74
+ ],
+ "EmbeddingDotProduct": [
+ 0.05,
+ 0.61,
+ 0.76,
+ 0.74
+ ],
+ "EmbeddingCosine": [
+ 0.05,
+ 0.61,
+ 0.76,
+ 0.74
+ ]
+ },
+ {
+ "ID": 2,
+ "Doc": "London",
+ "EmbeddingEuclidean": [
+ 0.19,
+ 0.81,
+ 0.75,
+ 0.11
+ ],
+ "EmbeddingDotProduct": [
+ 0.19,
+ 0.81,
+ 0.75,
+ 0.11
+ ],
+ "EmbeddingCosine": [
+ 0.19,
+ 0.81,
+ 0.75,
+ 0.11
+ ]
+ },
+ {
+ "ID": 3,
+ "Doc": "Moscow",
+ "EmbeddingEuclidean": [
+ 0.36,
+ 0.55,
+ 0.47,
+ 0.94
+ ],
+ "EmbeddingDotProduct": [
+ 0.36,
+ 0.55,
+ 0.47,
+ 0.94
+ ],
+ "EmbeddingCosine": [
+ 0.36,
+ 0.55,
+ 0.47,
+ 0.94
+ ]
+ },
+ {
+ "ID": 4,
+ "Doc": "San Francisco",
+ "EmbeddingEuclidean": [
+ 0.18,
+ 0.01,
+ 0.85,
+ 0.8
+ ],
+ "EmbeddingDotProduct": [
+ 0.18,
+ 0.01,
+ 0.85,
+ 0.8
+ ],
+ "EmbeddingCosine": [
+ 0.18,
+ 0.01,
+ 0.85,
+ 0.8
+ ]
+ },
+ {
+ "ID": 5,
+ "Doc": "Shanghai",
+ "EmbeddingEuclidean": [
+ 0.24,
+ 0.18,
+ 0.22,
+ 0.44
+ ],
+ "EmbeddingDotProduct": [
+ 0.24,
+ 0.18,
+ 0.22,
+ 0.44
+ ],
+ "EmbeddingCosine": [
+ 0.24,
+ 0.18,
+ 0.22,
+ 0.44
+ ]
+ },
+ {
+ "ID": 1,
+ "Doc": "Berlin",
+ "EmbeddingEuclidean": [
+ 0.05,
+ 0.61,
+ 0.76,
+ 0.74
+ ],
+ "EmbeddingDotProduct": [
+ 0.05,
+ 0.61,
+ 0.76,
+ 0.74
+ ],
+ "EmbeddingCosine": [
+ 0.05,
+ 0.61,
+ 0.76,
+ 0.74
+ ]
+ }
+]
+ )_";
+
+ auto createTableStatus = database.CreateTable(dbName, schema, tableId);
+ EXPECT_TRUE(createTableStatus.ok()) << createTableStatus.message();
+ vectordb::Json recordsJson;
+ EXPECT_TRUE(recordsJson.LoadFromString(records));
+ auto insertStatus = database.Insert(dbName, tableName, recordsJson);
+ EXPECT_TRUE(insertStatus.ok()) << insertStatus.message();
+ vectordb::engine::DenseVectorElement queryData[] = {0.35, 0.55, 0.47, 0.94};
+ struct TestCase {
+ std::string searchFieldName;
+ std::vector<std::string> expectedOrder;
+ };
+ std::vector<TestCase> testcases = {
+ {"EmbeddingEuclidean", {"Moscow", "Berlin", "Shanghai", "San Francisco", "London"}},
+ {"EmbeddingDotProduct", {"Moscow", "Berlin", "San Francisco", "London", "Shanghai"}},
+ {"EmbeddingCosine", {"Moscow", "Shanghai", "Berlin", "San Francisco", "London"}}};
+ auto rebuildOptions = {false, true};
+ for (auto rebuild : rebuildOptions) {
+ if (rebuild) {
+ auto rebuildStatus = database.Rebuild();
+ EXPECT_TRUE(rebuildStatus.ok()) << rebuildStatus.message();
+ }
+ for (auto &testcase : testcases) {
+ vectordb::Json result;
+ const auto limit = 6;
+ auto queryFields = std::vector<std::string>{"ID", "Doc", testcase.searchFieldName};
+ auto queryStatus = database.Search(dbName, tableName, testcase.searchFieldName, queryFields, queryDimension, queryData, limit, result, "", true);
+ EXPECT_TRUE(queryStatus.ok()) << queryStatus.message();
+ EXPECT_EQ(result.GetSize(), 5) << "duplicate insert should've been ignored";
+ for (int i = 0; i < result.GetSize(); i++) {
+ EXPECT_EQ(result.GetArrayElement(i).GetString("Doc"), testcase.expectedOrder[i])
+ << i << "th city mismatch when querying " << testcase.searchFieldName << std::endl
+ << result.DumpToString();
+ }
+ }
+ }
+}
+
+TEST(DbServer, SparseVector) {
+ std::string tempDir = std::filesystem::temp_directory_path() / std::filesystem::path("ut_db_server_sparse_vector");
+ vectordb::engine::DBServer database;
+ std::filesystem::remove_all(tempDir);
+ const auto dbName = "MyDb";
+ const auto tableName = "MyTable";
+ size_t queryDimension = 4;
+ database.LoadDB(dbName, tempDir, 150000, true);
+ size_t tableId = 0;
+
+ const std::string schema = R"_(
+{
+ "name": "MyTable",
+ "fields": [
+ {
+ "name": "ID",
+ "dataType": "INT",
+ "primaryKey": true
+ },
+ {
+ "name": "Doc",
+ "dataType": "STRING"
+ },
+ {
+ "name": "EmbeddingEuclidean",
+ "dataType": "SPARSE_VECTOR_FLOAT",
+ "dimensions": 4,
+ "metricType": "EUCLIDEAN"
+ },
+ {
+ "name": "EmbeddingDotProduct",
+ "dataType": "SPARSE_VECTOR_FLOAT",
+ "dimensions": 4,
+ "metricType": "DOT_PRODUCT"
+ },
+ {
+ "name": "EmbeddingCosine",
+ "dataType": "SPARSE_VECTOR_FLOAT",
+ "dimensions": 4,
+ "metricType": "COSINE"
+ }
+ ]
+}
+ )_";
+
+ const std::string records = R"_(
+[
+ {
+ "ID": 1,
+ "Doc": "Berlin",
+ "EmbeddingEuclidean": {
|
Add some UT case where the sparse vector records only contain subset of indices within the dimension, and each record present different indices
|
vectordb
|
github_2023
|
cpp
| 33 |
epsilla-cloud
|
lganzzzo
|
@@ -531,6 +531,76 @@ class WebController : public oatpp::web::server::api::ApiController {
return createDtoResponse(Status::CODE_200, res_dto);
}
+ ADD_CORS(CalcDistance)
+
+ ENDPOINT("POST", "/api/{db_name}/data/distance", CalcDistance,
+ PATH(String, db_name, "db_name"),
+ BODY_STRING(String, body)) {
+
+ auto status_dto = StatusDto::createShared();
+
+ vectordb::Json parsedBody;
+ auto valid = parsedBody.LoadFromString(body);
+ if (!valid) {
+ status_dto->statusCode = Status::CODE_400.code;
+ status_dto->message = "Invalid payload.";
+ return createDtoResponse(Status::CODE_400, status_dto);
+ }
+
+ if (!parsedBody.HasMember("table")) {
+ status_dto->statusCode = Status::CODE_400.code;
+ status_dto->message = "table is missing in your payload.";
+ return createDtoResponse(Status::CODE_400, status_dto);
+ }
+
+ if (!parsedBody.HasMember("idList")) {
+ status_dto->statusCode = Status::CODE_400.code;
+ status_dto->message = "idList is missing in your payload.";
+ return createDtoResponse(Status::CODE_400, status_dto);
+ }
+
+ if (!parsedBody.HasMember("queryVector")) {
+ status_dto->statusCode = Status::CODE_400.code;
+ status_dto->message = "queryVector is missing in your payload.";
+ return createDtoResponse(Status::CODE_400, status_dto);
+ }
+
+ std::string table_name = parsedBody.GetString("table");
+
+ size_t vector_size = parsedBody.GetArraySize("queryVector");
+ float query_vector[vector_size];
+ for (size_t i = 0; i < vector_size; i++) {
+ auto vector = parsedBody.GetArrayElement("queryVector", i);
+ query_vector[i] = (float)vector.GetDouble();
+ }
+
+ size_t id_list_size = parsedBody.GetArraySize("idList");
+ int64_t id_list[id_list_size];
+ for (size_t i = 0; i < id_list_size; i++) {
+ auto id = parsedBody.GetArrayElement("idList", i);
+ id_list[i] = id.GetInt();
+ }
+
+ std::string field_name = parsedBody.GetString("queryField");
+
+ vectordb::Json result;
+ vectordb::Status calc_distance_status = db_server->CalcDistance(
+ db_name, table_name, field_name, vector_size, query_vector, id_list_size, id_list, result
+ );
+ if (!calc_distance_status.ok()) {
+ status_dto->statusCode = Status::CODE_500.code;
+ status_dto->message = calc_distance_status.message();
+ return createDtoResponse(Status::CODE_500, status_dto);
+ }
+
+ auto res_dto = SearchRespDto::createShared();
+ res_dto->statusCode = Status::CODE_200.code;
+ res_dto->message = "Calculate distance successfully.";
+ oatpp::parser::json::mapping::ObjectMapper mapper;
|
Not need to create new ObjectMapper - you already have one injected in the Controller - see constructor.
You can access it via `getDefaultObjectMapper()`.
|
vectordb
|
github_2023
|
cpp
| 33 |
epsilla-cloud
|
lganzzzo
|
@@ -531,6 +531,76 @@ class WebController : public oatpp::web::server::api::ApiController {
return createDtoResponse(Status::CODE_200, res_dto);
}
+ ADD_CORS(CalcDistance)
+
+ ENDPOINT("POST", "/api/{db_name}/data/distance", CalcDistance,
+ PATH(String, db_name, "db_name"),
+ BODY_STRING(String, body)) {
+
+ auto status_dto = StatusDto::createShared();
+
+ vectordb::Json parsedBody;
+ auto valid = parsedBody.LoadFromString(body);
+ if (!valid) {
+ status_dto->statusCode = Status::CODE_400.code;
+ status_dto->message = "Invalid payload.";
+ return createDtoResponse(Status::CODE_400, status_dto);
+ }
+
+ if (!parsedBody.HasMember("table")) {
+ status_dto->statusCode = Status::CODE_400.code;
+ status_dto->message = "table is missing in your payload.";
+ return createDtoResponse(Status::CODE_400, status_dto);
+ }
+
+ if (!parsedBody.HasMember("idList")) {
+ status_dto->statusCode = Status::CODE_400.code;
+ status_dto->message = "idList is missing in your payload.";
+ return createDtoResponse(Status::CODE_400, status_dto);
+ }
+
+ if (!parsedBody.HasMember("queryVector")) {
+ status_dto->statusCode = Status::CODE_400.code;
+ status_dto->message = "queryVector is missing in your payload.";
+ return createDtoResponse(Status::CODE_400, status_dto);
+ }
+
+ std::string table_name = parsedBody.GetString("table");
+
+ size_t vector_size = parsedBody.GetArraySize("queryVector");
+ float query_vector[vector_size];
+ for (size_t i = 0; i < vector_size; i++) {
+ auto vector = parsedBody.GetArrayElement("queryVector", i);
+ query_vector[i] = (float)vector.GetDouble();
+ }
+
+ size_t id_list_size = parsedBody.GetArraySize("idList");
+ int64_t id_list[id_list_size];
+ for (size_t i = 0; i < id_list_size; i++) {
+ auto id = parsedBody.GetArrayElement("idList", i);
+ id_list[i] = id.GetInt();
+ }
+
+ std::string field_name = parsedBody.GetString("queryField");
+
+ vectordb::Json result;
+ vectordb::Status calc_distance_status = db_server->CalcDistance(
+ db_name, table_name, field_name, vector_size, query_vector, id_list_size, id_list, result
+ );
+ if (!calc_distance_status.ok()) {
+ status_dto->statusCode = Status::CODE_500.code;
+ status_dto->message = calc_distance_status.message();
+ return createDtoResponse(Status::CODE_500, status_dto);
+ }
+
+ auto res_dto = SearchRespDto::createShared();
+ res_dto->statusCode = Status::CODE_200.code;
+ res_dto->message = "Calculate distance successfully.";
+ oatpp::parser::json::mapping::ObjectMapper mapper;
+ res_dto->result = mapper.readFromString<oatpp::Any>(result.DumpToString());
|
Now I see what is the deal...
In this particular case it's more convenient to just construct resultant JSON using nlohman JSON.
Something like this:
```cpp
json data = {
{"statusCode", 200},
{"message", result}
};
return createResponse(Status::CODE_200, data.dump());
```
|
vectordb
|
github_2023
|
cpp
| 53 |
epsilla-cloud
|
richard-epsilla
|
@@ -181,7 +181,6 @@ class WebController : public oatpp::web::server::api::ApiController {
if (
field.field_type_ == vectordb::engine::meta::FieldType::VECTOR_DOUBLE ||
field.field_type_ == vectordb::engine::meta::FieldType::VECTOR_FLOAT) {
- // TODO: after figuring out metric type other than EUCLIDEAN, need to check metric type as well.
|
Where did we report error if metric type is unknown?
|
vectordb
|
github_2023
|
cpp
| 53 |
epsilla-cloud
|
richard-epsilla
|
@@ -312,7 +312,7 @@ static PyObject *query(PyObject *self, PyObject *args, PyObject *kwargs) {
if (!PyArg_ParseTupleAndKeywords(
args,
kwargs,
- "ssOOip",
+ "ssOOisp",
|
What is this?
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -484,21 +484,49 @@ class WebController : public oatpp::web::server::api::ApiController {
int64_t limit = parsedBody.GetInt("limit");
+ std::string filter;
|
Do we need initialize the string as empty string? I remember primitive types (int, float, etc) need to initialize inside a function as it could have non empty initial value, not sure about string
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,100 @@
+#pragma once
+
+#include <memory>
+#include <string>
+#include <unordered_map>
+
+namespace vectordb {
+namespace query {
+namespace expr {
+
+enum class ArithmeticOperator {
+ Add,
|
Use indentation 2 instead of 4
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,100 @@
+#pragma once
+
+#include <memory>
+#include <string>
+#include <unordered_map>
+
+namespace vectordb {
+namespace query {
+namespace expr {
+
+enum class ArithmeticOperator {
+ Add,
+ Subtract,
+ Multiply,
+ Divide,
+ Module
+};
+
+enum class CompareOperator {
+ LT,
+ LTE,
+ EQ,
+ GT,
+ GTE,
+ NE,
+};
+
+enum class LogicalOperator {
+ INVALID,
+ AND,
+ OR,
+ NOT
+};
+
+enum class NodeType {
+ Invalid,
+ IntConst,
+ StringConst,
+ DoubleConst,
+ BoolConst,
+ IntAttr,
+ StringAttr,
+ DoubleAttr,
+ BoolAttr,
+ Add,
+ Subtract,
+ Multiply,
+ Divide,
+ Module,
+ LT,
+ LTE,
+ EQ,
+ GT,
+ GTE,
+ NE,
+ AND,
+ OR,
+ NOT
+};
+
+const std::unordered_map<std::string, NodeType> OperatorNodeTypeMap = {
+ {"+", NodeType::Add},
+ {"-", NodeType::Subtract},
+ {"*", NodeType::Multiply},
+ {"/", NodeType::Divide},
+ {"%", NodeType::Module},
+ {">", NodeType::GT},
+ {">=", NodeType::GTE},
+ {"=", NodeType::EQ},
+ {"<=", NodeType::LTE},
+ {"<", NodeType::LT},
+ {"<>", NodeType::NE},
+ {"AND", NodeType::AND},
+ {"OR", NodeType::OR},
+ {"NOT", NodeType::NOT}
+};
+
+enum class ValueType {
+ STRING,
+ INT,
+ DOUBLE,
+ BOOL
+};
+
+struct ExprNode {
+ ValueType value_type;
+ NodeType node_type;
+ std::string field_name; // Only attribute has it.
+ size_t left;
+ size_t right;
+ std::string str_value;
+ int int_value;
|
Use int64_t instead to handle long int
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,29 @@
+#include <any>
+#include <unordered_map>
+#include <vector>
+
+#include "expr_types.hpp"
+
+namespace vectordb {
+namespace query {
+namespace expr {
+
+class ExprEvaluator {
+ public:
|
Use indentation 2 instead of 4
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -199,8 +202,16 @@ Status DBServer::Search(const std::string& db_name,
if (table == nullptr) {
return Status(DB_UNEXPECTED_ERROR, "Table not found: " + table_name);
}
+
+ // Filter validation
+ std::vector<query::expr::ExprNodePtr> expr_nodes;
+ Status expr = query::expr::Expr::ParseNodeFromStr(filter, expr_nodes, table->field_name_type_map_);
|
rename expr to expr_parse_status
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,29 @@
+#include <any>
+#include <unordered_map>
+#include <vector>
+
+#include "expr_types.hpp"
+
+namespace vectordb {
+namespace query {
+namespace expr {
+
+class ExprEvaluator {
+ public:
+ explicit ExprEvaluator(std::vector<ExprNodePtr>& nodes);
+
+ ~ExprEvaluator();
+
+ bool LogicalEvaluate(const int& node_index, std::unordered_map<std::string, std::any>& field_value_map);
|
Improvement: can we directly use the reference to the table_segment to evaluate instead of converting the value into the field_value_map? This value copy could be slow
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -174,8 +177,23 @@ class VecSearchExecutor {
boost::dynamic_bitset<>& is_visited,
const int64_t index_threshold);
- bool BruteForceSearch(const float* query_data, const int64_t start, const int64_t end, const ConcurrentBitset& deleted);
- Status Search(const float* query_data, const ConcurrentBitset& deleted, const size_t limit, const int64_t total, int64_t& result_size);
+ bool BruteForceSearch(
+ const float* query_data,
+ const int64_t start,
+ const int64_t end,
+ const ConcurrentBitset& deleted,
+ std::shared_ptr<vectordb::query::expr::ExprEvaluator>& expr_evaluator,
+ std::shared_ptr<vectordb::engine::TableSegmentMVP>& table_segment,
+ std::unordered_map<std::string, meta::FieldType>& field_name_type_map,
+ const int root_node_index);
|
Why BruteForceSearch need the root_node_index but Search doesn't?
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -174,8 +177,23 @@ class VecSearchExecutor {
boost::dynamic_bitset<>& is_visited,
const int64_t index_threshold);
- bool BruteForceSearch(const float* query_data, const int64_t start, const int64_t end, const ConcurrentBitset& deleted);
- Status Search(const float* query_data, const ConcurrentBitset& deleted, const size_t limit, const int64_t total, int64_t& result_size);
+ bool BruteForceSearch(
+ const float* query_data,
+ const int64_t start,
+ const int64_t end,
+ const ConcurrentBitset& deleted,
+ std::shared_ptr<vectordb::query::expr::ExprEvaluator>& expr_evaluator,
|
Why passing in as a shared_ptr?
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,23 @@
+#include <string>
+#include <unordered_map>
+
+#include "expr_types.hpp"
+#include "utils/json.hpp"
+
+namespace vectordb {
+namespace query {
+namespace expr {
+
+class Expr {
+ public:
|
Use indentation 2 instead of 4
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -704,7 +789,8 @@ bool VecSearchExecutor::BruteForceSearch(const float *query_data, const int64_t
num_result = 0;
// remove the invalid entries
for (; iter < end - start; ++iter) {
- if (!deleted.test(iter)) {
+ auto field_value_map = GenFieldValueMap(table_segment, field_name_type_map, root_node_index, iter);
|
GenFieldValueMap is expensive, should call it only if the filter expression is not empty. Or, better to not convert into this map but directly use the value of an attribute during expression evaluation
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -718,15 +804,26 @@ bool VecSearchExecutor::BruteForceSearch(const float *query_data, const int64_t
return true;
}
-Status VecSearchExecutor::Search(const float *query_data, const ConcurrentBitset &deleted, const size_t limit, const int64_t total_vector, int64_t &result_size) {
+Status VecSearchExecutor::Search(
+ const float *query_data,
+ const ConcurrentBitset &deleted,
+ const size_t limit,
+ std::vector<vectordb::query::expr::ExprNodePtr> &filter_nodes,
+ std::shared_ptr<vectordb::engine::TableSegmentMVP>& table_segment,
+ std::unordered_map<std::string, meta::FieldType>& field_name_type_map,
+ int64_t &result_size
+) {
+ int64_t total_vector = table_segment->record_number_;
+ auto expr_evaluator = std::make_shared<vectordb::query::expr::ExprEvaluator>(filter_nodes);
|
Why making a shared pointer?
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -765,7 +862,9 @@ Status VecSearchExecutor::Search(const float *query_data, const ConcurrentBitset
result_size = 0;
auto candidateNum = std::min({size_t(L_master_), size_t(total_vector)});
for (int64_t k_i = 0; k_i < candidateNum && result_size < searchLimit; ++k_i) {
- if (deleted.test(set_L_[k_i + master_queue_start].id_)) {
+ auto id = set_L_[k_i + master_queue_start].id_;
+ auto field_value_map = GenFieldValueMap(table_segment, field_name_type_map, filter_root_index, id);
|
Same here, try to avoid generate the map if no filter
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -777,7 +876,9 @@ Status VecSearchExecutor::Search(const float *query_data, const ConcurrentBitset
auto candidateNum = std::min({size_t(L_master_), size_t(total_indexed_vector_)});
const int64_t master_queue_start = local_queues_starts_[num_threads_ - 1];
for (int64_t k_i = 0; k_i < candidateNum && result_size < searchLimit; ++k_i) {
- if (deleted.test(set_L_[k_i + master_queue_start].id_)) {
+ auto id = set_L_[k_i + master_queue_start].id_;
+ auto field_value_map = GenFieldValueMap(table_segment, field_name_type_map, filter_root_index, id);
|
Same here, try to avoid generate the map if no filter
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,142 @@
+#include <cmath>
+
+#include "expr_evaluator.hpp"
+
+namespace vectordb {
+namespace query {
+namespace expr {
+
+ExprEvaluator::ExprEvaluator(std::vector<ExprNodePtr>& nodes)
+ : nodes_(nodes) {
+}
+
+std::string ExprEvaluator::StrEvaluate(
+ const int& node_index,
+ std::unordered_map<std::string, std::any>& field_value_map
+) {
+ ExprNodePtr root = nodes_[node_index];
+ auto node_type = root->node_type;
+ if (node_type == NodeType::StringConst) {
+ return root->str_value;
+ } else if (node_type == NodeType::StringAttr) {
+ auto name = root->field_name;
+ if (field_value_map.find(name) != field_value_map.end()) {
+ return std::any_cast<std::string>(field_value_map.at(name));
+ } else {
+ return "";
|
Did we check that the field always exist?
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,142 @@
+#include <cmath>
+
+#include "expr_evaluator.hpp"
+
+namespace vectordb {
+namespace query {
+namespace expr {
+
+ExprEvaluator::ExprEvaluator(std::vector<ExprNodePtr>& nodes)
+ : nodes_(nodes) {
+}
+
+std::string ExprEvaluator::StrEvaluate(
+ const int& node_index,
+ std::unordered_map<std::string, std::any>& field_value_map
+) {
+ ExprNodePtr root = nodes_[node_index];
+ auto node_type = root->node_type;
+ if (node_type == NodeType::StringConst) {
+ return root->str_value;
+ } else if (node_type == NodeType::StringAttr) {
+ auto name = root->field_name;
+ if (field_value_map.find(name) != field_value_map.end()) {
+ return std::any_cast<std::string>(field_value_map.at(name));
+ } else {
+ return "";
+ }
+ } else if (node_type == NodeType::Add) {
+ auto left = StrEvaluate(root->left, field_value_map);
+ auto right = StrEvaluate(root->right, field_value_map);
+ return left + right;
+ }
+ return "";
+}
+
+double ExprEvaluator::NumEvaluate(const int& node_index, std::unordered_map<std::string, std::any>& field_value_map) {
+ ExprNodePtr root = nodes_[node_index];
+ auto node_type = root->node_type;
+ if (node_type == NodeType::IntConst) {
+ return static_cast<double>(root->int_value);
+ } else if (node_type == NodeType::DoubleConst) {
+ return root->double_value;
+ } else if (node_type == NodeType::IntAttr) {
+ auto name = root->field_name;
+ if (field_value_map.find(name) != field_value_map.end()) {
+ return std::any_cast<int64_t>(field_value_map.at(name));
+ } else {
+ return 0.0;
+ }
+ } else if (node_type == NodeType::DoubleAttr) {
+ auto name = root->field_name;
+ if (field_value_map.find(name) != field_value_map.end()) {
+ return std::any_cast<double>(field_value_map.at(name));
+ } else {
+ return 0.0;
+ }
+ } else if (root->left != -1 && root->right != -1) {
+ auto left = NumEvaluate(root->left, field_value_map);
+ auto right = NumEvaluate(root->right, field_value_map);
+ switch (node_type) {
+ case NodeType::Add:
+ return left + right;
+ case NodeType::Subtract:
+ return left - right;
+ case NodeType::Multiply:
+ return left * right;
+ case NodeType::Divide:
+ return left / right;
|
Need check right value is not 0. If it's 0, should throw invalid value error, and the top level should evaluate as false
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,142 @@
+#include <cmath>
+
+#include "expr_evaluator.hpp"
+
+namespace vectordb {
+namespace query {
+namespace expr {
+
+ExprEvaluator::ExprEvaluator(std::vector<ExprNodePtr>& nodes)
+ : nodes_(nodes) {
+}
+
+std::string ExprEvaluator::StrEvaluate(
+ const int& node_index,
+ std::unordered_map<std::string, std::any>& field_value_map
+) {
+ ExprNodePtr root = nodes_[node_index];
+ auto node_type = root->node_type;
+ if (node_type == NodeType::StringConst) {
+ return root->str_value;
+ } else if (node_type == NodeType::StringAttr) {
+ auto name = root->field_name;
+ if (field_value_map.find(name) != field_value_map.end()) {
+ return std::any_cast<std::string>(field_value_map.at(name));
+ } else {
+ return "";
+ }
+ } else if (node_type == NodeType::Add) {
+ auto left = StrEvaluate(root->left, field_value_map);
+ auto right = StrEvaluate(root->right, field_value_map);
+ return left + right;
+ }
+ return "";
+}
+
+double ExprEvaluator::NumEvaluate(const int& node_index, std::unordered_map<std::string, std::any>& field_value_map) {
+ ExprNodePtr root = nodes_[node_index];
+ auto node_type = root->node_type;
+ if (node_type == NodeType::IntConst) {
+ return static_cast<double>(root->int_value);
+ } else if (node_type == NodeType::DoubleConst) {
+ return root->double_value;
+ } else if (node_type == NodeType::IntAttr) {
+ auto name = root->field_name;
+ if (field_value_map.find(name) != field_value_map.end()) {
+ return std::any_cast<int64_t>(field_value_map.at(name));
+ } else {
+ return 0.0;
+ }
+ } else if (node_type == NodeType::DoubleAttr) {
+ auto name = root->field_name;
+ if (field_value_map.find(name) != field_value_map.end()) {
+ return std::any_cast<double>(field_value_map.at(name));
+ } else {
+ return 0.0;
+ }
+ } else if (root->left != -1 && root->right != -1) {
+ auto left = NumEvaluate(root->left, field_value_map);
+ auto right = NumEvaluate(root->right, field_value_map);
+ switch (node_type) {
+ case NodeType::Add:
+ return left + right;
+ case NodeType::Subtract:
+ return left - right;
+ case NodeType::Multiply:
+ return left * right;
+ case NodeType::Divide:
+ return left / right;
+ case NodeType::Module:
+ return std::fmod(left, right);
|
Same here. right cannot be 0
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,142 @@
+#include <cmath>
+
+#include "expr_evaluator.hpp"
+
+namespace vectordb {
+namespace query {
+namespace expr {
+
+ExprEvaluator::ExprEvaluator(std::vector<ExprNodePtr>& nodes)
+ : nodes_(nodes) {
+}
+
+std::string ExprEvaluator::StrEvaluate(
+ const int& node_index,
+ std::unordered_map<std::string, std::any>& field_value_map
+) {
+ ExprNodePtr root = nodes_[node_index];
+ auto node_type = root->node_type;
+ if (node_type == NodeType::StringConst) {
+ return root->str_value;
+ } else if (node_type == NodeType::StringAttr) {
+ auto name = root->field_name;
+ if (field_value_map.find(name) != field_value_map.end()) {
+ return std::any_cast<std::string>(field_value_map.at(name));
+ } else {
+ return "";
+ }
+ } else if (node_type == NodeType::Add) {
+ auto left = StrEvaluate(root->left, field_value_map);
+ auto right = StrEvaluate(root->right, field_value_map);
+ return left + right;
+ }
+ return "";
+}
+
+double ExprEvaluator::NumEvaluate(const int& node_index, std::unordered_map<std::string, std::any>& field_value_map) {
+ ExprNodePtr root = nodes_[node_index];
+ auto node_type = root->node_type;
+ if (node_type == NodeType::IntConst) {
+ return static_cast<double>(root->int_value);
+ } else if (node_type == NodeType::DoubleConst) {
+ return root->double_value;
+ } else if (node_type == NodeType::IntAttr) {
+ auto name = root->field_name;
+ if (field_value_map.find(name) != field_value_map.end()) {
+ return std::any_cast<int64_t>(field_value_map.at(name));
+ } else {
+ return 0.0;
+ }
+ } else if (node_type == NodeType::DoubleAttr) {
+ auto name = root->field_name;
+ if (field_value_map.find(name) != field_value_map.end()) {
+ return std::any_cast<double>(field_value_map.at(name));
+ } else {
+ return 0.0;
+ }
+ } else if (root->left != -1 && root->right != -1) {
+ auto left = NumEvaluate(root->left, field_value_map);
+ auto right = NumEvaluate(root->right, field_value_map);
+ switch (node_type) {
+ case NodeType::Add:
+ return left + right;
+ case NodeType::Subtract:
+ return left - right;
+ case NodeType::Multiply:
+ return left * right;
+ case NodeType::Divide:
+ return left / right;
+ case NodeType::Module:
+ return std::fmod(left, right);
+ }
+ }
+ return 0.0;
+}
+
+bool ExprEvaluator::LogicalEvaluate(const int& node_index, std::unordered_map<std::string, std::any>& field_value_map) {
+ if (node_index < 0) {
+ return true;
+ }
+
+ ExprNodePtr root = nodes_[node_index];
+ auto node_type = root->node_type;
+ if (node_type == NodeType::BoolConst) {
+ return root->bool_value;
+ } else if (node_type == NodeType::BoolAttr) {
+ auto name = root->field_name;
+ if (field_value_map.find(name) != field_value_map.end()) {
+ return std::any_cast<bool>(field_value_map.at(name));
+ } else {
+ return false;
+ }
+ } else if (node_type == NodeType::NOT) {
+ auto child_index = root->left;
+ auto left = LogicalEvaluate(child_index, field_value_map);
+ return !left;
+ } else if (root->left != -1 && root->right != -1) {
+ auto left_index = root->left;
+ auto right_index = root->right;
+
+ auto value_type = root->value_type;
+ if (node_type == NodeType::EQ || node_type == NodeType::NE) {
+ auto child_value_type = nodes_[left_index]->value_type;
+ if (child_value_type == ValueType::STRING) {
+ auto left = StrEvaluate(left_index, field_value_map);
+ auto right = StrEvaluate(right_index, field_value_map);
+ return node_type == NodeType::EQ ? left == right : left != right;
+ } else if (child_value_type == ValueType::BOOL) {
+ auto left = LogicalEvaluate(left_index, field_value_map);
+ auto right = LogicalEvaluate(right_index, field_value_map);
+ return node_type == NodeType::EQ ? left == right : left != right;
+ } else {
+ auto left = NumEvaluate(left_index, field_value_map);
+ auto right = NumEvaluate(right_index, field_value_map);
+ return node_type == NodeType::EQ ? left == right : left != right;
+ }
+ } else if (node_type == NodeType::AND || node_type == NodeType::OR) {
+ auto left = LogicalEvaluate(left_index, field_value_map);
+ auto right = LogicalEvaluate(right_index, field_value_map);
+ return node_type == NodeType::AND ? left && right : left || right;
|
To make it easier to read:
return node_type == NodeType::AND ? (left && right) : (left || right);
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,537 @@
+#include <iostream>
+#include <sstream>
+#include <algorithm>
+#include <regex>
+#include <stack>
+#include <queue>
+#include <unordered_map>
+#include <vector>
+#include <boost/algorithm/string/join.hpp>
+
+#include "db/catalog/meta_types.hpp"
+#include "logger/logger.hpp"
+#include "utils/status.hpp"
+#include "expr.hpp"
+
+namespace vectordb {
+namespace query {
+namespace expr {
+
+enum class State {
+ Start,
+ Number,
+ String,
+ Attribute,
+ Operator,
+};
+
+bool isArithChar(char c) {
+ return c == '+' || c == '-' || c == '*' || c == '/' || c == '%';
+};
+
+bool isCompareChar(char c) {
+ return c == '>' || c == '<' || c == '=';
+};
+
+bool isArithStr(std::string str) {
+ return str == "+" || str == "-" || str == "*" || str == "/" || str == "%";
+};
+
+bool isCompareStr(std::string str) {
+ return str == ">" || str == ">=" || str == "=" || str == "<=" || str == "<" || str == "<>";
+};
+
+bool isLogicalStr(std::string str) {
+ std::transform(str.begin(), str.end(), str.begin(), [](unsigned char c) {
+ return std::toupper(c);
+ });
+ return str == "AND" || str == "OR" || str == "NOT";
+};
+
+bool isUnsupportedLogicalOp(std::string str) {
+ std::transform(str.begin(), str.end(), str.begin(), [](unsigned char c) {
+ return std::toupper(c);
+ });
+ return str == "ALL" || str == "ANY" || str == "BETWEEN" || str == "EXISTS" || str == "IN" ||
+ str == "LIKE" || str == "SOME";
+}
+
+bool isOperator(std::string str) {
+ return isArithStr(str) || isCompareStr(str) || isLogicalStr(str);
+};
+
+int getPrecedence(std::string& op) {
+ if (isLogicalStr(op))
+ return 1;
+ else if (isCompareStr(op))
+ return 2;
+ if (op == "+" || op == "-")
+ return 3;
+ else if (op == "*" || op == "/" || op == "%")
+ return 4;
+ return 0;
+};
+
+Status SplitTokens(std::string& expression, std::vector<std::string>& tokens) {
+ std::vector<std::string> token_list;
+ State state = State::Start;
+ std::string cur_token;
+
+ size_t last_index = expression.length() - 1;
+ for (size_t i = 0; i < expression.length(); ) {
+ char c = expression[i];
+ switch (state) {
+ case State::Start:
+ if (std::isspace(c)) {
+ i++;
+ continue;
+ } else if (std::isdigit(c)) {
+ state = State::Number;
+ } else if (std::isalpha(c) || c == '_') {
+ state = State::Attribute;
+ } else if (c == '(' || c == ')') {
+ token_list.push_back(std::string(1, c));
+ i++;
+ } else if (isArithChar(c) || isCompareChar(c)) {
+ if (c == '-' && i != last_index && std::isdigit(expression[i + 1])) {
+ if (!token_list.empty()) {
+ std::string ele = token_list.back();
+ if (!isOperator(ele) && ele != "(") {
+ state = State::Operator;
+ } else {
+ cur_token += c;
+ i++;
+ state = State::Number;
+ }
+ } else {
+ cur_token += c;
+ i++;
+ state = State::Number;
+ }
+ } else {
+ state = State::Operator;
+ }
+ } else if (c == '\'') {
+ state = State::String;
+ } else if (c == '&' || c == '|' || c == '^') {
+ return Status(NOT_IMPLEMENTED_ERROR, "Epsilla does not support bitwise operators yet.");
+ } else {
+ return Status(INVALID_EXPR, "Filter expression is not valid.");
+ }
+ break;
+ case State::String:
+ if (c == '\'') {
+ i++;
+ cur_token += c;
+ if (cur_token == "''") {
+ return Status(INVALID_EXPR, "String constant cannot be empty.");
|
Why string const cannot be empty?
|
vectordb
|
github_2023
|
cpp
| 52 |
epsilla-cloud
|
richard-epsilla
|
@@ -0,0 +1,537 @@
+#include <iostream>
+#include <sstream>
+#include <algorithm>
+#include <regex>
+#include <stack>
+#include <queue>
+#include <unordered_map>
+#include <vector>
+#include <boost/algorithm/string/join.hpp>
+
+#include "db/catalog/meta_types.hpp"
+#include "logger/logger.hpp"
+#include "utils/status.hpp"
+#include "expr.hpp"
+
+namespace vectordb {
+namespace query {
+namespace expr {
+
+enum class State {
+ Start,
+ Number,
+ String,
+ Attribute,
+ Operator,
+};
+
+bool isArithChar(char c) {
+ return c == '+' || c == '-' || c == '*' || c == '/' || c == '%';
+};
+
+bool isCompareChar(char c) {
+ return c == '>' || c == '<' || c == '=';
+};
+
+bool isArithStr(std::string str) {
+ return str == "+" || str == "-" || str == "*" || str == "/" || str == "%";
+};
+
+bool isCompareStr(std::string str) {
+ return str == ">" || str == ">=" || str == "=" || str == "<=" || str == "<" || str == "<>";
+};
+
+bool isLogicalStr(std::string str) {
+ std::transform(str.begin(), str.end(), str.begin(), [](unsigned char c) {
+ return std::toupper(c);
+ });
+ return str == "AND" || str == "OR" || str == "NOT";
+};
+
+bool isUnsupportedLogicalOp(std::string str) {
+ std::transform(str.begin(), str.end(), str.begin(), [](unsigned char c) {
+ return std::toupper(c);
+ });
+ return str == "ALL" || str == "ANY" || str == "BETWEEN" || str == "EXISTS" || str == "IN" ||
+ str == "LIKE" || str == "SOME";
+}
+
+bool isOperator(std::string str) {
+ return isArithStr(str) || isCompareStr(str) || isLogicalStr(str);
+};
+
+int getPrecedence(std::string& op) {
+ if (isLogicalStr(op))
+ return 1;
+ else if (isCompareStr(op))
+ return 2;
+ if (op == "+" || op == "-")
+ return 3;
+ else if (op == "*" || op == "/" || op == "%")
+ return 4;
+ return 0;
+};
+
+Status SplitTokens(std::string& expression, std::vector<std::string>& tokens) {
+ std::vector<std::string> token_list;
+ State state = State::Start;
+ std::string cur_token;
+
+ size_t last_index = expression.length() - 1;
+ for (size_t i = 0; i < expression.length(); ) {
+ char c = expression[i];
+ switch (state) {
+ case State::Start:
+ if (std::isspace(c)) {
+ i++;
+ continue;
+ } else if (std::isdigit(c)) {
+ state = State::Number;
+ } else if (std::isalpha(c) || c == '_') {
+ state = State::Attribute;
+ } else if (c == '(' || c == ')') {
+ token_list.push_back(std::string(1, c));
+ i++;
+ } else if (isArithChar(c) || isCompareChar(c)) {
+ if (c == '-' && i != last_index && std::isdigit(expression[i + 1])) {
+ if (!token_list.empty()) {
+ std::string ele = token_list.back();
+ if (!isOperator(ele) && ele != "(") {
+ state = State::Operator;
+ } else {
+ cur_token += c;
+ i++;
+ state = State::Number;
+ }
+ } else {
+ cur_token += c;
+ i++;
+ state = State::Number;
+ }
+ } else {
+ state = State::Operator;
+ }
+ } else if (c == '\'') {
+ state = State::String;
+ } else if (c == '&' || c == '|' || c == '^') {
+ return Status(NOT_IMPLEMENTED_ERROR, "Epsilla does not support bitwise operators yet.");
+ } else {
+ return Status(INVALID_EXPR, "Filter expression is not valid.");
+ }
+ break;
+ case State::String:
+ if (c == '\'') {
+ i++;
+ cur_token += c;
+ if (cur_token == "''") {
+ return Status(INVALID_EXPR, "String constant cannot be empty.");
+ }
+ if (cur_token.size() > 2) {
+ token_list.push_back(cur_token);
+ cur_token.clear();
+ state = State::Start;
+ }
+ } else {
+ if (i == last_index) {
+ return Status(INVALID_EXPR, "Missing terminating '.");
+ } else {
+ cur_token += c;
+ i++;
+ }
+ }
+ break;
+ case State::Attribute:
+ if (std::isspace(c) || c == ')' || isArithChar(c) || isCompareChar(c)) {
+ token_list.push_back(cur_token);
+ cur_token.clear();
+ state = State::Start;
+ } else if (std::isalnum(c) || c == '_') {
+ cur_token += c;
+ i++;
+ } else {
+ return Status(INVALID_EXPR, "Invalid name: " + (cur_token += c));
+ }
+ break;
+ case State::Number:
+ if (std::isspace(c) || c == ')' || isArithChar(c) || isCompareChar(c)) {
+ if (std::count(cur_token.begin(), cur_token.end(), '.') > 1) {
+ return Status(INVALID_EXPR, cur_token + " is not a valid number.");
+ } else {
+ token_list.push_back(cur_token);
+ cur_token.clear();
+ i++;
+ state = State::Start;
+ }
+ } else if (std::isdigit(c)) {
+ cur_token += c;
+ i++;
+ } else if (c == '.' && i != last_index && std::isdigit(expression[i + 1])) {
+ cur_token += c;
+ i++;
+ } else {
+ return Status(INVALID_EXPR, "Filter expression is not valid.");
+ }
+ break;
+ case State::Operator:
+ if (isArithChar(c)) {
+ if (i != last_index && expression[i + 1] == '=') {
+ return Status(NOT_IMPLEMENTED_ERROR, "Epsilla does not support compound operators yet.");
+ }
+ token_list.push_back(std::string(1, c));
+ i++;
+ state = State::Start;
+ } else if (isCompareChar(c)) {
+ cur_token += c;
+ if (i != last_index && isCompareChar(expression[i + 1])) {
+ i++;
+ } else {
+ if (isCompareStr(cur_token)) {
+ token_list.push_back(cur_token);
+ cur_token.clear();
+ i++;
+ state = State::Start;
+ } else {
+ return Status(INVALID_EXPR, "'" + cur_token + "' is an invalid operator.");
+ }
+ }
+ }
+ break;
+ }
+ }
+ if (!cur_token.empty()) {
+ token_list.push_back(cur_token);
+ cur_token.clear();
+ }
+
+ tokens = token_list;
+ return Status::OK();
+};
+
+std::vector<std::string> ShuntingYard(const std::vector<std::string>& tokens) {
+ std::vector<std::string> res;
+ std::stack<std::string> operatorStack;
+
+ for (std::string str : tokens) {
+ if (str == "(") {
+ operatorStack.push(str);
+ } else if (str == ")") {
+ while (!operatorStack.empty() && operatorStack.top() != "(") {
+ res.push_back(operatorStack.top());
+ operatorStack.pop();
+ }
+ operatorStack.pop(); // Pop the '('
+ } else if (isOperator(str)) {
+ while (!operatorStack.empty() && getPrecedence(operatorStack.top()) >= getPrecedence(str)) {
+ res.push_back(operatorStack.top());
+ operatorStack.pop();
+ }
+ operatorStack.push(str);
+ } else {
+ res.push_back(str);
+ }
+ }
+
+ while (!operatorStack.empty()) {
+ res.push_back(operatorStack.top());
|
Wrong indentation
|
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 75