Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Some package audit fix #2497

Merged
merged 4 commits into from
Nov 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 12 additions & 34 deletions connectors/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions connectors/src/connectors/slack/bot.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { WebClient } from "@slack/web-api";
import { Message } from "@slack/web-api/dist/response/ConversationsHistoryResponse";
import { MessageElement } from "@slack/web-api/dist/response/ConversationsHistoryResponse";
import { ConversationsRepliesResponse } from "@slack/web-api/dist/response/ConversationsRepliesResponse";
import levenshtein from "fast-levenshtein";

Expand Down Expand Up @@ -620,7 +620,7 @@ async function makeContentFragment(
startingAtTs: string | null,
connector: Connector
) {
let allMessages: Message[] = [];
let allMessages: MessageElement[] = [];

let next_cursor = undefined;

Expand Down
8 changes: 4 additions & 4 deletions connectors/src/connectors/slack/temporal/activities.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import {
} from "@slack/web-api";
import {
ConversationsHistoryResponse,
Message,
MessageElement,
} from "@slack/web-api/dist/response/ConversationsHistoryResponse";
import {
Channel,
Expand Down Expand Up @@ -361,7 +361,7 @@ export async function syncNonThreaded(
const dataSourceConfig = dataSourceConfigFromConnector(connector);
const client = await getSlackClient(connectorId);
const nextCursor: string | undefined = undefined;
const messages: Message[] = [];
const messages: MessageElement[] = [];

const startTsSec = Math.round(startTsMs / 1000);
const endTsSec = Math.round(endTsMs / 1000);
Expand Down Expand Up @@ -523,7 +523,7 @@ export async function syncThread(
const dataSourceConfig = dataSourceConfigFromConnector(connector);
const slackClient = await getSlackClient(connectorId);

let allMessages: Message[] = [];
let allMessages: MessageElement[] = [];

let next_cursor = undefined;

Expand Down Expand Up @@ -645,7 +645,7 @@ async function processMessageForMentions(

export async function formatMessagesForUpsert(
channelId: string,
messages: Message[],
messages: MessageElement[],
connectorId: ModelId,
slackClient: WebClient
) {
Expand Down
2 changes: 1 addition & 1 deletion core/src/providers/ai21.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ impl LLM for AI21LLM {
) -> Result<LLMGeneration> {
let r = self
.generate(
prompt.clone(),
prompt,
1, // num_results
match max_tokens {
Some(f) => f,
Expand Down
4 changes: 2 additions & 2 deletions core/src/providers/azure_openai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ impl LLM for AzureOpenAILLM {
self.api_key.clone().unwrap(),
None,
None,
prompt.clone(),
prompt,
max_tokens,
temperature,
n,
Expand Down Expand Up @@ -351,7 +351,7 @@ impl LLM for AzureOpenAILLM {
self.api_key.clone().unwrap(),
None,
None,
prompt.clone(),
prompt,
max_tokens,
temperature,
n,
Expand Down
2 changes: 1 addition & 1 deletion core/src/providers/cohere.rs
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ impl LLM for CohereLLM {

let r = self
.generate(
prompt.clone(),
prompt,
max_tokens,
temperature,
n,
Expand Down
4 changes: 2 additions & 2 deletions core/src/providers/openai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1221,7 +1221,7 @@ impl LLM for OpenAILLM {
None => None,
},
Some(self.id.clone()),
prompt.clone(),
prompt,
max_tokens,
temperature,
n,
Expand Down Expand Up @@ -1266,7 +1266,7 @@ impl LLM for OpenAILLM {
None => None,
},
Some(self.id.clone()),
prompt.clone(),
prompt,
max_tokens,
temperature,
n,
Expand Down
25 changes: 16 additions & 9 deletions front/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions front/pages/w/[wId]/builder/data-sources/managed.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ export default function DataSourcesView({
ds.connectorProvider as ConnectorProvider
] ||
!isAdmin;
const onclick = async () => {
const onClick = async () => {
let isDataSourceAllowedInPlan: boolean;

switch (ds.connectorProvider) {
Expand Down Expand Up @@ -439,7 +439,7 @@ export default function DataSourcesView({
variant="primary"
icon={CloudArrowLeftRightIcon}
disabled={disabled}
onClick={onclick}
onClick={onClick}
label={label}
/>
)}
Expand Down Expand Up @@ -503,7 +503,7 @@ export default function DataSourcesView({
variant="secondary"
icon={CloudArrowLeftRightIcon}
disabled={disabled}
onClick={onclick}
onClick={onClick}
label="Acknowledge and Connect"
/>
</DropdownMenu.Button>
Expand Down