Skip to content

Commit

Permalink
ci(js): add linting (#19)
Browse files Browse the repository at this point in the history
  • Loading branch information
mikeldking authored Jan 2, 2024
1 parent bd7df70 commit 0bfee13
Show file tree
Hide file tree
Showing 9 changed files with 1,877 additions and 2,953 deletions.
19 changes: 14 additions & 5 deletions .github/workflows/typescript-CI.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,22 @@ jobs:
version: 8
- name: Install Dependencies
working-directory: ./js
run: pnpm install --frozen-lockfile
run: pnpm install --frozen-lockfile -r
- name: Pre-Build
working-directory: ./js
run: pnpm run -r prebuild
- name: Build
working-directory: ./js
run: pnpm run -r build
- name: Formatting
working-directory: ./js
run: |
pnpm run prettier:check
- name: Type Checking
working-directory: ./js
run: |
pnpm run type:check
working-directory: ./js
run: |
pnpm run type:check
- name: Linting
working-directory: ./js
run: |
pnpm run lint
33 changes: 33 additions & 0 deletions js/.eslintrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
module.exports = {
env: {
browser: true,
es2021: true,
},
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
overrides: [
{
env: {
node: true,
},
files: [".eslintrc.{js,cjs}"],
parserOptions: {
sourceType: "script",
},
},
],
parser: "@typescript-eslint/parser",
parserOptions: {
ecmaVersion: "latest",
sourceType: "module",
},
plugins: ["@typescript-eslint"],
rules: {
"@typescript-eslint/no-unused-vars": [
"error",
{
argsIgnorePattern: "^_",
varsIgnorePattern: "^_",
},
], // ignore unused variables starting with underscore
},
};
3 changes: 2 additions & 1 deletion js/.prettierignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
pnpm-lock.yaml
pnpm-lock.yaml
dist
1 change: 0 additions & 1 deletion js/examples/openai/src/tracer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import { OpenAIInstrumentation } from "@arizeai/openinference-instrumentation-op
import {
ConsoleSpanExporter,
SimpleSpanProcessor,
BatchSpanProcessor,
} from "@opentelemetry/sdk-trace-base";
import { NodeTracerProvider } from "@opentelemetry/sdk-trace-node";
import { Resource } from "@opentelemetry/resources";
Expand Down
6 changes: 5 additions & 1 deletion js/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
"scripts": {
"prettier:check": "prettier --check .",
"prettier:write": "prettier --write .",
"type:check": "pnpm run -r type:check"
"type:check": "pnpm run -r type:check",
"lint": "eslint . --ext .ts"
},
"keywords": [
"telemetry",
Expand All @@ -17,6 +18,9 @@
"devDependencies": {
"@changesets/cli": "^2.27.1",
"@types/jest": "^29.5.11",
"@typescript-eslint/eslint-plugin": "^6.17.0",
"@typescript-eslint/parser": "^6.17.0",
"eslint": "^8.56.0",
"jest": "^29.7.0",
"prettier": "^3.1.1",
"rimraf": "^5.0.5",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ export class OpenAIInstrumentation extends InstrumentationBase<typeof openai> {
if (module?.openInferencePatched) {
return module;
}
// eslint-disable-next-line @typescript-eslint/no-this-alias
const instrumentation: OpenAIInstrumentation = this;
type CompletionCreateType =
typeof module.OpenAI.Chat.Completions.prototype.create;
Expand All @@ -66,6 +67,7 @@ export class OpenAIInstrumentation extends InstrumentationBase<typeof openai> {
this._wrap(
module.OpenAI.Chat.Completions.prototype,
"create",
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(original: CompletionCreateType): any => {
return function patchedCreate(
this: unknown,
Expand All @@ -74,7 +76,7 @@ export class OpenAIInstrumentation extends InstrumentationBase<typeof openai> {
>
) {
const body = args[0];
const { messages, ...invocationParameters } = body;
const { messages: _messages, ...invocationParameters } = body;
const span = instrumentation.tracer.startSpan(
`OpenAI Chat Completions`,
{
Expand Down Expand Up @@ -161,7 +163,7 @@ function getLLMInputMessagesAttributes(
function getUsageAttributes(
response: Stream<ChatCompletionChunk> | ChatCompletion,
) {
if (response.hasOwnProperty("usage")) {
if (Object.prototype.hasOwnProperty.call(response, "usage")) {
const completion = response as ChatCompletion;
if (completion.usage) {
return {
Expand All @@ -184,7 +186,7 @@ function getLLMOutputMessagesAttributes(
response: Stream<ChatCompletionChunk> | ChatCompletion,
): Attributes {
// Handle chat completion
if (response.hasOwnProperty("choices")) {
if (Object.prototype.hasOwnProperty.call(response, "choices")) {
const completion = response as ChatCompletion;
// Right now support just the first choice
const choice = completion.choices[0];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,29 +4,20 @@ import {
SimpleSpanProcessor,
} from "@opentelemetry/sdk-trace-base";
import { NodeTracerProvider } from "@opentelemetry/sdk-trace-node";
import { Resource } from "@opentelemetry/resources";
import { SemanticResourceAttributes } from "@opentelemetry/semantic-conventions";
const tracerProvider = new NodeTracerProvider();
tracerProvider.register();
const memoryExporter = new InMemorySpanExporter();
const tracer = tracerProvider.getTracer("default");
const resource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: "test-instrumentation-openai",
});

const instrumentation = new OpenAIInstrumentation();
instrumentation.disable();

import * as OpenAI from "openai";
import { ChatCompletion } from "openai/resources";
import { ChatCompletionCreateParamsBase } from "openai/resources/chat/completions";

describe("OpenAIInstrumentation", () => {
let openai: OpenAI.OpenAI;

const memoryExporter = new InMemorySpanExporter();
const provider = new NodeTracerProvider();
const tracer = provider.getTracer("default");
provider.getTracer("default");

instrumentation.setTracerProvider(tracerProvider);
tracerProvider.addSpanProcessor(new SimpleSpanProcessor(memoryExporter));
Expand All @@ -45,7 +36,9 @@ describe("OpenAIInstrumentation", () => {
jest.clearAllMocks();
});
it("is patched", () => {
expect((OpenAI as any).openInferencePatched).toBe(true);
expect(
(OpenAI as { openInferencePatched?: boolean }).openInferencePatched,
).toBe(true);
});
it("creates a span for chat completions", async () => {
const response = {
Expand All @@ -72,12 +65,12 @@ describe("OpenAIInstrumentation", () => {
};
// Mock out the chat completions endpoint
jest.spyOn(openai, "post").mockImplementation(
// @ts-expect-error
async (): Promise<any> => {
// @ts-expect-error the response type is not correct - this is just for testing
async (): Promise<unknown> => {
return response;
},
);
const chatCompletion = await openai.chat.completions.create({
await openai.chat.completions.create({
messages: [{ role: "user", content: "Say this is a test" }],
model: "gpt-3.5-turbo",
});
Expand Down
Loading

0 comments on commit 0bfee13

Please sign in to comment.