A simple Scala ZIO wrapper library to use with OpenAI's GPT-3.5+.
To use the library, add the openai-zio
dependency in you build.sbt like so:
libraryDependencies += "com.raisondata.openai" %% "openai-zio" % "0.1.0"
or in your pom.xml file like so:
<dependency>
<groupId>com.raisondata.openai</groupId>
<artifactId>openai-zio</artifactId>
<version>0.1.0</version>
</dependency>
Note that library supports Scala versions 2.12 and 2.13 at the moment.
import zio.{Scope, ZIO, ZIOAppArgs, ZIOAppDefault}
import com.raisondata.openai.OpenAI
object Main extends ZIOAppDefault {
override def run: ZIO[Environment with ZIOAppArgs with Scope, Any, Any] = {
val service = new OpenAI("YOUR_API_KEY")
for {
response <-
service.Audio
.transcribe(
"/path/to/audio/audio.mp3"
)
} yield response
}
}
To start making calls to OpenAI, you will need to instantiate the OpenAI
class with your OpenAI API Key i.e.
val service = new OpenAI("YOUR_API_KEY")
You can access the Models API through yourOpenAIInstance.Models._
:
import zio.{Scope, ZIO, ZIOAppArgs, ZIOAppDefault}
import com.raisondata.openai.OpenAI
object Main extends ZIOAppDefault {
override def run: ZIO[Environment with ZIOAppArgs with Scope, Any, Any] = {
val service = new OpenAI("YOUR_API_KEY")
for {
response <-
service.Models.listModels
} yield response
}
}
You can access the Completions API through yourOpenAIInstance.Completions.createCompletion
:
import zio.{Scope, ZIO, ZIOAppArgs, ZIOAppDefault}
import com.raisondata.openai.OpenAI
import com.raisondata.openai.Model
object Main extends ZIOAppDefault {
override def run: ZIO[Environment with ZIOAppArgs with Scope, Any, Any] = {
val service = new OpenAI("YOUR_API_KEY")
for {
response <-
service.Completions.createCompletion(
model = Model.gpt_3_5_turbo,
prompt = "Say this is a test",
user = Some("end-user-id")
)
_ <- ZIO.logInfo(s"Got back a response: $response")
} yield response
}
}
You can access the Chat API (non-streaming) through yourOpenAIInstance.Chat.createChat
:
import zio.{Scope, ZIO, ZIOAppArgs, ZIOAppDefault}
import com.raisondata.openai._
import com.raisondata.openai.api.chat.CreateChat
object Main extends ZIOAppDefault {
override def run: ZIO[Environment with ZIOAppArgs with Scope, Any, Any] = {
val service = new OpenAI("YOUR_API_KEY")
for {
response <-
service.Chat.createChat(
model = Model.gpt_3_5_turbo,
messages = List(CreateChat.Message(Role.user, "Hello!")), // Role.user, Role.assistant, Role.system
user = Some("end-user-id")
)
_ <- ZIO.logInfo(s"Got back a response: $response")
} yield response
}
}
You can access the Edits API through yourOpenAIInstance.Edits.createEdit
:
import zio.{Scope, ZIO, ZIOAppArgs, ZIOAppDefault}
import com.raisondata.openai.OpenAI
import com.raisondata.openai.Model
object Main extends ZIOAppDefault {
override def run: ZIO[Environment with ZIOAppArgs with Scope, Any, Any] = {
val service = new OpenAI("YOUR_API_KEY")
for {
response <-
service.Edits.createEdit(
input = "What day of the wek is it?",
instruction = "Fix the spelling mistakes"
)
_ <- ZIO.logInfo(s"Got back a response: $response")
} yield response
}
}
You can access the Images APIs through yourOpenAIInstance.Images._
:
import zio.{Scope, ZIO, ZIOAppArgs, ZIOAppDefault}
import com.raisondata.openai.OpenAI
import com.raisondata.openai._
object Main extends ZIOAppDefault {
override def run: ZIO[Environment with ZIOAppArgs with Scope, Any, Any] = {
val service = new OpenAI("YOUR_API_KEY")
for {
response <-
service.Images.generateImage(
prompt = "A cute baby sea otter",
user = Some("end-user-id"),
size = `512x512` //Defaults to 1024x1024
)
_ <- ZIO.logInfo(s"Got back a response: $response")
} yield response
}
}
You can access the Embeddings API through yourOpenAIInstance.Embeddings.createEmbeddings
:
import zio.{Scope, ZIO, ZIOAppArgs, ZIOAppDefault}
import com.raisondata.openai.OpenAI
import com.raisondata.openai.Model
object Main extends ZIOAppDefault {
override def run: ZIO[Environment with ZIOAppArgs with Scope, Any, Any] = {
val service = new OpenAI("YOUR_API_KEY")
for {
response <-
service.Embeddings.createEmbeddings(
model = Model.text_embedding_ada_002,
input = "The food was delicious and the waiter...",
user = Some("end-user-id")
)
_ <- ZIO.logInfo(s"Got back a response: $response")
} yield response
}
}
You can access the File APIs through yourOpenAIInstance.Files._
:
import zio.{Scope, ZIO, ZIOAppArgs, ZIOAppDefault}
import com.raisondata.openai.OpenAI
import com.raisondata.openai.Model
object Main extends ZIOAppDefault {
override def run: ZIO[Environment with ZIOAppArgs with Scope, Any, Any] = {
val service = new OpenAI("YOUR_API_KEY")
for {
response <-
service.Files.listFiles
_ <- ZIO.logInfo(s"Got back a response: $response")
} yield response
}
}
You can access the Moderation API through yourOpenAIInstance.Moderations.createModeration
:
import zio.{Scope, ZIO, ZIOAppArgs, ZIOAppDefault}
import com.raisondata.openai.OpenAI
import com.raisondata.openai.Model
object Main extends ZIOAppDefault {
override def run: ZIO[Environment with ZIOAppArgs with Scope, Any, Any] = {
val service = new OpenAI("YOUR_API_KEY")
for {
response <-
service.Moderations.createModeration(
input = "I want to kill them."
)
_ <- ZIO.logInfo(s"Got back a response: $response")
} yield response
}
}
You can access the Fine-tunes API through yourOpenAIInstance.Moderations.createModeration
:
import zio.{Scope, ZIO, ZIOAppArgs, ZIOAppDefault}
import com.raisondata.openai.OpenAI
import com.raisondata.openai.Model
object Main extends ZIOAppDefault {
override def run: ZIO[Environment with ZIOAppArgs with Scope, Any, Any] = {
val service = new OpenAI("YOUR_API_KEY")
for {
response <-
service.FineTunes.createFineTune(
trainingFile = "id_of_uploaded_file"
)
_ <- ZIO.logInfo(s"Got back a response: $response")
} yield response
}
}
- Scala 3 support