-
Notifications
You must be signed in to change notification settings - Fork 49
feat: support fit() in GeminiTextGenerator #758
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -571,6 +571,8 @@ class GeminiTextGenerator(base.BaseEstimator): | |
Connection to connect with remote service. str of the format <PROJECT_NUMBER/PROJECT_ID>.<LOCATION>.<CONNECTION_ID>. | ||
If None, use default connection in session context. BigQuery DataFrame will try to create the connection and attach | ||
permission if the connection isn't fully set up. | ||
max_iterations (Optional[int], Default to 300): | ||
The number of steps to run when performing supervised tuning. | ||
""" | ||
|
||
def __init__( | ||
|
@@ -581,9 +583,11 @@ def __init__( | |
] = "gemini-pro", | ||
session: Optional[bigframes.Session] = None, | ||
connection_name: Optional[str] = None, | ||
max_iterations: int = 300, | ||
): | ||
self.model_name = model_name | ||
self.session = session or bpd.get_global_session() | ||
self.max_iterations = max_iterations | ||
self._bq_connection_manager = self.session.bqconnectionmanager | ||
|
||
connection_name = connection_name or self.session._bq_connection | ||
|
@@ -647,6 +651,55 @@ def _from_bq( | |
model._bqml_model = core.BqmlModel(session, bq_model) | ||
return model | ||
|
||
@property | ||
def _bqml_options(self) -> dict: | ||
"""The model options as they will be set for BQML""" | ||
options = { | ||
"max_iterations": self.max_iterations, | ||
"data_split_method": "NO_SPLIT", | ||
} | ||
return options | ||
|
||
def fit( | ||
self, | ||
X: Union[bpd.DataFrame, bpd.Series], | ||
y: Union[bpd.DataFrame, bpd.Series], | ||
) -> GeminiTextGenerator: | ||
"""Fine tune GeminiTextGenerator model. Only support "gemini-pro" model for now. | ||
|
||
.. note:: | ||
|
||
This product or feature is subject to the "Pre-GA Offerings Terms" in the General Service Terms section of the | ||
Service Specific Terms(https://cloud.google.com/terms/service-terms#1). Pre-GA products and features are available "as is" | ||
and might have limited support. For more information, see the launch stage descriptions | ||
(https://cloud.google.com/products#product-launch-stages). | ||
|
||
Args: | ||
X (bigframes.dataframe.DataFrame or bigframes.series.Series): | ||
DataFrame of shape (n_samples, n_features). Training data. | ||
y (bigframes.dataframe.DataFrame or bigframes.series.Series: | ||
Training labels. | ||
|
||
Returns: | ||
GeminiTextGenerator: Fitted estimator. | ||
""" | ||
if self._bqml_model.model_name.startswith("gemini-1.5"): | ||
raise NotImplementedError("Fit is not supported for gemini-1.5 model.") | ||
|
||
X, y = utils.convert_to_dataframe(X, y) | ||
|
||
options = self._bqml_options | ||
options["endpoint"] = "gemini-1.0-pro-002" | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Spill out the model version to be used for tuning in docs. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Our model name is "gemini-pro-1.0", but their endpoint is called "gemini-1.0-pro-002". Any ideas for how to spill out it? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. synced offline, no need to document the model version now: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/model-versioning#auto-updated-models |
||
options["prompt_col"] = X.columns.tolist()[0] | ||
|
||
self._bqml_model = self._bqml_model_factory.create_llm_remote_model( | ||
X, | ||
y, | ||
options=options, | ||
connection_name=self.connection_name, | ||
) | ||
return self | ||
|
||
def predict( | ||
self, | ||
X: Union[bpd.DataFrame, bpd.Series], | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Also mention supported model types in docs.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Done.