New paste Repaste Download
open_ai_mock = <MagicMock name='process_chunks_concurrently' id='140106739988992'>
process_chunks_mock = <MagicMock name='invoke' id='140106739561904'>
client = <FlaskClient <Flask 'app'>>
tokens = {'access_token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJmcmVzaCI6ZmFsc2UsImlhdCI6MTcxNTc2MTQxMSwianRpIjoiZjA3NmNkYzg...rZjk5Y3lxMmJIcjlncDg5a2JwQmUwd3JMLW9maVVNUXFPVkRGLTFFMlJ6R192TklzeWNVPSJ9.TOfUGFX-Tj0Cy_rtBDZVnH_1iKLSu0MlIYgT0POx9hA'}
work_flow_cache = {'contract_type': '3299a386ce514e4986e1e82a2d50851c', 'conversation_ids': {'draft': '86a347ca-b29b-ijkl-8e16-95090de0f...ent', 'contract_type_id': '8a8f6e1f4b9344dfa348bb323916c93e', 'template_id': '44e14150f0b542c690315fb31167c440'}}, ...}
    @mock.patch("utils.LLM.model.ChatOpenAI.invoke")
    @mock.patch("utils.LLM.OpenAILLM.process_chunks_concurrently")
    def test_draft_generation(
        open_ai_mock,
        process_chunks_mock,
        client: FlaskClient,
        tokens: Dict[str, str],
        work_flow_cache,
    ):
        """
        This test function is used to generate a draft document. It first sets the conversation_id in the work_flow_cache.
        Then it makes a POST request to the /v1/generate_draft endpoint with the necessary data to generate a draft.
        The necessary data includes project_id, contract_type, doc_id, key_terms_specification, and chat_model.
        The function asserts that the status code of the response is 200, indicating a successful request.
        """
    
        with open(
            Path(__file__).parent / "test_data/test_langchain_data.json", encoding="utf-8"
        ) as lang_fp:
            openai_sample = json.load(lang_fp)
            ai_data_draft_formatted = openai_sample["generated_draft_formatted"]
            mock_open_ai = mock.MagicMock(name="__call__").return_value
            mock_open_ai.content = json.dumps(ai_data_draft_formatted)
            open_ai_mock.return_value = mock_open_ai
            ai_data_well_formatted_op = openai_sample["generated_well_formatted_data_op"]
            ai_data_well_formatted_op_retried = openai_sample[
                "generated_well_formatted_data_op_retried"
            ]
            mock_response1 = mock.MagicMock()
            mock_response1.content = json.dumps(ai_data_well_formatted_op)
    
            mock_response2 = mock.MagicMock()
            mock_response2.content = json.dumps(ai_data_well_formatted_op_retried)
    
            process_chunks_mock.side_effect = [mock_response1, mock_response2]
            project_id_request = work_flow_cache["projects"]["Document Drafting"]
    
            result_draft_formatted = client.post(
                path="/v1/generate_draft",
                json={
                    "project_id": project_id_request,
                    "contract_type": work_flow_cache["document_drafting_template_data"][
                        "draft_formatted"
                    ]["contract_type"],
                    "chat_model": {"query": "", "action": "", "conversation_id": "1234"},
                    "is_stream": True,
                },
                headers={"Authorization": f"Bearer {tokens['access_token']}"},
            )
>           assert result_draft_formatted.status_code == 200
E           assert 422 == 200
E            +  where 422 = <WrapperTestResponse streamed [422 UNPROCESSABLE ENTITY]>.status_code
tests/test_15_draft.py:1162: AssertionError
Filename: None. Size: 3kb. View raw, , hex, or download this file.

This paste expires on 2024-05-22 08:24:49.041668. Pasted through deprecated-web.