我如何找到ASSISTANT_USERNAME和ASSISTANT_PASSWORD

时间:2018-12-12 20:35:22

标签: ibm-watson watson-conversation

我尝试了三个小时,但没有找到任何解决方案。我正在用Android Studio和类似的代码编写代码:

 Assistant assistantservice = new Assistant("2018-02-16");
        assistantservice.setUsernameAndPassword("<ASSISTANT_USERNAME>", "<ASSISTANT_PASSWORD>");

    InputData input = new InputData.Builder(inputmessage).build();
    //Worspaces are now Skills
    MessageOptions options = new MessageOptions.Builder("<SKILL_ID>").input(input).context(context).build();
    MessageResponse response = assistantservice.message(options).execute();

我需要在这里写<ASSISTANT_USERNAME><ASSISTANT_PASSWORD><SKILL_ID>。但是当我搜索时,我可以看到旧的东西。有人可以帮我吗。 我所有的MainActivity代码:

public class MainActivity extends AppCompatActivity {


private RecyclerView recyclerView;
private ChatAdapter mAdapter;
private ArrayList messageArrayList;
private EditText inputMessage;
private ImageButton btnSend;
private ImageButton btnRecord;
//private Map<String,Object> context = new HashMap<>();
com.ibm.watson.developer_cloud.assistant.v1.model.Context context = null;
StreamPlayer streamPlayer;
private boolean initialRequest;
private boolean permissionToRecordAccepted = false;
private static final int REQUEST_RECORD_AUDIO_PERMISSION = 200;
private static String TAG = "MainActivity";
private static final int RECORD_REQUEST_CODE = 101;
private boolean listening = false;
private SpeechToText speechService;
private MicrophoneInputStream capture;
private SpeakerLabelsDiarization.RecoTokens recoTokens;
private MicrophoneHelper microphoneHelper;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    inputMessage = findViewById(R.id.message);
    btnSend = findViewById(R.id.btn_send);
    btnRecord= findViewById(R.id.btn_record);
    String customFont = "Montserrat-Regular.ttf";
    Typeface typeface = Typeface.createFromAsset(getAssets(), customFont);
    inputMessage.setTypeface(typeface);
    recyclerView = findViewById(R.id.recycler_view);

    messageArrayList = new ArrayList<>();
    mAdapter = new ChatAdapter(messageArrayList);
    microphoneHelper = new MicrophoneHelper(this);


    LinearLayoutManager layoutManager = new LinearLayoutManager(this);
    layoutManager.setStackFromEnd(true);
    recyclerView.setLayoutManager(layoutManager);
    recyclerView.setItemAnimator(new DefaultItemAnimator());
    recyclerView.setAdapter(mAdapter);
    this.inputMessage.setText("");
    this.initialRequest = true;
    sendMessage();

    //Watson Text-to-Speech Service on IBM Cloud
    final TextToSpeech textService = new TextToSpeech();
    //Use "apikey" as username and apikey values as password
    textService.setUsernameAndPassword("apikey", "qCE_kg4iN3BHiXJ1lbmhsh0UhLkwPgHHhh3dsfjvTvhjlUukH5F");
    textService.setEndPoint("https://gateway-syd.watsonplatform.net/text-to-speech/api");

    int permission = ContextCompat.checkSelfPermission(this,
            Manifest.permission.RECORD_AUDIO);

    if (permission != PackageManager.PERMISSION_GRANTED) {
        Log.i(TAG, "Permission to record denied");
        makeRequest();
    }


    recyclerView.addOnItemTouchListener(new RecyclerTouchListener(getApplicationContext(), recyclerView, new ClickListener() {
        @Override
        public void onClick(View view, final int position) {
            Thread thread = new Thread(new Runnable() {
                public void run() {
                    Message audioMessage;
                    try {

                        audioMessage =(Message) messageArrayList.get(position);
                        streamPlayer = new StreamPlayer();
                        if(audioMessage != null && !audioMessage.getMessage().isEmpty()) {
                            SynthesizeOptions synthesizeOptions = new SynthesizeOptions.Builder()
                                    .text(audioMessage.getMessage())
                                    .voice(SynthesizeOptions.Voice.EN_US_LISAVOICE)
                                    .accept(SynthesizeOptions.Accept.AUDIO_WAV)
                                    .build();
                            streamPlayer.playStream(textService.synthesize(synthesizeOptions).execute());
                        }
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            });
            thread.start();
        }

        @Override
        public void onLongClick(View view, int position) {
            recordMessage();

        }
    }));

    btnSend.setOnClickListener(new View.OnClickListener(){
        @Override
        public void onClick(View v) {
            if(checkInternetConnection()) {
                sendMessage();
            }
        }
    });

    btnRecord.setOnClickListener(new View.OnClickListener() {
        @Override public void onClick(View v) {
            recordMessage();
        }
    });
};

// Speech to Text Record Audio permission
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
    super.onRequestPermissionsResult(requestCode, permissions, grantResults);
    switch (requestCode){
        case REQUEST_RECORD_AUDIO_PERMISSION:
            permissionToRecordAccepted  = grantResults[0] == PackageManager.PERMISSION_GRANTED;
            break;
        case RECORD_REQUEST_CODE: {

            if (grantResults.length == 0
                    || grantResults[0] !=
                    PackageManager.PERMISSION_GRANTED) {

                Log.i(TAG, "Permission has been denied by user");
            } else {
                Log.i(TAG, "Permission has been granted by user");
            }
            return;
        }
        case MicrophoneHelper.REQUEST_PERMISSION: {
            if (grantResults.length > 0 && grantResults[0] != PackageManager.PERMISSION_GRANTED) {
                Toast.makeText(this, "Permission to record audio denied", Toast.LENGTH_SHORT).show();
            }
        }
    }
    // if (!permissionToRecordAccepted ) finish();

}

protected void makeRequest() {
    ActivityCompat.requestPermissions(this,
            new String[]{Manifest.permission.RECORD_AUDIO},
            MicrophoneHelper.REQUEST_PERMISSION);
}


// Sending a message to Watson Conversation Service
private void sendMessage() {

    final String inputmessage = this.inputMessage.getText().toString().trim();
    if(!this.initialRequest) {
        Message inputMessage = new Message();
        inputMessage.setMessage(inputmessage);
        inputMessage.setId("1");
        messageArrayList.add(inputMessage);
    }
    else
    {
        Message inputMessage = new Message();
        inputMessage.setMessage(inputmessage);
        inputMessage.setId("100");
        this.initialRequest = false;
        Toast.makeText(getApplicationContext(),"Tap on the message for Voice",Toast.LENGTH_LONG).show();

    }

    this.inputMessage.setText("");
    mAdapter.notifyDataSetChanged();

    Thread thread = new Thread(new Runnable(){
        public void run() {
            try {

    Assistant assistantservice = new Assistant("2018-02-16");
        assistantservice.setUsernameAndPassword("apikey", "xNChLjpjnrmri9sXZcsbdfg3jaD1qdUA7FTZaj3jM0LfP5T");

    InputData input = new InputData.Builder(inputmessage).build();
    //Worspaces are now Skills
    MessageOptions options = new MessageOptions.Builder("d516adfghs159-b63hs8-48b6-aac8-182bb47867e7").input(input).context(context).build();
    MessageResponse response = assistantservice.message(options).execute();

           //Passing Context of last conversation
            if(response.getContext() !=null)
                {
                    //context.clear();
                    context = response.getContext();

                }
    Message outMessage=new Message();
      if(response!=null)
      {
          if(response.getOutput()!=null && response.getOutput().containsKey("text"))
          {
              ArrayList responseList = (ArrayList) response.getOutput().get("text");
              if(null !=responseList && responseList.size()>0){
                  outMessage.setMessage((String)responseList.get(0));
                  outMessage.setId("2");
              }
              messageArrayList.add(outMessage);
          }

          runOnUiThread(new Runnable() {
              public void run() {
                  mAdapter.notifyDataSetChanged();
                 if (mAdapter.getItemCount() > 1) {
                      recyclerView.getLayoutManager().smoothScrollToPosition(recyclerView, null, mAdapter.getItemCount()-1);

                  }

              }
          });


      }
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    });

    thread.start();

}
//Record a message via Watson Speech to Text
private void recordMessage() {
    speechService = new SpeechToText();
    //Use "apikey" as username and apikey as your password
    speechService.setUsernameAndPassword("apikey", "EHsB2mPrMJLmpPScS3EdfgbfOYePyLkC_1jAzCgDpzBlOtR");
    //Default: https://stream.watsonplatform.net/text-to-speech/api
    speechService.setEndPoint("https://gateway-syd.watsonplatform.net/speech-to-text/api");

    if(listening != true) {
        capture = microphoneHelper.getInputStream(true);
        new Thread(new Runnable() {
            @Override public void run() {
                try {
                    speechService.recognizeUsingWebSocket(getRecognizeOptions(capture), new MicrophoneRecognizeDelegate());
                } catch (Exception e) {
                    showError(e);
                }
            }
        }).start();
        listening = true;
        Toast.makeText(MainActivity.this,"Listening....Click to Stop", Toast.LENGTH_LONG).show();

    } else {
        try {
            microphoneHelper.closeInputStream();
            listening = false;
            Toast.makeText(MainActivity.this,"Stopped Listening....Click to Start", Toast.LENGTH_LONG).show();
        } catch (Exception e) {
            e.printStackTrace();
        }

    }
}

/**
 * Check Internet Connection
 * @return
 */
private boolean checkInternetConnection() {
    // get Connectivity Manager object to check connection
    ConnectivityManager cm =
            (ConnectivityManager)getSystemService(Context.CONNECTIVITY_SERVICE);

    NetworkInfo activeNetwork = cm.getActiveNetworkInfo();
    boolean isConnected = activeNetwork != null &&
            activeNetwork.isConnectedOrConnecting();

    // Check for network connections
    if (isConnected){
        return true;
    }
   else {
        Toast.makeText(this, " No Internet Connection available ", Toast.LENGTH_LONG).show();
        return false;
    }

}

//Private Methods - Speech to Text
private RecognizeOptions getRecognizeOptions(InputStream audio) {
    return new RecognizeOptions.Builder()
            .audio(audio)
            .contentType(ContentType.OPUS.toString())
            .model("en-US_BroadbandModel")
            .interimResults(true)
            .inactivityTimeout(2000)
            //TODO: Uncomment this to enable Speaker Diarization
            //.speakerLabels(true)
            .build();
}

private class MicrophoneRecognizeDelegate extends BaseRecognizeCallback {

    @Override
    public void onTranscription(SpeechRecognitionResults speechResults) {
        System.out.println(speechResults);
        //TODO: Uncomment this to enable Speaker Diarization
        /*SpeakerLabelsDiarization.RecoTokens recoTokens = new SpeakerLabelsDiarization.RecoTokens();
        if(speechResults.getSpeakerLabels() !=null)
        {
            recoTokens.add(speechResults);
            Log.i("SPEECHRESULTS",speechResults.getSpeakerLabels().get(0).toString());


        }*/
        if(speechResults.getResults() != null && !speechResults.getResults().isEmpty()) {
            String text = speechResults.getResults().get(0).getAlternatives().get(0).getTranscript();
            showMicText(text);
        }
    }

    @Override public void onConnected() {

    }

    @Override public void onError(Exception e) {
        showError(e);
        enableMicButton();
    }

    @Override public void onDisconnected() {
        enableMicButton();
    }

    @Override
    public void onInactivityTimeout(RuntimeException runtimeException) {

    }

    @Override
    public void onListening() {

    }

    @Override
    public void onTranscriptionComplete() {

    }
}

private void showMicText(final String text) {
    runOnUiThread(new Runnable() {
        @Override public void run() {
            inputMessage.setText(text);
        }
    });
}

private void enableMicButton() {
    runOnUiThread(new Runnable() {
        @Override public void run() {
            btnRecord.setEnabled(true);
        }
    });
}

private void showError(final Exception e) {
    runOnUiThread(new Runnable() {
        @Override public void run() {
            Toast.makeText(MainActivity.this, e.getMessage(), Toast.LENGTH_SHORT).show();
            e.printStackTrace();
        }
    });
}

已编辑的MainActivity代码。你能纠正我的错误吗?

1 个答案:

答案 0 :(得分:1)

您应该创建三个服务-Watson Assistant,语音对文本和文本对语音here,并将凭据传递给代码。

传递凭据的示例代码

textService.setUsernameAndPassword("apikey", "xNChLjpjnrmri9sXZcsb3jaD1qdUA7FTZaj3jM0LfP5T");

如果创建了Watson Assistant服务,则可以单击“管理”下的“启动工具”。

  • 点击技能
  • 通过单击“新建”来创建新技能。使用示例技能。
  • 点击技能名称旁边的三个垂直点,然后选择查看API详细信息enter image description here
  • 您的用户名将是“ apikey”,并且可以在其中看到密码。
  • 作为Watson Assistant的V1。使用WORKSPACE ID代替SKILL ID。