如何将数据传递给UINavigationController堆栈上的第n个视图控制器?

时间:2018-04-07 04:18:28

标签: ios swift uinavigationcontroller delegates protocols

有2个应用程序A&乙

App A有一个用于打开App B的网址。

App B打开后,必须将5个视图控制器加载到导航堆栈中,该控制器由以下代码完成:

let LandingVC = self.storyboard?.instantiateViewControllerWithIdentifier("LandingVC") as! LandingVC
let Dashboard = self.storyboard?.instantiateViewControllerWithIdentifier("Dashboard") as! Dashboard
let PlayerVC = self.storyboard?.instantiateViewControllerWithIdentifier("PlayerVC") as! PlayerVC
let PlayerDetailVC = self.storyboard?.instantiateViewControllerWithIdentifier("PlayerDetailVC") as! PlayerDetailVC
let ScoreReportVC = self.storyboard?.instantiateViewControllerWithIdentifier("ScoreReportVC") as! ScoreReportVC
let viewControllersList = [LandingVC, Dashboard, PlayerVC, PlayerDetailVC, ScoreReportVC]
self.navigationController?.setViewControllers(viewControllersList, animated: false)

ScoreReportVC我需要能够在之前的ViewController上设置变量,以便用户可以导航到之前的屏幕,即使他们从其他应用程序激发了应用程序。

以下是我的尝试:在ScoreReportVC之后的ScoreReportVC之后的视图控制器中定义了一个协议,它位于堆栈内,for viewcontroller in self.navigationController?.viewControllers { if viewcontroller is PlayerDetailVC { PlayerDetailVC.delegate = self } if viewcontroller is PlayerVC { PlayerVC = self } if viewcontroller is Dashboard { Dashboard.delegate = self } if viewcontroller is LandingVC { LandingVC.delegate = self } } 内部如下所示:

ViewController

但代表们没有被召唤。任何帮助如何正确地将数据传递到堆栈上的所有public class SpeechService extends Service { public interface Listener { /** * Called when a new piece of text was recognized by the Speech API. * * @param text The text. * @param isFinal {@code true} when the API finished processing audio. */ void onSpeechRecognized(String text, boolean isFinal); } private static final String TAG = "SpeechService"; private static final String PREFS = "SpeechService"; private static final String PREF_ACCESS_TOKEN_VALUE = "access_token_value"; private static final String PREF_ACCESS_TOKEN_EXPIRATION_TIME = "access_token_expiration_time"; /** We reuse an access token if its expiration time is longer than this. */ private static final int ACCESS_TOKEN_EXPIRATION_TOLERANCE = 30 * 60 * 1000; // thirty minutes /** We refresh the current access token before it expires. */ private static final int ACCESS_TOKEN_FETCH_MARGIN = 60 * 1000; // one minute public static final List<String> SCOPE = Collections.singletonList("https://www.googleapis.com/auth/cloud-platform"); private static final String HOSTNAME = "speech.googleapis.com"; private static final int PORT = 443; private final SpeechBinder mBinder = new SpeechBinder(); private final ArrayList<Listener> mListeners = new ArrayList<>(); private volatile AccessTokenTask mAccessTokenTask; private SpeechGrpc.SpeechStub mApi; private static Handler mHandler; private final StreamObserver<StreamingRecognizeResponse> mResponseObserver = new StreamObserver<StreamingRecognizeResponse>() { @Override public void onNext(StreamingRecognizeResponse response) { String text = null; boolean isFinal = false; if (response.getResultsCount() > 0) { final StreamingRecognitionResult result = response.getResults(0); isFinal = result.getIsFinal(); if (result.getAlternativesCount() > 0) { final SpeechRecognitionAlternative alternative = result.getAlternatives(0); text = alternative.getTranscript(); } } if (text != null) { for (Listener listener : mListeners) { listener.onSpeechRecognized(text, isFinal); } } } @Override public void onError(Throwable t) { Log.e(TAG, "Error calling the API.", t); } @Override public void onCompleted() { Log.i(TAG, "API completed."); } }; private final StreamObserver<RecognizeResponse> mFileResponseObserver = new StreamObserver<RecognizeResponse>() { @Override public void onNext(RecognizeResponse response) { String text = null; if (response.getResultsCount() > 0) { final SpeechRecognitionResult result = response.getResults(0); if (result.getAlternativesCount() > 0) { final SpeechRecognitionAlternative alternative = result.getAlternatives(0); text = alternative.getTranscript(); } } if (text != null) { for (Listener listener : mListeners) { listener.onSpeechRecognized(text, true); } } } @Override public void onError(Throwable t) { Log.e(TAG, "Error calling the API.", t); } @Override public void onCompleted() { Log.i(TAG, "API completed."); } }; private StreamObserver<StreamingRecognizeRequest> mRequestObserver; public static SpeechService from(IBinder binder) { return ((SpeechBinder) binder).getService(); } @Override public void onCreate() { super.onCreate(); mHandler = new Handler(); fetchAccessToken(); } @Override public void onDestroy() { super.onDestroy(); mHandler.removeCallbacks(mFetchAccessTokenRunnable); mHandler = null; // Release the gRPC channel. if (mApi != null) { final ManagedChannel channel = (ManagedChannel) mApi.getChannel(); if (channel != null && !channel.isShutdown()) { try { channel.shutdown().awaitTermination(1, TimeUnit.SECONDS); } catch (InterruptedException e) { Log.e(TAG, "Error shutting down the gRPC channel.", e); } } mApi = null; } } private void fetchAccessToken() { if (mAccessTokenTask != null) { return; } mAccessTokenTask = new AccessTokenTask(); mAccessTokenTask.execute(); } private String getDefaultLanguageCode() { final Locale locale = Locale.getDefault(); final StringBuilder language = new StringBuilder(locale.getLanguage()); final String country = locale.getCountry(); if (!TextUtils.isEmpty(country)) { language.append("-"); language.append(country); } return language.toString(); } @Nullable @Override public IBinder onBind(Intent intent) { return mBinder; } public void addListener(@NonNull Listener listener) { mListeners.add(listener); } public void removeListener(@NonNull Listener listener) { mListeners.remove(listener); } /** * Starts recognizing speech audio. * * @param sampleRate The sample rate of the audio. */ public void startRecognizing(int sampleRate) { if (mApi == null) { Log.w(TAG, "API not ready. Ignoring the request."); return; } // Configure the API mRequestObserver = mApi.streamingRecognize(mResponseObserver); mRequestObserver.onNext(StreamingRecognizeRequest.newBuilder() .setStreamingConfig(StreamingRecognitionConfig.newBuilder() .setConfig(RecognitionConfig.newBuilder() .setLanguageCode(getDefaultLanguageCode()) .setEncoding(RecognitionConfig.AudioEncoding.LINEAR16) .setSampleRateHertz(sampleRate) .build()) .setInterimResults(true) .setSingleUtterance(true) .build()) .build()); } /** * Recognizes the speech audio. This method should be called every time a chunk of byte buffer * is ready. * * @param data The audio data. * @param size The number of elements that are actually relevant in the {@code data}. */ public void recognize(byte[] data, int size) { if (mRequestObserver == null) { return; } // Call the streaming recognition API mRequestObserver.onNext(StreamingRecognizeRequest.newBuilder() .setAudioContent(ByteString.copyFrom(data, 0, size)) .build()); } /** * Finishes recognizing speech audio. */ public void finishRecognizing() { if (mRequestObserver == null) { return; } mRequestObserver.onCompleted(); mRequestObserver = null; } /** * Recognize all data from the specified {@link InputStream}. * * @param stream The audio data. */ public void recognizeInputStream(InputStream stream) { try { mApi.recognize( RecognizeRequest.newBuilder() .setConfig(RecognitionConfig.newBuilder() .setEncoding(RecognitionConfig.AudioEncoding.LINEAR16) .setLanguageCode("en-US") .setSampleRateHertz(16000) .build()) .setAudio(RecognitionAudio.newBuilder() .setContent(ByteString.readFrom(stream)) .build()) .build(), mFileResponseObserver); } catch (IOException e) { Log.e(TAG, "Error loading the input", e); } } private class SpeechBinder extends Binder { SpeechService getService() { return SpeechService.this; } } private final Runnable mFetchAccessTokenRunnable = new Runnable() { @Override public void run() { fetchAccessToken(); } }; private class AccessTokenTask extends AsyncTask<Void, Void, AccessToken> { @Override protected AccessToken doInBackground(Void... voids) { final SharedPreferences prefs = getSharedPreferences(PREFS, Context.MODE_PRIVATE); String tokenValue = prefs.getString(PREF_ACCESS_TOKEN_VALUE, null); long expirationTime = prefs.getLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME, -1); // Check if the current token is still valid for a while if (tokenValue != null && expirationTime > 0) { if (expirationTime > System.currentTimeMillis() + ACCESS_TOKEN_EXPIRATION_TOLERANCE) { return new AccessToken(tokenValue, new Date(expirationTime)); } } // ***** WARNING ***** // In this sample, we load the credential from a JSON file stored in a raw resource // folder of this client app. You should never do this in your app. Instead, store // the file in your server and obtain an access token from there. // ******************* final InputStream stream = getResources().openRawResource(R.raw.credential); try { final GoogleCredentials credentials = GoogleCredentials.fromStream(stream) .createScoped(SCOPE); final AccessToken token = credentials.refreshAccessToken(); prefs.edit() .putString(PREF_ACCESS_TOKEN_VALUE, token.getTokenValue()) .putLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME, token.getExpirationTime().getTime()) .apply(); return token; } catch (IOException e) { Log.e(TAG, "Failed to obtain access token.", e); } return null; } @Override protected void onPostExecute(AccessToken accessToken) { mAccessTokenTask = null; final ManagedChannel channel = new OkHttpChannelProvider() .builderForAddress(HOSTNAME, PORT) .nameResolverFactory(new DnsNameResolverProvider()) .intercept(new GoogleCredentialsInterceptor(new GoogleCredentials(accessToken) .createScoped(SCOPE))) .build(); mApi = SpeechGrpc.newStub(channel); // Schedule access token refresh before it expires if (mHandler != null) { mHandler.postDelayed(mFetchAccessTokenRunnable, Math.max(accessToken.getExpirationTime().getTime() - System.currentTimeMillis() - ACCESS_TOKEN_FETCH_MARGIN, ACCESS_TOKEN_EXPIRATION_TOLERANCE)); } } } /** * Authenticates the gRPC channel using the specified {@link GoogleCredentials}. */ private static class GoogleCredentialsInterceptor implements ClientInterceptor { private final Credentials mCredentials; private Metadata mCached; private Map<String, List<String>> mLastMetadata; GoogleCredentialsInterceptor(Credentials credentials) { mCredentials = credentials; } @Override public <ReqT, RespT> ClientCall<ReqT, RespT> interceptCall( final MethodDescriptor<ReqT, RespT> method, CallOptions callOptions, final Channel next) { return new ClientInterceptors.CheckedForwardingClientCall<ReqT, RespT>( next.newCall(method, callOptions)) { @Override protected void checkedStart(Listener<RespT> responseListener, Metadata headers) throws StatusException { Metadata cachedSaved; URI uri = serviceUri(next, method); synchronized (this) { Map<String, List<String>> latestMetadata = getRequestMetadata(uri); if (mLastMetadata == null || mLastMetadata != latestMetadata) { mLastMetadata = latestMetadata; mCached = toHeaders(mLastMetadata); } cachedSaved = mCached; } headers.merge(cachedSaved); delegate().start(responseListener, headers); } }; } /** * Generate a JWT-specific service URI. The URI is simply an identifier with enough * information for a service to know that the JWT was intended for it. The URI will * commonly be verified with a simple string equality check. */ private URI serviceUri(Channel channel, MethodDescriptor<?, ?> method) throws StatusException { String authority = channel.authority(); if (authority == null) { throw Status.UNAUTHENTICATED .withDescription("Channel has no authority") .asException(); } // Always use HTTPS, by definition. final String scheme = "https"; final int defaultPort = 443; String path = "/" + MethodDescriptor.extractFullServiceName(method.getFullMethodName()); URI uri; try { uri = new URI(scheme, authority, path, null, null); } catch (URISyntaxException e) { throw Status.UNAUTHENTICATED .withDescription("Unable to construct service URI for auth") .withCause(e).asException(); } // The default port must not be present. Alternative ports should be present. if (uri.getPort() == defaultPort) { uri = removePort(uri); } return uri; } private URI removePort(URI uri) throws StatusException { try { return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), -1 /* port */, uri.getPath(), uri.getQuery(), uri.getFragment()); } catch (URISyntaxException e) { throw Status.UNAUTHENTICATED .withDescription("Unable to construct service URI after removing port") .withCause(e).asException(); } } private Map<String, List<String>> getRequestMetadata(URI uri) throws StatusException { try { return mCredentials.getRequestMetadata(uri); } catch (IOException e) { throw Status.UNAUTHENTICATED.withCause(e).asException(); } } private static Metadata toHeaders(Map<String, List<String>> metadata) { Metadata headers = new Metadata(); if (metadata != null) { for (String key : metadata.keySet()) { Metadata.Key<String> headerKey = Metadata.Key.of( key, Metadata.ASCII_STRING_MARSHALLER); for (String value : metadata.get(key)) { headers.put(headerKey, value); } } } return headers; } } 将非常感激。

2 个答案:

答案 0 :(得分:0)

对于相关类,我们使用Delegations。但是那些不相关的类,对于那些,我们使用通知。在您的情况下,将需要通知以实现并将数据从一个VC传递到另一个VC。

答案 1 :(得分:0)

更好的方法是在创建控制器时创建/设置变量。