Java源码示例:com.google.android.exoplayer.chunk.ChunkSampleSource

示例1
private void buildRenderers() {
  Period period = manifest.getPeriod(0);
  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  boolean hasContentProtection = false;
  for (int i = 0; i < period.adaptationSets.size(); i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
      hasContentProtection |= adaptationSet.hasContentProtection();
    }
  }

  // Check drm support if necessary.
  boolean filterHdContent = false;
  StreamingDrmSessionManager drmSessionManager = null;
  if (hasContentProtection) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
    try {
      drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
          player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
      filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
    } catch (UnsupportedDrmException e) {
      player.onRenderersError(e);
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      drmSessionManager, true, mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 
示例2
private void buildRenderers() {
  Period period = manifest.getPeriod(0);
  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  boolean hasContentProtection = false;
  for (int i = 0; i < period.adaptationSets.size(); i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
      hasContentProtection |= adaptationSet.hasContentProtection();
    }
  }

  // Check drm support if necessary.
  boolean filterHdContent = false;
  StreamingDrmSessionManager<FrameworkMediaCrypto> drmSessionManager = null;
  if (hasContentProtection) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
    try {
      drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
          player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
      filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
    } catch (UnsupportedDrmException e) {
      player.onRenderersError(e);
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      drmSessionManager, true, mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 
示例3
@Override
public void onSingleManifest(SmoothStreamingManifest manifest) {
  if (canceled) {
    return;
  }

  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  // Check drm support if necessary.
  DrmSessionManager<FrameworkMediaCrypto> drmSessionManager = null;
  if (manifest.protectionElement != null) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
    try {
      drmSessionManager = StreamingDrmSessionManager.newFrameworkInstance(
          manifest.protectionElement.uuid, player.getPlaybackLooper(), drmCallback, null,
          player.getMainHandler(), player);
    } catch (UnsupportedDrmException e) {
      player.onRenderersError(e);
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
      DefaultSmoothStreamingTrackSelector.newVideoInstance(context, true, false),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      drmSessionManager, true, mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
      DefaultSmoothStreamingTrackSelector.newAudioInstance(),
      audioDataSource, null, LIVE_EDGE_LATENCY_MS);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
      DefaultSmoothStreamingTrackSelector.newTextInstance(),
      textDataSource, null, LIVE_EDGE_LATENCY_MS);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 
示例4
private void buildRenderers() {
  Period period = manifest.getPeriod(0);
  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  boolean hasContentProtection = false;
  for (int i = 0; i < period.adaptationSets.size(); i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
      hasContentProtection |= adaptationSet.hasContentProtection();
    }
  }

  // Check drm support if necessary.
  boolean filterHdContent = false;
  StreamingDrmSessionManager drmSessionManager = null;
  if (hasContentProtection) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
    try {
      drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
          player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
      filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
    } catch (UnsupportedDrmException e) {
      player.onRenderersError(e);
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true,
      mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context));

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 
示例5
private void buildRenderers() {
  Period period = manifest.getPeriod(0);
  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  boolean hasContentProtection = false;
  for (int i = 0; i < period.adaptationSets.size(); i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
      hasContentProtection |= adaptationSet.hasContentProtection();
    }
  }

  if (hasContentProtection) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newVideoInstance(context, true, false),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      null, true, mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      MediaCodecSelector.DEFAULT, null, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 
示例6
private void buildRenderers() {
  Period period = manifest.getPeriod(0);
  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  boolean hasContentProtection = false;
  for (int i = 0; i < period.adaptationSets.size(); i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
      hasContentProtection |= adaptationSet.hasContentProtection();
    }
  }

  // Check drm support if necessary.
  boolean filterHdContent = false;
  StreamingDrmSessionManager drmSessionManager = null;
  if (hasContentProtection) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
    try {
      drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
          player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
      filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
    } catch (UnsupportedDrmException e) {
      player.onRenderersError(e);
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true,
      mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context));

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 
示例7
public DebugTrackRenderer(TextView textView, MediaCodecTrackRenderer renderer,
    ChunkSampleSource videoSampleSource) {
  this.textView = textView;
  this.renderer = renderer;
  this.videoSampleSource = videoSampleSource;
}
 
示例8
private void buildRenderers() {
  Period period = manifest.getPeriod(0);
  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  boolean hasContentProtection = false;
  for (int i = 0; i < period.adaptationSets.size(); i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
      hasContentProtection |= adaptationSet.hasContentProtection();
    }
  }

  if (hasContentProtection) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
          new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newVideoInstance(context, true, false),
      videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
      VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
      MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      null, true, mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
      AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
      MediaCodecSelector.DEFAULT, null, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
      DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
      elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
      TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
      DemoPlayer.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 
示例9
public DebugTrackRenderer(TextView textView, MediaCodecTrackRenderer renderer,
    ChunkSampleSource videoSampleSource) {
  this.textView = textView;
  this.renderer = renderer;
  this.videoSampleSource = videoSampleSource;
}
 
示例10
private void buildRenderers() {
    Period period = manifest.getPeriod(0);
    Handler mainHandler = player.getMainHandler();
    LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(
            BUFFER_SEGMENT_SIZE));
    DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

    boolean hasContentProtection = false;
    for (int i = 0; i < period.adaptationSets.size(); i++) {
        AdaptationSet adaptationSet = period.adaptationSets.get(i);
        if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
            hasContentProtection |= adaptationSet.hasContentProtection();
        }
    }

    // Check drm support if necessary.
    boolean filterHdContent = false;
    StreamingDrmSessionManager drmSessionManager = null;
    if (hasContentProtection) {
        if (Util.SDK_INT < 18) {
            player.onRenderersError(
                    new UnsupportedDrmException(
                            UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
            return;
        }
        try {
            drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
                    player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(),
                    player);
            filterHdContent =
                    getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
        } catch (UnsupportedDrmException e) {
            player.onRenderersError(e);
            return;
        }
    }

    // Build the video renderer.
    DataSource videoDataSource =
            new DefaultUriDataSource(context, bandwidthMeter, userAgent);
    ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
            DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
            videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
            elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
    ChunkSampleSource videoSampleSource =
            new ChunkSampleSource(videoChunkSource, loadControl,
                    VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
                    DemoPlayer.TYPE_VIDEO);
    TrackRenderer videoRenderer =
            new MediaCodecVideoTrackRenderer(context, videoSampleSource,
                    MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT,
                    5000,
                    drmSessionManager, true, mainHandler, player, 50);

    // Build the audio renderer.
    DataSource audioDataSource =
            new DefaultUriDataSource(context, bandwidthMeter, userAgent);
    ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
            DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null,
            LIVE_EDGE_LATENCY_MS,
            elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO);
    ChunkSampleSource audioSampleSource =
            new ChunkSampleSource(audioChunkSource, loadControl,
                    AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
                    DemoPlayer.TYPE_AUDIO);
    TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
            MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
            AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

    // Build the text renderer.
    DataSource textDataSource =
            new DefaultUriDataSource(context, bandwidthMeter, userAgent);
    ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
            DefaultDashTrackSelector.newTextInstance(), textDataSource, null,
            LIVE_EDGE_LATENCY_MS,
            elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT);
    ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
            TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
            DemoPlayer.TYPE_TEXT);
    TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
            mainHandler.getLooper());

    // Invoke the callback.
    TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
    renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
    renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
    renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
    player.onRenderers(renderers, bandwidthMeter);
}
 
示例11
@Override
public void onSingleManifest(SmoothStreamingManifest manifest) {
    if (canceled) {
        return;
    }

    Handler mainHandler = player.getMainHandler();
    LoadControl loadControl =
            new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
    DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

    // Check drm support if necessary.
    DrmSessionManager drmSessionManager = null;
    if (manifest.protectionElement != null) {
        if (Util.SDK_INT < 18) {
            player.onRenderersError(new UnsupportedDrmException(
                    UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
            return;
        }
        try {
            drmSessionManager =  StreamingDrmSessionManager.newFrameworkInstance(
                    manifest.protectionElement.uuid, player.getPlaybackLooper(),
                    drmCallback, null, player.getMainHandler(), player);
        } catch (UnsupportedDrmException e) {
            player.onRenderersError(e);
            return;
        }
    }

    // Build the video renderer.
    DataSource videoDataSource =
            new DefaultUriDataSource(context, bandwidthMeter, userAgent);
    ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
            DefaultSmoothStreamingTrackSelector.newVideoInstance(context, true, false),
            videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
    ChunkSampleSource videoSampleSource =
            new ChunkSampleSource(videoChunkSource, loadControl,
                    VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
                    DemoPlayer.TYPE_VIDEO);
    TrackRenderer videoRenderer =
            new MediaCodecVideoTrackRenderer(context, videoSampleSource,
                    MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT,
                    5000, drmSessionManager, true, mainHandler, player, 50);

    // Build the audio renderer.
    DataSource audioDataSource =
            new DefaultUriDataSource(context, bandwidthMeter, userAgent);
    ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
            DefaultSmoothStreamingTrackSelector.newAudioInstance(),
            audioDataSource, null, LIVE_EDGE_LATENCY_MS);
    ChunkSampleSource audioSampleSource =
            new ChunkSampleSource(audioChunkSource, loadControl,
                    AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
                    DemoPlayer.TYPE_AUDIO);
    TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
            MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
            AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

    // Build the text renderer.
    DataSource textDataSource =
            new DefaultUriDataSource(context, bandwidthMeter, userAgent);
    ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
            DefaultSmoothStreamingTrackSelector.newTextInstance(),
            textDataSource, null, LIVE_EDGE_LATENCY_MS);
    ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
            TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
            DemoPlayer.TYPE_TEXT);
    TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
            mainHandler.getLooper());

    // Invoke the callback.
    TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
    renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
    renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
    renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
    player.onRenderers(renderers, bandwidthMeter);
}
 
示例12
private void buildRenderers() {
  Period period = manifest.getPeriod(0);
  Handler mainHandler = player.getMainHandler();
  LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);

  boolean hasContentProtection = false;
  for (int i = 0; i < period.adaptationSets.size(); i++) {
    AdaptationSet adaptationSet = period.adaptationSets.get(i);
    if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
      hasContentProtection |= adaptationSet.hasContentProtection();
    }
  }

  // Check drm support if necessary.
  boolean filterHdContent = false;
  StreamingDrmSessionManager drmSessionManager = null;
  if (hasContentProtection) {
    if (Util.SDK_INT < 18) {
      player.onRenderersError(
              new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
      return;
    }
    try {
      drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
              player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
      filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
    } catch (UnsupportedDrmException e) {
      player.onRenderersError(e);
      return;
    }
  }

  // Build the video renderer.
  DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
          DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
          videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
          elapsedRealtimeOffset, mainHandler, player, ExoplayerWrapper.TYPE_VIDEO);
  ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
          VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
          ExoplayerWrapper.TYPE_VIDEO);
  TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
          MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true,
          mainHandler, player, 50);

  // Build the audio renderer.
  DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
          DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
          elapsedRealtimeOffset, mainHandler, player, ExoplayerWrapper.TYPE_AUDIO);
  ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
          AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
          ExoplayerWrapper.TYPE_AUDIO);
  TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
          MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
          AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);

  // Build the text renderer.
  DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
          DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
          elapsedRealtimeOffset, mainHandler, player, ExoplayerWrapper.TYPE_TEXT);
  ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
          TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
          ExoplayerWrapper.TYPE_TEXT);
  TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
          mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[ExoplayerWrapper.RENDERER_COUNT];
  renderers[ExoplayerWrapper.TYPE_VIDEO] = videoRenderer;
  renderers[ExoplayerWrapper.TYPE_AUDIO] = audioRenderer;
  renderers[ExoplayerWrapper.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 
示例13
public DebugTrackRenderer(TextView textView, MediaCodecTrackRenderer renderer,
                          ChunkSampleSource videoSampleSource) {
  this.textView = textView;
  this.renderer = renderer;
  this.videoSampleSource = videoSampleSource;
}