This commit is contained in:
ayub 2025-09-06 18:25:28 +07:00
commit 8a0a371d4c
88 changed files with 5396 additions and 0 deletions

BIN
.DS_Store vendored Normal file

Binary file not shown.

44
CHANGELOG.md Normal file
View File

@ -0,0 +1,44 @@
## 1.0.0
* Initial Release
## 1.0.1
* Added example code.
## 1.0.2
* Added description
## 1.0.2+1
* Fixed minor bug
## 1.0.2+2
* Made SimpleCache singleton thread safe
## 1.0.3
* Support iOS
## 2.0.0-dev.1
* Based on video_player 2.1.1
## 2.0.1
* Change back to `CachedVideoPlayer` namespace.
## 2.0.2
* Fix some issues reported on Github.
## 2.0.3
* Add example and increase pub score.
## 2.0.4
* Preserve headers when using KTVHTTPCache
* Skip caching in iOS if formatHint is `hls`
* Update exoplayer dependencies and replace deprecations

25
LICENSE Normal file
View File

@ -0,0 +1,25 @@
Copyright 2013 The Flutter Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

19
README.md Normal file
View File

@ -0,0 +1,19 @@
# Cached Video Player
A flutter plugin that has been forked from the official [video_player](https://pub.dev/packages/video_player) package except that it supports caching in Android and iOS.
Web plugin will work like official [video_player](https://pub.dev/packages/video_player) i.e. without caching support.
## Installation
First, add `cached_video_player` as a [dependency in your pubspec.yaml file](https://flutter.io/platform-plugins/).
Follow the Android and iOS configuration steps of the official [video_player](https://pub.dev/packages/video_player#installation) package. This plugin won't work in Desktop.
### Issues
* `maxFileSize` and `maxCacheSize` are hardcoded at 100MiB and 1GiB respectively in Android.
### Contributors
* [Vikram Pratap Singh](https://github.com/vikram25897)
* [EnderTan](https://github.com/EnderTan)
* [Philipp Bauer](https://github.com/ciriousjoker)

1
analysis_optios.yaml Normal file
View File

@ -0,0 +1 @@
include: package:lints/core.yaml

53
android/build.gradle Normal file
View File

@ -0,0 +1,53 @@
group 'io.flutter.plugins.videoplayer'
version '1.0-SNAPSHOT'
def args = ["-Xlint:deprecation","-Xlint:unchecked","-Werror"]
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.5.0'
}
}
rootProject.allprojects {
repositories {
google()
jcenter()
}
}
project.getTasks().withType(JavaCompile){
options.compilerArgs.addAll(args)
}
apply plugin: 'com.android.library'
android {
namespace 'com.lazyarts.vikram.cached_video_player'
compileSdkVersion 33
defaultConfig {
minSdkVersion 16
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
lintOptions {
disable 'InvalidPackage'
}
android {
compileOptions {
sourceCompatibility JavaVersion.VERSION_11
targetCompatibility JavaVersion.VERSION_11
}
}
dependencies {
implementation 'com.google.android.exoplayer:exoplayer-core:2.17.1'
implementation 'com.google.android.exoplayer:exoplayer-hls:2.17.1'
implementation 'com.google.android.exoplayer:exoplayer-dash:2.17.1'
implementation 'com.google.android.exoplayer:exoplayer-smoothstreaming:2.17.1'
}
}

View File

@ -0,0 +1 @@
org.gradle.jvmargs=-Xmx1536M

View File

@ -0,0 +1,6 @@
#Wed Oct 17 09:04:56 PDT 2018
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-4.10.2-all.zip

1
android/settings.gradle Normal file
View File

@ -0,0 +1 @@
rootProject.name = 'cached_video_player'

View File

@ -0,0 +1,3 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.lazyarts.vikram.cached_video_player">
</manifest>

View File

@ -0,0 +1,51 @@
package com.lazyarts.vikram.cached_video_player;
import android.content.Context;
import com.google.android.exoplayer2.upstream.DataSource;
import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer2.upstream.DefaultDataSource;
import com.google.android.exoplayer2.upstream.DefaultHttpDataSource;
import com.google.android.exoplayer2.upstream.FileDataSource;
import com.google.android.exoplayer2.upstream.cache.CacheDataSink;
import com.google.android.exoplayer2.upstream.cache.CacheDataSource;
import com.google.android.exoplayer2.upstream.cache.SimpleCache;
import java.util.Map;
class CacheDataSourceFactory implements DataSource.Factory {
private final Context context;
private DefaultDataSource.Factory defaultDatasourceFactory;
private final long maxFileSize, maxCacheSize;
private DefaultHttpDataSource.Factory defaultHttpDataSourceFactory;
CacheDataSourceFactory(Context context, long maxCacheSize, long maxFileSize) {
super();
this.context = context;
this.maxCacheSize = maxCacheSize;
this.maxFileSize = maxFileSize;
defaultHttpDataSourceFactory = new DefaultHttpDataSource.Factory();
defaultHttpDataSourceFactory.setUserAgent("ExoPlayer");
defaultHttpDataSourceFactory.setAllowCrossProtocolRedirects(true);
}
void setHeaders(Map<String, String> httpHeaders) {
defaultHttpDataSourceFactory.setDefaultRequestProperties(httpHeaders);
}
@Override
public DataSource createDataSource() {
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter.Builder(context).build();
defaultDatasourceFactory = new DefaultDataSource.Factory(this.context, defaultHttpDataSourceFactory);
defaultDatasourceFactory.setTransferListener(bandwidthMeter);
SimpleCache simpleCache = SimpleCacheSingleton.getInstance(context, maxCacheSize).simpleCache;
return new CacheDataSource(simpleCache, defaultDatasourceFactory.createDataSource(),
new FileDataSource(), new CacheDataSink(simpleCache, maxFileSize),
CacheDataSource.FLAG_BLOCK_ON_CACHE | CacheDataSource.FLAG_IGNORE_CACHE_ON_ERROR, null);
}
}

View File

@ -0,0 +1,306 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package com.lazyarts.vikram.cached_video_player;
import static com.google.android.exoplayer2.Player.REPEAT_MODE_ALL;
import static com.google.android.exoplayer2.Player.REPEAT_MODE_OFF;
import android.content.Context;
import android.net.Uri;
import android.view.Surface;
import androidx.annotation.NonNull;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.PlaybackException;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.audio.AudioAttributes;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.ProgressiveMediaSource;
import com.google.android.exoplayer2.source.dash.DashMediaSource;
import com.google.android.exoplayer2.source.dash.DefaultDashChunkSource;
import com.google.android.exoplayer2.source.hls.HlsMediaSource;
import com.google.android.exoplayer2.source.smoothstreaming.DefaultSsChunkSource;
import com.google.android.exoplayer2.source.smoothstreaming.SsMediaSource;
import com.google.android.exoplayer2.upstream.DataSource;
import com.google.android.exoplayer2.upstream.DefaultDataSource;
import com.google.android.exoplayer2.util.Util;
import io.flutter.plugin.common.EventChannel;
import io.flutter.view.TextureRegistry;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
final class CachedVideoPlayer {
private static final String FORMAT_SS = "ss";
private static final String FORMAT_DASH = "dash";
private static final String FORMAT_HLS = "hls";
private static final String FORMAT_OTHER = "other";
private ExoPlayer exoPlayer;
private Surface surface;
private final TextureRegistry.SurfaceTextureEntry textureEntry;
private QueuingEventSink eventSink = new QueuingEventSink();
private final EventChannel eventChannel;
private boolean isInitialized = false;
private final VideoPlayerOptions options;
CachedVideoPlayer(
Context context,
EventChannel eventChannel,
TextureRegistry.SurfaceTextureEntry textureEntry,
String dataSource,
String formatHint,
Map<String, String> httpHeaders,
VideoPlayerOptions options) {
this.eventChannel = eventChannel;
this.textureEntry = textureEntry;
this.options = options;
exoPlayer = new ExoPlayer.Builder(context).build();
Uri uri = Uri.parse(dataSource);
DataSource.Factory dataSourceFactory;
if (isHTTP(uri)) {
CacheDataSourceFactory cacheDataSourceFactory =
new CacheDataSourceFactory(
context,
// TODO: need a way to set these programmatically. Maybe fork VideoPlayerPlatformInterface
1024 * 1024 * 1024,
1024 * 1024 * 100);
if (httpHeaders != null && !httpHeaders.isEmpty()) {
cacheDataSourceFactory.setHeaders(httpHeaders);
}
dataSourceFactory = cacheDataSourceFactory;
} else {
dataSourceFactory = new DefaultDataSource.Factory(context);
}
MediaSource mediaSource = buildMediaSource(uri, dataSourceFactory, formatHint, context);
exoPlayer.setMediaSource(mediaSource);
exoPlayer.prepare();
setupVideoPlayer(eventChannel, textureEntry);
}
private static boolean isHTTP(Uri uri) {
if (uri == null || uri.getScheme() == null) {
return false;
}
String scheme = uri.getScheme();
return scheme.equals("http") || scheme.equals("https");
}
private MediaSource buildMediaSource(
Uri uri, DataSource.Factory mediaDataSourceFactory, String formatHint, Context context) {
int type;
if (formatHint == null) {
type = Util.inferContentType(uri.getLastPathSegment());
} else {
switch (formatHint) {
case FORMAT_SS:
type = C.TYPE_SS;
break;
case FORMAT_DASH:
type = C.TYPE_DASH;
break;
case FORMAT_HLS:
type = C.TYPE_HLS;
break;
case FORMAT_OTHER:
type = C.TYPE_OTHER;
break;
default:
type = -1;
break;
}
}
switch (type) {
case C.TYPE_SS:
return new SsMediaSource.Factory(
new DefaultSsChunkSource.Factory(mediaDataSourceFactory),
new DefaultDataSource.Factory(context, mediaDataSourceFactory))
.createMediaSource(MediaItem.fromUri(uri));
case C.TYPE_DASH:
return new DashMediaSource.Factory(
new DefaultDashChunkSource.Factory(mediaDataSourceFactory),
new DefaultDataSource.Factory(context, mediaDataSourceFactory))
.createMediaSource(MediaItem.fromUri(uri));
case C.TYPE_HLS:
return new HlsMediaSource.Factory(mediaDataSourceFactory)
.createMediaSource(MediaItem.fromUri(uri));
case C.TYPE_OTHER:
return new ProgressiveMediaSource.Factory(mediaDataSourceFactory)
.createMediaSource(MediaItem.fromUri(uri));
default: {
throw new IllegalStateException("Unsupported type: " + type);
}
}
}
private void setupVideoPlayer(
EventChannel eventChannel, TextureRegistry.SurfaceTextureEntry textureEntry) {
eventChannel.setStreamHandler(
new EventChannel.StreamHandler() {
@Override
public void onListen(Object o, EventChannel.EventSink sink) {
eventSink.setDelegate(sink);
}
@Override
public void onCancel(Object o) {
eventSink.setDelegate(null);
}
});
surface = new Surface(textureEntry.surfaceTexture());
exoPlayer.setVideoSurface(surface);
setAudioAttributes(exoPlayer, options.mixWithOthers);
exoPlayer.addListener(
new Player.Listener() {
private boolean isBuffering = false;
public void setBuffering(boolean buffering) {
if (isBuffering != buffering) {
isBuffering = buffering;
Map<String, Object> event = new HashMap<>();
event.put("event", isBuffering ? "bufferingStart" : "bufferingEnd");
eventSink.success(event);
}
}
@Override
public void onPlaybackStateChanged(final int playbackState) {
if (playbackState == Player.STATE_BUFFERING) {
setBuffering(true);
sendBufferingUpdate();
} else if (playbackState == Player.STATE_READY) {
if (!isInitialized) {
isInitialized = true;
sendInitialized();
}
} else if (playbackState == Player.STATE_ENDED) {
Map<String, Object> event = new HashMap<>();
event.put("event", "completed");
eventSink.success(event);
}
if (playbackState != Player.STATE_BUFFERING) {
setBuffering(false);
}
}
@Override
public void onPlayerError(@NonNull PlaybackException error) {
setBuffering(false);
if (eventSink != null) {
eventSink.error("VideoError", "Video player had error " + error, null);
}
}
});
}
void sendBufferingUpdate() {
Map<String, Object> event = new HashMap<>();
event.put("event", "bufferingUpdate");
List<? extends Number> range = Arrays.asList(0, exoPlayer.getBufferedPosition());
// iOS supports a list of buffered ranges, so here is a list with a single range.
event.put("values", Collections.singletonList(range));
eventSink.success(event);
}
private static void setAudioAttributes(ExoPlayer exoPlayer, boolean isMixMode) {
exoPlayer.setAudioAttributes(
new AudioAttributes.Builder().setContentType(C.CONTENT_TYPE_MOVIE).build(), !isMixMode);
}
void play() {
exoPlayer.setPlayWhenReady(true);
}
void pause() {
exoPlayer.setPlayWhenReady(false);
}
void setLooping(boolean value) {
exoPlayer.setRepeatMode(value ? REPEAT_MODE_ALL : REPEAT_MODE_OFF);
}
void setVolume(double value) {
float bracketedValue = (float) Math.max(0.0, Math.min(1.0, value));
exoPlayer.setVolume(bracketedValue);
}
void setPlaybackSpeed(double value) {
// We do not need to consider pitch and skipSilence for now as we do not handle them and
// therefore never diverge from the default values.
final PlaybackParameters playbackParameters = new PlaybackParameters(((float) value));
exoPlayer.setPlaybackParameters(playbackParameters);
}
void seekTo(int location) {
exoPlayer.seekTo(location);
}
long getPosition() {
return exoPlayer.getCurrentPosition();
}
@SuppressWarnings("SuspiciousNameCombination")
private void sendInitialized() {
if (isInitialized) {
Map<String, Object> event = new HashMap<>();
event.put("event", "initialized");
event.put("duration", exoPlayer.getDuration());
if (exoPlayer.getVideoFormat() != null) {
Format videoFormat = exoPlayer.getVideoFormat();
int width = videoFormat.width;
int height = videoFormat.height;
int rotationDegrees = videoFormat.rotationDegrees;
// Switch the width/height if video was taken in portrait mode
if (rotationDegrees == 90 || rotationDegrees == 270) {
width = exoPlayer.getVideoFormat().height;
height = exoPlayer.getVideoFormat().width;
}
event.put("width", width);
event.put("height", height);
}
eventSink.success(event);
}
}
void dispose() {
if (isInitialized) {
exoPlayer.stop();
}
textureEntry.release();
eventChannel.setStreamHandler(null);
if (surface != null) {
surface.release();
}
if (exoPlayer != null) {
exoPlayer.release();
}
}
}

View File

@ -0,0 +1,205 @@
package com.lazyarts.vikram.cached_video_player;
import android.content.Context;
import android.os.Build;
import android.util.LongSparseArray;
import io.flutter.FlutterInjector;
import io.flutter.Log;
import io.flutter.embedding.engine.plugins.FlutterPlugin;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.common.EventChannel;
import com.lazyarts.vikram.cached_video_player.Messages.CreateMessage;
import com.lazyarts.vikram.cached_video_player.Messages.LoopingMessage;
import com.lazyarts.vikram.cached_video_player.Messages.MixWithOthersMessage;
import com.lazyarts.vikram.cached_video_player.Messages.PlaybackSpeedMessage;
import com.lazyarts.vikram.cached_video_player.Messages.PositionMessage;
import com.lazyarts.vikram.cached_video_player.Messages.TextureMessage;
import com.lazyarts.vikram.cached_video_player.Messages.VideoPlayerApi;
import com.lazyarts.vikram.cached_video_player.Messages.VolumeMessage;
import io.flutter.view.TextureRegistry;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.util.Map;
import javax.net.ssl.HttpsURLConnection;
public class CachedVideoPlayerPlugin implements FlutterPlugin, VideoPlayerApi {
private static final String TAG = "VideoPlayerPlugin";
private final LongSparseArray<CachedVideoPlayer> videoPlayers = new LongSparseArray<>();
private FlutterState flutterState;
private VideoPlayerOptions options = new VideoPlayerOptions();
public CachedVideoPlayerPlugin() {}
@Override
public void onAttachedToEngine(FlutterPluginBinding binding) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
try {
HttpsURLConnection.setDefaultSSLSocketFactory(new CustomSSLSocketFactory());
} catch (KeyManagementException | NoSuchAlgorithmException e) {
Log.w(TAG, "Failed to enable TLS protocols on older Android versions.", e);
}
}
final FlutterInjector injector = FlutterInjector.instance();
this.flutterState = new FlutterState(
binding.getApplicationContext(),
binding.getBinaryMessenger(),
injector.flutterLoader()::getLookupKeyForAsset,
injector.flutterLoader()::getLookupKeyForAsset,
binding.getTextureRegistry()
);
flutterState.startListening(this, binding.getBinaryMessenger());
}
@Override
public void onDetachedFromEngine(FlutterPluginBinding binding) {
if (flutterState == null) {
Log.wtf(TAG, "Detached from engine before registration.");
return;
}
flutterState.stopListening(binding.getBinaryMessenger());
flutterState = null;
initialize();
}
private void disposeAllPlayers() {
for (int i = 0; i < videoPlayers.size(); i++) {
videoPlayers.valueAt(i).dispose();
}
videoPlayers.clear();
}
public void initialize() {
disposeAllPlayers();
}
public TextureMessage create(CreateMessage arg) {
TextureRegistry.SurfaceTextureEntry handle = flutterState.textureRegistry.createSurfaceTexture();
EventChannel eventChannel = new EventChannel(
flutterState.binaryMessenger,
"flutter.io/videoPlayer/videoEvents" + handle.id()
);
CachedVideoPlayer player;
if (arg.getAsset() != null) {
String assetLookupKey = (arg.getPackageName() != null)
? flutterState.keyForAssetAndPackageName.get(arg.getAsset(), arg.getPackageName())
: flutterState.keyForAsset.get(arg.getAsset());
player = new CachedVideoPlayer(
flutterState.applicationContext,
eventChannel,
handle,
"asset:///" + assetLookupKey,
null,
null,
options
);
} else {
@SuppressWarnings("unchecked")
Map<String, String> httpHeaders = arg.getHttpHeaders();
player = new CachedVideoPlayer(
flutterState.applicationContext,
eventChannel,
handle,
arg.getUri(),
arg.getFormatHint(),
httpHeaders,
options
);
}
videoPlayers.put(handle.id(), player);
TextureMessage result = new TextureMessage();
result.setTextureId(handle.id());
return result;
}
public void dispose(TextureMessage arg) {
CachedVideoPlayer player = videoPlayers.get(arg.getTextureId());
if (player != null) {
player.dispose();
videoPlayers.remove(arg.getTextureId());
}
}
public void setLooping(LoopingMessage arg) {
videoPlayers.get(arg.getTextureId()).setLooping(arg.getIsLooping());
}
public void setVolume(VolumeMessage arg) {
videoPlayers.get(arg.getTextureId()).setVolume(arg.getVolume());
}
public void setPlaybackSpeed(PlaybackSpeedMessage arg) {
videoPlayers.get(arg.getTextureId()).setPlaybackSpeed(arg.getSpeed());
}
public void play(TextureMessage arg) {
videoPlayers.get(arg.getTextureId()).play();
}
public PositionMessage position(TextureMessage arg) {
CachedVideoPlayer player = videoPlayers.get(arg.getTextureId());
PositionMessage result = new PositionMessage();
result.setPosition(player.getPosition());
player.sendBufferingUpdate();
return result;
}
public void seekTo(PositionMessage arg) {
videoPlayers.get(arg.getTextureId()).seekTo(arg.getPosition().intValue());
}
public void pause(TextureMessage arg) {
videoPlayers.get(arg.getTextureId()).pause();
}
@Override
public void setMixWithOthers(MixWithOthersMessage arg) {
options.mixWithOthers = arg.getMixWithOthers();
}
private interface KeyForAssetFn {
String get(String asset);
}
private interface KeyForAssetAndPackageName {
String get(String asset, String packageName);
}
private static final class FlutterState {
private final Context applicationContext;
private final BinaryMessenger binaryMessenger;
private final KeyForAssetFn keyForAsset;
private final KeyForAssetAndPackageName keyForAssetAndPackageName;
private final TextureRegistry textureRegistry;
FlutterState(
Context applicationContext,
BinaryMessenger messenger,
KeyForAssetFn keyForAsset,
KeyForAssetAndPackageName keyForAssetAndPackageName,
TextureRegistry textureRegistry) {
this.applicationContext = applicationContext;
this.binaryMessenger = messenger;
this.keyForAsset = keyForAsset;
this.keyForAssetAndPackageName = keyForAssetAndPackageName;
this.textureRegistry = textureRegistry;
}
void startListening(CachedVideoPlayerPlugin handler, BinaryMessenger messenger) {
VideoPlayerApi.setup(messenger, handler);
}
void stopListening(BinaryMessenger messenger) {
VideoPlayerApi.setup(messenger, null);
}
}
}

View File

@ -0,0 +1,74 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package com.lazyarts.vikram.cached_video_player;
import java.io.IOException;
import java.net.InetAddress;
import java.net.Socket;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.SSLSocketFactory;
public class CustomSSLSocketFactory extends SSLSocketFactory {
private SSLSocketFactory sslSocketFactory;
public CustomSSLSocketFactory() throws KeyManagementException, NoSuchAlgorithmException {
SSLContext context = SSLContext.getInstance("TLS");
context.init(null, null, null);
sslSocketFactory = context.getSocketFactory();
}
@Override
public String[] getDefaultCipherSuites() {
return sslSocketFactory.getDefaultCipherSuites();
}
@Override
public String[] getSupportedCipherSuites() {
return sslSocketFactory.getSupportedCipherSuites();
}
@Override
public Socket createSocket() throws IOException {
return enableProtocols(sslSocketFactory.createSocket());
}
@Override
public Socket createSocket(Socket s, String host, int port, boolean autoClose)
throws IOException {
return enableProtocols(sslSocketFactory.createSocket(s, host, port, autoClose));
}
@Override
public Socket createSocket(String host, int port) throws IOException {
return enableProtocols(sslSocketFactory.createSocket(host, port));
}
@Override
public Socket createSocket(String host, int port, InetAddress localHost, int localPort)
throws IOException {
return enableProtocols(sslSocketFactory.createSocket(host, port, localHost, localPort));
}
@Override
public Socket createSocket(InetAddress host, int port) throws IOException {
return enableProtocols(sslSocketFactory.createSocket(host, port));
}
@Override
public Socket createSocket(InetAddress address, int port, InetAddress localAddress, int localPort)
throws IOException {
return enableProtocols(sslSocketFactory.createSocket(address, port, localAddress, localPort));
}
private Socket enableProtocols(Socket socket) {
if (socket instanceof SSLSocket) {
((SSLSocket) socket).setEnabledProtocols(new String[] {"TLSv1.1", "TLSv1.2"});
}
return socket;
}
}

View File

@ -0,0 +1,621 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Autogenerated from Pigeon (v0.1.21), do not edit directly.
// See also: https://pub.dev/packages/pigeon
package com.lazyarts.vikram.cached_video_player;
import io.flutter.plugin.common.BasicMessageChannel;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.common.StandardMessageCodec;
import java.util.HashMap;
/** Generated class from Pigeon. */
@SuppressWarnings("unused")
public class Messages {
/** Generated class from Pigeon that represents data sent in messages. */
public static class TextureMessage {
private Long textureId;
public Long getTextureId() {
return textureId;
}
public void setTextureId(Long setterArg) {
this.textureId = setterArg;
}
HashMap toMap() {
HashMap<String, Object> toMapResult = new HashMap<>();
toMapResult.put("textureId", textureId);
return toMapResult;
}
static TextureMessage fromMap(HashMap map) {
TextureMessage fromMapResult = new TextureMessage();
Object textureId = map.get("textureId");
fromMapResult.textureId =
(textureId == null)
? null
: ((textureId instanceof Integer) ? (Integer) textureId : (Long) textureId);
return fromMapResult;
}
}
/** Generated class from Pigeon that represents data sent in messages. */
public static class CreateMessage {
private String asset;
public String getAsset() {
return asset;
}
public void setAsset(String setterArg) {
this.asset = setterArg;
}
private String uri;
public String getUri() {
return uri;
}
public void setUri(String setterArg) {
this.uri = setterArg;
}
private String packageName;
public String getPackageName() {
return packageName;
}
public void setPackageName(String setterArg) {
this.packageName = setterArg;
}
private String formatHint;
public String getFormatHint() {
return formatHint;
}
public void setFormatHint(String setterArg) {
this.formatHint = setterArg;
}
private HashMap httpHeaders;
public HashMap getHttpHeaders() {
return httpHeaders;
}
public void setHttpHeaders(HashMap setterArg) {
this.httpHeaders = setterArg;
}
HashMap toMap() {
HashMap<String, Object> toMapResult = new HashMap<>();
toMapResult.put("asset", asset);
toMapResult.put("uri", uri);
toMapResult.put("packageName", packageName);
toMapResult.put("formatHint", formatHint);
toMapResult.put("httpHeaders", httpHeaders);
return toMapResult;
}
static CreateMessage fromMap(HashMap map) {
CreateMessage fromMapResult = new CreateMessage();
Object asset = map.get("asset");
fromMapResult.asset = (String) asset;
Object uri = map.get("uri");
fromMapResult.uri = (String) uri;
Object packageName = map.get("packageName");
fromMapResult.packageName = (String) packageName;
Object formatHint = map.get("formatHint");
fromMapResult.formatHint = (String) formatHint;
Object httpHeaders = map.get("httpHeaders");
fromMapResult.httpHeaders = (HashMap) httpHeaders;
return fromMapResult;
}
}
/** Generated class from Pigeon that represents data sent in messages. */
public static class LoopingMessage {
private Long textureId;
public Long getTextureId() {
return textureId;
}
public void setTextureId(Long setterArg) {
this.textureId = setterArg;
}
private Boolean isLooping;
public Boolean getIsLooping() {
return isLooping;
}
public void setIsLooping(Boolean setterArg) {
this.isLooping = setterArg;
}
HashMap toMap() {
HashMap<String, Object> toMapResult = new HashMap<>();
toMapResult.put("textureId", textureId);
toMapResult.put("isLooping", isLooping);
return toMapResult;
}
static LoopingMessage fromMap(HashMap map) {
LoopingMessage fromMapResult = new LoopingMessage();
Object textureId = map.get("textureId");
fromMapResult.textureId =
(textureId == null)
? null
: ((textureId instanceof Integer) ? (Integer) textureId : (Long) textureId);
Object isLooping = map.get("isLooping");
fromMapResult.isLooping = (Boolean) isLooping;
return fromMapResult;
}
}
/** Generated class from Pigeon that represents data sent in messages. */
public static class VolumeMessage {
private Long textureId;
public Long getTextureId() {
return textureId;
}
public void setTextureId(Long setterArg) {
this.textureId = setterArg;
}
private Double volume;
public Double getVolume() {
return volume;
}
public void setVolume(Double setterArg) {
this.volume = setterArg;
}
HashMap toMap() {
HashMap<String, Object> toMapResult = new HashMap<>();
toMapResult.put("textureId", textureId);
toMapResult.put("volume", volume);
return toMapResult;
}
static VolumeMessage fromMap(HashMap map) {
VolumeMessage fromMapResult = new VolumeMessage();
Object textureId = map.get("textureId");
fromMapResult.textureId =
(textureId == null)
? null
: ((textureId instanceof Integer) ? (Integer) textureId : (Long) textureId);
Object volume = map.get("volume");
fromMapResult.volume = (Double) volume;
return fromMapResult;
}
}
/** Generated class from Pigeon that represents data sent in messages. */
public static class PlaybackSpeedMessage {
private Long textureId;
public Long getTextureId() {
return textureId;
}
public void setTextureId(Long setterArg) {
this.textureId = setterArg;
}
private Double speed;
public Double getSpeed() {
return speed;
}
public void setSpeed(Double setterArg) {
this.speed = setterArg;
}
HashMap toMap() {
HashMap<String, Object> toMapResult = new HashMap<>();
toMapResult.put("textureId", textureId);
toMapResult.put("speed", speed);
return toMapResult;
}
static PlaybackSpeedMessage fromMap(HashMap map) {
PlaybackSpeedMessage fromMapResult = new PlaybackSpeedMessage();
Object textureId = map.get("textureId");
fromMapResult.textureId =
(textureId == null)
? null
: ((textureId instanceof Integer) ? (Integer) textureId : (Long) textureId);
Object speed = map.get("speed");
fromMapResult.speed = (Double) speed;
return fromMapResult;
}
}
/** Generated class from Pigeon that represents data sent in messages. */
public static class PositionMessage {
private Long textureId;
public Long getTextureId() {
return textureId;
}
public void setTextureId(Long setterArg) {
this.textureId = setterArg;
}
private Long position;
public Long getPosition() {
return position;
}
public void setPosition(Long setterArg) {
this.position = setterArg;
}
HashMap toMap() {
HashMap<String, Object> toMapResult = new HashMap<>();
toMapResult.put("textureId", textureId);
toMapResult.put("position", position);
return toMapResult;
}
static PositionMessage fromMap(HashMap map) {
PositionMessage fromMapResult = new PositionMessage();
Object textureId = map.get("textureId");
fromMapResult.textureId =
(textureId == null)
? null
: ((textureId instanceof Integer) ? (Integer) textureId : (Long) textureId);
Object position = map.get("position");
fromMapResult.position =
(position == null)
? null
: ((position instanceof Integer) ? (Integer) position : (Long) position);
return fromMapResult;
}
}
/** Generated class from Pigeon that represents data sent in messages. */
public static class MixWithOthersMessage {
private Boolean mixWithOthers;
public Boolean getMixWithOthers() {
return mixWithOthers;
}
public void setMixWithOthers(Boolean setterArg) {
this.mixWithOthers = setterArg;
}
HashMap toMap() {
HashMap<String, Object> toMapResult = new HashMap<>();
toMapResult.put("mixWithOthers", mixWithOthers);
return toMapResult;
}
static MixWithOthersMessage fromMap(HashMap map) {
MixWithOthersMessage fromMapResult = new MixWithOthersMessage();
Object mixWithOthers = map.get("mixWithOthers");
fromMapResult.mixWithOthers = (Boolean) mixWithOthers;
return fromMapResult;
}
}
/** Generated interface from Pigeon that represents a handler of messages from Flutter. */
public interface VideoPlayerApi {
void initialize();
TextureMessage create(CreateMessage arg);
void dispose(TextureMessage arg);
void setLooping(LoopingMessage arg);
void setVolume(VolumeMessage arg);
void setPlaybackSpeed(PlaybackSpeedMessage arg);
void play(TextureMessage arg);
PositionMessage position(TextureMessage arg);
void seekTo(PositionMessage arg);
void pause(TextureMessage arg);
void setMixWithOthers(MixWithOthersMessage arg);
/** Sets up an instance of `VideoPlayerApi` to handle messages through the `binaryMessenger` */
static void setup(BinaryMessenger binaryMessenger, VideoPlayerApi api) {
{
BasicMessageChannel<Object> channel =
new BasicMessageChannel<>(
binaryMessenger,
"dev.flutter.pigeon.VideoPlayerApi.initialize",
new StandardMessageCodec());
if (api != null) {
channel.setMessageHandler(
(message, reply) -> {
HashMap<String, HashMap> wrapped = new HashMap<>();
try {
api.initialize();
wrapped.put("result", null);
} catch (Exception exception) {
wrapped.put("error", wrapError(exception));
}
reply.reply(wrapped);
});
} else {
channel.setMessageHandler(null);
}
}
{
BasicMessageChannel<Object> channel =
new BasicMessageChannel<>(
binaryMessenger,
"dev.flutter.pigeon.VideoPlayerApi.create",
new StandardMessageCodec());
if (api != null) {
channel.setMessageHandler(
(message, reply) -> {
HashMap<String, HashMap> wrapped = new HashMap<>();
try {
@SuppressWarnings("ConstantConditions")
CreateMessage input = CreateMessage.fromMap((HashMap) message);
TextureMessage output = api.create(input);
wrapped.put("result", output.toMap());
} catch (Exception exception) {
wrapped.put("error", wrapError(exception));
}
reply.reply(wrapped);
});
} else {
channel.setMessageHandler(null);
}
}
{
BasicMessageChannel<Object> channel =
new BasicMessageChannel<>(
binaryMessenger,
"dev.flutter.pigeon.VideoPlayerApi.dispose",
new StandardMessageCodec());
if (api != null) {
channel.setMessageHandler(
(message, reply) -> {
HashMap<String, HashMap> wrapped = new HashMap<>();
try {
@SuppressWarnings("ConstantConditions")
TextureMessage input = TextureMessage.fromMap((HashMap) message);
api.dispose(input);
wrapped.put("result", null);
} catch (Exception exception) {
wrapped.put("error", wrapError(exception));
}
reply.reply(wrapped);
});
} else {
channel.setMessageHandler(null);
}
}
{
BasicMessageChannel<Object> channel =
new BasicMessageChannel<>(
binaryMessenger,
"dev.flutter.pigeon.VideoPlayerApi.setLooping",
new StandardMessageCodec());
if (api != null) {
channel.setMessageHandler(
(message, reply) -> {
HashMap<String, HashMap> wrapped = new HashMap<>();
try {
@SuppressWarnings("ConstantConditions")
LoopingMessage input = LoopingMessage.fromMap((HashMap) message);
api.setLooping(input);
wrapped.put("result", null);
} catch (Exception exception) {
wrapped.put("error", wrapError(exception));
}
reply.reply(wrapped);
});
} else {
channel.setMessageHandler(null);
}
}
{
BasicMessageChannel<Object> channel =
new BasicMessageChannel<>(
binaryMessenger,
"dev.flutter.pigeon.VideoPlayerApi.setVolume",
new StandardMessageCodec());
if (api != null) {
channel.setMessageHandler(
(message, reply) -> {
HashMap<String, HashMap> wrapped = new HashMap<>();
try {
@SuppressWarnings("ConstantConditions")
VolumeMessage input = VolumeMessage.fromMap((HashMap) message);
api.setVolume(input);
wrapped.put("result", null);
} catch (Exception exception) {
wrapped.put("error", wrapError(exception));
}
reply.reply(wrapped);
});
} else {
channel.setMessageHandler(null);
}
}
{
BasicMessageChannel<Object> channel =
new BasicMessageChannel<>(
binaryMessenger,
"dev.flutter.pigeon.VideoPlayerApi.setPlaybackSpeed",
new StandardMessageCodec());
if (api != null) {
channel.setMessageHandler(
(message, reply) -> {
HashMap<String, HashMap> wrapped = new HashMap<>();
try {
@SuppressWarnings("ConstantConditions")
PlaybackSpeedMessage input = PlaybackSpeedMessage.fromMap((HashMap) message);
api.setPlaybackSpeed(input);
wrapped.put("result", null);
} catch (Exception exception) {
wrapped.put("error", wrapError(exception));
}
reply.reply(wrapped);
});
} else {
channel.setMessageHandler(null);
}
}
{
BasicMessageChannel<Object> channel =
new BasicMessageChannel<>(
binaryMessenger,
"dev.flutter.pigeon.VideoPlayerApi.play",
new StandardMessageCodec());
if (api != null) {
channel.setMessageHandler(
(message, reply) -> {
HashMap<String, HashMap> wrapped = new HashMap<>();
try {
@SuppressWarnings("ConstantConditions")
TextureMessage input = TextureMessage.fromMap((HashMap) message);
api.play(input);
wrapped.put("result", null);
} catch (Exception exception) {
wrapped.put("error", wrapError(exception));
}
reply.reply(wrapped);
});
} else {
channel.setMessageHandler(null);
}
}
{
BasicMessageChannel<Object> channel =
new BasicMessageChannel<>(
binaryMessenger,
"dev.flutter.pigeon.VideoPlayerApi.position",
new StandardMessageCodec());
if (api != null) {
channel.setMessageHandler(
(message, reply) -> {
HashMap<String, HashMap> wrapped = new HashMap<>();
try {
@SuppressWarnings("ConstantConditions")
TextureMessage input = TextureMessage.fromMap((HashMap) message);
PositionMessage output = api.position(input);
wrapped.put("result", output.toMap());
} catch (Exception exception) {
wrapped.put("error", wrapError(exception));
}
reply.reply(wrapped);
});
} else {
channel.setMessageHandler(null);
}
}
{
BasicMessageChannel<Object> channel =
new BasicMessageChannel<>(
binaryMessenger,
"dev.flutter.pigeon.VideoPlayerApi.seekTo",
new StandardMessageCodec());
if (api != null) {
channel.setMessageHandler(
(message, reply) -> {
HashMap<String, HashMap> wrapped = new HashMap<>();
try {
@SuppressWarnings("ConstantConditions")
PositionMessage input = PositionMessage.fromMap((HashMap) message);
api.seekTo(input);
wrapped.put("result", null);
} catch (Exception exception) {
wrapped.put("error", wrapError(exception));
}
reply.reply(wrapped);
});
} else {
channel.setMessageHandler(null);
}
}
{
BasicMessageChannel<Object> channel =
new BasicMessageChannel<>(
binaryMessenger,
"dev.flutter.pigeon.VideoPlayerApi.pause",
new StandardMessageCodec());
if (api != null) {
channel.setMessageHandler(
(message, reply) -> {
HashMap<String, HashMap> wrapped = new HashMap<>();
try {
@SuppressWarnings("ConstantConditions")
TextureMessage input = TextureMessage.fromMap((HashMap) message);
api.pause(input);
wrapped.put("result", null);
} catch (Exception exception) {
wrapped.put("error", wrapError(exception));
}
reply.reply(wrapped);
});
} else {
channel.setMessageHandler(null);
}
}
{
BasicMessageChannel<Object> channel =
new BasicMessageChannel<>(
binaryMessenger,
"dev.flutter.pigeon.VideoPlayerApi.setMixWithOthers",
new StandardMessageCodec());
if (api != null) {
channel.setMessageHandler(
(message, reply) -> {
HashMap<String, HashMap> wrapped = new HashMap<>();
try {
@SuppressWarnings("ConstantConditions")
MixWithOthersMessage input = MixWithOthersMessage.fromMap((HashMap) message);
api.setMixWithOthers(input);
wrapped.put("result", null);
} catch (Exception exception) {
wrapped.put("error", wrapError(exception));
}
reply.reply(wrapped);
});
} else {
channel.setMessageHandler(null);
}
}
}
}
private static HashMap wrapError(Exception exception) {
HashMap<String, Object> errorMap = new HashMap<>();
errorMap.put("message", exception.toString());
errorMap.put("code", exception.getClass().getSimpleName());
errorMap.put("details", null);
return errorMap;
}
}

View File

@ -0,0 +1,85 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package com.lazyarts.vikram.cached_video_player;
import io.flutter.plugin.common.EventChannel;
import java.util.ArrayList;
/**
* And implementation of {@link EventChannel.EventSink} which can wrap an underlying sink.
*
* <p>It delivers messages immediately when downstream is available, but it queues messages before
* the delegate event sink is set with setDelegate.
*
* <p>This class is not thread-safe. All calls must be done on the same thread or synchronized
* externally.
*/
final class QueuingEventSink implements EventChannel.EventSink {
private EventChannel.EventSink delegate;
private ArrayList<Object> eventQueue = new ArrayList<>();
private boolean done = false;
public void setDelegate(EventChannel.EventSink delegate) {
this.delegate = delegate;
maybeFlush();
}
@Override
public void endOfStream() {
enqueue(new EndOfStreamEvent());
maybeFlush();
done = true;
}
@Override
public void error(String code, String message, Object details) {
enqueue(new ErrorEvent(code, message, details));
maybeFlush();
}
@Override
public void success(Object event) {
enqueue(event);
maybeFlush();
}
private void enqueue(Object event) {
if (done) {
return;
}
eventQueue.add(event);
}
private void maybeFlush() {
if (delegate == null) {
return;
}
for (Object event : eventQueue) {
if (event instanceof EndOfStreamEvent) {
delegate.endOfStream();
} else if (event instanceof ErrorEvent) {
ErrorEvent errorEvent = (ErrorEvent) event;
delegate.error(errorEvent.code, errorEvent.message, errorEvent.details);
} else {
delegate.success(event);
}
}
eventQueue.clear();
}
private static class EndOfStreamEvent {}
private static class ErrorEvent {
String code;
String message;
Object details;
ErrorEvent(String code, String message, Object details) {
this.code = code;
this.message = message;
this.details = details;
}
}
}

View File

@ -0,0 +1,31 @@
package com.lazyarts.vikram.cached_video_player;
import android.content.Context;
import com.google.android.exoplayer2.database.StandaloneDatabaseProvider;
import com.google.android.exoplayer2.upstream.cache.LeastRecentlyUsedCacheEvictor;
import com.google.android.exoplayer2.upstream.cache.SimpleCache;
import java.io.File;
public class SimpleCacheSingleton {
LeastRecentlyUsedCacheEvictor evictor;
SimpleCache simpleCache;
private static SimpleCacheSingleton instance;
private SimpleCacheSingleton(Context context, long maxCacheSize) {
evictor = new LeastRecentlyUsedCacheEvictor(maxCacheSize);
simpleCache = new SimpleCache(new File(context.getCacheDir(), "media"), evictor, new StandaloneDatabaseProvider(context));
}
public synchronized static SimpleCacheSingleton getInstance(Context context, long maxCacheSize) {
if (instance == null) {
synchronized (SimpleCacheSingleton.class) {
if (instance == null)
instance = new SimpleCacheSingleton(context, maxCacheSize);
}
}
return instance;
}
}

View File

@ -0,0 +1,9 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package com.lazyarts.vikram.cached_video_player;
class VideoPlayerOptions {
public boolean mixWithOthers;
}

16
example/README.md Normal file
View File

@ -0,0 +1,16 @@
# example
A new Flutter project.
## Getting Started
This project is a starting point for a Flutter application.
A few resources to get you started if this is your first Flutter project:
- [Lab: Write your first Flutter app](https://flutter.dev/docs/get-started/codelab)
- [Cookbook: Useful Flutter samples](https://flutter.dev/docs/cookbook)
For help getting started with Flutter, view our
[online documentation](https://flutter.dev/docs), which offers tutorials,
samples, guidance on mobile development, and a full API reference.

View File

@ -0,0 +1,29 @@
# This file configures the analyzer, which statically analyzes Dart code to
# check for errors, warnings, and lints.
#
# The issues identified by the analyzer are surfaced in the UI of Dart-enabled
# IDEs (https://dart.dev/tools#ides-and-editors). The analyzer can also be
# invoked from the command line by running `flutter analyze`.
# The following line activates a set of recommended lints for Flutter apps,
# packages, and plugins designed to encourage good coding practices.
include: package:flutter_lints/flutter.yaml
linter:
# The lint rules applied to this project can be customized in the
# section below to disable rules from the `package:flutter_lints/flutter.yaml`
# included above or to enable additional rules. A list of all available lints
# and their documentation is published at
# https://dart-lang.github.io/linter/lints/index.html.
#
# Instead of disabling a lint rule for the entire project in the
# section below, it can also be suppressed for a single line of code
# or a specific dart file by using the `// ignore: name_of_lint` and
# `// ignore_for_file: name_of_lint` syntax on the line or in the file
# producing the lint.
rules:
# avoid_print: false # Uncomment to disable the `avoid_print` rule
# prefer_single_quotes: true # Uncomment to enable the `prefer_single_quotes` rule
# Additional information about this file can be found at
# https://dart.dev/guides/language/analysis-options

View File

@ -0,0 +1,69 @@
def localProperties = new Properties()
def localPropertiesFile = rootProject.file('local.properties')
if (localPropertiesFile.exists()) {
localPropertiesFile.withReader('UTF-8') { reader ->
localProperties.load(reader)
}
}
def flutterRoot = localProperties.getProperty('flutter.sdk')
if (flutterRoot == null) {
throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.")
}
def flutterVersionCode = localProperties.getProperty('flutter.versionCode')
if (flutterVersionCode == null) {
flutterVersionCode = '1'
}
def flutterVersionName = localProperties.getProperty('flutter.versionName')
if (flutterVersionName == null) {
flutterVersionName = '1.0'
}
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
android {
compileSdkVersion flutter.compileSdkVersion
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
sourceSets {
main.java.srcDirs += 'src/main/kotlin'
}
defaultConfig {
// TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
applicationId "com.example.example"
minSdkVersion flutter.minSdkVersion
targetSdkVersion flutter.targetSdkVersion
versionCode flutterVersionCode.toInteger()
versionName flutterVersionName
}
buildTypes {
release {
// TODO: Add your own signing config for the release build.
// Signing with the debug keys for now, so `flutter run --release` works.
signingConfig signingConfigs.debug
}
}
namespace 'com.example.example'
}
flutter {
source '../..'
}
dependencies {
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
}

View File

@ -0,0 +1,6 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<!-- Flutter needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
</manifest>

View File

@ -0,0 +1,34 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<application
android:label="example"
android:name="${applicationName}"
android:usesCleartextTraffic="true"
android:icon="@mipmap/ic_launcher">
<activity
android:name=".MainActivity"
android:exported="true"
android:launchMode="singleTop"
android:theme="@style/LaunchTheme"
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|smallestScreenSize|locale|layoutDirection|fontScale|screenLayout|density|uiMode"
android:hardwareAccelerated="true"
android:windowSoftInputMode="adjustResize">
<!-- Specifies an Android theme to apply to this Activity as soon as
the Android process has started. This theme is visible to the user
while the Flutter UI initializes. After that, this theme continues
to determine the Window background behind the Flutter UI. -->
<meta-data
android:name="io.flutter.embedding.android.NormalTheme"
android:resource="@style/NormalTheme"
/>
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
<!-- Don't delete the meta-data below.
This is used by the Flutter tool to generate GeneratedPluginRegistrant.java -->
<meta-data
android:name="flutterEmbedding"
android:value="2" />
</application>
</manifest>

View File

@ -0,0 +1,6 @@
package com.example.example
import io.flutter.embedding.android.FlutterActivity
class MainActivity: FlutterActivity() {
}

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="?android:colorBackground" />
<!-- You can insert your own image assets here -->
<!-- <item>
<bitmap
android:gravity="center"
android:src="@mipmap/launch_image" />
</item> -->
</layer-list>

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="@android:color/white" />
<!-- You can insert your own image assets here -->
<!-- <item>
<bitmap
android:gravity="center"
android:src="@mipmap/launch_image" />
</item> -->
</layer-list>

Binary file not shown.

After

Width:  |  Height:  |  Size: 544 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 442 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 721 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is on -->
<style name="LaunchTheme" parent="@android:style/Theme.Black.NoTitleBar">
<!-- Show a splash screen on the activity. Automatically removed when
Flutter draws its first frame -->
<item name="android:windowBackground">@drawable/launch_background</item>
</style>
<!-- Theme applied to the Android Window as soon as the process has started.
This theme determines the color of the Android Window while your
Flutter UI initializes, as well as behind your Flutter UI while its
running.
This Theme is only used starting with V2 of Flutter's Android embedding. -->
<style name="NormalTheme" parent="@android:style/Theme.Black.NoTitleBar">
<item name="android:windowBackground">?android:colorBackground</item>
</style>
</resources>

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is off -->
<style name="LaunchTheme" parent="@android:style/Theme.Light.NoTitleBar">
<!-- Show a splash screen on the activity. Automatically removed when
Flutter draws its first frame -->
<item name="android:windowBackground">@drawable/launch_background</item>
</style>
<!-- Theme applied to the Android Window as soon as the process has started.
This theme determines the color of the Android Window while your
Flutter UI initializes, as well as behind your Flutter UI while its
running.
This Theme is only used starting with V2 of Flutter's Android embedding. -->
<style name="NormalTheme" parent="@android:style/Theme.Light.NoTitleBar">
<item name="android:windowBackground">?android:colorBackground</item>
</style>
</resources>

View File

@ -0,0 +1,6 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<!-- Flutter needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
</manifest>

View File

@ -0,0 +1,31 @@
buildscript {
ext.kotlin_version = '1.6.21'
repositories {
google()
mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:7.4.1'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
allprojects {
repositories {
google()
mavenCentral()
}
}
rootProject.buildDir = '../build'
subprojects {
project.buildDir = "${rootProject.buildDir}/${project.name}"
}
subprojects {
project.evaluationDependsOn(':app')
}
task clean(type: Delete) {
delete rootProject.buildDir
}

View File

@ -0,0 +1,3 @@
org.gradle.jvmargs=-Xmx1536M
android.useAndroidX=true
android.enableJetifier=true

View File

@ -0,0 +1,6 @@
#Fri Jun 23 08:50:38 CEST 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.5-all.zip

View File

@ -0,0 +1,11 @@
include ':app'
def localPropertiesFile = new File(rootProject.projectDir, "local.properties")
def properties = new Properties()
assert localPropertiesFile.exists()
localPropertiesFile.withReader("UTF-8") { reader -> properties.load(reader) }
def flutterSdkPath = properties.getProperty("flutter.sdk")
assert flutterSdkPath != null, "flutter.sdk not set in local.properties"
apply from: "$flutterSdkPath/packages/flutter_tools/gradle/app_plugin_loader.gradle"

View File

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>App</string>
<key>CFBundleIdentifier</key>
<string>io.flutter.flutter.app</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>App</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1.0</string>
<key>MinimumOSVersion</key>
<string>9.0</string>
</dict>
</plist>

View File

@ -0,0 +1,2 @@
#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"
#include "Generated.xcconfig"

View File

@ -0,0 +1,2 @@
#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"
#include "Generated.xcconfig"

41
example/ios/Podfile Normal file
View File

@ -0,0 +1,41 @@
# Uncomment this line to define a global platform for your project
# platform :ios, '9.0'
# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
ENV['COCOAPODS_DISABLE_STATS'] = 'true'
project 'Runner', {
'Debug' => :debug,
'Profile' => :release,
'Release' => :release,
}
def flutter_root
generated_xcode_build_settings_path = File.expand_path(File.join('..', 'Flutter', 'Generated.xcconfig'), __FILE__)
unless File.exist?(generated_xcode_build_settings_path)
raise "#{generated_xcode_build_settings_path} must exist. If you're running pod install manually, make sure flutter pub get is executed first"
end
File.foreach(generated_xcode_build_settings_path) do |line|
matches = line.match(/FLUTTER_ROOT\=(.*)/)
return matches[1].strip if matches
end
raise "FLUTTER_ROOT not found in #{generated_xcode_build_settings_path}. Try deleting Generated.xcconfig, then run flutter pub get"
end
require File.expand_path(File.join('packages', 'flutter_tools', 'bin', 'podhelper'), flutter_root)
flutter_ios_podfile_setup
target 'Runner' do
use_frameworks!
use_modular_headers!
flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__))
end
post_install do |installer|
installer.pods_project.targets.each do |target|
flutter_additional_ios_build_settings(target)
end
end

View File

@ -0,0 +1,549 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 51;
objects = {
/* Begin PBXBuildFile section */
1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; };
3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; };
74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74858FAE1ED2DC5600515810 /* AppDelegate.swift */; };
888AFB2CD0B669AF438FB2FE /* Pods_Runner.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A0D554DCCC870A04D6BD1B40 /* Pods_Runner.framework */; };
97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; };
97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
9705A1C41CF9048500538489 /* Embed Frameworks */ = {
isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647;
dstPath = "";
dstSubfolderSpec = 10;
files = (
);
name = "Embed Frameworks";
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
111695B2BF834E9096C40712 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = "<group>"; };
1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = "<group>"; };
1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = "<group>"; };
3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; };
74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Runner-Bridging-Header.h"; sourceTree = "<group>"; };
74858FAE1ED2DC5600515810 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; };
9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = "<group>"; };
9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = "<group>"; };
97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; };
97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
A0D554DCCC870A04D6BD1B40 /* Pods_Runner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Runner.framework; sourceTree = BUILT_PRODUCTS_DIR; };
C1F2C88E43A906DF18C9FC06 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = "<group>"; };
DB37647AFBE68EF062E7119E /* Pods-Runner.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.profile.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.profile.xcconfig"; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
97C146EB1CF9000F007C117D /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
888AFB2CD0B669AF438FB2FE /* Pods_Runner.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
3A6F0AEA3C7B3AD1DF739CC7 /* Pods */ = {
isa = PBXGroup;
children = (
C1F2C88E43A906DF18C9FC06 /* Pods-Runner.debug.xcconfig */,
111695B2BF834E9096C40712 /* Pods-Runner.release.xcconfig */,
DB37647AFBE68EF062E7119E /* Pods-Runner.profile.xcconfig */,
);
path = Pods;
sourceTree = "<group>";
};
3F60B8DE7547BC9AC4577094 /* Frameworks */ = {
isa = PBXGroup;
children = (
A0D554DCCC870A04D6BD1B40 /* Pods_Runner.framework */,
);
name = Frameworks;
sourceTree = "<group>";
};
9740EEB11CF90186004384FC /* Flutter */ = {
isa = PBXGroup;
children = (
3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */,
9740EEB21CF90195004384FC /* Debug.xcconfig */,
7AFA3C8E1D35360C0083082E /* Release.xcconfig */,
9740EEB31CF90195004384FC /* Generated.xcconfig */,
);
name = Flutter;
sourceTree = "<group>";
};
97C146E51CF9000F007C117D = {
isa = PBXGroup;
children = (
9740EEB11CF90186004384FC /* Flutter */,
97C146F01CF9000F007C117D /* Runner */,
97C146EF1CF9000F007C117D /* Products */,
3A6F0AEA3C7B3AD1DF739CC7 /* Pods */,
3F60B8DE7547BC9AC4577094 /* Frameworks */,
);
sourceTree = "<group>";
};
97C146EF1CF9000F007C117D /* Products */ = {
isa = PBXGroup;
children = (
97C146EE1CF9000F007C117D /* Runner.app */,
);
name = Products;
sourceTree = "<group>";
};
97C146F01CF9000F007C117D /* Runner */ = {
isa = PBXGroup;
children = (
97C146FA1CF9000F007C117D /* Main.storyboard */,
97C146FD1CF9000F007C117D /* Assets.xcassets */,
97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */,
97C147021CF9000F007C117D /* Info.plist */,
1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */,
1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */,
74858FAE1ED2DC5600515810 /* AppDelegate.swift */,
74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */,
);
path = Runner;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
97C146ED1CF9000F007C117D /* Runner */ = {
isa = PBXNativeTarget;
buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */;
buildPhases = (
297D811D574365C683633399 /* [CP] Check Pods Manifest.lock */,
9740EEB61CF901F6004384FC /* Run Script */,
97C146EA1CF9000F007C117D /* Sources */,
97C146EB1CF9000F007C117D /* Frameworks */,
97C146EC1CF9000F007C117D /* Resources */,
9705A1C41CF9048500538489 /* Embed Frameworks */,
3B06AD1E1E4923F5004D2608 /* Thin Binary */,
64D7A799A1ACBD3B9FDB57C4 /* [CP] Embed Pods Frameworks */,
);
buildRules = (
);
dependencies = (
);
name = Runner;
productName = Runner;
productReference = 97C146EE1CF9000F007C117D /* Runner.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
97C146E61CF9000F007C117D /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 1300;
ORGANIZATIONNAME = "";
TargetAttributes = {
97C146ED1CF9000F007C117D = {
CreatedOnToolsVersion = 7.3.1;
LastSwiftMigration = 1100;
};
};
};
buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */;
compatibilityVersion = "Xcode 9.3";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 97C146E51CF9000F007C117D;
productRefGroup = 97C146EF1CF9000F007C117D /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
97C146ED1CF9000F007C117D /* Runner */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
97C146EC1CF9000F007C117D /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */,
3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */,
97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */,
97C146FC1CF9000F007C117D /* Main.storyboard in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXShellScriptBuildPhase section */
297D811D574365C683633399 /* [CP] Check Pods Manifest.lock */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
);
inputPaths = (
"${PODS_PODFILE_DIR_PATH}/Podfile.lock",
"${PODS_ROOT}/Manifest.lock",
);
name = "[CP] Check Pods Manifest.lock";
outputFileListPaths = (
);
outputPaths = (
"$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
showEnvVarsInLog = 0;
};
3B06AD1E1E4923F5004D2608 /* Thin Binary */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "Thin Binary";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin";
};
64D7A799A1ACBD3B9FDB57C4 /* [CP] Embed Pods Frameworks */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist",
);
name = "[CP] Embed Pods Frameworks";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";
showEnvVarsInLog = 0;
};
9740EEB61CF901F6004384FC /* Run Script */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "Run Script";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
};
/* End PBXShellScriptBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
97C146EA1CF9000F007C117D /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */,
1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
97C146FA1CF9000F007C117D /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
97C146FB1CF9000F007C117D /* Base */,
);
name = Main.storyboard;
sourceTree = "<group>";
};
97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
97C147001CF9000F007C117D /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
249021D3217E4FDB00AE95B9 /* Profile */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
SUPPORTED_PLATFORMS = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Profile;
};
249021D4217E4FDB00AE95B9 /* Profile */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
PRODUCT_BUNDLE_IDENTIFIER = com.example.example;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_VERSION = 5.0;
VERSIONING_SYSTEM = "apple-generic";
};
name = Profile;
};
97C147031CF9000F007C117D /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
97C147041CF9000F007C117D /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
SUPPORTED_PLATFORMS = iphoneos;
SWIFT_COMPILATION_MODE = wholemodule;
SWIFT_OPTIMIZATION_LEVEL = "-O";
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
97C147061CF9000F007C117D /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
DEVELOPMENT_TEAM = "";
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
PRODUCT_BUNDLE_IDENTIFIER = com.example.example;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 5.0;
VERSIONING_SYSTEM = "apple-generic";
};
name = Debug;
};
97C147071CF9000F007C117D /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
PRODUCT_BUNDLE_IDENTIFIER = com.example.example;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_VERSION = 5.0;
VERSIONING_SYSTEM = "apple-generic";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = {
isa = XCConfigurationList;
buildConfigurations = (
97C147031CF9000F007C117D /* Debug */,
97C147041CF9000F007C117D /* Release */,
249021D3217E4FDB00AE95B9 /* Profile */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = {
isa = XCConfigurationList;
buildConfigurations = (
97C147061CF9000F007C117D /* Debug */,
97C147071CF9000F007C117D /* Release */,
249021D4217E4FDB00AE95B9 /* Profile */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 97C146E61CF9000F007C117D /* Project object */;
}

View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>PreviewsEnabled</key>
<false/>
</dict>
</plist>

View File

@ -0,0 +1,87 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1300"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</MacroExpansion>
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</LaunchAction>
<ProfileAction
buildConfiguration = "Profile"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>

View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:Runner.xcodeproj">
</FileRef>
<FileRef
location = "group:Pods/Pods.xcodeproj">
</FileRef>
</Workspace>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>PreviewsEnabled</key>
<false/>
</dict>
</plist>

View File

@ -0,0 +1,13 @@
import UIKit
import Flutter
@UIApplicationMain
@objc class AppDelegate: FlutterAppDelegate {
override func application(
_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
) -> Bool {
GeneratedPluginRegistrant.register(with: self)
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
}

View File

@ -0,0 +1,122 @@
{
"images" : [
{
"size" : "20x20",
"idiom" : "iphone",
"filename" : "Icon-App-20x20@2x.png",
"scale" : "2x"
},
{
"size" : "20x20",
"idiom" : "iphone",
"filename" : "Icon-App-20x20@3x.png",
"scale" : "3x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@1x.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@2x.png",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@3x.png",
"scale" : "3x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-App-40x40@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-App-40x40@3x.png",
"scale" : "3x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-App-60x60@2x.png",
"scale" : "2x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-App-60x60@3x.png",
"scale" : "3x"
},
{
"size" : "20x20",
"idiom" : "ipad",
"filename" : "Icon-App-20x20@1x.png",
"scale" : "1x"
},
{
"size" : "20x20",
"idiom" : "ipad",
"filename" : "Icon-App-20x20@2x.png",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-App-29x29@1x.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-App-29x29@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-App-40x40@1x.png",
"scale" : "1x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-App-40x40@2x.png",
"scale" : "2x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-App-76x76@1x.png",
"scale" : "1x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-App-76x76@2x.png",
"scale" : "2x"
},
{
"size" : "83.5x83.5",
"idiom" : "ipad",
"filename" : "Icon-App-83.5x83.5@2x.png",
"scale" : "2x"
},
{
"size" : "1024x1024",
"idiom" : "ios-marketing",
"filename" : "Icon-App-1024x1024@1x.png",
"scale" : "1x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 564 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

View File

@ -0,0 +1,23 @@
{
"images" : [
{
"idiom" : "universal",
"filename" : "LaunchImage.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "LaunchImage@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "LaunchImage@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 B

View File

@ -0,0 +1,5 @@
# Launch Screen Assets
You can customize the launch screen with your own desired assets by replacing the image files in this directory.
You can also do it by opening your Flutter project's Xcode project with `open ios/Runner.xcworkspace`, selecting `Runner/Assets.xcassets` in the Project Navigator and dropping in the desired images.

View File

@ -0,0 +1,37 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="12121" systemVersion="16G29" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="12089"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="Ydg-fD-yQy"/>
<viewControllerLayoutGuide type="bottom" id="xbc-2k-c8Z"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<imageView opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" image="LaunchImage" translatesAutoresizingMaskIntoConstraints="NO" id="YRO-k0-Ey4">
</imageView>
</subviews>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="YRO-k0-Ey4" firstAttribute="centerX" secondItem="Ze5-6b-2t3" secondAttribute="centerX" id="1a2-6s-vTC"/>
<constraint firstItem="YRO-k0-Ey4" firstAttribute="centerY" secondItem="Ze5-6b-2t3" secondAttribute="centerY" id="4X2-HB-R7a"/>
</constraints>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
<resources>
<image name="LaunchImage" width="168" height="185"/>
</resources>
</document>

View File

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="10117" systemVersion="15F34" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="10085"/>
</dependencies>
<scenes>
<!--Flutter View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="FlutterViewController" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
<viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
</scene>
</scenes>
</document>

View File

@ -0,0 +1,54 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>$(DEVELOPMENT_LANGUAGE)</string>
<key>CFBundleDisplayName</key>
<string>Example</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>example</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>$(FLUTTER_BUILD_NAME)</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>$(FLUTTER_BUILD_NUMBER)</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UIViewControllerBasedStatusBarAppearance</key>
<false/>
<key>CADisableMinimumFrameDurationOnPhone</key>
<true/>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
</dict>
</plist>

View File

@ -0,0 +1 @@
#import "GeneratedPluginRegistrant.h"

86
example/lib/main.dart Normal file
View File

@ -0,0 +1,86 @@
import 'package:flutter/material.dart';
import 'package:cached_video_player/cached_video_player.dart';
void main() {
runApp(const MyApp());
}
class MyApp extends StatelessWidget {
const MyApp({Key? key}) : super(key: key);
// This widget is the root of your application.
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Flutter Demo',
theme: ThemeData(
// This is the theme of your application.
//
// Try running your application with "flutter run". You'll see the
// application has a blue toolbar. Then, without quitting the app, try
// changing the primarySwatch below to Colors.green and then invoke
// "hot reload" (press "r" in the console where you ran "flutter run",
// or simply save your changes to "hot reload" in a Flutter IDE).
// Notice that the counter didn't reset back to zero; the application
// is not restarted.
primarySwatch: Colors.blue,
),
home: const MyHomePage(title: 'Flutter Demo Home Page'),
);
}
}
class MyHomePage extends StatefulWidget {
const MyHomePage({Key? key, required this.title}) : super(key: key);
// This widget is the home page of your application. It is stateful, meaning
// that it has a State object (defined below) that contains fields that affect
// how it looks.
// This class is the configuration for the state. It holds the values (in this
// case the title) provided by the parent (in this case the App widget) and
// used by the build method of the State. Fields in a Widget subclass are
// always marked "final".
final String title;
@override
State<MyHomePage> createState() => _MyHomePageState();
}
class _MyHomePageState extends State<MyHomePage> {
late CachedVideoPlayerController controller;
@override
void initState() {
controller = CachedVideoPlayerController.network(
"http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4");
controller.initialize().then((value) {
controller.play();
setState(() {});
});
super.initState();
}
@override
Widget build(BuildContext context) {
// This method is rerun every time setState is called, for instance as done
// by the _incrementCounter method above.
//
// The Flutter framework has been optimized to make rerunning build methods
// fast, so that you can just rebuild anything that needs updating rather
// than having to individually change instances of widgets.
return Scaffold(
appBar: AppBar(
// Here we take the value from the MyHomePage object that was created by
// the App.build method, and use it to set our appbar title.
title: Text(widget.title),
),
body: Center(
child: controller.value.isInitialized
? AspectRatio(
aspectRatio: controller.value.aspectRatio,
child: CachedVideoPlayer(controller))
: const CircularProgressIndicator()), // This trailing comma makes auto-formatting nicer for build methods.
);
}
}

90
example/pubspec.yaml Normal file
View File

@ -0,0 +1,90 @@
name: example
description: A new Flutter project.
# The following line prevents the package from being accidentally published to
# pub.dev using `flutter pub publish`. This is preferred for private packages.
publish_to: 'none' # Remove this line if you wish to publish to pub.dev
# The following defines the version and build number for your application.
# A version number is three numbers separated by dots, like 1.2.43
# followed by an optional build number separated by a +.
# Both the version and the builder number may be overridden in flutter
# build by specifying --build-name and --build-number, respectively.
# In Android, build-name is used as versionName while build-number used as versionCode.
# Read more about Android versioning at https://developer.android.com/studio/publish/versioning
# In iOS, build-name is used as CFBundleShortVersionString while build-number used as CFBundleVersion.
# Read more about iOS versioning at
# https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html
version: 1.0.0+1
environment:
sdk: ">=2.16.2 <3.0.0"
# Dependencies specify other packages that your package needs in order to work.
# To automatically upgrade your package dependencies to the latest versions
# consider running `flutter pub upgrade --major-versions`. Alternatively,
# dependencies can be manually updated by changing the version numbers below to
# the latest version available on pub.dev. To see which dependencies have newer
# versions available, run `flutter pub outdated`.
dependencies:
flutter:
sdk: flutter
cached_video_player:
path: ../
# The following adds the Cupertino Icons font to your application.
# Use with the CupertinoIcons class for iOS style icons.
cupertino_icons: ^1.0.2
dev_dependencies:
flutter_test:
sdk: flutter
# The "flutter_lints" package below contains a set of recommended lints to
# encourage good coding practices. The lint set provided by the package is
# activated in the `analysis_options.yaml` file located at the root of your
# package. See that file for information about deactivating specific lint
# rules and activating additional ones.
flutter_lints: ^1.0.0
# For information on the generic Dart part of this file, see the
# following page: https://dart.dev/tools/pub/pubspec
# The following section is specific to Flutter.
flutter:
# The following line ensures that the Material Icons font is
# included with your application, so that you can use the icons in
# the material Icons class.
uses-material-design: true
# To add assets to your application, add an assets section, like this:
# assets:
# - images/a_dot_burr.jpeg
# - images/a_dot_ham.jpeg
# An image asset can refer to one or more resolution-specific "variants", see
# https://flutter.dev/assets-and-images/#resolution-aware.
# For details regarding adding assets from package dependencies, see
# https://flutter.dev/assets-and-images/#from-packages
# To add custom fonts to your application, add a fonts section here,
# in this "flutter" section. Each entry in this list should have a
# "family" key with the font family name, and a "fonts" key with a
# list giving the asset and other descriptors for the font. For
# example:
# fonts:
# - family: Schyler
# fonts:
# - asset: fonts/Schyler-Regular.ttf
# - asset: fonts/Schyler-Italic.ttf
# style: italic
# - family: Trajan Pro
# fonts:
# - asset: fonts/TrajanPro.ttf
# - asset: fonts/TrajanPro_Bold.ttf
# weight: 700
#
# For details regarding fonts from package dependencies,
# see https://flutter.dev/custom-fonts/#from-packages

View File

@ -0,0 +1,8 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import <Flutter/Flutter.h>
@interface CachedVideoPlayerPlugin : NSObject <FlutterPlugin>
@end

View File

@ -0,0 +1,622 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import "CachedVideoPlayerPlugin.h"
#import <AVFoundation/AVFoundation.h>
#import <GLKit/GLKit.h>
#import "messages.h"
#import <KTVHTTPCache/KTVHTTPCache.h>
#if !__has_feature(objc_arc)
#error Code Requires ARC.
#endif
int64_t CachedCMTimeToMillis(CMTime time) {
if (time.timescale == 0) return 0;
return time.value * 1000 / time.timescale;
}
@interface CachedFrameUpdater : NSObject
@property(nonatomic) int64_t textureId;
@property(nonatomic, weak, readonly) NSObject<FlutterTextureRegistry>* registry;
- (void)onDisplayLink:(CADisplayLink*)link;
@end
@implementation CachedFrameUpdater
- (CachedFrameUpdater*)initWithRegistry:(NSObject<FlutterTextureRegistry>*)registry {
NSAssert(self, @"super init cannot be nil");
if (self == nil) return nil;
_registry = registry;
return self;
}
- (void)onDisplayLink:(CADisplayLink*)link {
[_registry textureFrameAvailable:_textureId];
}
@end
@interface CachedVideoPlayer : NSObject <FlutterTexture, FlutterStreamHandler>
@property(readonly, nonatomic) AVPlayer* player;
@property(readonly, nonatomic) AVPlayerItemVideoOutput* videoOutput;
@property(readonly, nonatomic) CADisplayLink* displayLink;
@property(nonatomic) FlutterEventChannel* eventChannel;
@property(nonatomic) FlutterEventSink eventSink;
@property(nonatomic) CGAffineTransform preferredTransform;
@property(nonatomic, readonly) bool disposed;
@property(nonatomic, readonly) bool isPlaying;
@property(nonatomic) bool isLooping;
@property(nonatomic, readonly) bool isInitialized;
- (instancetype)initWithURL:(NSURL*)url
frameUpdater:(CachedFrameUpdater*)frameUpdater
httpHeaders:(NSDictionary<NSString*, NSString*>*)headers;
- (void)play;
- (void)pause;
- (void)setIsLooping:(bool)isLooping;
- (void)updatePlayingState;
@end
static void* timeRangeContext = &timeRangeContext;
static void* statusContext = &statusContext;
static void* playbackLikelyToKeepUpContext = &playbackLikelyToKeepUpContext;
static void* playbackBufferEmptyContext = &playbackBufferEmptyContext;
static void* playbackBufferFullContext = &playbackBufferFullContext;
@implementation CachedVideoPlayer
- (instancetype)initWithAsset:(NSString*)asset frameUpdater:(CachedFrameUpdater*)frameUpdater {
NSString* path = [[NSBundle mainBundle] pathForResource:asset ofType:nil];
return [self initWithURL:[NSURL fileURLWithPath:path] frameUpdater:frameUpdater httpHeaders:nil];
}
- (void)addObservers:(AVPlayerItem*)item {
[item addObserver:self
forKeyPath:@"loadedTimeRanges"
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:timeRangeContext];
[item addObserver:self
forKeyPath:@"status"
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:statusContext];
[item addObserver:self
forKeyPath:@"playbackLikelyToKeepUp"
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:playbackLikelyToKeepUpContext];
[item addObserver:self
forKeyPath:@"playbackBufferEmpty"
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:playbackBufferEmptyContext];
[item addObserver:self
forKeyPath:@"playbackBufferFull"
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:playbackBufferFullContext];
// Add an observer that will respond to itemDidPlayToEndTime
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(itemDidPlayToEndTime:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:item];
}
- (void)itemDidPlayToEndTime:(NSNotification*)notification {
if (_isLooping) {
AVPlayerItem* p = [notification object];
[p seekToTime:kCMTimeZero completionHandler:nil];
} else {
if (_eventSink) {
_eventSink(@{@"event" : @"completed"});
}
}
}
static inline CGFloat radiansToDegrees(CGFloat radians) {
// Input range [-pi, pi] or [-180, 180]
CGFloat degrees = GLKMathRadiansToDegrees((float)radians);
if (degrees < 0) {
// Convert -90 to 270 and -180 to 180
return degrees + 360;
}
// Output degrees in between [0, 360[
return degrees;
};
- (AVMutableVideoComposition*)getVideoCompositionWithTransform:(CGAffineTransform)transform
withAsset:(AVAsset*)asset
withVideoTrack:(AVAssetTrack*)videoTrack {
AVMutableVideoCompositionInstruction* instruction =
[AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [asset duration]);
AVMutableVideoCompositionLayerInstruction* layerInstruction =
[AVMutableVideoCompositionLayerInstruction
videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInstruction setTransform:_preferredTransform atTime:kCMTimeZero];
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
instruction.layerInstructions = @[ layerInstruction ];
videoComposition.instructions = @[ instruction ];
// If in portrait mode, switch the width and height of the video
CGFloat width = videoTrack.naturalSize.width;
CGFloat height = videoTrack.naturalSize.height;
NSInteger rotationDegrees =
(NSInteger)round(radiansToDegrees(atan2(_preferredTransform.b, _preferredTransform.a)));
if (rotationDegrees == 90 || rotationDegrees == 270) {
width = videoTrack.naturalSize.height;
height = videoTrack.naturalSize.width;
}
videoComposition.renderSize = CGSizeMake(width, height);
// TODO(@recastrodiaz): should we use videoTrack.nominalFrameRate ?
// Currently set at a constant 30 FPS
videoComposition.frameDuration = CMTimeMake(1, 30);
return videoComposition;
}
- (void)createVideoOutputAndDisplayLink:(CachedFrameUpdater*)frameUpdater {
NSDictionary* pixBuffAttributes = @{
(id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferIOSurfacePropertiesKey : @{}
};
_videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];
_displayLink = [CADisplayLink displayLinkWithTarget:frameUpdater
selector:@selector(onDisplayLink:)];
[_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];
_displayLink.paused = YES;
}
- (instancetype)initWithURL:(NSURL*)url
frameUpdater:(CachedFrameUpdater*)frameUpdater
httpHeaders:(NSDictionary<NSString*, NSString*>*)headers {
NSDictionary<NSString*, id>* options = nil;
if (headers != nil && [headers count] != 0) {
options = @{@"AVURLAssetHTTPHeaderFieldsKey" : headers};
}
AVURLAsset* urlAsset = [AVURLAsset URLAssetWithURL:url options:options];
AVPlayerItem* item = [AVPlayerItem playerItemWithAsset:urlAsset];
return [self initWithPlayerItem:item frameUpdater:frameUpdater];
}
- (CGAffineTransform)fixTransform:(AVAssetTrack*)videoTrack {
CGAffineTransform transform = videoTrack.preferredTransform;
// TODO(@recastrodiaz): why do we need to do this? Why is the preferredTransform incorrect?
// At least 2 user videos show a black screen when in portrait mode if we directly use the
// videoTrack.preferredTransform Setting tx to the height of the video instead of 0, properly
// displays the video https://github.com/flutter/flutter/issues/17606#issuecomment-413473181
if (transform.tx == 0 && transform.ty == 0) {
NSInteger rotationDegrees = (NSInteger)round(radiansToDegrees(atan2(transform.b, transform.a)));
NSLog(@"TX and TY are 0. Rotation: %ld. Natural width,height: %f, %f", (long)rotationDegrees,
videoTrack.naturalSize.width, videoTrack.naturalSize.height);
if (rotationDegrees == 90) {
NSLog(@"Setting transform tx");
transform.tx = videoTrack.naturalSize.height;
transform.ty = 0;
} else if (rotationDegrees == 270) {
NSLog(@"Setting transform ty");
transform.tx = 0;
transform.ty = videoTrack.naturalSize.width;
}
}
return transform;
}
- (instancetype)initWithPlayerItem:(AVPlayerItem*)item frameUpdater:(CachedFrameUpdater*)frameUpdater {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_isInitialized = false;
_isPlaying = false;
_disposed = false;
AVAsset* asset = [item asset];
void (^assetCompletionHandler)(void) = ^{
if ([asset statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
NSArray* tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if ([tracks count] > 0) {
AVAssetTrack* videoTrack = tracks[0];
void (^trackCompletionHandler)(void) = ^{
if (self->_disposed) return;
if ([videoTrack statusOfValueForKey:@"preferredTransform"
error:nil] == AVKeyValueStatusLoaded) {
// Rotate the video by using a videoComposition and the preferredTransform
self->_preferredTransform = [self fixTransform:videoTrack];
// Note:
// https://developer.apple.com/documentation/avfoundation/avplayeritem/1388818-videocomposition
// Video composition can only be used with file-based media and is not supported for
// use with media served using HTTP Live Streaming.
AVMutableVideoComposition* videoComposition =
[self getVideoCompositionWithTransform:self->_preferredTransform
withAsset:asset
withVideoTrack:videoTrack];
item.videoComposition = videoComposition;
}
};
[videoTrack loadValuesAsynchronouslyForKeys:@[ @"preferredTransform" ]
completionHandler:trackCompletionHandler];
}
}
};
_player = [AVPlayer playerWithPlayerItem:item];
_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
[self createVideoOutputAndDisplayLink:frameUpdater];
[self addObservers:item];
[asset loadValuesAsynchronouslyForKeys:@[ @"tracks" ] completionHandler:assetCompletionHandler];
return self;
}
- (void)observeValueForKeyPath:(NSString*)path
ofObject:(id)object
change:(NSDictionary*)change
context:(void*)context {
if (context == timeRangeContext) {
if (_eventSink != nil) {
NSMutableArray<NSArray<NSNumber*>*>* values = [[NSMutableArray alloc] init];
for (NSValue* rangeValue in [object loadedTimeRanges]) {
CMTimeRange range = [rangeValue CMTimeRangeValue];
int64_t start = CachedCMTimeToMillis(range.start);
[values addObject:@[ @(start), @(start + CachedCMTimeToMillis(range.duration)) ]];
}
_eventSink(@{@"event" : @"bufferingUpdate", @"values" : values});
}
} else if (context == statusContext) {
AVPlayerItem* item = (AVPlayerItem*)object;
switch (item.status) {
case AVPlayerItemStatusFailed:
if (_eventSink != nil) {
_eventSink([FlutterError
errorWithCode:@"VideoError"
message:[@"Failed to load video: "
stringByAppendingString:[item.error localizedDescription]]
details:nil]);
}
break;
case AVPlayerItemStatusUnknown:
break;
case AVPlayerItemStatusReadyToPlay:
[item addOutput:_videoOutput];
[self sendInitialized];
[self updatePlayingState];
break;
}
} else if (context == playbackLikelyToKeepUpContext) {
if ([[_player currentItem] isPlaybackLikelyToKeepUp]) {
[self updatePlayingState];
if (_eventSink != nil) {
_eventSink(@{@"event" : @"bufferingEnd"});
}
}
} else if (context == playbackBufferEmptyContext) {
if (_eventSink != nil) {
_eventSink(@{@"event" : @"bufferingStart"});
}
} else if (context == playbackBufferFullContext) {
if (_eventSink != nil) {
_eventSink(@{@"event" : @"bufferingEnd"});
}
}
}
- (void)updatePlayingState {
if (!_isInitialized) {
return;
}
if (_isPlaying) {
[_player play];
} else {
[_player pause];
}
_displayLink.paused = !_isPlaying;
}
- (void)sendInitialized {
if (_eventSink && !_isInitialized) {
CGSize size = [self.player currentItem].presentationSize;
CGFloat width = size.width;
CGFloat height = size.height;
// The player has not yet initialized.
if (height == CGSizeZero.height && width == CGSizeZero.width) {
return;
}
// The player may be initialized but still needs to determine the duration.
if ([self duration] == 0) {
return;
}
_isInitialized = true;
_eventSink(@{
@"event" : @"initialized",
@"duration" : @([self duration]),
@"width" : @(width),
@"height" : @(height)
});
}
}
- (void)play {
_isPlaying = true;
[self updatePlayingState];
}
- (void)pause {
_isPlaying = false;
[self updatePlayingState];
}
- (int64_t)position {
return CachedCMTimeToMillis([_player currentTime]);
}
- (int64_t)duration {
return CachedCMTimeToMillis([[_player currentItem] duration]);
}
- (void)seekTo:(int)location {
[_player seekToTime:CMTimeMake(location, 1000)
toleranceBefore:kCMTimeZero
toleranceAfter:kCMTimeZero];
}
- (void)setIsLooping:(bool)isLooping {
_isLooping = isLooping;
}
- (void)setVolume:(double)volume {
_player.volume = (float)((volume < 0.0) ? 0.0 : ((volume > 1.0) ? 1.0 : volume));
}
- (void)setPlaybackSpeed:(double)speed {
// See https://developer.apple.com/library/archive/qa/qa1772/_index.html for an explanation of
// these checks.
if (speed > 2.0 && !_player.currentItem.canPlayFastForward) {
if (_eventSink != nil) {
_eventSink([FlutterError errorWithCode:@"VideoError"
message:@"Video cannot be fast-forwarded beyond 2.0x"
details:nil]);
}
return;
}
if (speed < 1.0 && !_player.currentItem.canPlaySlowForward) {
if (_eventSink != nil) {
_eventSink([FlutterError errorWithCode:@"VideoError"
message:@"Video cannot be slow-forwarded"
details:nil]);
}
return;
}
_player.rate = speed;
}
- (CVPixelBufferRef)copyPixelBuffer {
CMTime outputItemTime = [_videoOutput itemTimeForHostTime:CACurrentMediaTime()];
if ([_videoOutput hasNewPixelBufferForItemTime:outputItemTime]) {
return [_videoOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
} else {
return NULL;
}
}
- (void)onTextureUnregistered:(NSObject<FlutterTexture>*)texture {
dispatch_async(dispatch_get_main_queue(), ^{
[self dispose];
});
}
- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments {
_eventSink = nil;
return nil;
}
- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments
eventSink:(nonnull FlutterEventSink)events {
_eventSink = events;
// TODO(@recastrodiaz): remove the line below when the race condition is resolved:
// https://github.com/flutter/flutter/issues/21483
// This line ensures the 'initialized' event is sent when the event
// 'AVPlayerItemStatusReadyToPlay' fires before _eventSink is set (this function
// onListenWithArguments is called)
[self sendInitialized];
return nil;
}
/// This method allows you to dispose without touching the event channel. This
/// is useful for the case where the Engine is in the process of deconstruction
/// so the channel is going to die or is already dead.
- (void)disposeSansEventChannel {
_disposed = true;
[_displayLink invalidate];
[[_player currentItem] removeObserver:self forKeyPath:@"status" context:statusContext];
[[_player currentItem] removeObserver:self
forKeyPath:@"loadedTimeRanges"
context:timeRangeContext];
[[_player currentItem] removeObserver:self
forKeyPath:@"playbackLikelyToKeepUp"
context:playbackLikelyToKeepUpContext];
[[_player currentItem] removeObserver:self
forKeyPath:@"playbackBufferEmpty"
context:playbackBufferEmptyContext];
[[_player currentItem] removeObserver:self
forKeyPath:@"playbackBufferFull"
context:playbackBufferFullContext];
[_player replaceCurrentItemWithPlayerItem:nil];
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
- (void)dispose {
[self disposeSansEventChannel];
[_eventChannel setStreamHandler:nil];
}
@end
@interface CachedVideoPlayerPlugin () <CachedVideoPlayerApi>
@property(readonly, weak, nonatomic) NSObject<FlutterTextureRegistry>* registry;
@property(readonly, weak, nonatomic) NSObject<FlutterBinaryMessenger>* messenger;
@property(readonly, strong, nonatomic) NSMutableDictionary* players;
@property(readonly, strong, nonatomic) NSObject<FlutterPluginRegistrar>* registrar;
@end
@implementation CachedVideoPlayerPlugin
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
CachedVideoPlayerPlugin* instance = [[CachedVideoPlayerPlugin alloc] initWithRegistrar:registrar];
[registrar publish:instance];
CachedVideoPlayerApiSetup(registrar.messenger, instance);
}
- (instancetype)initWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
self = [super init];
NSAssert(self, @"super init cannot be nil");
[KTVHTTPCache proxyStart:nil];
_registry = [registrar textures];
_messenger = [registrar messenger];
_registrar = registrar;
_players = [NSMutableDictionary dictionaryWithCapacity:1];
return self;
}
- (void)detachFromEngineForRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
for (NSNumber* textureId in _players.allKeys) {
CachedVideoPlayer* player = _players[textureId];
[player disposeSansEventChannel];
}
[_players removeAllObjects];
// TODO(57151): This should be commented out when 57151's fix lands on stable.
// This is the correct behavior we never did it in the past and the engine
// doesn't currently support it.
// CachedVideoPlayerApiSetup(registrar.messenger, nil);
}
- (CachedTextureMessage*)onPlayerSetup:(CachedVideoPlayer*)player
frameUpdater:(CachedFrameUpdater*)frameUpdater {
int64_t textureId = [_registry registerTexture:player];
frameUpdater.textureId = textureId;
FlutterEventChannel* eventChannel = [FlutterEventChannel
eventChannelWithName:[NSString stringWithFormat:@"flutter.io/videoPlayer/videoEvents%lld",
textureId]
binaryMessenger:_messenger];
[eventChannel setStreamHandler:player];
player.eventChannel = eventChannel;
_players[@(textureId)] = player;
CachedTextureMessage* result = [[CachedTextureMessage alloc] init];
result.textureId = @(textureId);
return result;
}
- (void)initialize:(FlutterError* __autoreleasing*)error {
// Allow audio playback when the Ring/Silent switch is set to silent
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
for (NSNumber* textureId in _players) {
[_registry unregisterTexture:[textureId unsignedIntegerValue]];
[_players[textureId] dispose];
}
[_players removeAllObjects];
}
- (CachedTextureMessage*)create:(CachedCreateMessage*)input error:(FlutterError**)error {
CachedFrameUpdater* frameUpdater = [[CachedFrameUpdater alloc] initWithRegistry:_registry];
CachedVideoPlayer* player;
if (input.asset) {
NSString* assetPath;
if (input.packageName) {
assetPath = [_registrar lookupKeyForAsset:input.asset fromPackage:input.packageName];
} else {
assetPath = [_registrar lookupKeyForAsset:input.asset];
}
player = [[CachedVideoPlayer alloc] initWithAsset:assetPath frameUpdater:frameUpdater];
return [self onPlayerSetup:player frameUpdater:frameUpdater];
} else if (input.uri) {
// player = [[CachedVideoPlayer alloc] initWithURL:[NSURL URLWithString:input.uri]
NSURL *usedURL = [NSURL URLWithString:input.uri];
if(![input.formatHint isEqual: @"hls"]) {
usedURL = [KTVHTTPCache proxyURLWithOriginalURL:usedURL];
[KTVHTTPCache downloadSetAdditionalHeaders:input.httpHeaders];
}
player = [[CachedVideoPlayer alloc] initWithURL:usedURL
frameUpdater:frameUpdater
httpHeaders:input.httpHeaders];
return [self onPlayerSetup:player frameUpdater:frameUpdater];
} else {
*error = [FlutterError errorWithCode:@"video_player" message:@"not implemented" details:nil];
return nil;
}
}
- (void)dispose:(CachedTextureMessage*)input error:(FlutterError**)error {
CachedVideoPlayer* player = _players[input.textureId];
[_registry unregisterTexture:input.textureId.intValue];
[_players removeObjectForKey:input.textureId];
// If the Flutter contains https://github.com/flutter/engine/pull/12695,
// the `player` is disposed via `onTextureUnregistered` at the right time.
// Without https://github.com/flutter/engine/pull/12695, there is no guarantee that the
// texture has completed the un-reregistration. It may leads a crash if we dispose the
// `player` before the texture is unregistered. We add a dispatch_after hack to make sure the
// texture is unregistered before we dispose the `player`.
//
// TODO(cyanglaz): Remove this dispatch block when
// https://github.com/flutter/flutter/commit/8159a9906095efc9af8b223f5e232cb63542ad0b is in
// stable And update the min flutter version of the plugin to the stable version.
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1 * NSEC_PER_SEC)),
dispatch_get_main_queue(), ^{
if (!player.disposed) {
[player dispose];
}
});
}
- (void)setLooping:(CachedLoopingMessage*)input error:(FlutterError**)error {
CachedVideoPlayer* player = _players[input.textureId];
[player setIsLooping:[input.isLooping boolValue]];
}
- (void)setVolume:(CachedVolumeMessage*)input error:(FlutterError**)error {
CachedVideoPlayer* player = _players[input.textureId];
[player setVolume:[input.volume doubleValue]];
}
- (void)setPlaybackSpeed:(CachedPlaybackSpeedMessage*)input error:(FlutterError**)error {
CachedVideoPlayer* player = _players[input.textureId];
[player setPlaybackSpeed:[input.speed doubleValue]];
}
- (void)play:(CachedTextureMessage*)input error:(FlutterError**)error {
CachedVideoPlayer* player = _players[input.textureId];
[player play];
}
- (CachedPositionMessage*)position:(CachedTextureMessage*)input error:(FlutterError**)error {
CachedVideoPlayer* player = _players[input.textureId];
CachedPositionMessage* result = [[CachedPositionMessage alloc] init];
result.position = @([player position]);
return result;
}
- (void)seekTo:(CachedPositionMessage*)input error:(FlutterError**)error {
CachedVideoPlayer* player = _players[input.textureId];
[player seekTo:[input.position intValue]];
}
- (void)pause:(CachedTextureMessage*)input error:(FlutterError**)error {
CachedVideoPlayer* player = _players[input.textureId];
[player pause];
}
- (void)setMixWithOthers:(CachedMixWithOthersMessage*)input
error:(FlutterError* _Nullable __autoreleasing*)error {
if ([input.mixWithOthers boolValue]) {
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback
withOptions:AVAudioSessionCategoryOptionMixWithOthers
error:nil];
} else {
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
}
}
@end

79
ios/Classes/messages.h Normal file
View File

@ -0,0 +1,79 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Autogenerated from Pigeon (v0.1.21), do not edit directly.
// See also: https://pub.dev/packages/pigeon
#import <Foundation/Foundation.h>
@protocol FlutterBinaryMessenger;
@class FlutterError;
@class FlutterStandardTypedData;
NS_ASSUME_NONNULL_BEGIN
@class CachedTextureMessage;
@class CachedCreateMessage;
@class CachedLoopingMessage;
@class CachedVolumeMessage;
@class CachedPlaybackSpeedMessage;
@class CachedPositionMessage;
@class CachedMixWithOthersMessage;
@interface CachedTextureMessage : NSObject
@property(nonatomic, strong, nullable) NSNumber *textureId;
@end
@interface CachedCreateMessage : NSObject
@property(nonatomic, copy, nullable) NSString *asset;
@property(nonatomic, copy, nullable) NSString *uri;
@property(nonatomic, copy, nullable) NSString *packageName;
@property(nonatomic, copy, nullable) NSString *formatHint;
@property(nonatomic, strong, nullable) NSDictionary *httpHeaders;
@end
@interface CachedLoopingMessage : NSObject
@property(nonatomic, strong, nullable) NSNumber *textureId;
@property(nonatomic, strong, nullable) NSNumber *isLooping;
@end
@interface CachedVolumeMessage : NSObject
@property(nonatomic, strong, nullable) NSNumber *textureId;
@property(nonatomic, strong, nullable) NSNumber *volume;
@end
@interface CachedPlaybackSpeedMessage : NSObject
@property(nonatomic, strong, nullable) NSNumber *textureId;
@property(nonatomic, strong, nullable) NSNumber *speed;
@end
@interface CachedPositionMessage : NSObject
@property(nonatomic, strong, nullable) NSNumber *textureId;
@property(nonatomic, strong, nullable) NSNumber *position;
@end
@interface CachedMixWithOthersMessage : NSObject
@property(nonatomic, strong, nullable) NSNumber *mixWithOthers;
@end
@protocol CachedVideoPlayerApi
- (void)initialize:(FlutterError *_Nullable *_Nonnull)error;
- (nullable CachedTextureMessage *)create:(CachedCreateMessage *)input
error:(FlutterError *_Nullable *_Nonnull)error;
- (void)dispose:(CachedTextureMessage *)input error:(FlutterError *_Nullable *_Nonnull)error;
- (void)setLooping:(CachedLoopingMessage *)input error:(FlutterError *_Nullable *_Nonnull)error;
- (void)setVolume:(CachedVolumeMessage *)input error:(FlutterError *_Nullable *_Nonnull)error;
- (void)setPlaybackSpeed:(CachedPlaybackSpeedMessage *)input
error:(FlutterError *_Nullable *_Nonnull)error;
- (void)play:(CachedTextureMessage *)input error:(FlutterError *_Nullable *_Nonnull)error;
- (nullable CachedPositionMessage *)position:(CachedTextureMessage *)input
error:(FlutterError *_Nullable *_Nonnull)error;
- (void)seekTo:(CachedPositionMessage *)input error:(FlutterError *_Nullable *_Nonnull)error;
- (void)pause:(CachedTextureMessage *)input error:(FlutterError *_Nullable *_Nonnull)error;
- (void)setMixWithOthers:(CachedMixWithOthersMessage *)input
error:(FlutterError *_Nullable *_Nonnull)error;
@end
extern void CachedVideoPlayerApiSetup(id<FlutterBinaryMessenger> binaryMessenger,
id<CachedVideoPlayerApi> _Nullable api);
NS_ASSUME_NONNULL_END

375
ios/Classes/messages.m Normal file
View File

@ -0,0 +1,375 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Autogenerated from Pigeon (v0.1.21), do not edit directly.
// See also: https://pub.dev/packages/pigeon
#import "messages.h"
#import <Flutter/Flutter.h>
#if !__has_feature(objc_arc)
#error File requires ARC to be enabled.
#endif
static NSDictionary<NSString *, id> *wrapResult(NSDictionary *result, FlutterError *error) {
NSDictionary *errorDict = (NSDictionary *)[NSNull null];
if (error) {
errorDict = @{
@"code" : (error.code ? error.code : [NSNull null]),
@"message" : (error.message ? error.message : [NSNull null]),
@"details" : (error.details ? error.details : [NSNull null]),
};
}
return @{
@"result" : (result ? result : [NSNull null]),
@"error" : errorDict,
};
}
@interface CachedTextureMessage ()
+ (CachedTextureMessage *)fromMap:(NSDictionary *)dict;
- (NSDictionary *)toMap;
@end
@interface CachedCreateMessage ()
+ (CachedCreateMessage *)fromMap:(NSDictionary *)dict;
- (NSDictionary *)toMap;
@end
@interface CachedLoopingMessage ()
+ (CachedLoopingMessage *)fromMap:(NSDictionary *)dict;
- (NSDictionary *)toMap;
@end
@interface CachedVolumeMessage ()
+ (CachedVolumeMessage *)fromMap:(NSDictionary *)dict;
- (NSDictionary *)toMap;
@end
@interface CachedPlaybackSpeedMessage ()
+ (CachedPlaybackSpeedMessage *)fromMap:(NSDictionary *)dict;
- (NSDictionary *)toMap;
@end
@interface CachedPositionMessage ()
+ (CachedPositionMessage *)fromMap:(NSDictionary *)dict;
- (NSDictionary *)toMap;
@end
@interface CachedMixWithOthersMessage ()
+ (CachedMixWithOthersMessage *)fromMap:(NSDictionary *)dict;
- (NSDictionary *)toMap;
@end
@implementation CachedTextureMessage
+ (CachedTextureMessage *)fromMap:(NSDictionary *)dict {
CachedTextureMessage *result = [[CachedTextureMessage alloc] init];
result.textureId = dict[@"textureId"];
if ((NSNull *)result.textureId == [NSNull null]) {
result.textureId = nil;
}
return result;
}
- (NSDictionary *)toMap {
return
[NSDictionary dictionaryWithObjectsAndKeys:(self.textureId ? self.textureId : [NSNull null]),
@"textureId", nil];
}
@end
@implementation CachedCreateMessage
+ (CachedCreateMessage *)fromMap:(NSDictionary *)dict {
CachedCreateMessage *result = [[CachedCreateMessage alloc] init];
result.asset = dict[@"asset"];
if ((NSNull *)result.asset == [NSNull null]) {
result.asset = nil;
}
result.uri = dict[@"uri"];
if ((NSNull *)result.uri == [NSNull null]) {
result.uri = nil;
}
result.packageName = dict[@"packageName"];
if ((NSNull *)result.packageName == [NSNull null]) {
result.packageName = nil;
}
result.formatHint = dict[@"formatHint"];
if ((NSNull *)result.formatHint == [NSNull null]) {
result.formatHint = nil;
}
result.httpHeaders = dict[@"httpHeaders"];
if ((NSNull *)result.httpHeaders == [NSNull null]) {
result.httpHeaders = nil;
}
return result;
}
- (NSDictionary *)toMap {
return [NSDictionary
dictionaryWithObjectsAndKeys:(self.asset ? self.asset : [NSNull null]), @"asset",
(self.uri ? self.uri : [NSNull null]), @"uri",
(self.packageName ? self.packageName : [NSNull null]),
@"packageName",
(self.formatHint ? self.formatHint : [NSNull null]),
@"formatHint",
(self.httpHeaders ? self.httpHeaders : [NSNull null]),
@"httpHeaders", nil];
}
@end
@implementation CachedLoopingMessage
+ (CachedLoopingMessage *)fromMap:(NSDictionary *)dict {
CachedLoopingMessage *result = [[CachedLoopingMessage alloc] init];
result.textureId = dict[@"textureId"];
if ((NSNull *)result.textureId == [NSNull null]) {
result.textureId = nil;
}
result.isLooping = dict[@"isLooping"];
if ((NSNull *)result.isLooping == [NSNull null]) {
result.isLooping = nil;
}
return result;
}
- (NSDictionary *)toMap {
return [NSDictionary
dictionaryWithObjectsAndKeys:(self.textureId ? self.textureId : [NSNull null]), @"textureId",
(self.isLooping ? self.isLooping : [NSNull null]), @"isLooping",
nil];
}
@end
@implementation CachedVolumeMessage
+ (CachedVolumeMessage *)fromMap:(NSDictionary *)dict {
CachedVolumeMessage *result = [[CachedVolumeMessage alloc] init];
result.textureId = dict[@"textureId"];
if ((NSNull *)result.textureId == [NSNull null]) {
result.textureId = nil;
}
result.volume = dict[@"volume"];
if ((NSNull *)result.volume == [NSNull null]) {
result.volume = nil;
}
return result;
}
- (NSDictionary *)toMap {
return [NSDictionary
dictionaryWithObjectsAndKeys:(self.textureId ? self.textureId : [NSNull null]), @"textureId",
(self.volume ? self.volume : [NSNull null]), @"volume", nil];
}
@end
@implementation CachedPlaybackSpeedMessage
+ (CachedPlaybackSpeedMessage *)fromMap:(NSDictionary *)dict {
CachedPlaybackSpeedMessage *result = [[CachedPlaybackSpeedMessage alloc] init];
result.textureId = dict[@"textureId"];
if ((NSNull *)result.textureId == [NSNull null]) {
result.textureId = nil;
}
result.speed = dict[@"speed"];
if ((NSNull *)result.speed == [NSNull null]) {
result.speed = nil;
}
return result;
}
- (NSDictionary *)toMap {
return [NSDictionary
dictionaryWithObjectsAndKeys:(self.textureId ? self.textureId : [NSNull null]), @"textureId",
(self.speed ? self.speed : [NSNull null]), @"speed", nil];
}
@end
@implementation CachedPositionMessage
+ (CachedPositionMessage *)fromMap:(NSDictionary *)dict {
CachedPositionMessage *result = [[CachedPositionMessage alloc] init];
result.textureId = dict[@"textureId"];
if ((NSNull *)result.textureId == [NSNull null]) {
result.textureId = nil;
}
result.position = dict[@"position"];
if ((NSNull *)result.position == [NSNull null]) {
result.position = nil;
}
return result;
}
- (NSDictionary *)toMap {
return [NSDictionary
dictionaryWithObjectsAndKeys:(self.textureId ? self.textureId : [NSNull null]), @"textureId",
(self.position ? self.position : [NSNull null]), @"position",
nil];
}
@end
@implementation CachedMixWithOthersMessage
+ (CachedMixWithOthersMessage *)fromMap:(NSDictionary *)dict {
CachedMixWithOthersMessage *result = [[CachedMixWithOthersMessage alloc] init];
result.mixWithOthers = dict[@"mixWithOthers"];
if ((NSNull *)result.mixWithOthers == [NSNull null]) {
result.mixWithOthers = nil;
}
return result;
}
- (NSDictionary *)toMap {
return [NSDictionary
dictionaryWithObjectsAndKeys:(self.mixWithOthers ? self.mixWithOthers : [NSNull null]),
@"mixWithOthers", nil];
}
@end
void CachedVideoPlayerApiSetup(id<FlutterBinaryMessenger> binaryMessenger, id<CachedVideoPlayerApi> api) {
{
FlutterBasicMessageChannel *channel = [FlutterBasicMessageChannel
messageChannelWithName:@"dev.flutter.pigeon.VideoPlayerApi.initialize"
binaryMessenger:binaryMessenger];
if (api) {
[channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
FlutterError *error;
[api initialize:&error];
callback(wrapResult(nil, error));
}];
} else {
[channel setMessageHandler:nil];
}
}
{
FlutterBasicMessageChannel *channel = [FlutterBasicMessageChannel
messageChannelWithName:@"dev.flutter.pigeon.VideoPlayerApi.create"
binaryMessenger:binaryMessenger];
if (api) {
[channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
CachedCreateMessage *input = [CachedCreateMessage fromMap:message];
FlutterError *error;
CachedTextureMessage *output = [api create:input error:&error];
callback(wrapResult([output toMap], error));
}];
} else {
[channel setMessageHandler:nil];
}
}
{
FlutterBasicMessageChannel *channel = [FlutterBasicMessageChannel
messageChannelWithName:@"dev.flutter.pigeon.VideoPlayerApi.dispose"
binaryMessenger:binaryMessenger];
if (api) {
[channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
CachedTextureMessage *input = [CachedTextureMessage fromMap:message];
FlutterError *error;
[api dispose:input error:&error];
callback(wrapResult(nil, error));
}];
} else {
[channel setMessageHandler:nil];
}
}
{
FlutterBasicMessageChannel *channel = [FlutterBasicMessageChannel
messageChannelWithName:@"dev.flutter.pigeon.VideoPlayerApi.setLooping"
binaryMessenger:binaryMessenger];
if (api) {
[channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
CachedLoopingMessage *input = [CachedLoopingMessage fromMap:message];
FlutterError *error;
[api setLooping:input error:&error];
callback(wrapResult(nil, error));
}];
} else {
[channel setMessageHandler:nil];
}
}
{
FlutterBasicMessageChannel *channel = [FlutterBasicMessageChannel
messageChannelWithName:@"dev.flutter.pigeon.VideoPlayerApi.setVolume"
binaryMessenger:binaryMessenger];
if (api) {
[channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
CachedVolumeMessage *input = [CachedVolumeMessage fromMap:message];
FlutterError *error;
[api setVolume:input error:&error];
callback(wrapResult(nil, error));
}];
} else {
[channel setMessageHandler:nil];
}
}
{
FlutterBasicMessageChannel *channel = [FlutterBasicMessageChannel
messageChannelWithName:@"dev.flutter.pigeon.VideoPlayerApi.setPlaybackSpeed"
binaryMessenger:binaryMessenger];
if (api) {
[channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
CachedPlaybackSpeedMessage *input = [CachedPlaybackSpeedMessage fromMap:message];
FlutterError *error;
[api setPlaybackSpeed:input error:&error];
callback(wrapResult(nil, error));
}];
} else {
[channel setMessageHandler:nil];
}
}
{
FlutterBasicMessageChannel *channel =
[FlutterBasicMessageChannel messageChannelWithName:@"dev.flutter.pigeon.VideoPlayerApi.play"
binaryMessenger:binaryMessenger];
if (api) {
[channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
CachedTextureMessage *input = [CachedTextureMessage fromMap:message];
FlutterError *error;
[api play:input error:&error];
callback(wrapResult(nil, error));
}];
} else {
[channel setMessageHandler:nil];
}
}
{
FlutterBasicMessageChannel *channel = [FlutterBasicMessageChannel
messageChannelWithName:@"dev.flutter.pigeon.VideoPlayerApi.position"
binaryMessenger:binaryMessenger];
if (api) {
[channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
CachedTextureMessage *input = [CachedTextureMessage fromMap:message];
FlutterError *error;
CachedPositionMessage *output = [api position:input error:&error];
callback(wrapResult([output toMap], error));
}];
} else {
[channel setMessageHandler:nil];
}
}
{
FlutterBasicMessageChannel *channel = [FlutterBasicMessageChannel
messageChannelWithName:@"dev.flutter.pigeon.VideoPlayerApi.seekTo"
binaryMessenger:binaryMessenger];
if (api) {
[channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
CachedPositionMessage *input = [CachedPositionMessage fromMap:message];
FlutterError *error;
[api seekTo:input error:&error];
callback(wrapResult(nil, error));
}];
} else {
[channel setMessageHandler:nil];
}
}
{
FlutterBasicMessageChannel *channel = [FlutterBasicMessageChannel
messageChannelWithName:@"dev.flutter.pigeon.VideoPlayerApi.pause"
binaryMessenger:binaryMessenger];
if (api) {
[channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
CachedTextureMessage *input = [CachedTextureMessage fromMap:message];
FlutterError *error;
[api pause:input error:&error];
callback(wrapResult(nil, error));
}];
} else {
[channel setMessageHandler:nil];
}
}
{
FlutterBasicMessageChannel *channel = [FlutterBasicMessageChannel
messageChannelWithName:@"dev.flutter.pigeon.VideoPlayerApi.setMixWithOthers"
binaryMessenger:binaryMessenger];
if (api) {
[channel setMessageHandler:^(id _Nullable message, FlutterReply callback) {
CachedMixWithOthersMessage *input = [CachedMixWithOthersMessage fromMap:message];
FlutterError *error;
[api setMixWithOthers:input error:&error];
callback(wrapResult(nil, error));
}];
} else {
[channel setMessageHandler:nil];
}
}
}

View File

@ -0,0 +1,26 @@
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'cached_video_player'
s.version = '0.0.2'
s.summary = 'Cached Video Player'
s.description = <<-DESC
Cached Video Player
DESC
s.homepage = 'https://github.com/vikram25897/flutter_cached_video_player'
s.license = { :type => 'BSD', :file => '../LICENSE' }
s.author = { 'Vikram Pratap Singh' => 'vikram@lazyarts.me' }
s.source = { :http => 'https://github.com/vikram25897/flutter_cached_video_player' }
s.documentation_url = 'https://pub.dev/packages/cached_video_player'
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
# KTVHTTPCache
s.dependency 'KTVHTTPCache', '~> 2.0.0'
s.platform = :ios, '10.0'
s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' }
end

View File

@ -0,0 +1,959 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:video_player_platform_interface/video_player_platform_interface.dart';
export 'package:video_player_platform_interface/video_player_platform_interface.dart'
show DurationRange, DataSourceType, VideoFormat, VideoPlayerOptions;
import 'src/closed_caption_file.dart';
export 'src/closed_caption_file.dart';
final VideoPlayerPlatform _videoPlayerPlatform = VideoPlayerPlatform.instance
// This will clear all open videos on the platform when a full restart is
// performed.
..init();
/// The duration, current position, buffering state, error state and settings
/// of a [CachedVideoPlayerController].
class CachedVideoPlayerValue {
/// Constructs a video with the given values. Only [duration] is required. The
/// rest will initialize with default values when unset.
CachedVideoPlayerValue({
required this.duration,
this.size = Size.zero,
this.position = Duration.zero,
this.caption = Caption.none,
this.buffered = const <DurationRange>[],
this.isInitialized = false,
this.isPlaying = false,
this.isLooping = false,
this.isBuffering = false,
this.volume = 1.0,
this.playbackSpeed = 1.0,
this.errorDescription,
});
/// Returns an instance for a video that hasn't been loaded.
CachedVideoPlayerValue.uninitialized()
: this(duration: Duration.zero, isInitialized: false);
/// Returns an instance with the given [errorDescription].
CachedVideoPlayerValue.erroneous(String errorDescription)
: this(
duration: Duration.zero,
isInitialized: false,
errorDescription: errorDescription);
/// The total duration of the video.
///
/// The duration is [Duration.zero] if the video hasn't been initialized.
final Duration duration;
/// The current playback position.
final Duration position;
/// The [Caption] that should be displayed based on the current [position].
///
/// This field will never be null. If there is no caption for the current
/// [position], this will be a [Caption.none] object.
final Caption caption;
/// The currently buffered ranges.
final List<DurationRange> buffered;
/// True if the video is playing. False if it's paused.
final bool isPlaying;
/// True if the video is looping.
final bool isLooping;
/// True if the video is currently buffering.
final bool isBuffering;
/// The current volume of the playback.
final double volume;
/// The current speed of the playback.
final double playbackSpeed;
/// A description of the error if present.
///
/// If [hasError] is false this is `null`.
final String? errorDescription;
/// The [size] of the currently loaded video.
final Size size;
/// Indicates whether or not the video has been loaded and is ready to play.
final bool isInitialized;
/// Indicates whether or not the video is in an error state. If this is true
/// [errorDescription] should have information about the problem.
bool get hasError => errorDescription != null;
/// Returns [size.width] / [size.height].
///
/// Will return `1.0` if:
/// * [isInitialized] is `false`
/// * [size.width], or [size.height] is equal to `0.0`
/// * aspect ratio would be less than or equal to `0.0`
double get aspectRatio {
if (!isInitialized || size.width == 0 || size.height == 0) {
return 1.0;
}
final double aspectRatio = size.width / size.height;
if (aspectRatio <= 0) {
return 1.0;
}
return aspectRatio;
}
/// Returns a new instance that has the same values as this current instance,
/// except for any overrides passed in as arguments to [copyWidth].
CachedVideoPlayerValue copyWith({
Duration? duration,
Size? size,
Duration? position,
Caption? caption,
List<DurationRange>? buffered,
bool? isInitialized,
bool? isPlaying,
bool? isLooping,
bool? isBuffering,
double? volume,
double? playbackSpeed,
String? errorDescription,
}) {
return CachedVideoPlayerValue(
duration: duration ?? this.duration,
size: size ?? this.size,
position: position ?? this.position,
caption: caption ?? this.caption,
buffered: buffered ?? this.buffered,
isInitialized: isInitialized ?? this.isInitialized,
isPlaying: isPlaying ?? this.isPlaying,
isLooping: isLooping ?? this.isLooping,
isBuffering: isBuffering ?? this.isBuffering,
volume: volume ?? this.volume,
playbackSpeed: playbackSpeed ?? this.playbackSpeed,
errorDescription: errorDescription ?? this.errorDescription,
);
}
@override
String toString() {
return '$runtimeType('
'duration: $duration, '
'size: $size, '
'position: $position, '
'caption: $caption, '
'buffered: [${buffered.join(', ')}], '
'isInitialized: $isInitialized, '
'isPlaying: $isPlaying, '
'isLooping: $isLooping, '
'isBuffering: $isBuffering, '
'volume: $volume, '
'playbackSpeed: $playbackSpeed, '
'errorDescription: $errorDescription)';
}
}
/// Controls a platform video player, and provides updates when the state is
/// changing.
///
/// Instances must be initialized with initialize.
///
/// The video is displayed in a Flutter app by creating a [CachedVideoPlayer] widget.
///
/// To reclaim the resources used by the player call [dispose].
///
/// After [dispose] all further calls are ignored.
class CachedVideoPlayerController
extends ValueNotifier<CachedVideoPlayerValue> {
/// Constructs a [CachedVideoPlayerController] playing a video from an asset.
///
/// The name of the asset is given by the [dataSource] argument and must not be
/// null. The [package] argument must be non-null when the asset comes from a
/// package and null otherwise.
CachedVideoPlayerController.asset(this.dataSource,
{this.package, this.closedCaptionFile, this.videoPlayerOptions})
: dataSourceType = DataSourceType.asset,
formatHint = null,
httpHeaders = const {},
super(CachedVideoPlayerValue(duration: Duration.zero));
/// Constructs a [CachedVideoPlayerController] playing a video from obtained from
/// the network.
///
/// The URI for the video is given by the [dataSource] argument and must not be
/// null.
/// **Android only**: The [formatHint] option allows the caller to override
/// the video format detection code.
/// [httpHeaders] option allows to specify HTTP headers
/// for the request to the [dataSource].
CachedVideoPlayerController.network(
this.dataSource, {
this.formatHint,
this.closedCaptionFile,
this.videoPlayerOptions,
this.httpHeaders = const {},
}) : dataSourceType = DataSourceType.network,
package = null,
super(CachedVideoPlayerValue(duration: Duration.zero));
/// Constructs a [CachedVideoPlayerController] playing a video from a file.
///
/// This will load the file from the file-URI given by:
/// `'file://${file.path}'`.
CachedVideoPlayerController.file(File file,
{this.closedCaptionFile, this.videoPlayerOptions})
: dataSource = 'file://${file.path}',
dataSourceType = DataSourceType.file,
package = null,
formatHint = null,
httpHeaders = const {},
super(CachedVideoPlayerValue(duration: Duration.zero));
/// The URI to the video file. This will be in different formats depending on
/// the [DataSourceType] of the original video.
final String dataSource;
/// HTTP headers used for the request to the [dataSource].
/// Only for [VideoPlayerController.network].
/// Always empty for other video types.
final Map<String, String> httpHeaders;
/// **Android only**. Will override the platform's generic file format
/// detection with whatever is set here.
final VideoFormat? formatHint;
/// Describes the type of data source this [CachedVideoPlayerController]
/// is constructed with.
final DataSourceType dataSourceType;
/// Provide additional configuration options (optional). Like setting the audio mode to mix
final VideoPlayerOptions? videoPlayerOptions;
/// Only set for [asset] videos. The package that the asset was loaded from.
final String? package;
/// Optional field to specify a file containing the closed
/// captioning.
///
/// This future will be awaited and the file will be loaded when
/// [initialize()] is called.
final Future<ClosedCaptionFile>? closedCaptionFile;
ClosedCaptionFile? _closedCaptionFile;
Timer? _timer;
bool _isDisposed = false;
Completer<void>? _creatingCompleter;
StreamSubscription<dynamic>? _eventSubscription;
late _CachedVideoAppLifeCycleObserver _lifeCycleObserver;
/// The id of a texture that hasn't been initialized.
@visibleForTesting
static const int kUninitializedTextureId = -1;
int _textureId = kUninitializedTextureId;
/// This is just exposed for testing. It shouldn't be used by anyone depending
/// on the plugin.
@visibleForTesting
int get textureId => _textureId;
/// Attempts to open the given [dataSource] and load metadata about the video.
Future<void> initialize() async {
_lifeCycleObserver = _CachedVideoAppLifeCycleObserver(this);
_lifeCycleObserver.initialize();
_creatingCompleter = Completer<void>();
late DataSource dataSourceDescription;
switch (dataSourceType) {
case DataSourceType.asset:
dataSourceDescription = DataSource(
sourceType: DataSourceType.asset,
asset: dataSource,
package: package,
);
break;
case DataSourceType.network:
dataSourceDescription = DataSource(
sourceType: DataSourceType.network,
uri: dataSource,
formatHint: formatHint,
httpHeaders: httpHeaders,
);
break;
case DataSourceType.file:
dataSourceDescription = DataSource(
sourceType: DataSourceType.file,
uri: dataSource,
);
break;
case DataSourceType.contentUri:
dataSourceDescription = DataSource(
sourceType: DataSourceType.contentUri,
uri: dataSource,
);
break;
}
if (videoPlayerOptions?.mixWithOthers != null) {
await _videoPlayerPlatform
.setMixWithOthers(videoPlayerOptions!.mixWithOthers);
}
_textureId = (await _videoPlayerPlatform.create(dataSourceDescription)) ??
kUninitializedTextureId;
_creatingCompleter!.complete(null);
final Completer<void> initializingCompleter = Completer<void>();
void eventListener(VideoEvent event) {
if (_isDisposed) {
return;
}
switch (event.eventType) {
case VideoEventType.initialized:
value = value.copyWith(
duration: event.duration,
size: event.size,
isInitialized: event.duration != null,
);
initializingCompleter.complete(null);
_applyLooping();
_applyVolume();
_applyPlayPause();
break;
case VideoEventType.completed:
value = value.copyWith(isPlaying: false, position: value.duration);
_timer?.cancel();
break;
case VideoEventType.bufferingUpdate:
value = value.copyWith(buffered: event.buffered);
break;
case VideoEventType.bufferingStart:
value = value.copyWith(isBuffering: true);
break;
case VideoEventType.bufferingEnd:
value = value.copyWith(isBuffering: false);
break;
case VideoEventType.unknown:
break;
}
}
if (closedCaptionFile != null) {
if (_closedCaptionFile == null) {
_closedCaptionFile = await closedCaptionFile;
}
value = value.copyWith(caption: _getCaptionAt(value.position));
}
void errorListener(Object obj) {
final PlatformException e = obj as PlatformException;
value = CachedVideoPlayerValue.erroneous(e.message!);
_timer?.cancel();
if (!initializingCompleter.isCompleted) {
initializingCompleter.completeError(obj);
}
}
_eventSubscription = _videoPlayerPlatform
.videoEventsFor(_textureId)
.listen(eventListener, onError: errorListener);
return initializingCompleter.future;
}
@override
Future<void> dispose() async {
if (_creatingCompleter != null) {
await _creatingCompleter!.future;
if (!_isDisposed) {
_isDisposed = true;
_timer?.cancel();
await _eventSubscription?.cancel();
await _videoPlayerPlatform.dispose(_textureId);
}
_lifeCycleObserver.dispose();
}
_isDisposed = true;
super.dispose();
}
/// Starts playing the video.
///
/// This method returns a future that completes as soon as the "play" command
/// has been sent to the platform, not when playback itself is totally
/// finished.
Future<void> play() async {
value = value.copyWith(isPlaying: true);
await _applyPlayPause();
}
/// Sets whether or not the video should loop after playing once. See also
/// [CachedVideoPlayerValue.isLooping].
Future<void> setLooping(bool looping) async {
value = value.copyWith(isLooping: looping);
await _applyLooping();
}
/// Pauses the video.
Future<void> pause() async {
value = value.copyWith(isPlaying: false);
await _applyPlayPause();
}
Future<void> _applyLooping() async {
if (!value.isInitialized || _isDisposed) {
return;
}
await _videoPlayerPlatform.setLooping(_textureId, value.isLooping);
}
Future<void> _applyPlayPause() async {
if (!value.isInitialized || _isDisposed) {
return;
}
if (value.isPlaying) {
await _videoPlayerPlatform.play(_textureId);
// Cancel previous timer.
_timer?.cancel();
_timer = Timer.periodic(
const Duration(milliseconds: 500),
(Timer timer) async {
if (_isDisposed) {
return;
}
final Duration? newPosition = await position;
if (newPosition == null) {
return;
}
_updatePosition(newPosition);
},
);
// This ensures that the correct playback speed is always applied when
// playing back. This is necessary because we do not set playback speed
// when paused.
await _applyPlaybackSpeed();
} else {
_timer?.cancel();
await _videoPlayerPlatform.pause(_textureId);
}
}
Future<void> _applyVolume() async {
if (!value.isInitialized || _isDisposed) {
return;
}
await _videoPlayerPlatform.setVolume(_textureId, value.volume);
}
Future<void> _applyPlaybackSpeed() async {
if (!value.isInitialized || _isDisposed) {
return;
}
// Setting the playback speed on iOS will trigger the video to play. We
// prevent this from happening by not applying the playback speed until
// the video is manually played from Flutter.
if (!value.isPlaying) return;
await _videoPlayerPlatform.setPlaybackSpeed(
_textureId,
value.playbackSpeed,
);
}
/// The position in the current video.
Future<Duration?> get position async {
if (_isDisposed) {
return null;
}
return await _videoPlayerPlatform.getPosition(_textureId);
}
/// Sets the video's current timestamp to be at [moment]. The next
/// time the video is played it will resume from the given [moment].
///
/// If [moment] is outside of the video's full range it will be automatically
/// and silently clamped.
Future<void> seekTo(Duration position) async {
if (_isDisposed) {
return;
}
if (position > value.duration) {
position = value.duration;
} else if (position < const Duration()) {
position = const Duration();
}
await _videoPlayerPlatform.seekTo(_textureId, position);
_updatePosition(position);
}
/// Sets the audio volume of [this].
///
/// [volume] indicates a value between 0.0 (silent) and 1.0 (full volume) on a
/// linear scale.
Future<void> setVolume(double volume) async {
value = value.copyWith(volume: volume.clamp(0.0, 1.0));
await _applyVolume();
}
/// Sets the playback speed of [this].
///
/// [speed] indicates a speed value with different platforms accepting
/// different ranges for speed values. The [speed] must be greater than 0.
///
/// The values will be handled as follows:
/// * On web, the audio will be muted at some speed when the browser
/// determines that the sound would not be useful anymore. For example,
/// "Gecko mutes the sound outside the range `0.25` to `5.0`" (see https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/playbackRate).
/// * On Android, some very extreme speeds will not be played back accurately.
/// Instead, your video will still be played back, but the speed will be
/// clamped by ExoPlayer (but the values are allowed by the player, like on
/// web).
/// * On iOS, you can sometimes not go above `2.0` playback speed on a video.
/// An error will be thrown for if the option is unsupported. It is also
/// possible that your specific video cannot be slowed down, in which case
/// the plugin also reports errors.
Future<void> setPlaybackSpeed(double speed) async {
if (speed < 0) {
throw ArgumentError.value(
speed,
'Negative playback speeds are generally unsupported.',
);
} else if (speed == 0) {
throw ArgumentError.value(
speed,
'Zero playback speed is generally unsupported. Consider using [pause].',
);
}
value = value.copyWith(playbackSpeed: speed);
await _applyPlaybackSpeed();
}
/// The closed caption based on the current [position] in the video.
///
/// If there are no closed captions at the current [position], this will
/// return an empty [Caption].
///
/// If no [closedCaptionFile] was specified, this will always return an empty
/// [Caption].
Caption _getCaptionAt(Duration position) {
if (_closedCaptionFile == null) {
return Caption.none;
}
// TODO: This would be more efficient as a binary search.
for (final caption in _closedCaptionFile!.captions) {
if (caption.start <= position && caption.end >= position) {
return caption;
}
}
return Caption.none;
}
void _updatePosition(Duration position) {
value = value.copyWith(position: position);
value = value.copyWith(caption: _getCaptionAt(position));
}
}
class _CachedVideoAppLifeCycleObserver extends Object
with WidgetsBindingObserver {
_CachedVideoAppLifeCycleObserver(this._controller);
bool _wasPlayingBeforePause = false;
final CachedVideoPlayerController _controller;
void initialize() {
WidgetsBinding.instance.addObserver(this);
}
@override
void didChangeAppLifecycleState(AppLifecycleState state) {
switch (state) {
case AppLifecycleState.paused:
_wasPlayingBeforePause = _controller.value.isPlaying;
_controller.pause();
break;
case AppLifecycleState.resumed:
if (_wasPlayingBeforePause) {
_controller.play();
}
break;
default:
}
}
void dispose() {
WidgetsBinding.instance.removeObserver(this);
}
}
/// Widget that displays the video controlled by [controller].
class CachedVideoPlayer extends StatefulWidget {
/// Uses the given [controller] for all video rendered in this widget.
CachedVideoPlayer(this.controller, {Key? key}) : super(key: key);
/// The [CachedVideoPlayerController] responsible for the video being rendered in
/// this widget.
final CachedVideoPlayerController controller;
@override
_CachedVideoPlayerState createState() => _CachedVideoPlayerState();
}
class _CachedVideoPlayerState extends State<CachedVideoPlayer> {
_CachedVideoPlayerState() {
_listener = () {
final int newTextureId = widget.controller.textureId;
if (newTextureId != _textureId) {
setState(() {
_textureId = newTextureId;
});
}
};
}
late VoidCallback _listener;
late int _textureId;
@override
void initState() {
super.initState();
_textureId = widget.controller.textureId;
// Need to listen for initialization events since the actual texture ID
// becomes available after asynchronous initialization finishes.
widget.controller.addListener(_listener);
}
@override
void didUpdateWidget(CachedVideoPlayer oldWidget) {
super.didUpdateWidget(oldWidget);
if (oldWidget.controller._isDisposed == false) {
oldWidget.controller.removeListener(_listener);
}
_textureId = widget.controller.textureId;
widget.controller.addListener(_listener);
}
@override
void deactivate() {
super.deactivate();
widget.controller.removeListener(_listener);
}
@override
Widget build(BuildContext context) {
return _textureId == CachedVideoPlayerController.kUninitializedTextureId
? Container()
: _videoPlayerPlatform.buildView(_textureId);
}
}
/// Used to configure the [VideoProgressIndicator] widget's colors for how it
/// describes the video's status.
///
/// The widget uses default colors that are customizeable through this class.
class VideoProgressColors {
/// Any property can be set to any color. They each have defaults.
///
/// [playedColor] defaults to red at 70% opacity. This fills up a portion of
/// the [VideoProgressIndicator] to represent how much of the video has played
/// so far.
///
/// [bufferedColor] defaults to blue at 20% opacity. This fills up a portion
/// of [VideoProgressIndicator] to represent how much of the video has
/// buffered so far.
///
/// [backgroundColor] defaults to gray at 50% opacity. This is the background
/// color behind both [playedColor] and [bufferedColor] to denote the total
/// size of the video compared to either of those values.
const VideoProgressColors({
this.playedColor = const Color.fromRGBO(255, 0, 0, 0.7),
this.bufferedColor = const Color.fromRGBO(50, 50, 200, 0.2),
this.backgroundColor = const Color.fromRGBO(200, 200, 200, 0.5),
});
/// [playedColor] defaults to red at 70% opacity. This fills up a portion of
/// the [VideoProgressIndicator] to represent how much of the video has played
/// so far.
final Color playedColor;
/// [bufferedColor] defaults to blue at 20% opacity. This fills up a portion
/// of [VideoProgressIndicator] to represent how much of the video has
/// buffered so far.
final Color bufferedColor;
/// [backgroundColor] defaults to gray at 50% opacity. This is the background
/// color behind both [playedColor] and [bufferedColor] to denote the total
/// size of the video compared to either of those values.
final Color backgroundColor;
}
class _VideoScrubber extends StatefulWidget {
_VideoScrubber({
required this.child,
required this.controller,
});
final Widget child;
final CachedVideoPlayerController controller;
@override
_VideoScrubberState createState() => _VideoScrubberState();
}
class _VideoScrubberState extends State<_VideoScrubber> {
bool _controllerWasPlaying = false;
CachedVideoPlayerController get controller => widget.controller;
@override
Widget build(BuildContext context) {
void seekToRelativePosition(Offset globalPosition) {
final RenderBox box = context.findRenderObject() as RenderBox;
final Offset tapPos = box.globalToLocal(globalPosition);
final double relative = tapPos.dx / box.size.width;
final Duration position = controller.value.duration * relative;
controller.seekTo(position);
}
return GestureDetector(
behavior: HitTestBehavior.opaque,
child: widget.child,
onHorizontalDragStart: (DragStartDetails details) {
if (!controller.value.isInitialized) {
return;
}
_controllerWasPlaying = controller.value.isPlaying;
if (_controllerWasPlaying) {
controller.pause();
}
},
onHorizontalDragUpdate: (DragUpdateDetails details) {
if (!controller.value.isInitialized) {
return;
}
seekToRelativePosition(details.globalPosition);
},
onHorizontalDragEnd: (DragEndDetails details) {
if (_controllerWasPlaying) {
controller.play();
}
},
onTapDown: (TapDownDetails details) {
if (!controller.value.isInitialized) {
return;
}
seekToRelativePosition(details.globalPosition);
},
);
}
}
/// Displays the play/buffering status of the video controlled by [controller].
///
/// If [allowScrubbing] is true, this widget will detect taps and drags and
/// seek the video accordingly.
///
/// [padding] allows to specify some extra padding around the progress indicator
/// that will also detect the gestures.
class VideoProgressIndicator extends StatefulWidget {
/// Construct an instance that displays the play/buffering status of the video
/// controlled by [controller].
///
/// Defaults will be used for everything except [controller] if they're not
/// provided. [allowScrubbing] defaults to false, and [padding] will default
/// to `top: 5.0`.
VideoProgressIndicator(
this.controller, {
this.colors = const VideoProgressColors(),
required this.allowScrubbing,
this.padding = const EdgeInsets.only(top: 5.0),
});
/// The [CachedVideoPlayerController] that actually associates a video with this
/// widget.
final CachedVideoPlayerController controller;
/// The default colors used throughout the indicator.
///
/// See [VideoProgressColors] for default values.
final VideoProgressColors colors;
/// When true, the widget will detect touch input and try to seek the video
/// accordingly. The widget ignores such input when false.
///
/// Defaults to false.
final bool allowScrubbing;
/// This allows for visual padding around the progress indicator that can
/// still detect gestures via [allowScrubbing].
///
/// Defaults to `top: 5.0`.
final EdgeInsets padding;
@override
_VideoProgressIndicatorState createState() => _VideoProgressIndicatorState();
}
class _VideoProgressIndicatorState extends State<VideoProgressIndicator> {
_VideoProgressIndicatorState() {
listener = () {
if (!mounted) {
return;
}
setState(() {});
};
}
late VoidCallback listener;
CachedVideoPlayerController get controller => widget.controller;
VideoProgressColors get colors => widget.colors;
@override
void initState() {
super.initState();
controller.addListener(listener);
}
@override
void deactivate() {
controller.removeListener(listener);
super.deactivate();
}
@override
Widget build(BuildContext context) {
Widget progressIndicator;
if (controller.value.isInitialized) {
final int duration = controller.value.duration.inMilliseconds;
final int position = controller.value.position.inMilliseconds;
int maxBuffering = 0;
for (DurationRange range in controller.value.buffered) {
final int end = range.end.inMilliseconds;
if (end > maxBuffering) {
maxBuffering = end;
}
}
progressIndicator = Stack(
fit: StackFit.passthrough,
children: <Widget>[
LinearProgressIndicator(
value: maxBuffering / duration,
valueColor: AlwaysStoppedAnimation<Color>(colors.bufferedColor),
backgroundColor: colors.backgroundColor,
),
LinearProgressIndicator(
value: position / duration,
valueColor: AlwaysStoppedAnimation<Color>(colors.playedColor),
backgroundColor: Colors.transparent,
),
],
);
} else {
progressIndicator = LinearProgressIndicator(
value: null,
valueColor: AlwaysStoppedAnimation<Color>(colors.playedColor),
backgroundColor: colors.backgroundColor,
);
}
final Widget paddedProgressIndicator = Padding(
padding: widget.padding,
child: progressIndicator,
);
if (widget.allowScrubbing) {
return _VideoScrubber(
child: paddedProgressIndicator,
controller: controller,
);
} else {
return paddedProgressIndicator;
}
}
}
/// Widget for displaying closed captions on top of a video.
///
/// If [text] is null, this widget will not display anything.
///
/// If [textStyle] is supplied, it will be used to style the text in the closed
/// caption.
///
/// Note: in order to have closed captions, you need to specify a
/// [CachedVideoPlayerController.closedCaptionFile].
///
/// Usage:
///
/// ```dart
/// Stack(children: <Widget>[
/// VideoPlayer(_controller),
/// ClosedCaption(text: _controller.value.caption.text),
/// ]),
/// ```
class ClosedCaption extends StatelessWidget {
/// Creates a a new closed caption, designed to be used with
/// [CachedVideoPlayerValue.caption].
///
/// If [text] is null, nothing will be displayed.
const ClosedCaption({Key? key, this.text, this.textStyle}) : super(key: key);
/// The text that will be shown in the closed caption, or null if no caption
/// should be shown.
final String? text;
/// Specifies how the text in the closed caption should look.
///
/// If null, defaults to [DefaultTextStyle.of(context).style] with size 36
/// font colored white.
final TextStyle? textStyle;
@override
Widget build(BuildContext context) {
final TextStyle effectiveTextStyle = textStyle ??
DefaultTextStyle.of(context).style.copyWith(
fontSize: 36.0,
color: Colors.white,
);
if (text == null) {
return SizedBox.shrink();
}
return Align(
alignment: Alignment.bottomCenter,
child: Padding(
padding: EdgeInsets.only(bottom: 24.0),
child: DecoratedBox(
decoration: BoxDecoration(
color: Color(0xB8000000),
borderRadius: BorderRadius.circular(2.0),
),
child: Padding(
padding: EdgeInsets.symmetric(horizontal: 2.0),
child: Text(text!, style: effectiveTextStyle),
),
),
),
);
}
}

View File

@ -0,0 +1,71 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'sub_rip.dart';
export 'sub_rip.dart' show SubRipCaptionFile;
/// A structured representation of a parsed closed caption file.
///
/// A closed caption file includes a list of captions, each with a start and end
/// time for when the given closed caption should be displayed.
///
/// The [captions] are a list of all captions in a file, in the order that they
/// appeared in the file.
///
/// See:
/// * [SubRipCaptionFile].
abstract class ClosedCaptionFile {
/// The full list of captions from a given file.
///
/// The [captions] will be in the order that they appear in the given file.
List<Caption> get captions;
}
/// A representation of a single caption.
///
/// A typical closed captioning file will include several [Caption]s, each
/// linked to a start and end time.
class Caption {
/// Creates a new [Caption] object.
///
/// This is not recommended for direct use unless you are writing a parser for
/// a new closed captioning file type.
const Caption({
required this.number,
required this.start,
required this.end,
required this.text,
});
/// The number that this caption was assigned.
final int number;
/// When in the given video should this [Caption] begin displaying.
final Duration start;
/// When in the given video should this [Caption] be dismissed.
final Duration end;
/// The actual text that should appear on screen to be read between [start]
/// and [end].
final String text;
/// A no caption object. This is a caption with [start] and [end] durations of zero,
/// and an empty [text] string.
static const Caption none = Caption(
number: 0,
start: Duration.zero,
end: Duration.zero,
text: '',
);
@override
String toString() {
return '$runtimeType('
'number: $number, '
'start: $start, '
'end: $end, '
'text: $text)';
}
}

131
lib/src/sub_rip.dart Normal file
View File

@ -0,0 +1,131 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:convert';
import 'closed_caption_file.dart';
/// Represents a [ClosedCaptionFile], parsed from the SubRip file format.
/// See: https://en.wikipedia.org/wiki/SubRip
class SubRipCaptionFile extends ClosedCaptionFile {
/// Parses a string into a [ClosedCaptionFile], assuming [fileContents] is in
/// the SubRip file format.
/// * See: https://en.wikipedia.org/wiki/SubRip
SubRipCaptionFile(this.fileContents)
: _captions = _parseCaptionsFromSubRipString(fileContents);
/// The entire body of the SubRip file.
final String fileContents;
@override
List<Caption> get captions => _captions;
final List<Caption> _captions;
}
List<Caption> _parseCaptionsFromSubRipString(String file) {
final List<Caption> captions = <Caption>[];
for (List<String> captionLines in _readSubRipFile(file)) {
if (captionLines.length < 3) break;
final int captionNumber = int.parse(captionLines[0]);
final _StartAndEnd startAndEnd =
_StartAndEnd.fromSubRipString(captionLines[1]);
final String text = captionLines.sublist(2).join('\n');
final Caption newCaption = Caption(
number: captionNumber,
start: startAndEnd.start,
end: startAndEnd.end,
text: text,
);
if (newCaption.start != newCaption.end) {
captions.add(newCaption);
}
}
return captions;
}
class _StartAndEnd {
final Duration start;
final Duration end;
_StartAndEnd(this.start, this.end);
// Assumes format from an SubRip file.
// For example:
// 00:01:54,724 --> 00:01:56,760
static _StartAndEnd fromSubRipString(String line) {
final RegExp format =
RegExp(_subRipTimeStamp + _subRipArrow + _subRipTimeStamp);
if (!format.hasMatch(line)) {
return _StartAndEnd(Duration.zero, Duration.zero);
}
final List<String> times = line.split(_subRipArrow);
final Duration start = _parseSubRipTimestamp(times[0]);
final Duration end = _parseSubRipTimestamp(times[1]);
return _StartAndEnd(start, end);
}
}
// Parses a time stamp in an SubRip file into a Duration.
// For example:
//
// _parseSubRipTimestamp('00:01:59,084')
// returns
// Duration(hours: 0, minutes: 1, seconds: 59, milliseconds: 084)
Duration _parseSubRipTimestamp(String timestampString) {
if (!RegExp(_subRipTimeStamp).hasMatch(timestampString)) {
return Duration.zero;
}
final List<String> commaSections = timestampString.split(',');
final List<String> hoursMinutesSeconds = commaSections[0].split(':');
final int hours = int.parse(hoursMinutesSeconds[0]);
final int minutes = int.parse(hoursMinutesSeconds[1]);
final int seconds = int.parse(hoursMinutesSeconds[2]);
final int milliseconds = int.parse(commaSections[1]);
return Duration(
hours: hours,
minutes: minutes,
seconds: seconds,
milliseconds: milliseconds,
);
}
// Reads on SubRip file and splits it into Lists of strings where each list is one
// caption.
List<List<String>> _readSubRipFile(String file) {
final List<String> lines = LineSplitter.split(file).toList();
final List<List<String>> captionStrings = <List<String>>[];
List<String> currentCaption = <String>[];
int lineIndex = 0;
for (final String line in lines) {
final bool isLineBlank = line.trim().isEmpty;
if (!isLineBlank) {
currentCaption.add(line);
}
if (isLineBlank || lineIndex == lines.length - 1) {
captionStrings.add(currentCaption);
currentCaption = <String>[];
}
lineIndex += 1;
}
return captionStrings;
}
const String _subRipTimeStamp = r'\d\d:\d\d:\d\d,\d\d\d';
const String _subRipArrow = r' --> ';

58
pigeons/messages.dart Normal file
View File

@ -0,0 +1,58 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// @dart = 2.9
import 'package:pigeon/pigeon_lib.dart';
class TextureMessage {
int textureId;
}
class LoopingMessage {
int textureId;
bool isLooping;
}
class VolumeMessage {
int textureId;
double volume;
}
class PlaybackSpeedMessage {
int textureId;
double speed;
}
class PositionMessage {
int textureId;
int position;
}
class CreateMessage {
String asset;
String uri;
String packageName;
String formatHint;
Map<String, String> httpHeaders;
}
class MixWithOthersMessage {
bool mixWithOthers;
}
@HostApi(dartHostTestHandler: 'TestHostVideoPlayerApi')
abstract class VideoPlayerApi {
void initialize();
TextureMessage create(CreateMessage msg);
void dispose(TextureMessage msg);
void setLooping(LoopingMessage msg);
void setVolume(VolumeMessage msg);
void setPlaybackSpeed(PlaybackSpeedMessage msg);
void play(TextureMessage msg);
PositionMessage position(TextureMessage msg);
void seekTo(PositionMessage msg);
void pause(TextureMessage msg);
void setMixWithOthers(MixWithOthersMessage msg);
}

42
pubspec.yaml Normal file
View File

@ -0,0 +1,42 @@
name: cached_video_player
description: A new flutter plugin that is virtually a clone of official video_player plugin except that it supports caching( Android and iOS)
version: 2.0.4
homepage: https://github.com/vikram25897/flutter_cached_video_player
flutter:
plugin:
platforms:
android:
package: com.lazyarts.vikram.cached_video_player
pluginClass: CachedVideoPlayerPlugin
ios:
pluginClass: CachedVideoPlayerPlugin
web:
default_package: video_player_web
dependencies:
meta: ^1.7.0
video_player_platform_interface: ^5.1.2
# The design on https://flutter.dev/go/federated-plugins was to leave
# this constraint as "any". We cannot do it right now as it fails pub publish
# validation, so we set a ^ constraint. The exact value doesn't matter since
# the constraints on the interface pins it.
# TODO(amirh): Revisit this (either update this part in the design or the pub tool).
# https://github.com/flutter/flutter/issues/46264
video_player_web: ^2.0.10
flutter:
sdk: flutter
flutter_test:
sdk: flutter
dev_dependencies:
lints: ^1.0.1
pedantic: ^1.11.1
pigeon: ^3.1.0
environment:
sdk: ">=2.14.0 <3.0.0"
flutter: ">=2.10.0"