mirror of https://github.com/sgoudham/Enso-Bot.git
Trying to get music bot working
parent
12405bec1e
commit
969f3a07af
@ -1,11 +1,593 @@
|
||||
import asyncio
|
||||
import functools
|
||||
import itertools
|
||||
import math
|
||||
import random
|
||||
|
||||
import discord
|
||||
import youtube_dl
|
||||
from async_timeout import timeout
|
||||
from discord.ext import commands
|
||||
|
||||
# Silence useless bug reports messages
|
||||
youtube_dl.utils.bug_reports_message = lambda: ''
|
||||
|
||||
|
||||
class VoiceError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class YTDLError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class YTDLSource(discord.PCMVolumeTransformer):
|
||||
YTDL_OPTIONS = {
|
||||
'format': 'bestaudio/best',
|
||||
'extractaudio': True,
|
||||
'audioformat': 'mp3',
|
||||
'outtmpl': '%(extractor)s-%(id)s-%(title)s.%(ext)s',
|
||||
'restrictfilenames': True,
|
||||
'noplaylist': True,
|
||||
'nocheckcertificate': True,
|
||||
'ignoreerrors': False,
|
||||
'logtostderr': False,
|
||||
'quiet': True,
|
||||
'no_warnings': True,
|
||||
'default_search': 'auto',
|
||||
'source_address': '0.0.0.0',
|
||||
}
|
||||
|
||||
FFMPEG_OPTIONS = {
|
||||
'before_options': '-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5',
|
||||
'options': '-vn',
|
||||
}
|
||||
|
||||
ytdl = youtube_dl.YoutubeDL(YTDL_OPTIONS)
|
||||
|
||||
def __init__(self, ctx: commands.Context, source: discord.FFmpegPCMAudio, *, data: dict, volume: float = 0.5):
|
||||
super().__init__(source, volume)
|
||||
|
||||
self.requester = ctx.author
|
||||
self.channel = ctx.channel
|
||||
self.data = data
|
||||
|
||||
self.uploader = data.get('uploader')
|
||||
self.uploader_url = data.get('uploader_url')
|
||||
date = data.get('upload_date')
|
||||
self.upload_date = date[6:8] + '.' + date[4:6] + '.' + date[0:4]
|
||||
self.title = data.get('title')
|
||||
self.thumbnail = data.get('thumbnail')
|
||||
self.description = data.get('description')
|
||||
self.duration = self.parse_duration(int(data.get('duration')))
|
||||
self.tags = data.get('tags')
|
||||
self.url = data.get('webpage_url')
|
||||
self.views = data.get('view_count')
|
||||
self.likes = data.get('like_count')
|
||||
self.dislikes = data.get('dislike_count')
|
||||
self.stream_url = data.get('url')
|
||||
|
||||
def __str__(self):
|
||||
return '**{0.title}** by **{0.uploader}**'.format(self)
|
||||
|
||||
@classmethod
|
||||
async def create_source(cls, ctx: commands.Context, search: str, *, loop: asyncio.BaseEventLoop = None):
|
||||
loop = loop or asyncio.get_event_loop()
|
||||
|
||||
partial = functools.partial(cls.ytdl.extract_info, search, download=False, process=False)
|
||||
data = await loop.run_in_executor(None, partial)
|
||||
|
||||
if data is None:
|
||||
raise YTDLError('Couldn\'t find anything that matches `{}`'.format(search))
|
||||
|
||||
if 'entries' not in data:
|
||||
process_info = data
|
||||
else:
|
||||
process_info = None
|
||||
for entry in data['entries']:
|
||||
if entry:
|
||||
process_info = entry
|
||||
break
|
||||
|
||||
if process_info is None:
|
||||
raise YTDLError('Couldn\'t find anything that matches `{}`'.format(search))
|
||||
|
||||
webpage_url = process_info['webpage_url']
|
||||
partial = functools.partial(cls.ytdl.extract_info, webpage_url, download=False)
|
||||
processed_info = await loop.run_in_executor(None, partial)
|
||||
|
||||
if processed_info is None:
|
||||
raise YTDLError('Couldn\'t fetch `{}`'.format(webpage_url))
|
||||
|
||||
if 'entries' not in processed_info:
|
||||
info = processed_info
|
||||
else:
|
||||
info = None
|
||||
while info is None:
|
||||
try:
|
||||
info = processed_info['entries'].pop(0)
|
||||
except IndexError:
|
||||
raise YTDLError('Couldn\'t retrieve any matches for `{}`'.format(webpage_url))
|
||||
|
||||
return cls(ctx, discord.FFmpegPCMAudio(info['url'], **cls.FFMPEG_OPTIONS), data=info)
|
||||
|
||||
@staticmethod
|
||||
def parse_duration(duration: int):
|
||||
minutes, seconds = divmod(duration, 60)
|
||||
hours, minutes = divmod(minutes, 60)
|
||||
days, hours = divmod(hours, 24)
|
||||
|
||||
duration = []
|
||||
if days > 0:
|
||||
duration.append('{} days'.format(days))
|
||||
if hours > 0:
|
||||
duration.append('{} hours'.format(hours))
|
||||
if minutes > 0:
|
||||
duration.append('{} minutes'.format(minutes))
|
||||
if seconds > 0:
|
||||
duration.append('{} seconds'.format(seconds))
|
||||
|
||||
return ', '.join(duration)
|
||||
|
||||
|
||||
class Song:
|
||||
__slots__ = ('source', 'requester')
|
||||
|
||||
def __init__(self, source: YTDLSource):
|
||||
self.source = source
|
||||
self.requester = source.requester
|
||||
|
||||
def create_embed(self):
|
||||
embed = (discord.Embed(title='Now playing',
|
||||
description='```css\n{0.source.title}\n```'.format(self),
|
||||
color=discord.Color.blurple())
|
||||
.add_field(name='Duration', value=self.source.duration)
|
||||
.add_field(name='Requested by', value=self.requester.mention)
|
||||
.add_field(name='Uploader', value='[{0.source.uploader}]({0.source.uploader_url})'.format(self))
|
||||
.add_field(name='URL', value='[Click]({0.source.url})'.format(self))
|
||||
.set_thumbnail(url=self.source.thumbnail))
|
||||
|
||||
return embed
|
||||
|
||||
|
||||
class SongQueue(asyncio.Queue):
|
||||
def __getitem__(self, item):
|
||||
if isinstance(item, slice):
|
||||
return list(itertools.islice(self._queue, item.start, item.stop, item.step))
|
||||
else:
|
||||
return self._queue[item]
|
||||
|
||||
def __iter__(self):
|
||||
return self._queue.__iter__()
|
||||
|
||||
def __len__(self):
|
||||
return self.qsize()
|
||||
|
||||
def clear(self):
|
||||
self._queue.clear()
|
||||
|
||||
def shuffle(self):
|
||||
random.shuffle(self._queue)
|
||||
|
||||
def remove(self, index: int):
|
||||
del self._queue[index]
|
||||
|
||||
|
||||
class VoiceState:
|
||||
def __init__(self, bot: commands.Bot, ctx: commands.Context):
|
||||
self.bot = bot
|
||||
self._ctx = ctx
|
||||
|
||||
self.current = None
|
||||
self.voice = None
|
||||
self.next = asyncio.Event()
|
||||
self.songs = SongQueue()
|
||||
|
||||
self._loop = False
|
||||
self._volume = 0.5
|
||||
self.skip_votes = set()
|
||||
|
||||
self.audio_player = bot.loop.create_task(self.audio_player_task())
|
||||
|
||||
def __del__(self):
|
||||
self.audio_player.cancel()
|
||||
|
||||
@property
|
||||
def loop(self):
|
||||
return self._loop
|
||||
|
||||
@loop.setter
|
||||
def loop(self, value: bool):
|
||||
self._loop = value
|
||||
|
||||
@property
|
||||
def volume(self):
|
||||
return self._volume
|
||||
|
||||
@volume.setter
|
||||
def volume(self, value: float):
|
||||
self._volume = value
|
||||
|
||||
@property
|
||||
def is_playing(self):
|
||||
return self.voice and self.current
|
||||
|
||||
async def audio_player_task(self):
|
||||
while True:
|
||||
self.next.clear()
|
||||
|
||||
if not self.loop:
|
||||
# Try to get the next song within 3 minutes.
|
||||
# If no song will be added to the queue in time,
|
||||
# the player will disconnect due to performance
|
||||
# reasons.
|
||||
try:
|
||||
async with timeout(180): # 3 minutes
|
||||
self.current = await self.songs.get()
|
||||
except asyncio.TimeoutError:
|
||||
self.bot.loop.create_task(self.stop())
|
||||
return
|
||||
|
||||
self.current.source.volume = self._volume
|
||||
self.voice.play(self.current.source, after=self.play_next_song)
|
||||
await self.current.source.channel.send(embed=self.current.create_embed())
|
||||
|
||||
await self.next.wait()
|
||||
|
||||
def play_next_song(self, error=None):
|
||||
if error:
|
||||
raise VoiceError(str(error))
|
||||
|
||||
self.next.set()
|
||||
|
||||
def skip(self):
|
||||
self.skip_votes.clear()
|
||||
|
||||
if self.is_playing:
|
||||
self.voice.stop()
|
||||
|
||||
async def stop(self):
|
||||
self.songs.clear()
|
||||
|
||||
if self.voice:
|
||||
await self.voice.disconnect()
|
||||
self.voice = None
|
||||
|
||||
|
||||
class Music(commands.Cog):
|
||||
def __init__(self, bot):
|
||||
def __init__(self, bot: commands.Bot):
|
||||
self.bot = bot
|
||||
self.voice_states = {}
|
||||
|
||||
def get_voice_state(self, ctx: commands.Context):
|
||||
state = self.voice_states.get(ctx.guild.id)
|
||||
if not state:
|
||||
state = VoiceState(self.bot, ctx)
|
||||
self.voice_states[ctx.guild.id] = state
|
||||
|
||||
return state
|
||||
|
||||
def cog_unload(self):
|
||||
for state in self.voice_states.values():
|
||||
self.bot.loop.create_task(state.stop())
|
||||
|
||||
def cog_check(self, ctx: commands.Context):
|
||||
if not ctx.guild:
|
||||
raise commands.NoPrivateMessage('This command can\'t be used in DM channels.')
|
||||
|
||||
return True
|
||||
|
||||
async def cog_before_invoke(self, ctx: commands.Context):
|
||||
ctx.voice_state = self.get_voice_state(ctx)
|
||||
|
||||
async def cog_command_error(self, ctx: commands.Context, error: commands.CommandError):
|
||||
await ctx.send('An error occurred: {}'.format(str(error)))
|
||||
|
||||
@commands.command(name='join', invoke_without_subcommand=True)
|
||||
async def _join(self, ctx: commands.Context):
|
||||
"""Joins a voice channel."""
|
||||
|
||||
destination = ctx.author.voice.channel
|
||||
if ctx.voice_state.voice:
|
||||
await ctx.voice_state.voice.move_to(destination)
|
||||
return
|
||||
|
||||
ctx.voice_state.voice = await destination.connect()
|
||||
|
||||
@commands.command(name='summon')
|
||||
@commands.has_permissions(manage_guild=True)
|
||||
async def _summon(self, ctx: commands.Context, *, channel: discord.VoiceChannel = None):
|
||||
"""Summons the bot to a voice channel.
|
||||
If no channel was specified, it joins your channel.
|
||||
"""
|
||||
|
||||
if not channel and not ctx.author.voice:
|
||||
raise VoiceError('You are neither connected to a voice channel nor specified a channel to join.')
|
||||
|
||||
destination = channel or ctx.author.voice.channel
|
||||
if ctx.voice_state.voice:
|
||||
await ctx.voice_state.voice.move_to(destination)
|
||||
return
|
||||
|
||||
ctx.voice_state.voice = await destination.connect()
|
||||
|
||||
@commands.command(name='leave', aliases=['disconnect'])
|
||||
@commands.has_permissions(manage_guild=True)
|
||||
async def _leave(self, ctx: commands.Context):
|
||||
"""Clears the queue and leaves the voice channel."""
|
||||
|
||||
if not ctx.voice_state.voice:
|
||||
return await ctx.send('Not connected to any voice channel.')
|
||||
|
||||
await ctx.voice_state.stop()
|
||||
del self.voice_states[ctx.guild.id]
|
||||
|
||||
@commands.command(name='volume')
|
||||
async def _volume(self, ctx: commands.Context, *, volume: int):
|
||||
"""Sets the volume of the player."""
|
||||
|
||||
if not ctx.voice_state.is_playing:
|
||||
return await ctx.send('Nothing being played at the moment.')
|
||||
|
||||
if 0 > volume > 100:
|
||||
return await ctx.send('Volume must be between 0 and 100')
|
||||
|
||||
ctx.voice_state.volume = volume / 100
|
||||
await ctx.send('Volume of the player set to {}%'.format(volume))
|
||||
|
||||
@commands.command(name='now', aliases=['current', 'playing'])
|
||||
async def _now(self, ctx: commands.Context):
|
||||
"""Displays the currently playing song."""
|
||||
|
||||
await ctx.send(embed=ctx.voice_state.current.create_embed())
|
||||
|
||||
@commands.command(name='pause')
|
||||
@commands.has_permissions(manage_guild=True)
|
||||
async def _pause(self, ctx: commands.Context):
|
||||
"""Pauses the currently playing song."""
|
||||
|
||||
if not ctx.voice_state.is_playing and ctx.voice_state.voice.is_playing():
|
||||
ctx.voice_state.voice.pause()
|
||||
await ctx.message.add_reaction('⏯')
|
||||
|
||||
@commands.command(name='resume')
|
||||
@commands.has_permissions(manage_guild=True)
|
||||
async def _resume(self, ctx: commands.Context):
|
||||
"""Resumes a currently paused song."""
|
||||
|
||||
if not ctx.voice_state.is_playing and ctx.voice_state.voice.is_paused():
|
||||
ctx.voice_state.voice.resume()
|
||||
await ctx.message.add_reaction('⏯')
|
||||
|
||||
@commands.command(name='stop')
|
||||
@commands.has_permissions(manage_guild=True)
|
||||
async def _stop(self, ctx: commands.Context):
|
||||
"""Stops playing song and clears the queue."""
|
||||
|
||||
ctx.voice_state.songs.clear()
|
||||
|
||||
if not ctx.voice_state.is_playing:
|
||||
ctx.voice_state.voice.stop()
|
||||
await ctx.message.add_reaction('⏹')
|
||||
|
||||
@commands.command(name='skip')
|
||||
async def _skip(self, ctx: commands.Context):
|
||||
"""Vote to skip a song. The requester can automatically skip.
|
||||
3 skip votes are needed for the song to be skipped.
|
||||
"""
|
||||
|
||||
if not ctx.voice_state.is_playing:
|
||||
return await ctx.send('Not playing any music right now...')
|
||||
|
||||
voter = ctx.message.author
|
||||
if voter == ctx.voice_state.current.requester:
|
||||
await ctx.message.add_reaction('⏭')
|
||||
ctx.voice_state.skip()
|
||||
|
||||
elif voter.id not in ctx.voice_state.skip_votes:
|
||||
ctx.voice_state.skip_votes.add(voter.id)
|
||||
total_votes = len(ctx.voice_state.skip_votes)
|
||||
|
||||
if total_votes >= 3:
|
||||
await ctx.message.add_reaction('⏭')
|
||||
ctx.voice_state.skip()
|
||||
else:
|
||||
await ctx.send('Skip vote added, currently at **{}/3**'.format(total_votes))
|
||||
|
||||
else:
|
||||
await ctx.send('You have already voted to skip this song.')
|
||||
|
||||
@commands.command(name='queue')
|
||||
async def _queue(self, ctx: commands.Context, *, page: int = 1):
|
||||
"""Shows the player's queue.
|
||||
You can optionally specify the page to show. Each page contains 10 elements.
|
||||
"""
|
||||
|
||||
if len(ctx.voice_state.songs) == 0:
|
||||
return await ctx.send('Empty queue.')
|
||||
|
||||
items_per_page = 10
|
||||
pages = math.ceil(len(ctx.voice_state.songs) / items_per_page)
|
||||
|
||||
start = (page - 1) * items_per_page
|
||||
end = start + items_per_page
|
||||
|
||||
queue = ''
|
||||
for i, song in enumerate(ctx.voice_state.songs[start:end], start=start):
|
||||
queue += '`{0}.` [**{1.source.title}**]({1.source.url})\n'.format(i + 1, song)
|
||||
|
||||
embed = (discord.Embed(description='**{} tracks:**\n\n{}'.format(len(ctx.voice_state.songs), queue))
|
||||
.set_footer(text='Viewing page {}/{}'.format(page, pages)))
|
||||
await ctx.send(embed=embed)
|
||||
|
||||
@commands.command(name='shuffle')
|
||||
async def _shuffle(self, ctx: commands.Context):
|
||||
"""Shuffles the queue."""
|
||||
|
||||
if len(ctx.voice_state.songs) == 0:
|
||||
return await ctx.send('Empty queue.')
|
||||
|
||||
ctx.voice_state.songs.shuffle()
|
||||
await ctx.message.add_reaction('✅')
|
||||
|
||||
@commands.command(name='remove')
|
||||
async def _remove(self, ctx: commands.Context, index: int):
|
||||
"""Removes a song from the queue at a given index."""
|
||||
|
||||
if len(ctx.voice_state.songs) == 0:
|
||||
return await ctx.send('Empty queue.')
|
||||
|
||||
ctx.voice_state.songs.remove(index - 1)
|
||||
await ctx.message.add_reaction('✅')
|
||||
|
||||
@commands.command(name='loop')
|
||||
async def _loop(self, ctx: commands.Context):
|
||||
"""Loops the currently playing song.
|
||||
Invoke this command again to unloop the song.
|
||||
"""
|
||||
|
||||
if not ctx.voice_state.is_playing:
|
||||
return await ctx.send('Nothing being played at the moment.')
|
||||
|
||||
# Inverse boolean value to loop and unloop.
|
||||
ctx.voice_state.loop = not ctx.voice_state.loop
|
||||
await ctx.message.add_reaction('✅')
|
||||
|
||||
@commands.command(name='play')
|
||||
async def _play(self, ctx: commands.Context, *, search: str):
|
||||
"""Plays a song.
|
||||
If there are songs in the queue, this will be queued until the
|
||||
other songs finished playing.
|
||||
This command automatically searches from various sites if no URL is provided.
|
||||
A list of these sites can be found here: https://rg3.github.io/youtube-dl/supportedsites.html
|
||||
"""
|
||||
|
||||
if not ctx.voice_state.voice:
|
||||
await ctx.invoke(self._join)
|
||||
|
||||
async with ctx.typing():
|
||||
try:
|
||||
source = await YTDLSource.create_source(ctx, search, loop=self.bot.loop)
|
||||
except YTDLError as e:
|
||||
await ctx.send('An error occurred while processing this request: {}'.format(str(e)))
|
||||
else:
|
||||
song = Song(source)
|
||||
|
||||
await ctx.voice_state.songs.put(song)
|
||||
await ctx.send('Enqueued {}'.format(str(source)))
|
||||
|
||||
@_join.before_invoke
|
||||
@_play.before_invoke
|
||||
async def ensure_voice_state(self, ctx: commands.Context):
|
||||
if not ctx.author.voice or not ctx.author.voice.channel:
|
||||
raise commands.CommandError('You are not connected to any voice channel.')
|
||||
|
||||
if ctx.voice_client:
|
||||
if ctx.voice_client.channel != ctx.author.voice.channel:
|
||||
raise commands.CommandError('Bot is already in a voice channel.')
|
||||
|
||||
|
||||
def setup(bot):
|
||||
bot.add_cog(Music(bot))
|
||||
|
||||
|
||||
"""
|
||||
import asyncio
|
||||
import ffmpeg
|
||||
import discord
|
||||
import youtube_dl
|
||||
|
||||
from discord.ext import commands
|
||||
|
||||
# Suppress noise about console usage from errors
|
||||
youtube_dl.utils.bug_reports_message = lambda: ''
|
||||
|
||||
ytdl_format_options = {
|
||||
'format': 'bestaudio/best',
|
||||
'outtmpl': '%(extractor)s-%(id)s-%(title)s.%(ext)s',
|
||||
'restrictfilenames': True,
|
||||
'noplaylist': True,
|
||||
'nocheckcertificate': True,
|
||||
'ignoreerrors': False,
|
||||
'logtostderr': False,
|
||||
'quiet': True,
|
||||
'no_warnings': True,
|
||||
'default_search': 'auto',
|
||||
'source_address': '0.0.0.0' # bind to ipv4 since ipv6 addresses cause issues sometimes
|
||||
}
|
||||
|
||||
ffmpeg_options = {
|
||||
'options': '-vn'
|
||||
}
|
||||
|
||||
ytdl = youtube_dl.YoutubeDL(ytdl_format_options)
|
||||
|
||||
|
||||
class YTDLSource(discord.PCMVolumeTransformer):
|
||||
def __init__(self, source, *, data, volume=0.5):
|
||||
super().__init__(source, volume)
|
||||
|
||||
self.data = data
|
||||
|
||||
self.title = data.get('title')
|
||||
self.url = data.get('url')
|
||||
|
||||
@classmethod
|
||||
async def from_url(cls, url, *, loop=None, stream=False):
|
||||
loop = loop or asyncio.get_event_loop()
|
||||
data = await loop.run_in_executor(None, lambda: ytdl.extract_info(url, download=not stream))
|
||||
|
||||
if 'entries' in data:
|
||||
# take first item from a playlist
|
||||
data = data['entries'][0]
|
||||
|
||||
filename = data['url'] if stream else ytdl.prepare_filename(data)
|
||||
return cls(discord.FFmpegPCMAudio(filename, **ffmpeg_options), data=data)
|
||||
|
||||
|
||||
players = {}
|
||||
|
||||
|
||||
class Music(commands.Cog):
|
||||
def __init__(self, bot):
|
||||
self.bot = bot
|
||||
|
||||
@commands.command(aliases=['j', 'joi'])
|
||||
async def join(self, ctx):
|
||||
if ctx.message.author.voice:
|
||||
channel = ctx.message.author.voice.channel
|
||||
await channel.connect()
|
||||
|
||||
@commands.command(aliases=['l', 'Leave'])
|
||||
async def leave(self, ctx):
|
||||
try:
|
||||
server = ctx.voice_client
|
||||
|
||||
await server.disconnect()
|
||||
await ctx.send(f"Left the voice channel")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
@commands.command()
|
||||
async def yt(self, ctx, *, url):
|
||||
Plays from a url (almost anything youtube_dl supports)
|
||||
try:
|
||||
async with ctx.typing():
|
||||
player = await YTDLSource.from_url(url, loop=self.bot.loop)
|
||||
ctx.voice_client.play(player, after=lambda e: print('Player error: %s' % e) if e else None)
|
||||
|
||||
await ctx.send('Now playing: {}'.format(player.title))
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
async def play(self, url, ctx):
|
||||
try:
|
||||
guild = ctx.message.guild
|
||||
voice_client = guild.voice_client
|
||||
|
||||
player = await voice_client.create_ytdl_player(url)
|
||||
players[guild.id] = player
|
||||
player.start()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
"""
|
||||
|
@ -0,0 +1 @@
|
||||
pip
|
@ -0,0 +1,27 @@
|
||||
Copyright (c) Django Software Foundation and individual contributors.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of Django nor the names of its contributors may be used
|
||||
to endorse or promote products derived from this software without
|
||||
specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@ -0,0 +1,266 @@
|
||||
Django is licensed under the three-clause BSD license; see the file
|
||||
LICENSE for details.
|
||||
|
||||
Django includes code from the Python standard library, which is licensed under
|
||||
the Python license, a permissive open source license. The copyright and license
|
||||
is included below for compliance with Python's terms.
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
Copyright (c) 2001-present Python Software Foundation; All Rights Reserved
|
||||
|
||||
A. HISTORY OF THE SOFTWARE
|
||||
==========================
|
||||
|
||||
Python was created in the early 1990s by Guido van Rossum at Stichting
|
||||
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
||||
as a successor of a language called ABC. Guido remains Python's
|
||||
principal author, although it includes many contributions from others.
|
||||
|
||||
In 1995, Guido continued his work on Python at the Corporation for
|
||||
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
||||
in Reston, Virginia where he released several versions of the
|
||||
software.
|
||||
|
||||
In May 2000, Guido and the Python core development team moved to
|
||||
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
||||
year, the PythonLabs team moved to Digital Creations (now Zope
|
||||
Corporation, see http://www.zope.com). In 2001, the Python Software
|
||||
Foundation (PSF, see http://www.python.org/psf/) was formed, a
|
||||
non-profit organization created specifically to own Python-related
|
||||
Intellectual Property. Zope Corporation is a sponsoring member of
|
||||
the PSF.
|
||||
|
||||
All Python releases are Open Source (see http://www.opensource.org for
|
||||
the Open Source Definition). Historically, most, but not all, Python
|
||||
releases have also been GPL-compatible; the table below summarizes
|
||||
the various releases.
|
||||
|
||||
Release Derived Year Owner GPL-
|
||||
from compatible? (1)
|
||||
|
||||
0.9.0 thru 1.2 1991-1995 CWI yes
|
||||
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
||||
1.6 1.5.2 2000 CNRI no
|
||||
2.0 1.6 2000 BeOpen.com no
|
||||
1.6.1 1.6 2001 CNRI yes (2)
|
||||
2.1 2.0+1.6.1 2001 PSF no
|
||||
2.0.1 2.0+1.6.1 2001 PSF yes
|
||||
2.1.1 2.1+2.0.1 2001 PSF yes
|
||||
2.1.2 2.1.1 2002 PSF yes
|
||||
2.1.3 2.1.2 2002 PSF yes
|
||||
2.2 and above 2.1.1 2001-now PSF yes
|
||||
|
||||
Footnotes:
|
||||
|
||||
(1) GPL-compatible doesn't mean that we're distributing Python under
|
||||
the GPL. All Python licenses, unlike the GPL, let you distribute
|
||||
a modified version without making your changes open source. The
|
||||
GPL-compatible licenses make it possible to combine Python with
|
||||
other software that is released under the GPL; the others don't.
|
||||
|
||||
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
||||
because its license has a choice of law clause. According to
|
||||
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
||||
is "not incompatible" with the GPL.
|
||||
|
||||
Thanks to the many outside volunteers who have worked under Guido's
|
||||
direction to make these releases possible.
|
||||
|
||||
|
||||
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
||||
===============================================================
|
||||
|
||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||
--------------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||
otherwise using this software ("Python") in source or binary form and
|
||||
its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
||||
distribute, and otherwise use Python alone or in any derivative version,
|
||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
||||
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018 Python Software Foundation; All
|
||||
Rights Reserved" are retained in Python alone or in any derivative version
|
||||
prepared by Licensee.
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python.
|
||||
|
||||
4. PSF is making Python available to Licensee on an "AS IS"
|
||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. Nothing in this License Agreement shall be deemed to create any
|
||||
relationship of agency, partnership, or joint venture between PSF and
|
||||
Licensee. This License Agreement does not grant permission to use PSF
|
||||
trademarks or trade name in a trademark sense to endorse or promote
|
||||
products or services of Licensee, or any third party.
|
||||
|
||||
8. By copying, installing or otherwise using Python, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
|
||||
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
||||
-------------------------------------------
|
||||
|
||||
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
||||
|
||||
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
||||
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
||||
Individual or Organization ("Licensee") accessing and otherwise using
|
||||
this software in source or binary form and its associated
|
||||
documentation ("the Software").
|
||||
|
||||
2. Subject to the terms and conditions of this BeOpen Python License
|
||||
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
||||
royalty-free, world-wide license to reproduce, analyze, test, perform
|
||||
and/or display publicly, prepare derivative works, distribute, and
|
||||
otherwise use the Software alone or in any derivative version,
|
||||
provided, however, that the BeOpen Python License is retained in the
|
||||
Software, alone or in any derivative version prepared by Licensee.
|
||||
|
||||
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
||||
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
||||
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
||||
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
||||
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
5. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
6. This License Agreement shall be governed by and interpreted in all
|
||||
respects by the law of the State of California, excluding conflict of
|
||||
law provisions. Nothing in this License Agreement shall be deemed to
|
||||
create any relationship of agency, partnership, or joint venture
|
||||
between BeOpen and Licensee. This License Agreement does not grant
|
||||
permission to use BeOpen trademarks or trade names in a trademark
|
||||
sense to endorse or promote products or services of Licensee, or any
|
||||
third party. As an exception, the "BeOpen Python" logos available at
|
||||
http://www.pythonlabs.com/logos.html may be used according to the
|
||||
permissions granted on that web page.
|
||||
|
||||
7. By copying, installing or otherwise using the software, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
|
||||
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
||||
---------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Corporation for National
|
||||
Research Initiatives, having an office at 1895 Preston White Drive,
|
||||
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
||||
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
||||
source or binary form and its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, CNRI
|
||||
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
||||
license to reproduce, analyze, test, perform and/or display publicly,
|
||||
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
||||
alone or in any derivative version, provided, however, that CNRI's
|
||||
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
||||
1995-2001 Corporation for National Research Initiatives; All Rights
|
||||
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
||||
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
||||
Agreement, Licensee may substitute the following text (omitting the
|
||||
quotes): "Python 1.6.1 is made available subject to the terms and
|
||||
conditions in CNRI's License Agreement. This Agreement together with
|
||||
Python 1.6.1 may be located on the Internet using the following
|
||||
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
||||
Agreement may also be obtained from a proxy server on the Internet
|
||||
using the following URL: http://hdl.handle.net/1895.22/1013".
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python 1.6.1.
|
||||
|
||||
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
||||
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. This License Agreement shall be governed by the federal
|
||||
intellectual property law of the United States, including without
|
||||
limitation the federal copyright law, and, to the extent such
|
||||
U.S. federal law does not apply, by the law of the Commonwealth of
|
||||
Virginia, excluding Virginia's conflict of law provisions.
|
||||
Notwithstanding the foregoing, with regard to derivative works based
|
||||
on Python 1.6.1 that incorporate non-separable material that was
|
||||
previously distributed under the GNU General Public License (GPL), the
|
||||
law of the Commonwealth of Virginia shall govern this License
|
||||
Agreement only as to issues arising under or with respect to
|
||||
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
||||
License Agreement shall be deemed to create any relationship of
|
||||
agency, partnership, or joint venture between CNRI and Licensee. This
|
||||
License Agreement does not grant permission to use CNRI trademarks or
|
||||
trade name in a trademark sense to endorse or promote products or
|
||||
services of Licensee, or any third party.
|
||||
|
||||
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
||||
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
||||
bound by the terms and conditions of this License Agreement.
|
||||
|
||||
ACCEPT
|
||||
|
||||
|
||||
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
||||
--------------------------------------------------
|
||||
|
||||
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
||||
The Netherlands. All rights reserved.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software and its
|
||||
documentation for any purpose and without fee is hereby granted,
|
||||
provided that the above copyright notice appear in all copies and that
|
||||
both that copyright notice and this permission notice appear in
|
||||
supporting documentation, and that the name of Stichting Mathematisch
|
||||
Centrum or CWI not be used in advertising or publicity pertaining to
|
||||
distribution of the software without specific, written prior
|
||||
permission.
|
||||
|
||||
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
||||
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
||||
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
@ -0,0 +1,89 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: Django
|
||||
Version: 3.0.7
|
||||
Summary: A high-level Python Web framework that encourages rapid development and clean, pragmatic design.
|
||||
Home-page: https://www.djangoproject.com/
|
||||
Author: Django Software Foundation
|
||||
Author-email: foundation@djangoproject.com
|
||||
License: BSD
|
||||
Project-URL: Documentation, https://docs.djangoproject.com/
|
||||
Project-URL: Funding, https://www.djangoproject.com/fundraising/
|
||||
Project-URL: Source, https://github.com/django/django
|
||||
Project-URL: Tracker, https://code.djangoproject.com/
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Framework :: Django
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Topic :: Internet :: WWW/HTTP
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
|
||||
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Requires-Python: >=3.6
|
||||
Requires-Dist: pytz
|
||||
Requires-Dist: sqlparse (>=0.2.2)
|
||||
Requires-Dist: asgiref (~=3.2)
|
||||
Provides-Extra: argon2
|
||||
Requires-Dist: argon2-cffi (>=16.1.0) ; extra == 'argon2'
|
||||
Provides-Extra: bcrypt
|
||||
Requires-Dist: bcrypt ; extra == 'bcrypt'
|
||||
|
||||
======
|
||||
Django
|
||||
======
|
||||
|
||||
Django is a high-level Python Web framework that encourages rapid development
|
||||
and clean, pragmatic design. Thanks for checking it out.
|
||||
|
||||
All documentation is in the "``docs``" directory and online at
|
||||
https://docs.djangoproject.com/en/stable/. If you're just getting started,
|
||||
here's how we recommend you read the docs:
|
||||
|
||||
* First, read ``docs/intro/install.txt`` for instructions on installing Django.
|
||||
|
||||
* Next, work through the tutorials in order (``docs/intro/tutorial01.txt``,
|
||||
``docs/intro/tutorial02.txt``, etc.).
|
||||
|
||||
* If you want to set up an actual deployment server, read
|
||||
``docs/howto/deployment/index.txt`` for instructions.
|
||||
|
||||
* You'll probably want to read through the topical guides (in ``docs/topics``)
|
||||
next; from there you can jump to the HOWTOs (in ``docs/howto``) for specific
|
||||
problems, and check out the reference (``docs/ref``) for gory details.
|
||||
|
||||
* See ``docs/README`` for instructions on building an HTML version of the docs.
|
||||
|
||||
Docs are updated rigorously. If you find any problems in the docs, or think
|
||||
they should be clarified in any way, please take 30 seconds to fill out a
|
||||
ticket here: https://code.djangoproject.com/newticket
|
||||
|
||||
To get more help:
|
||||
|
||||
* Join the ``#django`` channel on irc.freenode.net. Lots of helpful people hang
|
||||
out there. See https://en.wikipedia.org/wiki/Wikipedia:IRC/Tutorial if you're
|
||||
new to IRC.
|
||||
|
||||
* Join the django-users mailing list, or read the archives, at
|
||||
https://groups.google.com/group/django-users.
|
||||
|
||||
To contribute to Django:
|
||||
|
||||
* Check out https://docs.djangoproject.com/en/dev/internals/contributing/ for
|
||||
information about getting involved.
|
||||
|
||||
To run Django's test suite:
|
||||
|
||||
* Follow the instructions in the "Unit tests" section of
|
||||
``docs/internals/contributing/writing-code/unit-tests.txt``, published online at
|
||||
https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/unit-tests/#running-the-unit-tests
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.34.2)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
@ -0,0 +1,3 @@
|
||||
[console_scripts]
|
||||
django-admin = django.core.management:execute_from_command_line
|
||||
|
@ -0,0 +1 @@
|
||||
django
|
@ -0,0 +1,22 @@
|
||||
Metadata-Version: 1.1
|
||||
Name: South
|
||||
Version: 1.0.2
|
||||
Summary: South: Migrations for Django
|
||||
Home-page: http://south.aeracode.org/
|
||||
Author: Andrew Godwin & Andy McCurdy
|
||||
Author-email: south@aeracode.org
|
||||
License: UNKNOWN
|
||||
Download-URL: http://south.aeracode.org/wiki/Download
|
||||
Description: South is an intelligent database migrations library for the Django web framework. It is database-independent and DVCS-friendly, as well as a whole host of other features.
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Framework :: Django
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Intended Audience :: System Administrators
|
||||
Classifier: Intended Audience :: System Administrators
|
||||
Classifier: License :: OSI Approved :: Apache Software License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Topic :: Software Development
|
||||
Classifier: Programming Language :: Python :: 3.3
|
||||
Classifier: Programming Language :: Python :: 2.6
|
||||
Classifier: Programming Language :: Python :: 2.7
|
@ -0,0 +1,124 @@
|
||||
README
|
||||
setup.cfg
|
||||
setup.py
|
||||
South.egg-info/PKG-INFO
|
||||
South.egg-info/SOURCES.txt
|
||||
South.egg-info/dependency_links.txt
|
||||
South.egg-info/top_level.txt
|
||||
south/__init__.py
|
||||
south/exceptions.py
|
||||
south/logger.py
|
||||
south/models.py
|
||||
south/modelsinspector.py
|
||||
south/orm.py
|
||||
south/signals.py
|
||||
south/test_shim.py
|
||||
south/v2.py
|
||||
south/creator/__init__.py
|
||||
south/creator/actions.py
|
||||
south/creator/changes.py
|
||||
south/creator/freezer.py
|
||||
south/db/__init__.py
|
||||
south/db/firebird.py
|
||||
south/db/generic.py
|
||||
south/db/mysql.py
|
||||
south/db/oracle.py
|
||||
south/db/postgresql_psycopg2.py
|
||||
south/db/sqlite3.py
|
||||
south/db/sql_server/__init__.py
|
||||
south/db/sql_server/pyodbc.py
|
||||
south/hacks/__init__.py
|
||||
south/hacks/django_1_0.py
|
||||
south/introspection_plugins/__init__.py
|
||||
south/introspection_plugins/annoying_autoonetoone.py
|
||||
south/introspection_plugins/django_audit_log.py
|
||||
south/introspection_plugins/django_objectpermissions.py
|
||||
south/introspection_plugins/django_tagging.py
|
||||
south/introspection_plugins/django_taggit.py
|
||||
south/introspection_plugins/django_timezones.py
|
||||
south/introspection_plugins/geodjango.py
|
||||
south/management/__init__.py
|
||||
south/management/commands/__init__.py
|
||||
south/management/commands/convert_to_south.py
|
||||
south/management/commands/datamigration.py
|
||||
south/management/commands/graphmigrations.py
|
||||
south/management/commands/migrate.py
|
||||
south/management/commands/migrationcheck.py
|
||||
south/management/commands/schemamigration.py
|
||||
south/management/commands/startmigration.py
|
||||
south/management/commands/syncdb.py
|
||||
south/management/commands/test.py
|
||||
south/management/commands/testserver.py
|
||||
south/migration/__init__.py
|
||||
south/migration/base.py
|
||||
south/migration/migrators.py
|
||||
south/migration/utils.py
|
||||
south/tests/__init__.py
|
||||
south/tests/autodetection.py
|
||||
south/tests/db.py
|
||||
south/tests/db_firebird.py
|
||||
south/tests/db_mysql.py
|
||||
south/tests/freezer.py
|
||||
south/tests/inspector.py
|
||||
south/tests/logger.py
|
||||
south/tests/logic.py
|
||||
south/tests/brokenapp/__init__.py
|
||||
south/tests/brokenapp/models.py
|
||||
south/tests/brokenapp/migrations/0001_depends_on_unmigrated.py
|
||||
south/tests/brokenapp/migrations/0002_depends_on_unknown.py
|
||||
south/tests/brokenapp/migrations/0003_depends_on_higher.py
|
||||
south/tests/brokenapp/migrations/0004_higher.py
|
||||
south/tests/brokenapp/migrations/__init__.py
|
||||
south/tests/circular_a/__init__.py
|
||||
south/tests/circular_a/models.py
|
||||
south/tests/circular_a/migrations/0001_first.py
|
||||
south/tests/circular_a/migrations/__init__.py
|
||||
south/tests/circular_b/__init__.py
|
||||
south/tests/circular_b/models.py
|
||||
south/tests/circular_b/migrations/0001_first.py
|
||||
south/tests/circular_b/migrations/__init__.py
|
||||
south/tests/deps_a/__init__.py
|
||||
south/tests/deps_a/models.py
|
||||
south/tests/deps_a/migrations/0001_a.py
|
||||
south/tests/deps_a/migrations/0002_a.py
|
||||
south/tests/deps_a/migrations/0003_a.py
|
||||
south/tests/deps_a/migrations/0004_a.py
|
||||
south/tests/deps_a/migrations/0005_a.py
|
||||
south/tests/deps_a/migrations/__init__.py
|
||||
south/tests/deps_b/__init__.py
|
||||
south/tests/deps_b/models.py
|
||||
south/tests/deps_b/migrations/0001_b.py
|
||||
south/tests/deps_b/migrations/0002_b.py
|
||||
south/tests/deps_b/migrations/0003_b.py
|
||||
south/tests/deps_b/migrations/0004_b.py
|
||||
south/tests/deps_b/migrations/0005_b.py
|
||||
south/tests/deps_b/migrations/__init__.py
|
||||
south/tests/deps_c/__init__.py
|
||||
south/tests/deps_c/models.py
|
||||
south/tests/deps_c/migrations/0001_c.py
|
||||
south/tests/deps_c/migrations/0002_c.py
|
||||
south/tests/deps_c/migrations/0003_c.py
|
||||
south/tests/deps_c/migrations/0004_c.py
|
||||
south/tests/deps_c/migrations/0005_c.py
|
||||
south/tests/deps_c/migrations/__init__.py
|
||||
south/tests/emptyapp/__init__.py
|
||||
south/tests/emptyapp/models.py
|
||||
south/tests/emptyapp/migrations/__init__.py
|
||||
south/tests/fakeapp/__init__.py
|
||||
south/tests/fakeapp/models.py
|
||||
south/tests/fakeapp/migrations/0001_spam.py
|
||||
south/tests/fakeapp/migrations/0002_eggs.py
|
||||
south/tests/fakeapp/migrations/0003_alter_spam.py
|
||||
south/tests/fakeapp/migrations/__init__.py
|
||||
south/tests/non_managed/__init__.py
|
||||
south/tests/non_managed/models.py
|
||||
south/tests/non_managed/migrations/__init__.py
|
||||
south/tests/otherfakeapp/__init__.py
|
||||
south/tests/otherfakeapp/models.py
|
||||
south/tests/otherfakeapp/migrations/0001_first.py
|
||||
south/tests/otherfakeapp/migrations/0002_second.py
|
||||
south/tests/otherfakeapp/migrations/0003_third.py
|
||||
south/tests/otherfakeapp/migrations/__init__.py
|
||||
south/utils/__init__.py
|
||||
south/utils/datetime_utils.py
|
||||
south/utils/py3.py
|
@ -0,0 +1 @@
|
||||
|
@ -0,0 +1,238 @@
|
||||
..\south\__init__.py
|
||||
..\south\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\__pycache__\exceptions.cpython-36.pyc
|
||||
..\south\__pycache__\logger.cpython-36.pyc
|
||||
..\south\__pycache__\models.cpython-36.pyc
|
||||
..\south\__pycache__\modelsinspector.cpython-36.pyc
|
||||
..\south\__pycache__\orm.cpython-36.pyc
|
||||
..\south\__pycache__\signals.cpython-36.pyc
|
||||
..\south\__pycache__\test_shim.cpython-36.pyc
|
||||
..\south\__pycache__\v2.cpython-36.pyc
|
||||
..\south\creator\__init__.py
|
||||
..\south\creator\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\creator\__pycache__\actions.cpython-36.pyc
|
||||
..\south\creator\__pycache__\changes.cpython-36.pyc
|
||||
..\south\creator\__pycache__\freezer.cpython-36.pyc
|
||||
..\south\creator\actions.py
|
||||
..\south\creator\changes.py
|
||||
..\south\creator\freezer.py
|
||||
..\south\db\__init__.py
|
||||
..\south\db\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\db\__pycache__\firebird.cpython-36.pyc
|
||||
..\south\db\__pycache__\generic.cpython-36.pyc
|
||||
..\south\db\__pycache__\mysql.cpython-36.pyc
|
||||
..\south\db\__pycache__\oracle.cpython-36.pyc
|
||||
..\south\db\__pycache__\postgresql_psycopg2.cpython-36.pyc
|
||||
..\south\db\__pycache__\sqlite3.cpython-36.pyc
|
||||
..\south\db\firebird.py
|
||||
..\south\db\generic.py
|
||||
..\south\db\mysql.py
|
||||
..\south\db\oracle.py
|
||||
..\south\db\postgresql_psycopg2.py
|
||||
..\south\db\sql_server\__init__.py
|
||||
..\south\db\sql_server\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\db\sql_server\__pycache__\pyodbc.cpython-36.pyc
|
||||
..\south\db\sql_server\pyodbc.py
|
||||
..\south\db\sqlite3.py
|
||||
..\south\exceptions.py
|
||||
..\south\hacks\__init__.py
|
||||
..\south\hacks\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\hacks\__pycache__\django_1_0.cpython-36.pyc
|
||||
..\south\hacks\django_1_0.py
|
||||
..\south\introspection_plugins\__init__.py
|
||||
..\south\introspection_plugins\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\introspection_plugins\__pycache__\annoying_autoonetoone.cpython-36.pyc
|
||||
..\south\introspection_plugins\__pycache__\django_audit_log.cpython-36.pyc
|
||||
..\south\introspection_plugins\__pycache__\django_objectpermissions.cpython-36.pyc
|
||||
..\south\introspection_plugins\__pycache__\django_tagging.cpython-36.pyc
|
||||
..\south\introspection_plugins\__pycache__\django_taggit.cpython-36.pyc
|
||||
..\south\introspection_plugins\__pycache__\django_timezones.cpython-36.pyc
|
||||
..\south\introspection_plugins\__pycache__\geodjango.cpython-36.pyc
|
||||
..\south\introspection_plugins\annoying_autoonetoone.py
|
||||
..\south\introspection_plugins\django_audit_log.py
|
||||
..\south\introspection_plugins\django_objectpermissions.py
|
||||
..\south\introspection_plugins\django_tagging.py
|
||||
..\south\introspection_plugins\django_taggit.py
|
||||
..\south\introspection_plugins\django_timezones.py
|
||||
..\south\introspection_plugins\geodjango.py
|
||||
..\south\logger.py
|
||||
..\south\management\__init__.py
|
||||
..\south\management\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\management\commands\__init__.py
|
||||
..\south\management\commands\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\management\commands\__pycache__\convert_to_south.cpython-36.pyc
|
||||
..\south\management\commands\__pycache__\datamigration.cpython-36.pyc
|
||||
..\south\management\commands\__pycache__\graphmigrations.cpython-36.pyc
|
||||
..\south\management\commands\__pycache__\migrate.cpython-36.pyc
|
||||
..\south\management\commands\__pycache__\migrationcheck.cpython-36.pyc
|
||||
..\south\management\commands\__pycache__\schemamigration.cpython-36.pyc
|
||||
..\south\management\commands\__pycache__\startmigration.cpython-36.pyc
|
||||
..\south\management\commands\__pycache__\syncdb.cpython-36.pyc
|
||||
..\south\management\commands\__pycache__\test.cpython-36.pyc
|
||||
..\south\management\commands\__pycache__\testserver.cpython-36.pyc
|
||||
..\south\management\commands\convert_to_south.py
|
||||
..\south\management\commands\datamigration.py
|
||||
..\south\management\commands\graphmigrations.py
|
||||
..\south\management\commands\migrate.py
|
||||
..\south\management\commands\migrationcheck.py
|
||||
..\south\management\commands\schemamigration.py
|
||||
..\south\management\commands\startmigration.py
|
||||
..\south\management\commands\syncdb.py
|
||||
..\south\management\commands\test.py
|
||||
..\south\management\commands\testserver.py
|
||||
..\south\migration\__init__.py
|
||||
..\south\migration\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\migration\__pycache__\base.cpython-36.pyc
|
||||
..\south\migration\__pycache__\migrators.cpython-36.pyc
|
||||
..\south\migration\__pycache__\utils.cpython-36.pyc
|
||||
..\south\migration\base.py
|
||||
..\south\migration\migrators.py
|
||||
..\south\migration\utils.py
|
||||
..\south\models.py
|
||||
..\south\modelsinspector.py
|
||||
..\south\orm.py
|
||||
..\south\signals.py
|
||||
..\south\test_shim.py
|
||||
..\south\tests\__init__.py
|
||||
..\south\tests\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\__pycache__\autodetection.cpython-36.pyc
|
||||
..\south\tests\__pycache__\db.cpython-36.pyc
|
||||
..\south\tests\__pycache__\db_firebird.cpython-36.pyc
|
||||
..\south\tests\__pycache__\db_mysql.cpython-36.pyc
|
||||
..\south\tests\__pycache__\freezer.cpython-36.pyc
|
||||
..\south\tests\__pycache__\inspector.cpython-36.pyc
|
||||
..\south\tests\__pycache__\logger.cpython-36.pyc
|
||||
..\south\tests\__pycache__\logic.cpython-36.pyc
|
||||
..\south\tests\autodetection.py
|
||||
..\south\tests\brokenapp\__init__.py
|
||||
..\south\tests\brokenapp\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\brokenapp\__pycache__\models.cpython-36.pyc
|
||||
..\south\tests\brokenapp\migrations\0001_depends_on_unmigrated.py
|
||||
..\south\tests\brokenapp\migrations\0002_depends_on_unknown.py
|
||||
..\south\tests\brokenapp\migrations\0003_depends_on_higher.py
|
||||
..\south\tests\brokenapp\migrations\0004_higher.py
|
||||
..\south\tests\brokenapp\migrations\__init__.py
|
||||
..\south\tests\brokenapp\migrations\__pycache__\0001_depends_on_unmigrated.cpython-36.pyc
|
||||
..\south\tests\brokenapp\migrations\__pycache__\0002_depends_on_unknown.cpython-36.pyc
|
||||
..\south\tests\brokenapp\migrations\__pycache__\0003_depends_on_higher.cpython-36.pyc
|
||||
..\south\tests\brokenapp\migrations\__pycache__\0004_higher.cpython-36.pyc
|
||||
..\south\tests\brokenapp\migrations\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\brokenapp\models.py
|
||||
..\south\tests\circular_a\__init__.py
|
||||
..\south\tests\circular_a\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\circular_a\__pycache__\models.cpython-36.pyc
|
||||
..\south\tests\circular_a\migrations\0001_first.py
|
||||
..\south\tests\circular_a\migrations\__init__.py
|
||||
..\south\tests\circular_a\migrations\__pycache__\0001_first.cpython-36.pyc
|
||||
..\south\tests\circular_a\migrations\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\circular_a\models.py
|
||||
..\south\tests\circular_b\__init__.py
|
||||
..\south\tests\circular_b\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\circular_b\__pycache__\models.cpython-36.pyc
|
||||
..\south\tests\circular_b\migrations\0001_first.py
|
||||
..\south\tests\circular_b\migrations\__init__.py
|
||||
..\south\tests\circular_b\migrations\__pycache__\0001_first.cpython-36.pyc
|
||||
..\south\tests\circular_b\migrations\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\circular_b\models.py
|
||||
..\south\tests\db.py
|
||||
..\south\tests\db_firebird.py
|
||||
..\south\tests\db_mysql.py
|
||||
..\south\tests\deps_a\__init__.py
|
||||
..\south\tests\deps_a\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\deps_a\__pycache__\models.cpython-36.pyc
|
||||
..\south\tests\deps_a\migrations\0001_a.py
|
||||
..\south\tests\deps_a\migrations\0002_a.py
|
||||
..\south\tests\deps_a\migrations\0003_a.py
|
||||
..\south\tests\deps_a\migrations\0004_a.py
|
||||
..\south\tests\deps_a\migrations\0005_a.py
|
||||
..\south\tests\deps_a\migrations\__init__.py
|
||||
..\south\tests\deps_a\migrations\__pycache__\0001_a.cpython-36.pyc
|
||||
..\south\tests\deps_a\migrations\__pycache__\0002_a.cpython-36.pyc
|
||||
..\south\tests\deps_a\migrations\__pycache__\0003_a.cpython-36.pyc
|
||||
..\south\tests\deps_a\migrations\__pycache__\0004_a.cpython-36.pyc
|
||||
..\south\tests\deps_a\migrations\__pycache__\0005_a.cpython-36.pyc
|
||||
..\south\tests\deps_a\migrations\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\deps_a\models.py
|
||||
..\south\tests\deps_b\__init__.py
|
||||
..\south\tests\deps_b\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\deps_b\__pycache__\models.cpython-36.pyc
|
||||
..\south\tests\deps_b\migrations\0001_b.py
|
||||
..\south\tests\deps_b\migrations\0002_b.py
|
||||
..\south\tests\deps_b\migrations\0003_b.py
|
||||
..\south\tests\deps_b\migrations\0004_b.py
|
||||
..\south\tests\deps_b\migrations\0005_b.py
|
||||
..\south\tests\deps_b\migrations\__init__.py
|
||||
..\south\tests\deps_b\migrations\__pycache__\0001_b.cpython-36.pyc
|
||||
..\south\tests\deps_b\migrations\__pycache__\0002_b.cpython-36.pyc
|
||||
..\south\tests\deps_b\migrations\__pycache__\0003_b.cpython-36.pyc
|
||||
..\south\tests\deps_b\migrations\__pycache__\0004_b.cpython-36.pyc
|
||||
..\south\tests\deps_b\migrations\__pycache__\0005_b.cpython-36.pyc
|
||||
..\south\tests\deps_b\migrations\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\deps_b\models.py
|
||||
..\south\tests\deps_c\__init__.py
|
||||
..\south\tests\deps_c\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\deps_c\__pycache__\models.cpython-36.pyc
|
||||
..\south\tests\deps_c\migrations\0001_c.py
|
||||
..\south\tests\deps_c\migrations\0002_c.py
|
||||
..\south\tests\deps_c\migrations\0003_c.py
|
||||
..\south\tests\deps_c\migrations\0004_c.py
|
||||
..\south\tests\deps_c\migrations\0005_c.py
|
||||
..\south\tests\deps_c\migrations\__init__.py
|
||||
..\south\tests\deps_c\migrations\__pycache__\0001_c.cpython-36.pyc
|
||||
..\south\tests\deps_c\migrations\__pycache__\0002_c.cpython-36.pyc
|
||||
..\south\tests\deps_c\migrations\__pycache__\0003_c.cpython-36.pyc
|
||||
..\south\tests\deps_c\migrations\__pycache__\0004_c.cpython-36.pyc
|
||||
..\south\tests\deps_c\migrations\__pycache__\0005_c.cpython-36.pyc
|
||||
..\south\tests\deps_c\migrations\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\deps_c\models.py
|
||||
..\south\tests\emptyapp\__init__.py
|
||||
..\south\tests\emptyapp\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\emptyapp\__pycache__\models.cpython-36.pyc
|
||||
..\south\tests\emptyapp\migrations\__init__.py
|
||||
..\south\tests\emptyapp\migrations\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\emptyapp\models.py
|
||||
..\south\tests\fakeapp\__init__.py
|
||||
..\south\tests\fakeapp\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\fakeapp\__pycache__\models.cpython-36.pyc
|
||||
..\south\tests\fakeapp\migrations\0001_spam.py
|
||||
..\south\tests\fakeapp\migrations\0002_eggs.py
|
||||
..\south\tests\fakeapp\migrations\0003_alter_spam.py
|
||||
..\south\tests\fakeapp\migrations\__init__.py
|
||||
..\south\tests\fakeapp\migrations\__pycache__\0001_spam.cpython-36.pyc
|
||||
..\south\tests\fakeapp\migrations\__pycache__\0002_eggs.cpython-36.pyc
|
||||
..\south\tests\fakeapp\migrations\__pycache__\0003_alter_spam.cpython-36.pyc
|
||||
..\south\tests\fakeapp\migrations\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\fakeapp\models.py
|
||||
..\south\tests\freezer.py
|
||||
..\south\tests\inspector.py
|
||||
..\south\tests\logger.py
|
||||
..\south\tests\logic.py
|
||||
..\south\tests\non_managed\__init__.py
|
||||
..\south\tests\non_managed\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\non_managed\__pycache__\models.cpython-36.pyc
|
||||
..\south\tests\non_managed\migrations\__init__.py
|
||||
..\south\tests\non_managed\migrations\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\non_managed\models.py
|
||||
..\south\tests\otherfakeapp\__init__.py
|
||||
..\south\tests\otherfakeapp\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\otherfakeapp\__pycache__\models.cpython-36.pyc
|
||||
..\south\tests\otherfakeapp\migrations\0001_first.py
|
||||
..\south\tests\otherfakeapp\migrations\0002_second.py
|
||||
..\south\tests\otherfakeapp\migrations\0003_third.py
|
||||
..\south\tests\otherfakeapp\migrations\__init__.py
|
||||
..\south\tests\otherfakeapp\migrations\__pycache__\0001_first.cpython-36.pyc
|
||||
..\south\tests\otherfakeapp\migrations\__pycache__\0002_second.cpython-36.pyc
|
||||
..\south\tests\otherfakeapp\migrations\__pycache__\0003_third.cpython-36.pyc
|
||||
..\south\tests\otherfakeapp\migrations\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\tests\otherfakeapp\models.py
|
||||
..\south\utils\__init__.py
|
||||
..\south\utils\__pycache__\__init__.cpython-36.pyc
|
||||
..\south\utils\__pycache__\datetime_utils.cpython-36.pyc
|
||||
..\south\utils\__pycache__\py3.cpython-36.pyc
|
||||
..\south\utils\datetime_utils.py
|
||||
..\south\utils\py3.py
|
||||
..\south\v2.py
|
||||
PKG-INFO
|
||||
SOURCES.txt
|
||||
dependency_links.txt
|
||||
top_level.txt
|
@ -0,0 +1 @@
|
||||
south
|
@ -0,0 +1,15 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: Voice
|
||||
Version: 0.1.0
|
||||
Summary: A voting app for letting users request features.
|
||||
Home-page: http://github.com/jarodl/voice
|
||||
Author: Jarod Luebbert
|
||||
Author-email: jarodluebbert@gmail.com
|
||||
License: UNKNOWN
|
||||
Description: UNKNOWN
|
||||
Platform: UNKNOWN
|
||||
Classifier: Framework :: Django
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Intended Audience :: End Users/Desktop
|
||||
Classifier: Operating System :: OS Independent
|
||||
Provides-Extra: test
|
@ -0,0 +1,26 @@
|
||||
setup.cfg
|
||||
setup.py
|
||||
Voice.egg-info/PKG-INFO
|
||||
Voice.egg-info/SOURCES.txt
|
||||
Voice.egg-info/dependency_links.txt
|
||||
Voice.egg-info/not-zip-safe
|
||||
Voice.egg-info/requires.txt
|
||||
Voice.egg-info/top_level.txt
|
||||
example_project/__init__.py
|
||||
example_project/manage.py
|
||||
example_project/settings.py
|
||||
example_project/urls.py
|
||||
voice/__init__.py
|
||||
voice/admin.py
|
||||
voice/forms.py
|
||||
voice/models.py
|
||||
voice/settings.py
|
||||
voice/tests.py
|
||||
voice/urls.py
|
||||
voice/views.py
|
||||
voice/migrations/0001_initial.py
|
||||
voice/migrations/0002_auto__add_unique_vote_voter_request.py
|
||||
voice/migrations/0003_auto__del_request__add_feature__del_field_vote_request__add_field_vote.py
|
||||
voice/migrations/__init__.py
|
||||
voice/templatetags/__init__.py
|
||||
voice/templatetags/feature_extras.py
|
@ -0,0 +1 @@
|
||||
|
@ -0,0 +1,34 @@
|
||||
..\voice\__init__.py
|
||||
..\voice\__pycache__\__init__.cpython-36.pyc
|
||||
..\voice\__pycache__\admin.cpython-36.pyc
|
||||
..\voice\__pycache__\forms.cpython-36.pyc
|
||||
..\voice\__pycache__\models.cpython-36.pyc
|
||||
..\voice\__pycache__\settings.cpython-36.pyc
|
||||
..\voice\__pycache__\tests.cpython-36.pyc
|
||||
..\voice\__pycache__\urls.cpython-36.pyc
|
||||
..\voice\__pycache__\views.cpython-36.pyc
|
||||
..\voice\admin.py
|
||||
..\voice\forms.py
|
||||
..\voice\migrations\0001_initial.py
|
||||
..\voice\migrations\0002_auto__add_unique_vote_voter_request.py
|
||||
..\voice\migrations\0003_auto__del_request__add_feature__del_field_vote_request__add_field_vote.py
|
||||
..\voice\migrations\__init__.py
|
||||
..\voice\migrations\__pycache__\0001_initial.cpython-36.pyc
|
||||
..\voice\migrations\__pycache__\0002_auto__add_unique_vote_voter_request.cpython-36.pyc
|
||||
..\voice\migrations\__pycache__\0003_auto__del_request__add_feature__del_field_vote_request__add_field_vote.cpython-36.pyc
|
||||
..\voice\migrations\__pycache__\__init__.cpython-36.pyc
|
||||
..\voice\models.py
|
||||
..\voice\settings.py
|
||||
..\voice\templatetags\__init__.py
|
||||
..\voice\templatetags\__pycache__\__init__.cpython-36.pyc
|
||||
..\voice\templatetags\__pycache__\feature_extras.cpython-36.pyc
|
||||
..\voice\templatetags\feature_extras.py
|
||||
..\voice\tests.py
|
||||
..\voice\urls.py
|
||||
..\voice\views.py
|
||||
PKG-INFO
|
||||
SOURCES.txt
|
||||
dependency_links.txt
|
||||
not-zip-safe
|
||||
requires.txt
|
||||
top_level.txt
|
@ -0,0 +1 @@
|
||||
|
@ -0,0 +1,5 @@
|
||||
Django>=1.3.1
|
||||
South
|
||||
|
||||
[test]
|
||||
Django>=1.3.1
|
@ -0,0 +1 @@
|
||||
voice
|
@ -0,0 +1 @@
|
||||
pip
|
@ -0,0 +1,27 @@
|
||||
Copyright (c) Django Software Foundation and individual contributors.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of Django nor the names of its contributors may be used
|
||||
to endorse or promote products derived from this software without
|
||||
specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@ -0,0 +1,226 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: asgiref
|
||||
Version: 3.2.7
|
||||
Summary: ASGI specs, helper code, and adapters
|
||||
Home-page: http://github.com/django/asgiref/
|
||||
Author: Django Software Foundation
|
||||
Author-email: foundation@djangoproject.com
|
||||
License: BSD
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Topic :: Internet :: WWW/HTTP
|
||||
Requires-Python: >=3.5
|
||||
Description-Content-Type: text/x-rst
|
||||
Provides-Extra: tests
|
||||
Requires-Dist: pytest (~=4.3.0) ; extra == 'tests'
|
||||
Requires-Dist: pytest-asyncio (~=0.10.0) ; extra == 'tests'
|
||||
|
||||
asgiref
|
||||
=======
|
||||
|
||||
.. image:: https://api.travis-ci.org/django/asgiref.svg
|
||||
:target: https://travis-ci.org/django/asgiref
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/asgiref.svg
|
||||
:target: https://pypi.python.org/pypi/asgiref
|
||||
|
||||
ASGI is a standard for Python asynchronous web apps and servers to communicate
|
||||
with each other, and positioned as an asynchronous successor to WSGI. You can
|
||||
read more at https://asgi.readthedocs.io/en/latest/
|
||||
|
||||
This package includes ASGI base libraries, such as:
|
||||
|
||||
* Sync-to-async and async-to-sync function wrappers, ``asgiref.sync``
|
||||
* Server base classes, ``asgiref.server``
|
||||
* A WSGI-to-ASGI adapter, in ``asgiref.wsgi``
|
||||
|
||||
|
||||
Function wrappers
|
||||
-----------------
|
||||
|
||||
These allow you to wrap or decorate async or sync functions to call them from
|
||||
the other style (so you can call async functions from a synchronous thread,
|
||||
or vice-versa).
|
||||
|
||||
In particular:
|
||||
|
||||
* AsyncToSync lets a synchronous subthread stop and wait while the async
|
||||
function is called on the main thread's event loop, and then control is
|
||||
returned to the thread when the async function is finished.
|
||||
|
||||
* SyncToAsync lets async code call a synchronous function, which is run in
|
||||
a threadpool and control returned to the async coroutine when the synchronous
|
||||
function completes.
|
||||
|
||||
The idea is to make it easier to call synchronous APIs from async code and
|
||||
asynchronous APIs from synchronous code so it's easier to transition code from
|
||||
one style to the other. In the case of Channels, we wrap the (synchronous)
|
||||
Django view system with SyncToAsync to allow it to run inside the (asynchronous)
|
||||
ASGI server.
|
||||
|
||||
Note that exactly what threads things run in is very specific, and aimed to
|
||||
keep maximum compatibility with old synchronous code. See
|
||||
"Synchronous code & Threads" below for a full explanation.
|
||||
|
||||
|
||||
Threadlocal replacement
|
||||
-----------------------
|
||||
|
||||
This is a drop-in replacement for ``threading.local`` that works with both
|
||||
threads and asyncio Tasks. Even better, it will proxy values through from a
|
||||
task-local context to a thread-local context when you use ``sync_to_async``
|
||||
to run things in a threadpool, and vice-versa for ``async_to_sync``.
|
||||
|
||||
If you instead want true thread- and task-safety, you can set
|
||||
``thread_critical`` on the Local object to ensure this instead.
|
||||
|
||||
|
||||
Server base classes
|
||||
-------------------
|
||||
|
||||
Includes a ``StatelessServer`` class which provides all the hard work of
|
||||
writing a stateless server (as in, does not handle direct incoming sockets
|
||||
but instead consumes external streams or sockets to work out what is happening).
|
||||
|
||||
An example of such a server would be a chatbot server that connects out to
|
||||
a central chat server and provides a "connection scope" per user chatting to
|
||||
it. There's only one actual connection, but the server has to separate things
|
||||
into several scopes for easier writing of the code.
|
||||
|
||||
You can see an example of this being used in `frequensgi <https://github.com/andrewgodwin/frequensgi>`_.
|
||||
|
||||
|
||||
WSGI-to-ASGI adapter
|
||||
--------------------
|
||||
|
||||
Allows you to wrap a WSGI application so it appears as a valid ASGI application.
|
||||
|
||||
Simply wrap it around your WSGI application like so::
|
||||
|
||||
asgi_application = WsgiToAsgi(wsgi_application)
|
||||
|
||||
The WSGI application will be run in a synchronous threadpool, and the wrapped
|
||||
ASGI application will be one that accepts ``http`` class messages.
|
||||
|
||||
Please note that not all extended features of WSGI may be supported (such as
|
||||
file handles for incoming POST bodies).
|
||||
|
||||
|
||||
Dependencies
|
||||
------------
|
||||
|
||||
``asgiref`` requires Python 3.5 or higher.
|
||||
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
Please refer to the
|
||||
`main Channels contributing docs <https://github.com/django/channels/blob/master/CONTRIBUTING.rst>`_.
|
||||
|
||||
|
||||
Testing
|
||||
'''''''
|
||||
|
||||
To run tests, make sure you have installed the ``tests`` extra with the package::
|
||||
|
||||
cd asgiref/
|
||||
pip install -e .[tests]
|
||||
pytest
|
||||
|
||||
|
||||
Building the documentation
|
||||
''''''''''''''''''''''''''
|
||||
|
||||
The documentation uses `Sphinx <http://www.sphinx-doc.org>`_::
|
||||
|
||||
cd asgiref/docs/
|
||||
pip install sphinx
|
||||
|
||||
To build the docs, you can use the default tools::
|
||||
|
||||
sphinx-build -b html . _build/html # or `make html`, if you've got make set up
|
||||
cd _build/html
|
||||
python -m http.server
|
||||
|
||||
...or you can use ``sphinx-autobuild`` to run a server and rebuild/reload
|
||||
your documentation changes automatically::
|
||||
|
||||
pip install sphinx-autobuild
|
||||
sphinx-autobuild . _build/html
|
||||
|
||||
|
||||
Implementation Details
|
||||
----------------------
|
||||
|
||||
Synchronous code & threads
|
||||
''''''''''''''''''''''''''
|
||||
|
||||
The ``asgiref.sync`` module provides two wrappers that let you go between
|
||||
asynchronous and synchronous code at will, while taking care of the rough edges
|
||||
for you.
|
||||
|
||||
Unfortunately, the rough edges are numerous, and the code has to work especially
|
||||
hard to keep things in the same thread as much as possible. Notably, the
|
||||
restrictions we are working with are:
|
||||
|
||||
* All synchronous code called through ``SyncToAsync`` and marked with
|
||||
``thread_sensitive`` should run in the same thread as each other (and if the
|
||||
outer layer of the program is synchronous, the main thread)
|
||||
|
||||
* If a thread already has a running async loop, ``AsyncToSync`` can't run things
|
||||
on that loop if it's blocked on synchronous code that is above you in the
|
||||
call stack.
|
||||
|
||||
The first compromise you get to might be that ``thread_sensitive`` code should
|
||||
just run in the same thread and not spawn in a sub-thread, fulfilling the first
|
||||
restriction, but that immediately runs you into the second restriction.
|
||||
|
||||
The only real solution is to essentially have a variant of ThreadPoolExecutor
|
||||
that executes any ``thread_sensitive`` code on the outermost synchronous
|
||||
thread - either the main thread, or a single spawned subthread.
|
||||
|
||||
This means you now have two basic states:
|
||||
|
||||
* If the outermost layer of your program is synchronous, then all async code
|
||||
run through ``AsyncToSync`` will run in a per-call event loop in arbitary
|
||||
sub-threads, while all ``thread_sensitive`` code will run in the main thread.
|
||||
|
||||
* If the outermost layer of your program is asynchronous, then all async code
|
||||
runs on the main thread's event loop, and all ``thread_sensitive`` synchronous
|
||||
code will run in a single shared sub-thread.
|
||||
|
||||
Cruicially, this means that in both cases there is a thread which is a shared
|
||||
resource that all ``thread_sensitive`` code must run on, and there is a chance
|
||||
that this thread is currently blocked on its own ``AsyncToSync`` call. Thus,
|
||||
``AsyncToSync`` needs to act as an executor for thread code while it's blocking.
|
||||
|
||||
The ``CurrentThreadExecutor`` class provides this functionality; rather than
|
||||
simply waiting on a Future, you can call its ``run_until_future`` method and
|
||||
it will run submitted code until that Future is done. This means that code
|
||||
inside the call can then run code on your thread.
|
||||
|
||||
|
||||
Maintenance and Security
|
||||
------------------------
|
||||
|
||||
To report security issues, please contact security@djangoproject.com. For GPG
|
||||
signatures and more security process information, see
|
||||
https://docs.djangoproject.com/en/dev/internals/security/.
|
||||
|
||||
To report bugs or request new features, please open a new GitHub issue.
|
||||
|
||||
This repository is part of the Channels project. For the shepherd and maintenance team, please see the
|
||||
`main Channels readme <https://github.com/django/channels/blob/master/README.rst>`_.
|
||||
|
||||
|
@ -0,0 +1,24 @@
|
||||
asgiref-3.2.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
asgiref-3.2.7.dist-info/LICENSE,sha256=uEZBXRtRTpwd_xSiLeuQbXlLxUbKYSn5UKGM0JHipmk,1552
|
||||
asgiref-3.2.7.dist-info/METADATA,sha256=18_8GupjB9c-tnY7PgZQIO49z5XpFrT3453WbVj2Qvo,8227
|
||||
asgiref-3.2.7.dist-info/RECORD,,
|
||||
asgiref-3.2.7.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
|
||||
asgiref-3.2.7.dist-info/top_level.txt,sha256=bokQjCzwwERhdBiPdvYEZa4cHxT4NCeAffQNUqJ8ssg,8
|
||||
asgiref/__init__.py,sha256=IELkDI6KAaCvfMjius3Of-YvCdsjwTVZkdHxMo4NHTU,22
|
||||
asgiref/__pycache__/__init__.cpython-36.pyc,,
|
||||
asgiref/__pycache__/compatibility.cpython-36.pyc,,
|
||||
asgiref/__pycache__/current_thread_executor.cpython-36.pyc,,
|
||||
asgiref/__pycache__/local.cpython-36.pyc,,
|
||||
asgiref/__pycache__/server.cpython-36.pyc,,
|
||||
asgiref/__pycache__/sync.cpython-36.pyc,,
|
||||
asgiref/__pycache__/testing.cpython-36.pyc,,
|
||||
asgiref/__pycache__/timeout.cpython-36.pyc,,
|
||||
asgiref/__pycache__/wsgi.cpython-36.pyc,,
|
||||
asgiref/compatibility.py,sha256=MVH2bEdiCMMVTLbE-1V6KiU7q4LwqzP7PIufeXa-njM,1598
|
||||
asgiref/current_thread_executor.py,sha256=3dRFt3jAl_x1wr9prZZMut071pmdHdIwbTnUAYVejj4,2974
|
||||
asgiref/local.py,sha256=jKNZ4SVcdeYIJIVm9Ru5x5FOQa9EL-bs6S3fQOSieyc,4718
|
||||
asgiref/server.py,sha256=iFJn_uD-poeHWgLOuSnKCVMS1HqqV-IOTOOC85fKr00,5915
|
||||
asgiref/sync.py,sha256=sO9nYylm40T-mD4_1FKOJTTnnuncrm18BG_Vh5FSHlo,13004
|
||||
asgiref/testing.py,sha256=3byNRV7Oto_Fg8Z-fErQJ3yGf7OQlcUexbN_cDQugzQ,3119
|
||||
asgiref/timeout.py,sha256=Emw-Oop1pRfSc5YSMEYHgEz1802mP6JdA6bxH37bby8,3914
|
||||
asgiref/wsgi.py,sha256=rxGUxQG4FsSJYXJekClLuAGM_rovnxfH1qrNt95CNaI,5606
|
@ -0,0 +1,6 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.34.2)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
@ -0,0 +1 @@
|
||||
asgiref
|
@ -0,0 +1 @@
|
||||
__version__ = "3.2.7"
|
@ -0,0 +1,47 @@
|
||||
import asyncio
|
||||
import inspect
|
||||
|
||||
|
||||
def is_double_callable(application):
|
||||
"""
|
||||
Tests to see if an application is a legacy-style (double-callable) application.
|
||||
"""
|
||||
# Look for a hint on the object first
|
||||
if getattr(application, "_asgi_single_callable", False):
|
||||
return False
|
||||
if getattr(application, "_asgi_double_callable", False):
|
||||
return True
|
||||
# Uninstanted classes are double-callable
|
||||
if inspect.isclass(application):
|
||||
return True
|
||||
# Instanted classes depend on their __call__
|
||||
if hasattr(application, "__call__"):
|
||||
# We only check to see if its __call__ is a coroutine function -
|
||||
# if it's not, it still might be a coroutine function itself.
|
||||
if asyncio.iscoroutinefunction(application.__call__):
|
||||
return False
|
||||
# Non-classes we just check directly
|
||||
return not asyncio.iscoroutinefunction(application)
|
||||
|
||||
|
||||
def double_to_single_callable(application):
|
||||
"""
|
||||
Transforms a double-callable ASGI application into a single-callable one.
|
||||
"""
|
||||
|
||||
async def new_application(scope, receive, send):
|
||||
instance = application(scope)
|
||||
return await instance(receive, send)
|
||||
|
||||
return new_application
|
||||
|
||||
|
||||
def guarantee_single_callable(application):
|
||||
"""
|
||||
Takes either a single- or double-callable application and always returns it
|
||||
in single-callable style. Use this to add backwards compatibility for ASGI
|
||||
2.0 applications to your server/test harness/etc.
|
||||
"""
|
||||
if is_double_callable(application):
|
||||
application = double_to_single_callable(application)
|
||||
return application
|
@ -0,0 +1,86 @@
|
||||
import queue
|
||||
import threading
|
||||
import time
|
||||
from concurrent.futures import Executor, Future
|
||||
|
||||
|
||||
class _WorkItem(object):
|
||||
"""
|
||||
Represents an item needing to be run in the executor.
|
||||
Copied from ThreadPoolExecutor (but it's private, so we're not going to rely on importing it)
|
||||
"""
|
||||
|
||||
def __init__(self, future, fn, args, kwargs):
|
||||
self.future = future
|
||||
self.fn = fn
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
def run(self):
|
||||
if not self.future.set_running_or_notify_cancel():
|
||||
return
|
||||
try:
|
||||
result = self.fn(*self.args, **self.kwargs)
|
||||
except BaseException as exc:
|
||||
self.future.set_exception(exc)
|
||||
# Break a reference cycle with the exception 'exc'
|
||||
self = None
|
||||
else:
|
||||
self.future.set_result(result)
|
||||
|
||||
|
||||
class CurrentThreadExecutor(Executor):
|
||||
"""
|
||||
An Executor that actually runs code in the thread it is instantiated in.
|
||||
Passed to other threads running async code, so they can run sync code in
|
||||
the thread they came from.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._work_thread = threading.current_thread()
|
||||
self._work_queue = queue.Queue()
|
||||
self._broken = False
|
||||
|
||||
def run_until_future(self, future):
|
||||
"""
|
||||
Runs the code in the work queue until a result is available from the future.
|
||||
Should be run from the thread the executor is initialised in.
|
||||
"""
|
||||
# Check we're in the right thread
|
||||
if threading.current_thread() != self._work_thread:
|
||||
raise RuntimeError(
|
||||
"You cannot run CurrentThreadExecutor from a different thread"
|
||||
)
|
||||
# Keep getting work items and checking the future
|
||||
try:
|
||||
while True:
|
||||
# Get a work item and run it
|
||||
try:
|
||||
work_item = self._work_queue.get(block=False)
|
||||
except queue.Empty:
|
||||
# See if the future is done (we only exit if the work queue is empty)
|
||||
if future.done():
|
||||
return
|
||||
# Prevent hot-looping on nothing
|
||||
time.sleep(0.001)
|
||||
else:
|
||||
work_item.run()
|
||||
del work_item
|
||||
finally:
|
||||
self._broken = True
|
||||
|
||||
def submit(self, fn, *args, **kwargs):
|
||||
# Check they're not submitting from the same thread
|
||||
if threading.current_thread() == self._work_thread:
|
||||
raise RuntimeError(
|
||||
"You cannot submit onto CurrentThreadExecutor from its own thread"
|
||||
)
|
||||
# Check they're not too late or the executor errored
|
||||
if self._broken:
|
||||
raise RuntimeError("CurrentThreadExecutor already quit or is broken")
|
||||
# Add to work queue
|
||||
f = Future()
|
||||
work_item = _WorkItem(f, fn, args, kwargs)
|
||||
self._work_queue.put(work_item)
|
||||
# Return the future
|
||||
return f
|
@ -0,0 +1,119 @@
|
||||
import random
|
||||
import string
|
||||
import sys
|
||||
import threading
|
||||
import weakref
|
||||
|
||||
|
||||
class Local:
|
||||
"""
|
||||
A drop-in replacement for threading.locals that also works with asyncio
|
||||
Tasks (via the current_task asyncio method), and passes locals through
|
||||
sync_to_async and async_to_sync.
|
||||
|
||||
Specifically:
|
||||
- Locals work per-coroutine on any thread not spawned using asgiref
|
||||
- Locals work per-thread on any thread not spawned using asgiref
|
||||
- Locals are shared with the parent coroutine when using sync_to_async
|
||||
- Locals are shared with the parent thread when using async_to_sync
|
||||
(and if that thread was launched using sync_to_async, with its parent
|
||||
coroutine as well, with this working for indefinite levels of nesting)
|
||||
|
||||
Set thread_critical to True to not allow locals to pass from an async Task
|
||||
to a thread it spawns. This is needed for code that truly needs
|
||||
thread-safety, as opposed to things used for helpful context (e.g. sqlite
|
||||
does not like being called from a different thread to the one it is from).
|
||||
Thread-critical code will still be differentiated per-Task within a thread
|
||||
as it is expected it does not like concurrent access.
|
||||
|
||||
This doesn't use contextvars as it needs to support 3.6. Once it can support
|
||||
3.7 only, we can then reimplement the storage more nicely.
|
||||
"""
|
||||
|
||||
CLEANUP_INTERVAL = 60 # seconds
|
||||
|
||||
def __init__(self, thread_critical=False):
|
||||
self._thread_critical = thread_critical
|
||||
self._thread_lock = threading.RLock()
|
||||
self._context_refs = []
|
||||
# Random suffixes stop accidental reuse between different Locals,
|
||||
# though we try to force deletion as well.
|
||||
self._attr_name = "_asgiref_local_impl_%s_%s" % (
|
||||
id(self),
|
||||
"".join(random.choice(string.ascii_letters) for i in range(8)),
|
||||
)
|
||||
|
||||
def _get_context_id(self):
|
||||
"""
|
||||
Get the ID we should use for looking up variables
|
||||
"""
|
||||
# Prevent a circular reference
|
||||
from .sync import AsyncToSync, SyncToAsync
|
||||
|
||||
# First, pull the current task if we can
|
||||
context_id = SyncToAsync.get_current_task()
|
||||
context_is_async = True
|
||||
# OK, let's try for a thread ID
|
||||
if context_id is None:
|
||||
context_id = threading.current_thread()
|
||||
context_is_async = False
|
||||
# If we're thread-critical, we stop here, as we can't share contexts.
|
||||
if self._thread_critical:
|
||||
return context_id
|
||||
# Now, take those and see if we can resolve them through the launch maps
|
||||
for i in range(sys.getrecursionlimit()):
|
||||
try:
|
||||
if context_is_async:
|
||||
# Tasks have a source thread in AsyncToSync
|
||||
context_id = AsyncToSync.launch_map[context_id]
|
||||
context_is_async = False
|
||||
else:
|
||||
# Threads have a source task in SyncToAsync
|
||||
context_id = SyncToAsync.launch_map[context_id]
|
||||
context_is_async = True
|
||||
except KeyError:
|
||||
break
|
||||
else:
|
||||
# Catch infinite loops (they happen if you are screwing around
|
||||
# with AsyncToSync implementations)
|
||||
raise RuntimeError("Infinite launch_map loops")
|
||||
return context_id
|
||||
|
||||
def _get_storage(self):
|
||||
context_obj = self._get_context_id()
|
||||
if not hasattr(context_obj, self._attr_name):
|
||||
setattr(context_obj, self._attr_name, {})
|
||||
self._context_refs.append(weakref.ref(context_obj))
|
||||
return getattr(context_obj, self._attr_name)
|
||||
|
||||
def __del__(self):
|
||||
for ref in self._context_refs:
|
||||
context_obj = ref()
|
||||
if context_obj:
|
||||
try:
|
||||
delattr(context_obj, self._attr_name)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
def __getattr__(self, key):
|
||||
with self._thread_lock:
|
||||
storage = self._get_storage()
|
||||
if key in storage:
|
||||
return storage[key]
|
||||
else:
|
||||
raise AttributeError("%r object has no attribute %r" % (self, key))
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key in ("_context_refs", "_thread_critical", "_thread_lock", "_attr_name"):
|
||||
return super().__setattr__(key, value)
|
||||
with self._thread_lock:
|
||||
storage = self._get_storage()
|
||||
storage[key] = value
|
||||
|
||||
def __delattr__(self, key):
|
||||
with self._thread_lock:
|
||||
storage = self._get_storage()
|
||||
if key in storage:
|
||||
del storage[key]
|
||||
else:
|
||||
raise AttributeError("%r object has no attribute %r" % (self, key))
|
@ -0,0 +1,154 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
import traceback
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StatelessServer:
|
||||
"""
|
||||
Base server class that handles basic concepts like application instance
|
||||
creation/pooling, exception handling, and similar, for stateless protocols
|
||||
(i.e. ones without actual incoming connections to the process)
|
||||
|
||||
Your code should override the handle() method, doing whatever it needs to,
|
||||
and calling get_or_create_application_instance with a unique `scope_id`
|
||||
and `scope` for the scope it wants to get.
|
||||
|
||||
If an application instance is found with the same `scope_id`, you are
|
||||
given its input queue, otherwise one is made for you with the scope provided
|
||||
and you are given that fresh new input queue. Either way, you should do
|
||||
something like:
|
||||
|
||||
input_queue = self.get_or_create_application_instance(
|
||||
"user-123456",
|
||||
{"type": "testprotocol", "user_id": "123456", "username": "andrew"},
|
||||
)
|
||||
input_queue.put_nowait(message)
|
||||
|
||||
If you try and create an application instance and there are already
|
||||
`max_application` instances, the oldest/least recently used one will be
|
||||
reclaimed and shut down to make space.
|
||||
|
||||
Application coroutines that error will be found periodically (every 100ms
|
||||
by default) and have their exceptions printed to the console. Override
|
||||
application_exception() if you want to do more when this happens.
|
||||
|
||||
If you override run(), make sure you handle things like launching the
|
||||
application checker.
|
||||
"""
|
||||
|
||||
application_checker_interval = 0.1
|
||||
|
||||
def __init__(self, application, max_applications=1000):
|
||||
# Parameters
|
||||
self.application = application
|
||||
self.max_applications = max_applications
|
||||
# Initialisation
|
||||
self.application_instances = {}
|
||||
|
||||
### Mainloop and handling
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Runs the asyncio event loop with our handler loop.
|
||||
"""
|
||||
event_loop = asyncio.get_event_loop()
|
||||
asyncio.ensure_future(self.application_checker())
|
||||
try:
|
||||
event_loop.run_until_complete(self.handle())
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Exiting due to Ctrl-C/interrupt")
|
||||
|
||||
async def handle(self):
|
||||
raise NotImplementedError("You must implement handle()")
|
||||
|
||||
async def application_send(self, scope, message):
|
||||
"""
|
||||
Receives outbound sends from applications and handles them.
|
||||
"""
|
||||
raise NotImplementedError("You must implement application_send()")
|
||||
|
||||
### Application instance management
|
||||
|
||||
def get_or_create_application_instance(self, scope_id, scope):
|
||||
"""
|
||||
Creates an application instance and returns its queue.
|
||||
"""
|
||||
if scope_id in self.application_instances:
|
||||
self.application_instances[scope_id]["last_used"] = time.time()
|
||||
return self.application_instances[scope_id]["input_queue"]
|
||||
# See if we need to delete an old one
|
||||
while len(self.application_instances) > self.max_applications:
|
||||
self.delete_oldest_application_instance()
|
||||
# Make an instance of the application
|
||||
input_queue = asyncio.Queue()
|
||||
application_instance = self.application(scope=scope)
|
||||
# Run it, and stash the future for later checking
|
||||
future = asyncio.ensure_future(
|
||||
application_instance(
|
||||
receive=input_queue.get,
|
||||
send=lambda message: self.application_send(scope, message),
|
||||
)
|
||||
)
|
||||
self.application_instances[scope_id] = {
|
||||
"input_queue": input_queue,
|
||||
"future": future,
|
||||
"scope": scope,
|
||||
"last_used": time.time(),
|
||||
}
|
||||
return input_queue
|
||||
|
||||
def delete_oldest_application_instance(self):
|
||||
"""
|
||||
Finds and deletes the oldest application instance
|
||||
"""
|
||||
oldest_time = min(
|
||||
details["last_used"] for details in self.application_instances.values()
|
||||
)
|
||||
for scope_id, details in self.application_instances.items():
|
||||
if details["last_used"] == oldest_time:
|
||||
self.delete_application_instance(scope_id)
|
||||
# Return to make sure we only delete one in case two have
|
||||
# the same oldest time
|
||||
return
|
||||
|
||||
def delete_application_instance(self, scope_id):
|
||||
"""
|
||||
Removes an application instance (makes sure its task is stopped,
|
||||
then removes it from the current set)
|
||||
"""
|
||||
details = self.application_instances[scope_id]
|
||||
del self.application_instances[scope_id]
|
||||
if not details["future"].done():
|
||||
details["future"].cancel()
|
||||
|
||||
async def application_checker(self):
|
||||
"""
|
||||
Goes through the set of current application instance Futures and cleans up
|
||||
any that are done/prints exceptions for any that errored.
|
||||
"""
|
||||
while True:
|
||||
await asyncio.sleep(self.application_checker_interval)
|
||||
for scope_id, details in list(self.application_instances.items()):
|
||||
if details["future"].done():
|
||||
exception = details["future"].exception()
|
||||
if exception:
|
||||
await self.application_exception(exception, details)
|
||||
try:
|
||||
del self.application_instances[scope_id]
|
||||
except KeyError:
|
||||
# Exception handling might have already got here before us. That's fine.
|
||||
pass
|
||||
|
||||
async def application_exception(self, exception, application_details):
|
||||
"""
|
||||
Called whenever an application coroutine has an exception.
|
||||
"""
|
||||
logging.error(
|
||||
"Exception inside application: %s\n%s%s",
|
||||
exception,
|
||||
"".join(traceback.format_tb(exception.__traceback__)),
|
||||
" {}".format(exception),
|
||||
)
|
@ -0,0 +1,341 @@
|
||||
import asyncio
|
||||
import asyncio.coroutines
|
||||
import functools
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
from concurrent.futures import Future, ThreadPoolExecutor
|
||||
|
||||
from .current_thread_executor import CurrentThreadExecutor
|
||||
from .local import Local
|
||||
|
||||
try:
|
||||
import contextvars # Python 3.7+ only.
|
||||
except ImportError:
|
||||
contextvars = None
|
||||
|
||||
|
||||
class AsyncToSync:
|
||||
"""
|
||||
Utility class which turns an awaitable that only works on the thread with
|
||||
the event loop into a synchronous callable that works in a subthread.
|
||||
|
||||
If the call stack contains an async loop, the code runs there.
|
||||
Otherwise, the code runs in a new loop in a new thread.
|
||||
|
||||
Either way, this thread then pauses and waits to run any thread_sensitive
|
||||
code called from further down the call stack using SyncToAsync, before
|
||||
finally exiting once the async task returns.
|
||||
"""
|
||||
|
||||
# Maps launched Tasks to the threads that launched them (for locals impl)
|
||||
launch_map = {}
|
||||
|
||||
# Keeps track of which CurrentThreadExecutor to use. This uses an asgiref
|
||||
# Local, not a threadlocal, so that tasks can work out what their parent used.
|
||||
executors = Local()
|
||||
|
||||
def __init__(self, awaitable, force_new_loop=False):
|
||||
self.awaitable = awaitable
|
||||
try:
|
||||
self.__self__ = self.awaitable.__self__
|
||||
except AttributeError:
|
||||
pass
|
||||
if force_new_loop:
|
||||
# They have asked that we always run in a new sub-loop.
|
||||
self.main_event_loop = None
|
||||
else:
|
||||
try:
|
||||
self.main_event_loop = asyncio.get_event_loop()
|
||||
except RuntimeError:
|
||||
# There's no event loop in this thread. Look for the threadlocal if
|
||||
# we're inside SyncToAsync
|
||||
self.main_event_loop = getattr(
|
||||
SyncToAsync.threadlocal, "main_event_loop", None
|
||||
)
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
# You can't call AsyncToSync from a thread with a running event loop
|
||||
try:
|
||||
event_loop = asyncio.get_event_loop()
|
||||
except RuntimeError:
|
||||
pass
|
||||
else:
|
||||
if event_loop.is_running():
|
||||
raise RuntimeError(
|
||||
"You cannot use AsyncToSync in the same thread as an async event loop - "
|
||||
"just await the async function directly."
|
||||
)
|
||||
# Make a future for the return information
|
||||
call_result = Future()
|
||||
# Get the source thread
|
||||
source_thread = threading.current_thread()
|
||||
# Make a CurrentThreadExecutor we'll use to idle in this thread - we
|
||||
# need one for every sync frame, even if there's one above us in the
|
||||
# same thread.
|
||||
if hasattr(self.executors, "current"):
|
||||
old_current_executor = self.executors.current
|
||||
else:
|
||||
old_current_executor = None
|
||||
current_executor = CurrentThreadExecutor()
|
||||
self.executors.current = current_executor
|
||||
# Use call_soon_threadsafe to schedule a synchronous callback on the
|
||||
# main event loop's thread if it's there, otherwise make a new loop
|
||||
# in this thread.
|
||||
try:
|
||||
if not (self.main_event_loop and self.main_event_loop.is_running()):
|
||||
# Make our own event loop - in a new thread - and run inside that.
|
||||
loop = asyncio.new_event_loop()
|
||||
loop_executor = ThreadPoolExecutor(max_workers=1)
|
||||
loop_future = loop_executor.submit(
|
||||
self._run_event_loop,
|
||||
loop,
|
||||
self.main_wrap(
|
||||
args, kwargs, call_result, source_thread, sys.exc_info()
|
||||
),
|
||||
)
|
||||
if current_executor:
|
||||
# Run the CurrentThreadExecutor until the future is done
|
||||
current_executor.run_until_future(loop_future)
|
||||
# Wait for future and/or allow for exception propagation
|
||||
loop_future.result()
|
||||
else:
|
||||
# Call it inside the existing loop
|
||||
self.main_event_loop.call_soon_threadsafe(
|
||||
self.main_event_loop.create_task,
|
||||
self.main_wrap(
|
||||
args, kwargs, call_result, source_thread, sys.exc_info()
|
||||
),
|
||||
)
|
||||
if current_executor:
|
||||
# Run the CurrentThreadExecutor until the future is done
|
||||
current_executor.run_until_future(call_result)
|
||||
finally:
|
||||
# Clean up any executor we were running
|
||||
if hasattr(self.executors, "current"):
|
||||
del self.executors.current
|
||||
if old_current_executor:
|
||||
self.executors.current = old_current_executor
|
||||
# Wait for results from the future.
|
||||
return call_result.result()
|
||||
|
||||
def _run_event_loop(self, loop, coro):
|
||||
"""
|
||||
Runs the given event loop (designed to be called in a thread).
|
||||
"""
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
loop.run_until_complete(coro)
|
||||
finally:
|
||||
try:
|
||||
# mimic asyncio.run() behavior
|
||||
# cancel unexhausted async generators
|
||||
if sys.version_info >= (3, 7, 0):
|
||||
tasks = asyncio.all_tasks(loop)
|
||||
else:
|
||||
tasks = asyncio.Task.all_tasks(loop)
|
||||
for task in tasks:
|
||||
task.cancel()
|
||||
loop.run_until_complete(asyncio.gather(*tasks, return_exceptions=True))
|
||||
for task in tasks:
|
||||
if task.cancelled():
|
||||
continue
|
||||
if task.exception() is not None:
|
||||
loop.call_exception_handler(
|
||||
{
|
||||
"message": "unhandled exception during loop shutdown",
|
||||
"exception": task.exception(),
|
||||
"task": task,
|
||||
}
|
||||
)
|
||||
if hasattr(loop, "shutdown_asyncgens"):
|
||||
loop.run_until_complete(loop.shutdown_asyncgens())
|
||||
finally:
|
||||
loop.close()
|
||||
asyncio.set_event_loop(self.main_event_loop)
|
||||
|
||||
def __get__(self, parent, objtype):
|
||||
"""
|
||||
Include self for methods
|
||||
"""
|
||||
func = functools.partial(self.__call__, parent)
|
||||
return functools.update_wrapper(func, self.awaitable)
|
||||
|
||||
async def main_wrap(self, args, kwargs, call_result, source_thread, exc_info):
|
||||
"""
|
||||
Wraps the awaitable with something that puts the result into the
|
||||
result/exception future.
|
||||
"""
|
||||
current_task = SyncToAsync.get_current_task()
|
||||
self.launch_map[current_task] = source_thread
|
||||
try:
|
||||
# If we have an exception, run the function inside the except block
|
||||
# after raising it so exc_info is correctly populated.
|
||||
if exc_info[1]:
|
||||
try:
|
||||
raise exc_info[1]
|
||||
except:
|
||||
result = await self.awaitable(*args, **kwargs)
|
||||
else:
|
||||
result = await self.awaitable(*args, **kwargs)
|
||||
except Exception as e:
|
||||
call_result.set_exception(e)
|
||||
else:
|
||||
call_result.set_result(result)
|
||||
finally:
|
||||
del self.launch_map[current_task]
|
||||
|
||||
|
||||
class SyncToAsync:
|
||||
"""
|
||||
Utility class which turns a synchronous callable into an awaitable that
|
||||
runs in a threadpool. It also sets a threadlocal inside the thread so
|
||||
calls to AsyncToSync can escape it.
|
||||
|
||||
If thread_sensitive is passed, the code will run in the same thread as any
|
||||
outer code. This is needed for underlying Python code that is not
|
||||
threadsafe (for example, code which handles SQLite database connections).
|
||||
|
||||
If the outermost program is async (i.e. SyncToAsync is outermost), then
|
||||
this will be a dedicated single sub-thread that all sync code runs in,
|
||||
one after the other. If the outermost program is sync (i.e. AsyncToSync is
|
||||
outermost), this will just be the main thread. This is achieved by idling
|
||||
with a CurrentThreadExecutor while AsyncToSync is blocking its sync parent,
|
||||
rather than just blocking.
|
||||
"""
|
||||
|
||||
# If they've set ASGI_THREADS, update the default asyncio executor for now
|
||||
if "ASGI_THREADS" in os.environ:
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.set_default_executor(
|
||||
ThreadPoolExecutor(max_workers=int(os.environ["ASGI_THREADS"]))
|
||||
)
|
||||
|
||||
# Maps launched threads to the coroutines that spawned them
|
||||
launch_map = {}
|
||||
|
||||
# Storage for main event loop references
|
||||
threadlocal = threading.local()
|
||||
|
||||
# Single-thread executor for thread-sensitive code
|
||||
single_thread_executor = ThreadPoolExecutor(max_workers=1)
|
||||
|
||||
def __init__(self, func, thread_sensitive=False):
|
||||
self.func = func
|
||||
functools.update_wrapper(self, func)
|
||||
self._thread_sensitive = thread_sensitive
|
||||
self._is_coroutine = asyncio.coroutines._is_coroutine
|
||||
try:
|
||||
self.__self__ = func.__self__
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
async def __call__(self, *args, **kwargs):
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
# Work out what thread to run the code in
|
||||
if self._thread_sensitive:
|
||||
if hasattr(AsyncToSync.executors, "current"):
|
||||
# If we have a parent sync thread above somewhere, use that
|
||||
executor = AsyncToSync.executors.current
|
||||
else:
|
||||
# Otherwise, we run it in a fixed single thread
|
||||
executor = self.single_thread_executor
|
||||
else:
|
||||
executor = None # Use default
|
||||
|
||||
if contextvars is not None:
|
||||
context = contextvars.copy_context()
|
||||
child = functools.partial(self.func, *args, **kwargs)
|
||||
func = context.run
|
||||
args = (child,)
|
||||
kwargs = {}
|
||||
else:
|
||||
func = self.func
|
||||
|
||||
# Run the code in the right thread
|
||||
future = loop.run_in_executor(
|
||||
executor,
|
||||
functools.partial(
|
||||
self.thread_handler,
|
||||
loop,
|
||||
self.get_current_task(),
|
||||
sys.exc_info(),
|
||||
func,
|
||||
*args,
|
||||
**kwargs
|
||||
),
|
||||
)
|
||||
ret = await asyncio.wait_for(future, timeout=None)
|
||||
|
||||
if contextvars is not None:
|
||||
# Check for changes in contextvars, and set them to the current
|
||||
# context for downstream consumers
|
||||
for cvar in context:
|
||||
try:
|
||||
if cvar.get() != context.get(cvar):
|
||||
cvar.set(context.get(cvar))
|
||||
except LookupError:
|
||||
cvar.set(context.get(cvar))
|
||||
|
||||
return ret
|
||||
|
||||
def __get__(self, parent, objtype):
|
||||
"""
|
||||
Include self for methods
|
||||
"""
|
||||
return functools.partial(self.__call__, parent)
|
||||
|
||||
def thread_handler(self, loop, source_task, exc_info, func, *args, **kwargs):
|
||||
"""
|
||||
Wraps the sync application with exception handling.
|
||||
"""
|
||||
# Set the threadlocal for AsyncToSync
|
||||
self.threadlocal.main_event_loop = loop
|
||||
# Set the task mapping (used for the locals module)
|
||||
current_thread = threading.current_thread()
|
||||
if AsyncToSync.launch_map.get(source_task) == current_thread:
|
||||
# Our parent task was launched from this same thread, so don't make
|
||||
# a launch map entry - let it shortcut over us! (and stop infinite loops)
|
||||
parent_set = False
|
||||
else:
|
||||
self.launch_map[current_thread] = source_task
|
||||
parent_set = True
|
||||
# Run the function
|
||||
try:
|
||||
# If we have an exception, run the function inside the except block
|
||||
# after raising it so exc_info is correctly populated.
|
||||
if exc_info[1]:
|
||||
try:
|
||||
raise exc_info[1]
|
||||
except:
|
||||
return func(*args, **kwargs)
|
||||
else:
|
||||
return func(*args, **kwargs)
|
||||
finally:
|
||||
# Only delete the launch_map parent if we set it, otherwise it is
|
||||
# from someone else.
|
||||
if parent_set:
|
||||
del self.launch_map[current_thread]
|
||||
|
||||
@staticmethod
|
||||
def get_current_task():
|
||||
"""
|
||||
Cross-version implementation of asyncio.current_task()
|
||||
|
||||
Returns None if there is no task.
|
||||
"""
|
||||
try:
|
||||
if hasattr(asyncio, "current_task"):
|
||||
# Python 3.7 and up
|
||||
return asyncio.current_task()
|
||||
else:
|
||||
# Python 3.6
|
||||
return asyncio.Task.current_task()
|
||||
except RuntimeError:
|
||||
return None
|
||||
|
||||
|
||||
# Lowercase is more sensible for most things
|
||||
sync_to_async = SyncToAsync
|
||||
async_to_sync = AsyncToSync
|
@ -0,0 +1,97 @@
|
||||
import asyncio
|
||||
import time
|
||||
|
||||
from .compatibility import guarantee_single_callable
|
||||
from .timeout import timeout as async_timeout
|
||||
|
||||
|
||||
class ApplicationCommunicator:
|
||||
"""
|
||||
Runs an ASGI application in a test mode, allowing sending of
|
||||
messages to it and retrieval of messages it sends.
|
||||
"""
|
||||
|
||||
def __init__(self, application, scope):
|
||||
self.application = guarantee_single_callable(application)
|
||||
self.scope = scope
|
||||
self.input_queue = asyncio.Queue()
|
||||
self.output_queue = asyncio.Queue()
|
||||
self.future = asyncio.ensure_future(
|
||||
self.application(scope, self.input_queue.get, self.output_queue.put)
|
||||
)
|
||||
|
||||
async def wait(self, timeout=1):
|
||||
"""
|
||||
Waits for the application to stop itself and returns any exceptions.
|
||||
"""
|
||||
try:
|
||||
async with async_timeout(timeout):
|
||||
try:
|
||||
await self.future
|
||||
self.future.result()
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
finally:
|
||||
if not self.future.done():
|
||||
self.future.cancel()
|
||||
try:
|
||||
await self.future
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
def stop(self, exceptions=True):
|
||||
if not self.future.done():
|
||||
self.future.cancel()
|
||||
elif exceptions:
|
||||
# Give a chance to raise any exceptions
|
||||
self.future.result()
|
||||
|
||||
def __del__(self):
|
||||
# Clean up on deletion
|
||||
try:
|
||||
self.stop(exceptions=False)
|
||||
except RuntimeError:
|
||||
# Event loop already stopped
|
||||
pass
|
||||
|
||||
async def send_input(self, message):
|
||||
"""
|
||||
Sends a single message to the application
|
||||
"""
|
||||
# Give it the message
|
||||
await self.input_queue.put(message)
|
||||
|
||||
async def receive_output(self, timeout=1):
|
||||
"""
|
||||
Receives a single message from the application, with optional timeout.
|
||||
"""
|
||||
# Make sure there's not an exception to raise from the task
|
||||
if self.future.done():
|
||||
self.future.result()
|
||||
# Wait and receive the message
|
||||
try:
|
||||
async with async_timeout(timeout):
|
||||
return await self.output_queue.get()
|
||||
except asyncio.TimeoutError as e:
|
||||
# See if we have another error to raise inside
|
||||
if self.future.done():
|
||||
self.future.result()
|
||||
else:
|
||||
self.future.cancel()
|
||||
try:
|
||||
await self.future
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
raise e
|
||||
|
||||
async def receive_nothing(self, timeout=0.1, interval=0.01):
|
||||
"""
|
||||
Checks that there is no message to receive in the given time.
|
||||
"""
|
||||
# `interval` has precedence over `timeout`
|
||||
start = time.monotonic()
|
||||
while time.monotonic() - start < timeout:
|
||||
if not self.output_queue.empty():
|
||||
return False
|
||||
await asyncio.sleep(interval)
|
||||
return self.output_queue.empty()
|
@ -0,0 +1,128 @@
|
||||
# This code is originally sourced from the aio-libs project "async_timeout",
|
||||
# under the Apache 2.0 license. You may see the original project at
|
||||
# https://github.com/aio-libs/async-timeout
|
||||
|
||||
# It is vendored here to reduce chain-dependencies on this library, and
|
||||
# modified slightly to remove some features we don't use.
|
||||
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
from types import TracebackType
|
||||
from typing import Any, Optional, Type # noqa
|
||||
|
||||
PY_37 = sys.version_info >= (3, 7)
|
||||
|
||||
|
||||
class timeout:
|
||||
"""timeout context manager.
|
||||
|
||||
Useful in cases when you want to apply timeout logic around block
|
||||
of code or in cases when asyncio.wait_for is not suitable. For example:
|
||||
|
||||
>>> with timeout(0.001):
|
||||
... async with aiohttp.get('https://github.com') as r:
|
||||
... await r.text()
|
||||
|
||||
|
||||
timeout - value in seconds or None to disable timeout logic
|
||||
loop - asyncio compatible event loop
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
timeout: Optional[float],
|
||||
*,
|
||||
loop: Optional[asyncio.AbstractEventLoop] = None
|
||||
) -> None:
|
||||
self._timeout = timeout
|
||||
if loop is None:
|
||||
loop = asyncio.get_event_loop()
|
||||
self._loop = loop
|
||||
self._task = None # type: Optional[asyncio.Task[Any]]
|
||||
self._cancelled = False
|
||||
self._cancel_handler = None # type: Optional[asyncio.Handle]
|
||||
self._cancel_at = None # type: Optional[float]
|
||||
|
||||
def __enter__(self) -> "timeout":
|
||||
return self._do_enter()
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Type[BaseException],
|
||||
exc_val: BaseException,
|
||||
exc_tb: TracebackType,
|
||||
) -> Optional[bool]:
|
||||
self._do_exit(exc_type)
|
||||
return None
|
||||
|
||||
async def __aenter__(self) -> "timeout":
|
||||
return self._do_enter()
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: Type[BaseException],
|
||||
exc_val: BaseException,
|
||||
exc_tb: TracebackType,
|
||||
) -> None:
|
||||
self._do_exit(exc_type)
|
||||
|
||||
@property
|
||||
def expired(self) -> bool:
|
||||
return self._cancelled
|
||||
|
||||
@property
|
||||
def remaining(self) -> Optional[float]:
|
||||
if self._cancel_at is not None:
|
||||
return max(self._cancel_at - self._loop.time(), 0.0)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _do_enter(self) -> "timeout":
|
||||
# Support Tornado 5- without timeout
|
||||
# Details: https://github.com/python/asyncio/issues/392
|
||||
if self._timeout is None:
|
||||
return self
|
||||
|
||||
self._task = current_task(self._loop)
|
||||
if self._task is None:
|
||||
raise RuntimeError(
|
||||
"Timeout context manager should be used " "inside a task"
|
||||
)
|
||||
|
||||
if self._timeout <= 0:
|
||||
self._loop.call_soon(self._cancel_task)
|
||||
return self
|
||||
|
||||
self._cancel_at = self._loop.time() + self._timeout
|
||||
self._cancel_handler = self._loop.call_at(self._cancel_at, self._cancel_task)
|
||||
return self
|
||||
|
||||
def _do_exit(self, exc_type: Type[BaseException]) -> None:
|
||||
if exc_type is asyncio.CancelledError and self._cancelled:
|
||||
self._cancel_handler = None
|
||||
self._task = None
|
||||
raise asyncio.TimeoutError
|
||||
if self._timeout is not None and self._cancel_handler is not None:
|
||||
self._cancel_handler.cancel()
|
||||
self._cancel_handler = None
|
||||
self._task = None
|
||||
return None
|
||||
|
||||
def _cancel_task(self) -> None:
|
||||
if self._task is not None:
|
||||
self._task.cancel()
|
||||
self._cancelled = True
|
||||
|
||||
|
||||
def current_task(loop: asyncio.AbstractEventLoop) -> "asyncio.Task[Any]":
|
||||
if PY_37:
|
||||
task = asyncio.current_task(loop=loop) # type: ignore
|
||||
else:
|
||||
task = asyncio.Task.current_task(loop=loop)
|
||||
if task is None:
|
||||
# this should be removed, tokio must use register_task and family API
|
||||
if hasattr(loop, "current_task"):
|
||||
task = loop.current_task() # type: ignore
|
||||
|
||||
return task
|
@ -0,0 +1,145 @@
|
||||
from io import BytesIO
|
||||
from tempfile import SpooledTemporaryFile
|
||||
|
||||
from asgiref.sync import AsyncToSync, sync_to_async
|
||||
|
||||
|
||||
class WsgiToAsgi:
|
||||
"""
|
||||
Wraps a WSGI application to make it into an ASGI application.
|
||||
"""
|
||||
|
||||
def __init__(self, wsgi_application):
|
||||
self.wsgi_application = wsgi_application
|
||||
|
||||
async def __call__(self, scope, receive, send):
|
||||
"""
|
||||
ASGI application instantiation point.
|
||||
We return a new WsgiToAsgiInstance here with the WSGI app
|
||||
and the scope, ready to respond when it is __call__ed.
|
||||
"""
|
||||
await WsgiToAsgiInstance(self.wsgi_application)(scope, receive, send)
|
||||
|
||||
|
||||
class WsgiToAsgiInstance:
|
||||
"""
|
||||
Per-socket instance of a wrapped WSGI application
|
||||
"""
|
||||
|
||||
def __init__(self, wsgi_application):
|
||||
self.wsgi_application = wsgi_application
|
||||
self.response_started = False
|
||||
|
||||
async def __call__(self, scope, receive, send):
|
||||
if scope["type"] != "http":
|
||||
raise ValueError("WSGI wrapper received a non-HTTP scope")
|
||||
self.scope = scope
|
||||
with SpooledTemporaryFile(max_size=65536) as body:
|
||||
# Alright, wait for the http.request messages
|
||||
while True:
|
||||
message = await receive()
|
||||
if message["type"] != "http.request":
|
||||
raise ValueError("WSGI wrapper received a non-HTTP-request message")
|
||||
body.write(message.get("body", b""))
|
||||
if not message.get("more_body"):
|
||||
break
|
||||
body.seek(0)
|
||||
# Wrap send so it can be called from the subthread
|
||||
self.sync_send = AsyncToSync(send)
|
||||
# Call the WSGI app
|
||||
await self.run_wsgi_app(body)
|
||||
|
||||
def build_environ(self, scope, body):
|
||||
"""
|
||||
Builds a scope and request body into a WSGI environ object.
|
||||
"""
|
||||
environ = {
|
||||
"REQUEST_METHOD": scope["method"],
|
||||
"SCRIPT_NAME": scope.get("root_path", ""),
|
||||
"PATH_INFO": scope["path"],
|
||||
"QUERY_STRING": scope["query_string"].decode("ascii"),
|
||||
"SERVER_PROTOCOL": "HTTP/%s" % scope["http_version"],
|
||||
"wsgi.version": (1, 0),
|
||||
"wsgi.url_scheme": scope.get("scheme", "http"),
|
||||
"wsgi.input": body,
|
||||
"wsgi.errors": BytesIO(),
|
||||
"wsgi.multithread": True,
|
||||
"wsgi.multiprocess": True,
|
||||
"wsgi.run_once": False,
|
||||
}
|
||||
# Get server name and port - required in WSGI, not in ASGI
|
||||
if "server" in scope:
|
||||
environ["SERVER_NAME"] = scope["server"][0]
|
||||
environ["SERVER_PORT"] = str(scope["server"][1])
|
||||
else:
|
||||
environ["SERVER_NAME"] = "localhost"
|
||||
environ["SERVER_PORT"] = "80"
|
||||
|
||||
if "client" in scope:
|
||||
environ["REMOTE_ADDR"] = scope["client"][0]
|
||||
|
||||
# Go through headers and make them into environ entries
|
||||
for name, value in self.scope.get("headers", []):
|
||||
name = name.decode("latin1")
|
||||
if name == "content-length":
|
||||
corrected_name = "CONTENT_LENGTH"
|
||||
elif name == "content-type":
|
||||
corrected_name = "CONTENT_TYPE"
|
||||
else:
|
||||
corrected_name = "HTTP_%s" % name.upper().replace("-", "_")
|
||||
# HTTPbis say only ASCII chars are allowed in headers, but we latin1 just in case
|
||||
value = value.decode("latin1")
|
||||
if corrected_name in environ:
|
||||
value = environ[corrected_name] + "," + value
|
||||
environ[corrected_name] = value
|
||||
return environ
|
||||
|
||||
def start_response(self, status, response_headers, exc_info=None):
|
||||
"""
|
||||
WSGI start_response callable.
|
||||
"""
|
||||
# Don't allow re-calling once response has begun
|
||||
if self.response_started:
|
||||
raise exc_info[1].with_traceback(exc_info[2])
|
||||
# Don't allow re-calling without exc_info
|
||||
if hasattr(self, "response_start") and exc_info is None:
|
||||
raise ValueError(
|
||||
"You cannot call start_response a second time without exc_info"
|
||||
)
|
||||
# Extract status code
|
||||
status_code, _ = status.split(" ", 1)
|
||||
status_code = int(status_code)
|
||||
# Extract headers
|
||||
headers = [
|
||||
(name.lower().encode("ascii"), value.encode("ascii"))
|
||||
for name, value in response_headers
|
||||
]
|
||||
# Build and send response start message.
|
||||
self.response_start = {
|
||||
"type": "http.response.start",
|
||||
"status": status_code,
|
||||
"headers": headers,
|
||||
}
|
||||
|
||||
@sync_to_async
|
||||
def run_wsgi_app(self, body):
|
||||
"""
|
||||
Called in a subthread to run the WSGI app. We encapsulate like
|
||||
this so that the start_response callable is called in the same thread.
|
||||
"""
|
||||
# Translate the scope and incoming request body into a WSGI environ
|
||||
environ = self.build_environ(self.scope, body)
|
||||
# Run the WSGI app
|
||||
for output in self.wsgi_application(environ, self.start_response):
|
||||
# If this is the first response, include the response headers
|
||||
if not self.response_started:
|
||||
self.response_started = True
|
||||
self.sync_send(self.response_start)
|
||||
self.sync_send(
|
||||
{"type": "http.response.body", "body": output, "more_body": True}
|
||||
)
|
||||
# Close connection
|
||||
if not self.response_started:
|
||||
self.response_started = True
|
||||
self.sync_send(self.response_start)
|
||||
self.sync_send({"type": "http.response.body"})
|
@ -0,0 +1,24 @@
|
||||
from django.utils.version import get_version
|
||||
|
||||
VERSION = (3, 0, 7, 'final', 0)
|
||||
|
||||
__version__ = get_version(VERSION)
|
||||
|
||||
|
||||
def setup(set_prefix=True):
|
||||
"""
|
||||
Configure the settings (this happens as a side effect of accessing the
|
||||
first setting), configure logging and populate the app registry.
|
||||
Set the thread-local urlresolvers script prefix if `set_prefix` is True.
|
||||
"""
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.urls import set_script_prefix
|
||||
from django.utils.log import configure_logging
|
||||
|
||||
configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)
|
||||
if set_prefix:
|
||||
set_script_prefix(
|
||||
'/' if settings.FORCE_SCRIPT_NAME is None else settings.FORCE_SCRIPT_NAME
|
||||
)
|
||||
apps.populate(settings.INSTALLED_APPS)
|
@ -0,0 +1,9 @@
|
||||
"""
|
||||
Invokes django-admin when the django module is run as a script.
|
||||
|
||||
Example: python -m django check
|
||||
"""
|
||||
from django.core import management
|
||||
|
||||
if __name__ == "__main__":
|
||||
management.execute_from_command_line()
|
@ -0,0 +1,4 @@
|
||||
from .config import AppConfig
|
||||
from .registry import apps
|
||||
|
||||
__all__ = ['AppConfig', 'apps']
|
@ -0,0 +1,216 @@
|
||||
import os
|
||||
from importlib import import_module
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.utils.module_loading import module_has_submodule
|
||||
|
||||
MODELS_MODULE_NAME = 'models'
|
||||
|
||||
|
||||
class AppConfig:
|
||||
"""Class representing a Django application and its configuration."""
|
||||
|
||||
def __init__(self, app_name, app_module):
|
||||
# Full Python path to the application e.g. 'django.contrib.admin'.
|
||||
self.name = app_name
|
||||
|
||||
# Root module for the application e.g. <module 'django.contrib.admin'
|
||||
# from 'django/contrib/admin/__init__.py'>.
|
||||
self.module = app_module
|
||||
|
||||
# Reference to the Apps registry that holds this AppConfig. Set by the
|
||||
# registry when it registers the AppConfig instance.
|
||||
self.apps = None
|
||||
|
||||
# The following attributes could be defined at the class level in a
|
||||
# subclass, hence the test-and-set pattern.
|
||||
|
||||
# Last component of the Python path to the application e.g. 'admin'.
|
||||
# This value must be unique across a Django project.
|
||||
if not hasattr(self, 'label'):
|
||||
self.label = app_name.rpartition(".")[2]
|
||||
|
||||
# Human-readable name for the application e.g. "Admin".
|
||||
if not hasattr(self, 'verbose_name'):
|
||||
self.verbose_name = self.label.title()
|
||||
|
||||
# Filesystem path to the application directory e.g.
|
||||
# '/path/to/django/contrib/admin'.
|
||||
if not hasattr(self, 'path'):
|
||||
self.path = self._path_from_module(app_module)
|
||||
|
||||
# Module containing models e.g. <module 'django.contrib.admin.models'
|
||||
# from 'django/contrib/admin/models.py'>. Set by import_models().
|
||||
# None if the application doesn't have a models module.
|
||||
self.models_module = None
|
||||
|
||||
# Mapping of lowercase model names to model classes. Initially set to
|
||||
# None to prevent accidental access before import_models() runs.
|
||||
self.models = None
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.label)
|
||||
|
||||
def _path_from_module(self, module):
|
||||
"""Attempt to determine app's filesystem path from its module."""
|
||||
# See #21874 for extended discussion of the behavior of this method in
|
||||
# various cases.
|
||||
# Convert paths to list because Python's _NamespacePath doesn't support
|
||||
# indexing.
|
||||
paths = list(getattr(module, '__path__', []))
|
||||
if len(paths) != 1:
|
||||
filename = getattr(module, '__file__', None)
|
||||
if filename is not None:
|
||||
paths = [os.path.dirname(filename)]
|
||||
else:
|
||||
# For unknown reasons, sometimes the list returned by __path__
|
||||
# contains duplicates that must be removed (#25246).
|
||||
paths = list(set(paths))
|
||||
if len(paths) > 1:
|
||||
raise ImproperlyConfigured(
|
||||
"The app module %r has multiple filesystem locations (%r); "
|
||||
"you must configure this app with an AppConfig subclass "
|
||||
"with a 'path' class attribute." % (module, paths))
|
||||
elif not paths:
|
||||
raise ImproperlyConfigured(
|
||||
"The app module %r has no filesystem location, "
|
||||
"you must configure this app with an AppConfig subclass "
|
||||
"with a 'path' class attribute." % (module,))
|
||||
return paths[0]
|
||||
|
||||
@classmethod
|
||||
def create(cls, entry):
|
||||
"""
|
||||
Factory that creates an app config from an entry in INSTALLED_APPS.
|
||||
"""
|
||||
try:
|
||||
# If import_module succeeds, entry is a path to an app module,
|
||||
# which may specify an app config class with default_app_config.
|
||||
# Otherwise, entry is a path to an app config class or an error.
|
||||
module = import_module(entry)
|
||||
|
||||
except ImportError:
|
||||
# Track that importing as an app module failed. If importing as an
|
||||
# app config class fails too, we'll trigger the ImportError again.
|
||||
module = None
|
||||
|
||||
mod_path, _, cls_name = entry.rpartition('.')
|
||||
|
||||
# Raise the original exception when entry cannot be a path to an
|
||||
# app config class.
|
||||
if not mod_path:
|
||||
raise
|
||||
|
||||
else:
|
||||
try:
|
||||
# If this works, the app module specifies an app config class.
|
||||
entry = module.default_app_config
|
||||
except AttributeError:
|
||||
# Otherwise, it simply uses the default app config class.
|
||||
return cls(entry, module)
|
||||
else:
|
||||
mod_path, _, cls_name = entry.rpartition('.')
|
||||
|
||||
# If we're reaching this point, we must attempt to load the app config
|
||||
# class located at <mod_path>.<cls_name>
|
||||
mod = import_module(mod_path)
|
||||
try:
|
||||
cls = getattr(mod, cls_name)
|
||||
except AttributeError:
|
||||
if module is None:
|
||||
# If importing as an app module failed, check if the module
|
||||
# contains any valid AppConfigs and show them as choices.
|
||||
# Otherwise, that error probably contains the most informative
|
||||
# traceback, so trigger it again.
|
||||
candidates = sorted(
|
||||
repr(name) for name, candidate in mod.__dict__.items()
|
||||
if isinstance(candidate, type) and
|
||||
issubclass(candidate, AppConfig) and
|
||||
candidate is not AppConfig
|
||||
)
|
||||
if candidates:
|
||||
raise ImproperlyConfigured(
|
||||
"'%s' does not contain a class '%s'. Choices are: %s."
|
||||
% (mod_path, cls_name, ', '.join(candidates))
|
||||
)
|
||||
import_module(entry)
|
||||
else:
|
||||
raise
|
||||
|
||||
# Check for obvious errors. (This check prevents duck typing, but
|
||||
# it could be removed if it became a problem in practice.)
|
||||
if not issubclass(cls, AppConfig):
|
||||
raise ImproperlyConfigured(
|
||||
"'%s' isn't a subclass of AppConfig." % entry)
|
||||
|
||||
# Obtain app name here rather than in AppClass.__init__ to keep
|
||||
# all error checking for entries in INSTALLED_APPS in one place.
|
||||
try:
|
||||
app_name = cls.name
|
||||
except AttributeError:
|
||||
raise ImproperlyConfigured(
|
||||
"'%s' must supply a name attribute." % entry)
|
||||
|
||||
# Ensure app_name points to a valid module.
|
||||
try:
|
||||
app_module = import_module(app_name)
|
||||
except ImportError:
|
||||
raise ImproperlyConfigured(
|
||||
"Cannot import '%s'. Check that '%s.%s.name' is correct." % (
|
||||
app_name, mod_path, cls_name,
|
||||
)
|
||||
)
|
||||
|
||||
# Entry is a path to an app config class.
|
||||
return cls(app_name, app_module)
|
||||
|
||||
def get_model(self, model_name, require_ready=True):
|
||||
"""
|
||||
Return the model with the given case-insensitive model_name.
|
||||
|
||||
Raise LookupError if no model exists with this name.
|
||||
"""
|
||||
if require_ready:
|
||||
self.apps.check_models_ready()
|
||||
else:
|
||||
self.apps.check_apps_ready()
|
||||
try:
|
||||
return self.models[model_name.lower()]
|
||||
except KeyError:
|
||||
raise LookupError(
|
||||
"App '%s' doesn't have a '%s' model." % (self.label, model_name))
|
||||
|
||||
def get_models(self, include_auto_created=False, include_swapped=False):
|
||||
"""
|
||||
Return an iterable of models.
|
||||
|
||||
By default, the following models aren't included:
|
||||
|
||||
- auto-created models for many-to-many relations without
|
||||
an explicit intermediate table,
|
||||
- models that have been swapped out.
|
||||
|
||||
Set the corresponding keyword argument to True to include such models.
|
||||
Keyword arguments aren't documented; they're a private API.
|
||||
"""
|
||||
self.apps.check_models_ready()
|
||||
for model in self.models.values():
|
||||
if model._meta.auto_created and not include_auto_created:
|
||||
continue
|
||||
if model._meta.swapped and not include_swapped:
|
||||
continue
|
||||
yield model
|
||||
|
||||
def import_models(self):
|
||||
# Dictionary of models for this app, primarily maintained in the
|
||||
# 'all_models' attribute of the Apps this AppConfig is attached to.
|
||||
self.models = self.apps.all_models[self.label]
|
||||
|
||||
if module_has_submodule(self.module, MODELS_MODULE_NAME):
|
||||
models_module_name = '%s.%s' % (self.name, MODELS_MODULE_NAME)
|
||||
self.models_module = import_module(models_module_name)
|
||||
|
||||
def ready(self):
|
||||
"""
|
||||
Override this method in subclasses to run code when Django starts.
|
||||
"""
|
@ -0,0 +1,427 @@
|
||||
import functools
|
||||
import sys
|
||||
import threading
|
||||
import warnings
|
||||
from collections import Counter, defaultdict
|
||||
from functools import partial
|
||||
|
||||
from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured
|
||||
|
||||
from .config import AppConfig
|
||||
|
||||
|
||||
class Apps:
|
||||
"""
|
||||
A registry that stores the configuration of installed applications.
|
||||
|
||||
It also keeps track of models, e.g. to provide reverse relations.
|
||||
"""
|
||||
|
||||
def __init__(self, installed_apps=()):
|
||||
# installed_apps is set to None when creating the master registry
|
||||
# because it cannot be populated at that point. Other registries must
|
||||
# provide a list of installed apps and are populated immediately.
|
||||
if installed_apps is None and hasattr(sys.modules[__name__], 'apps'):
|
||||
raise RuntimeError("You must supply an installed_apps argument.")
|
||||
|
||||
# Mapping of app labels => model names => model classes. Every time a
|
||||
# model is imported, ModelBase.__new__ calls apps.register_model which
|
||||
# creates an entry in all_models. All imported models are registered,
|
||||
# regardless of whether they're defined in an installed application
|
||||
# and whether the registry has been populated. Since it isn't possible
|
||||
# to reimport a module safely (it could reexecute initialization code)
|
||||
# all_models is never overridden or reset.
|
||||
self.all_models = defaultdict(dict)
|
||||
|
||||
# Mapping of labels to AppConfig instances for installed apps.
|
||||
self.app_configs = {}
|
||||
|
||||
# Stack of app_configs. Used to store the current state in
|
||||
# set_available_apps and set_installed_apps.
|
||||
self.stored_app_configs = []
|
||||
|
||||
# Whether the registry is populated.
|
||||
self.apps_ready = self.models_ready = self.ready = False
|
||||
# For the autoreloader.
|
||||
self.ready_event = threading.Event()
|
||||
|
||||
# Lock for thread-safe population.
|
||||
self._lock = threading.RLock()
|
||||
self.loading = False
|
||||
|
||||
# Maps ("app_label", "modelname") tuples to lists of functions to be
|
||||
# called when the corresponding model is ready. Used by this class's
|
||||
# `lazy_model_operation()` and `do_pending_operations()` methods.
|
||||
self._pending_operations = defaultdict(list)
|
||||
|
||||
# Populate apps and models, unless it's the master registry.
|
||||
if installed_apps is not None:
|
||||
self.populate(installed_apps)
|
||||
|
||||
def populate(self, installed_apps=None):
|
||||
"""
|
||||
Load application configurations and models.
|
||||
|
||||
Import each application module and then each model module.
|
||||
|
||||
It is thread-safe and idempotent, but not reentrant.
|
||||
"""
|
||||
if self.ready:
|
||||
return
|
||||
|
||||
# populate() might be called by two threads in parallel on servers
|
||||
# that create threads before initializing the WSGI callable.
|
||||
with self._lock:
|
||||
if self.ready:
|
||||
return
|
||||
|
||||
# An RLock prevents other threads from entering this section. The
|
||||
# compare and set operation below is atomic.
|
||||
if self.loading:
|
||||
# Prevent reentrant calls to avoid running AppConfig.ready()
|
||||
# methods twice.
|
||||
raise RuntimeError("populate() isn't reentrant")
|
||||
self.loading = True
|
||||
|
||||
# Phase 1: initialize app configs and import app modules.
|
||||
for entry in installed_apps:
|
||||
if isinstance(entry, AppConfig):
|
||||
app_config = entry
|
||||
else:
|
||||
app_config = AppConfig.create(entry)
|
||||
if app_config.label in self.app_configs:
|
||||
raise ImproperlyConfigured(
|
||||
"Application labels aren't unique, "
|
||||
"duplicates: %s" % app_config.label)
|
||||
|
||||
self.app_configs[app_config.label] = app_config
|
||||
app_config.apps = self
|
||||
|
||||
# Check for duplicate app names.
|
||||
counts = Counter(
|
||||
app_config.name for app_config in self.app_configs.values())
|
||||
duplicates = [
|
||||
name for name, count in counts.most_common() if count > 1]
|
||||
if duplicates:
|
||||
raise ImproperlyConfigured(
|
||||
"Application names aren't unique, "
|
||||
"duplicates: %s" % ", ".join(duplicates))
|
||||
|
||||
self.apps_ready = True
|
||||
|
||||
# Phase 2: import models modules.
|
||||
for app_config in self.app_configs.values():
|
||||
app_config.import_models()
|
||||
|
||||
self.clear_cache()
|
||||
|
||||
self.models_ready = True
|
||||
|
||||
# Phase 3: run ready() methods of app configs.
|
||||
for app_config in self.get_app_configs():
|
||||
app_config.ready()
|
||||
|
||||
self.ready = True
|
||||
self.ready_event.set()
|
||||
|
||||
def check_apps_ready(self):
|
||||
"""Raise an exception if all apps haven't been imported yet."""
|
||||
if not self.apps_ready:
|
||||
from django.conf import settings
|
||||
# If "not ready" is due to unconfigured settings, accessing
|
||||
# INSTALLED_APPS raises a more helpful ImproperlyConfigured
|
||||
# exception.
|
||||
settings.INSTALLED_APPS
|
||||
raise AppRegistryNotReady("Apps aren't loaded yet.")
|
||||
|
||||
def check_models_ready(self):
|
||||
"""Raise an exception if all models haven't been imported yet."""
|
||||
if not self.models_ready:
|
||||
raise AppRegistryNotReady("Models aren't loaded yet.")
|
||||
|
||||
def get_app_configs(self):
|
||||
"""Import applications and return an iterable of app configs."""
|
||||
self.check_apps_ready()
|
||||
return self.app_configs.values()
|
||||
|
||||
def get_app_config(self, app_label):
|
||||
"""
|
||||
Import applications and returns an app config for the given label.
|
||||
|
||||
Raise LookupError if no application exists with this label.
|
||||
"""
|
||||
self.check_apps_ready()
|
||||
try:
|
||||
return self.app_configs[app_label]
|
||||
except KeyError:
|
||||
message = "No installed app with label '%s'." % app_label
|
||||
for app_config in self.get_app_configs():
|
||||
if app_config.name == app_label:
|
||||
message += " Did you mean '%s'?" % app_config.label
|
||||
break
|
||||
raise LookupError(message)
|
||||
|
||||
# This method is performance-critical at least for Django's test suite.
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def get_models(self, include_auto_created=False, include_swapped=False):
|
||||
"""
|
||||
Return a list of all installed models.
|
||||
|
||||
By default, the following models aren't included:
|
||||
|
||||
- auto-created models for many-to-many relations without
|
||||
an explicit intermediate table,
|
||||
- models that have been swapped out.
|
||||
|
||||
Set the corresponding keyword argument to True to include such models.
|
||||
"""
|
||||
self.check_models_ready()
|
||||
|
||||
result = []
|
||||
for app_config in self.app_configs.values():
|
||||
result.extend(app_config.get_models(include_auto_created, include_swapped))
|
||||
return result
|
||||
|
||||
def get_model(self, app_label, model_name=None, require_ready=True):
|
||||
"""
|
||||
Return the model matching the given app_label and model_name.
|
||||
|
||||
As a shortcut, app_label may be in the form <app_label>.<model_name>.
|
||||
|
||||
model_name is case-insensitive.
|
||||
|
||||
Raise LookupError if no application exists with this label, or no
|
||||
model exists with this name in the application. Raise ValueError if
|
||||
called with a single argument that doesn't contain exactly one dot.
|
||||
"""
|
||||
if require_ready:
|
||||
self.check_models_ready()
|
||||
else:
|
||||
self.check_apps_ready()
|
||||
|
||||
if model_name is None:
|
||||
app_label, model_name = app_label.split('.')
|
||||
|
||||
app_config = self.get_app_config(app_label)
|
||||
|
||||
if not require_ready and app_config.models is None:
|
||||
app_config.import_models()
|
||||
|
||||
return app_config.get_model(model_name, require_ready=require_ready)
|
||||
|
||||
def register_model(self, app_label, model):
|
||||
# Since this method is called when models are imported, it cannot
|
||||
# perform imports because of the risk of import loops. It mustn't
|
||||
# call get_app_config().
|
||||
model_name = model._meta.model_name
|
||||
app_models = self.all_models[app_label]
|
||||
if model_name in app_models:
|
||||
if (model.__name__ == app_models[model_name].__name__ and
|
||||
model.__module__ == app_models[model_name].__module__):
|
||||
warnings.warn(
|
||||
"Model '%s.%s' was already registered. "
|
||||
"Reloading models is not advised as it can lead to inconsistencies, "
|
||||
"most notably with related models." % (app_label, model_name),
|
||||
RuntimeWarning, stacklevel=2)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"Conflicting '%s' models in application '%s': %s and %s." %
|
||||
(model_name, app_label, app_models[model_name], model))
|
||||
app_models[model_name] = model
|
||||
self.do_pending_operations(model)
|
||||
self.clear_cache()
|
||||
|
||||
def is_installed(self, app_name):
|
||||
"""
|
||||
Check whether an application with this name exists in the registry.
|
||||
|
||||
app_name is the full name of the app e.g. 'django.contrib.admin'.
|
||||
"""
|
||||
self.check_apps_ready()
|
||||
return any(ac.name == app_name for ac in self.app_configs.values())
|
||||
|
||||
def get_containing_app_config(self, object_name):
|
||||
"""
|
||||
Look for an app config containing a given object.
|
||||
|
||||
object_name is the dotted Python path to the object.
|
||||
|
||||
Return the app config for the inner application in case of nesting.
|
||||
Return None if the object isn't in any registered app config.
|
||||
"""
|
||||
self.check_apps_ready()
|
||||
candidates = []
|
||||
for app_config in self.app_configs.values():
|
||||
if object_name.startswith(app_config.name):
|
||||
subpath = object_name[len(app_config.name):]
|
||||
if subpath == '' or subpath[0] == '.':
|
||||
candidates.append(app_config)
|
||||
if candidates:
|
||||
return sorted(candidates, key=lambda ac: -len(ac.name))[0]
|
||||
|
||||
def get_registered_model(self, app_label, model_name):
|
||||
"""
|
||||
Similar to get_model(), but doesn't require that an app exists with
|
||||
the given app_label.
|
||||
|
||||
It's safe to call this method at import time, even while the registry
|
||||
is being populated.
|
||||
"""
|
||||
model = self.all_models[app_label].get(model_name.lower())
|
||||
if model is None:
|
||||
raise LookupError(
|
||||
"Model '%s.%s' not registered." % (app_label, model_name))
|
||||
return model
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def get_swappable_settings_name(self, to_string):
|
||||
"""
|
||||
For a given model string (e.g. "auth.User"), return the name of the
|
||||
corresponding settings name if it refers to a swappable model. If the
|
||||
referred model is not swappable, return None.
|
||||
|
||||
This method is decorated with lru_cache because it's performance
|
||||
critical when it comes to migrations. Since the swappable settings don't
|
||||
change after Django has loaded the settings, there is no reason to get
|
||||
the respective settings attribute over and over again.
|
||||
"""
|
||||
for model in self.get_models(include_swapped=True):
|
||||
swapped = model._meta.swapped
|
||||
# Is this model swapped out for the model given by to_string?
|
||||
if swapped and swapped == to_string:
|
||||
return model._meta.swappable
|
||||
# Is this model swappable and the one given by to_string?
|
||||
if model._meta.swappable and model._meta.label == to_string:
|
||||
return model._meta.swappable
|
||||
return None
|
||||
|
||||
def set_available_apps(self, available):
|
||||
"""
|
||||
Restrict the set of installed apps used by get_app_config[s].
|
||||
|
||||
available must be an iterable of application names.
|
||||
|
||||
set_available_apps() must be balanced with unset_available_apps().
|
||||
|
||||
Primarily used for performance optimization in TransactionTestCase.
|
||||
|
||||
This method is safe in the sense that it doesn't trigger any imports.
|
||||
"""
|
||||
available = set(available)
|
||||
installed = {app_config.name for app_config in self.get_app_configs()}
|
||||
if not available.issubset(installed):
|
||||
raise ValueError(
|
||||
"Available apps isn't a subset of installed apps, extra apps: %s"
|
||||
% ", ".join(available - installed)
|
||||
)
|
||||
|
||||
self.stored_app_configs.append(self.app_configs)
|
||||
self.app_configs = {
|
||||
label: app_config
|
||||
for label, app_config in self.app_configs.items()
|
||||
if app_config.name in available
|
||||
}
|
||||
self.clear_cache()
|
||||
|
||||
def unset_available_apps(self):
|
||||
"""Cancel a previous call to set_available_apps()."""
|
||||
self.app_configs = self.stored_app_configs.pop()
|
||||
self.clear_cache()
|
||||
|
||||
def set_installed_apps(self, installed):
|
||||
"""
|
||||
Enable a different set of installed apps for get_app_config[s].
|
||||
|
||||
installed must be an iterable in the same format as INSTALLED_APPS.
|
||||
|
||||
set_installed_apps() must be balanced with unset_installed_apps(),
|
||||
even if it exits with an exception.
|
||||
|
||||
Primarily used as a receiver of the setting_changed signal in tests.
|
||||
|
||||
This method may trigger new imports, which may add new models to the
|
||||
registry of all imported models. They will stay in the registry even
|
||||
after unset_installed_apps(). Since it isn't possible to replay
|
||||
imports safely (e.g. that could lead to registering listeners twice),
|
||||
models are registered when they're imported and never removed.
|
||||
"""
|
||||
if not self.ready:
|
||||
raise AppRegistryNotReady("App registry isn't ready yet.")
|
||||
self.stored_app_configs.append(self.app_configs)
|
||||
self.app_configs = {}
|
||||
self.apps_ready = self.models_ready = self.loading = self.ready = False
|
||||
self.clear_cache()
|
||||
self.populate(installed)
|
||||
|
||||
def unset_installed_apps(self):
|
||||
"""Cancel a previous call to set_installed_apps()."""
|
||||
self.app_configs = self.stored_app_configs.pop()
|
||||
self.apps_ready = self.models_ready = self.ready = True
|
||||
self.clear_cache()
|
||||
|
||||
def clear_cache(self):
|
||||
"""
|
||||
Clear all internal caches, for methods that alter the app registry.
|
||||
|
||||
This is mostly used in tests.
|
||||
"""
|
||||
# Call expire cache on each model. This will purge
|
||||
# the relation tree and the fields cache.
|
||||
self.get_models.cache_clear()
|
||||
if self.ready:
|
||||
# Circumvent self.get_models() to prevent that the cache is refilled.
|
||||
# This particularly prevents that an empty value is cached while cloning.
|
||||
for app_config in self.app_configs.values():
|
||||
for model in app_config.get_models(include_auto_created=True):
|
||||
model._meta._expire_cache()
|
||||
|
||||
def lazy_model_operation(self, function, *model_keys):
|
||||
"""
|
||||
Take a function and a number of ("app_label", "modelname") tuples, and
|
||||
when all the corresponding models have been imported and registered,
|
||||
call the function with the model classes as its arguments.
|
||||
|
||||
The function passed to this method must accept exactly n models as
|
||||
arguments, where n=len(model_keys).
|
||||
"""
|
||||
# Base case: no arguments, just execute the function.
|
||||
if not model_keys:
|
||||
function()
|
||||
# Recursive case: take the head of model_keys, wait for the
|
||||
# corresponding model class to be imported and registered, then apply
|
||||
# that argument to the supplied function. Pass the resulting partial
|
||||
# to lazy_model_operation() along with the remaining model args and
|
||||
# repeat until all models are loaded and all arguments are applied.
|
||||
else:
|
||||
next_model, *more_models = model_keys
|
||||
|
||||
# This will be executed after the class corresponding to next_model
|
||||
# has been imported and registered. The `func` attribute provides
|
||||
# duck-type compatibility with partials.
|
||||
def apply_next_model(model):
|
||||
next_function = partial(apply_next_model.func, model)
|
||||
self.lazy_model_operation(next_function, *more_models)
|
||||
apply_next_model.func = function
|
||||
|
||||
# If the model has already been imported and registered, partially
|
||||
# apply it to the function now. If not, add it to the list of
|
||||
# pending operations for the model, where it will be executed with
|
||||
# the model class as its sole argument once the model is ready.
|
||||
try:
|
||||
model_class = self.get_registered_model(*next_model)
|
||||
except LookupError:
|
||||
self._pending_operations[next_model].append(apply_next_model)
|
||||
else:
|
||||
apply_next_model(model_class)
|
||||
|
||||
def do_pending_operations(self, model):
|
||||
"""
|
||||
Take a newly-prepared model and pass it to each function waiting for
|
||||
it. This is called at the very end of Apps.register_model().
|
||||
"""
|
||||
key = model._meta.app_label, model._meta.model_name
|
||||
for function in self._pending_operations.pop(key, []):
|
||||
function(model)
|
||||
|
||||
|
||||
apps = Apps(installed_apps=None)
|
@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
from django.core import management
|
||||
|
||||
if __name__ == "__main__":
|
||||
management.execute_from_command_line()
|
@ -0,0 +1,236 @@
|
||||
"""
|
||||
Settings and configuration for Django.
|
||||
|
||||
Read values from the module specified by the DJANGO_SETTINGS_MODULE environment
|
||||
variable, and then from django.conf.global_settings; see the global_settings.py
|
||||
for a list of all possible variables.
|
||||
"""
|
||||
|
||||
import importlib
|
||||
import os
|
||||
import time
|
||||
import traceback
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
|
||||
import django
|
||||
from django.conf import global_settings
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.utils.deprecation import RemovedInDjango31Warning
|
||||
from django.utils.functional import LazyObject, empty
|
||||
|
||||
ENVIRONMENT_VARIABLE = "DJANGO_SETTINGS_MODULE"
|
||||
|
||||
FILE_CHARSET_DEPRECATED_MSG = (
|
||||
'The FILE_CHARSET setting is deprecated. Starting with Django 3.1, all '
|
||||
'files read from disk must be UTF-8 encoded.'
|
||||
)
|
||||
|
||||
|
||||
class SettingsReference(str):
|
||||
"""
|
||||
String subclass which references a current settings value. It's treated as
|
||||
the value in memory but serializes to a settings.NAME attribute reference.
|
||||
"""
|
||||
def __new__(self, value, setting_name):
|
||||
return str.__new__(self, value)
|
||||
|
||||
def __init__(self, value, setting_name):
|
||||
self.setting_name = setting_name
|
||||
|
||||
|
||||
class LazySettings(LazyObject):
|
||||
"""
|
||||
A lazy proxy for either global Django settings or a custom settings object.
|
||||
The user can manually configure settings prior to using them. Otherwise,
|
||||
Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
|
||||
"""
|
||||
def _setup(self, name=None):
|
||||
"""
|
||||
Load the settings module pointed to by the environment variable. This
|
||||
is used the first time settings are needed, if the user hasn't
|
||||
configured settings manually.
|
||||
"""
|
||||
settings_module = os.environ.get(ENVIRONMENT_VARIABLE)
|
||||
if not settings_module:
|
||||
desc = ("setting %s" % name) if name else "settings"
|
||||
raise ImproperlyConfigured(
|
||||
"Requested %s, but settings are not configured. "
|
||||
"You must either define the environment variable %s "
|
||||
"or call settings.configure() before accessing settings."
|
||||
% (desc, ENVIRONMENT_VARIABLE))
|
||||
|
||||
self._wrapped = Settings(settings_module)
|
||||
|
||||
def __repr__(self):
|
||||
# Hardcode the class name as otherwise it yields 'Settings'.
|
||||
if self._wrapped is empty:
|
||||
return '<LazySettings [Unevaluated]>'
|
||||
return '<LazySettings "%(settings_module)s">' % {
|
||||
'settings_module': self._wrapped.SETTINGS_MODULE,
|
||||
}
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Return the value of a setting and cache it in self.__dict__."""
|
||||
if self._wrapped is empty:
|
||||
self._setup(name)
|
||||
val = getattr(self._wrapped, name)
|
||||
self.__dict__[name] = val
|
||||
return val
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
"""
|
||||
Set the value of setting. Clear all cached values if _wrapped changes
|
||||
(@override_settings does this) or clear single values when set.
|
||||
"""
|
||||
if name == '_wrapped':
|
||||
self.__dict__.clear()
|
||||
else:
|
||||
self.__dict__.pop(name, None)
|
||||
super().__setattr__(name, value)
|
||||
|
||||
def __delattr__(self, name):
|
||||
"""Delete a setting and clear it from cache if needed."""
|
||||
super().__delattr__(name)
|
||||
self.__dict__.pop(name, None)
|
||||
|
||||
def configure(self, default_settings=global_settings, **options):
|
||||
"""
|
||||
Called to manually configure the settings. The 'default_settings'
|
||||
parameter sets where to retrieve any unspecified values from (its
|
||||
argument must support attribute access (__getattr__)).
|
||||
"""
|
||||
if self._wrapped is not empty:
|
||||
raise RuntimeError('Settings already configured.')
|
||||
holder = UserSettingsHolder(default_settings)
|
||||
for name, value in options.items():
|
||||
if not name.isupper():
|
||||
raise TypeError('Setting %r must be uppercase.' % name)
|
||||
setattr(holder, name, value)
|
||||
self._wrapped = holder
|
||||
|
||||
@property
|
||||
def configured(self):
|
||||
"""Return True if the settings have already been configured."""
|
||||
return self._wrapped is not empty
|
||||
|
||||
@property
|
||||
def FILE_CHARSET(self):
|
||||
stack = traceback.extract_stack()
|
||||
# Show a warning if the setting is used outside of Django.
|
||||
# Stack index: -1 this line, -2 the caller.
|
||||
filename, _line_number, _function_name, _text = stack[-2]
|
||||
if not filename.startswith(os.path.dirname(django.__file__)):
|
||||
warnings.warn(
|
||||
FILE_CHARSET_DEPRECATED_MSG,
|
||||
RemovedInDjango31Warning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return self.__getattr__('FILE_CHARSET')
|
||||
|
||||
|
||||
class Settings:
|
||||
def __init__(self, settings_module):
|
||||
# update this dict from global settings (but only for ALL_CAPS settings)
|
||||
for setting in dir(global_settings):
|
||||
if setting.isupper():
|
||||
setattr(self, setting, getattr(global_settings, setting))
|
||||
|
||||
# store the settings module in case someone later cares
|
||||
self.SETTINGS_MODULE = settings_module
|
||||
|
||||
mod = importlib.import_module(self.SETTINGS_MODULE)
|
||||
|
||||
tuple_settings = (
|
||||
"INSTALLED_APPS",
|
||||
"TEMPLATE_DIRS",
|
||||
"LOCALE_PATHS",
|
||||
)
|
||||
self._explicit_settings = set()
|
||||
for setting in dir(mod):
|
||||
if setting.isupper():
|
||||
setting_value = getattr(mod, setting)
|
||||
|
||||
if (setting in tuple_settings and
|
||||
not isinstance(setting_value, (list, tuple))):
|
||||
raise ImproperlyConfigured("The %s setting must be a list or a tuple. " % setting)
|
||||
setattr(self, setting, setting_value)
|
||||
self._explicit_settings.add(setting)
|
||||
|
||||
if not self.SECRET_KEY:
|
||||
raise ImproperlyConfigured("The SECRET_KEY setting must not be empty.")
|
||||
|
||||
if self.is_overridden('FILE_CHARSET'):
|
||||
warnings.warn(FILE_CHARSET_DEPRECATED_MSG, RemovedInDjango31Warning)
|
||||
|
||||
if hasattr(time, 'tzset') and self.TIME_ZONE:
|
||||
# When we can, attempt to validate the timezone. If we can't find
|
||||
# this file, no check happens and it's harmless.
|
||||
zoneinfo_root = Path('/usr/share/zoneinfo')
|
||||
zone_info_file = zoneinfo_root.joinpath(*self.TIME_ZONE.split('/'))
|
||||
if zoneinfo_root.exists() and not zone_info_file.exists():
|
||||
raise ValueError("Incorrect timezone setting: %s" % self.TIME_ZONE)
|
||||
# Move the time zone info into os.environ. See ticket #2315 for why
|
||||
# we don't do this unconditionally (breaks Windows).
|
||||
os.environ['TZ'] = self.TIME_ZONE
|
||||
time.tzset()
|
||||
|
||||
def is_overridden(self, setting):
|
||||
return setting in self._explicit_settings
|
||||
|
||||
def __repr__(self):
|
||||
return '<%(cls)s "%(settings_module)s">' % {
|
||||
'cls': self.__class__.__name__,
|
||||
'settings_module': self.SETTINGS_MODULE,
|
||||
}
|
||||
|
||||
|
||||
class UserSettingsHolder:
|
||||
"""Holder for user configured settings."""
|
||||
# SETTINGS_MODULE doesn't make much sense in the manually configured
|
||||
# (standalone) case.
|
||||
SETTINGS_MODULE = None
|
||||
|
||||
def __init__(self, default_settings):
|
||||
"""
|
||||
Requests for configuration variables not in this class are satisfied
|
||||
from the module specified in default_settings (if possible).
|
||||
"""
|
||||
self.__dict__['_deleted'] = set()
|
||||
self.default_settings = default_settings
|
||||
|
||||
def __getattr__(self, name):
|
||||
if not name.isupper() or name in self._deleted:
|
||||
raise AttributeError
|
||||
return getattr(self.default_settings, name)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
self._deleted.discard(name)
|
||||
if name == 'FILE_CHARSET':
|
||||
warnings.warn(FILE_CHARSET_DEPRECATED_MSG, RemovedInDjango31Warning)
|
||||
super().__setattr__(name, value)
|
||||
|
||||
def __delattr__(self, name):
|
||||
self._deleted.add(name)
|
||||
if hasattr(self, name):
|
||||
super().__delattr__(name)
|
||||
|
||||
def __dir__(self):
|
||||
return sorted(
|
||||
s for s in [*self.__dict__, *dir(self.default_settings)]
|
||||
if s not in self._deleted
|
||||
)
|
||||
|
||||
def is_overridden(self, setting):
|
||||
deleted = (setting in self._deleted)
|
||||
set_locally = (setting in self.__dict__)
|
||||
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
|
||||
return deleted or set_locally or set_on_default
|
||||
|
||||
def __repr__(self):
|
||||
return '<%(cls)s>' % {
|
||||
'cls': self.__class__.__name__,
|
||||
}
|
||||
|
||||
|
||||
settings = LazySettings()
|
@ -0,0 +1,3 @@
|
||||
from django.contrib import admin
|
||||
|
||||
# Register your models here.
|
@ -0,0 +1,5 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class {{ camel_case_app_name }}Config(AppConfig):
|
||||
name = '{{ app_name }}'
|
@ -0,0 +1,3 @@
|
||||
from django.db import models
|
||||
|
||||
# Create your models here.
|
@ -0,0 +1,3 @@
|
||||
from django.test import TestCase
|
||||
|
||||
# Create your tests here.
|
@ -0,0 +1,3 @@
|
||||
from django.shortcuts import render
|
||||
|
||||
# Create your views here.
|
@ -0,0 +1,639 @@
|
||||
"""
|
||||
Default Django settings. Override these with settings in the module pointed to
|
||||
by the DJANGO_SETTINGS_MODULE environment variable.
|
||||
"""
|
||||
|
||||
|
||||
# This is defined here as a do-nothing function because we can't import
|
||||
# django.utils.translation -- that module depends on the settings.
|
||||
def gettext_noop(s):
|
||||
return s
|
||||
|
||||
|
||||
####################
|
||||
# CORE #
|
||||
####################
|
||||
|
||||
DEBUG = False
|
||||
|
||||
# Whether the framework should propagate raw exceptions rather than catching
|
||||
# them. This is useful under some testing situations and should never be used
|
||||
# on a live site.
|
||||
DEBUG_PROPAGATE_EXCEPTIONS = False
|
||||
|
||||
# People who get code error notifications.
|
||||
# In the format [('Full Name', 'email@example.com'), ('Full Name', 'anotheremail@example.com')]
|
||||
ADMINS = []
|
||||
|
||||
# List of IP addresses, as strings, that:
|
||||
# * See debug comments, when DEBUG is true
|
||||
# * Receive x-headers
|
||||
INTERNAL_IPS = []
|
||||
|
||||
# Hosts/domain names that are valid for this site.
|
||||
# "*" matches anything, ".example.com" matches example.com and all subdomains
|
||||
ALLOWED_HOSTS = []
|
||||
|
||||
# Local time zone for this installation. All choices can be found here:
|
||||
# https://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all
|
||||
# systems may support all possibilities). When USE_TZ is True, this is
|
||||
# interpreted as the default user time zone.
|
||||
TIME_ZONE = 'America/Chicago'
|
||||
|
||||
# If you set this to True, Django will use timezone-aware datetimes.
|
||||
USE_TZ = False
|
||||
|
||||
# Language code for this installation. All choices can be found here:
|
||||
# http://www.i18nguy.com/unicode/language-identifiers.html
|
||||
LANGUAGE_CODE = 'en-us'
|
||||
|
||||
# Languages we provide translations for, out of the box.
|
||||
LANGUAGES = [
|
||||
('af', gettext_noop('Afrikaans')),
|
||||
('ar', gettext_noop('Arabic')),
|
||||
('ast', gettext_noop('Asturian')),
|
||||
('az', gettext_noop('Azerbaijani')),
|
||||
('bg', gettext_noop('Bulgarian')),
|
||||
('be', gettext_noop('Belarusian')),
|
||||
('bn', gettext_noop('Bengali')),
|
||||
('br', gettext_noop('Breton')),
|
||||
('bs', gettext_noop('Bosnian')),
|
||||
('ca', gettext_noop('Catalan')),
|
||||
('cs', gettext_noop('Czech')),
|
||||
('cy', gettext_noop('Welsh')),
|
||||
('da', gettext_noop('Danish')),
|
||||
('de', gettext_noop('German')),
|
||||
('dsb', gettext_noop('Lower Sorbian')),
|
||||
('el', gettext_noop('Greek')),
|
||||
('en', gettext_noop('English')),
|
||||
('en-au', gettext_noop('Australian English')),
|
||||
('en-gb', gettext_noop('British English')),
|
||||
('eo', gettext_noop('Esperanto')),
|
||||
('es', gettext_noop('Spanish')),
|
||||
('es-ar', gettext_noop('Argentinian Spanish')),
|
||||
('es-co', gettext_noop('Colombian Spanish')),
|
||||
('es-mx', gettext_noop('Mexican Spanish')),
|
||||
('es-ni', gettext_noop('Nicaraguan Spanish')),
|
||||
('es-ve', gettext_noop('Venezuelan Spanish')),
|
||||
('et', gettext_noop('Estonian')),
|
||||
('eu', gettext_noop('Basque')),
|
||||
('fa', gettext_noop('Persian')),
|
||||
('fi', gettext_noop('Finnish')),
|
||||
('fr', gettext_noop('French')),
|
||||
('fy', gettext_noop('Frisian')),
|
||||
('ga', gettext_noop('Irish')),
|
||||
('gd', gettext_noop('Scottish Gaelic')),
|
||||
('gl', gettext_noop('Galician')),
|
||||
('he', gettext_noop('Hebrew')),
|
||||
('hi', gettext_noop('Hindi')),
|
||||
('hr', gettext_noop('Croatian')),
|
||||
('hsb', gettext_noop('Upper Sorbian')),
|
||||
('hu', gettext_noop('Hungarian')),
|
||||
('hy', gettext_noop('Armenian')),
|
||||
('ia', gettext_noop('Interlingua')),
|
||||
('id', gettext_noop('Indonesian')),
|
||||
('io', gettext_noop('Ido')),
|
||||
('is', gettext_noop('Icelandic')),
|
||||
('it', gettext_noop('Italian')),
|
||||
('ja', gettext_noop('Japanese')),
|
||||
('ka', gettext_noop('Georgian')),
|
||||
('kab', gettext_noop('Kabyle')),
|
||||
('kk', gettext_noop('Kazakh')),
|
||||
('km', gettext_noop('Khmer')),
|
||||
('kn', gettext_noop('Kannada')),
|
||||
('ko', gettext_noop('Korean')),
|
||||
('lb', gettext_noop('Luxembourgish')),
|
||||
('lt', gettext_noop('Lithuanian')),
|
||||
('lv', gettext_noop('Latvian')),
|
||||
('mk', gettext_noop('Macedonian')),
|
||||
('ml', gettext_noop('Malayalam')),
|
||||
('mn', gettext_noop('Mongolian')),
|
||||
('mr', gettext_noop('Marathi')),
|
||||
('my', gettext_noop('Burmese')),
|
||||
('nb', gettext_noop('Norwegian Bokmål')),
|
||||
('ne', gettext_noop('Nepali')),
|
||||
('nl', gettext_noop('Dutch')),
|
||||
('nn', gettext_noop('Norwegian Nynorsk')),
|
||||
('os', gettext_noop('Ossetic')),
|
||||
('pa', gettext_noop('Punjabi')),
|
||||
('pl', gettext_noop('Polish')),
|
||||
('pt', gettext_noop('Portuguese')),
|
||||
('pt-br', gettext_noop('Brazilian Portuguese')),
|
||||
('ro', gettext_noop('Romanian')),
|
||||
('ru', gettext_noop('Russian')),
|
||||
('sk', gettext_noop('Slovak')),
|
||||
('sl', gettext_noop('Slovenian')),
|
||||
('sq', gettext_noop('Albanian')),
|
||||
('sr', gettext_noop('Serbian')),
|
||||
('sr-latn', gettext_noop('Serbian Latin')),
|
||||
('sv', gettext_noop('Swedish')),
|
||||
('sw', gettext_noop('Swahili')),
|
||||
('ta', gettext_noop('Tamil')),
|
||||
('te', gettext_noop('Telugu')),
|
||||
('th', gettext_noop('Thai')),
|
||||
('tr', gettext_noop('Turkish')),
|
||||
('tt', gettext_noop('Tatar')),
|
||||
('udm', gettext_noop('Udmurt')),
|
||||
('uk', gettext_noop('Ukrainian')),
|
||||
('ur', gettext_noop('Urdu')),
|
||||
('uz', gettext_noop('Uzbek')),
|
||||
('vi', gettext_noop('Vietnamese')),
|
||||
('zh-hans', gettext_noop('Simplified Chinese')),
|
||||
('zh-hant', gettext_noop('Traditional Chinese')),
|
||||
]
|
||||
|
||||
# Languages using BiDi (right-to-left) layout
|
||||
LANGUAGES_BIDI = ["he", "ar", "fa", "ur"]
|
||||
|
||||
# If you set this to False, Django will make some optimizations so as not
|
||||
# to load the internationalization machinery.
|
||||
USE_I18N = True
|
||||
LOCALE_PATHS = []
|
||||
|
||||
# Settings for language cookie
|
||||
LANGUAGE_COOKIE_NAME = 'django_language'
|
||||
LANGUAGE_COOKIE_AGE = None
|
||||
LANGUAGE_COOKIE_DOMAIN = None
|
||||
LANGUAGE_COOKIE_PATH = '/'
|
||||
LANGUAGE_COOKIE_SECURE = False
|
||||
LANGUAGE_COOKIE_HTTPONLY = False
|
||||
LANGUAGE_COOKIE_SAMESITE = None
|
||||
|
||||
|
||||
# If you set this to True, Django will format dates, numbers and calendars
|
||||
# according to user current locale.
|
||||
USE_L10N = False
|
||||
|
||||
# Not-necessarily-technical managers of the site. They get broken link
|
||||
# notifications and other various emails.
|
||||
MANAGERS = ADMINS
|
||||
|
||||
# Default charset to use for all HttpResponse objects, if a MIME type isn't
|
||||
# manually specified. It's used to construct the Content-Type header.
|
||||
DEFAULT_CHARSET = 'utf-8'
|
||||
|
||||
# Encoding of files read from disk (template and initial SQL files).
|
||||
FILE_CHARSET = 'utf-8'
|
||||
|
||||
# Email address that error messages come from.
|
||||
SERVER_EMAIL = 'root@localhost'
|
||||
|
||||
# Database connection info. If left empty, will default to the dummy backend.
|
||||
DATABASES = {}
|
||||
|
||||
# Classes used to implement DB routing behavior.
|
||||
DATABASE_ROUTERS = []
|
||||
|
||||
# The email backend to use. For possible shortcuts see django.core.mail.
|
||||
# The default is to use the SMTP backend.
|
||||
# Third-party backends can be specified by providing a Python path
|
||||
# to a module that defines an EmailBackend class.
|
||||
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
|
||||
|
||||
# Host for sending email.
|
||||
EMAIL_HOST = 'localhost'
|
||||
|
||||
# Port for sending email.
|
||||
EMAIL_PORT = 25
|
||||
|
||||
# Whether to send SMTP 'Date' header in the local time zone or in UTC.
|
||||
EMAIL_USE_LOCALTIME = False
|
||||
|
||||
# Optional SMTP authentication information for EMAIL_HOST.
|
||||
EMAIL_HOST_USER = ''
|
||||
EMAIL_HOST_PASSWORD = ''
|
||||
EMAIL_USE_TLS = False
|
||||
EMAIL_USE_SSL = False
|
||||
EMAIL_SSL_CERTFILE = None
|
||||
EMAIL_SSL_KEYFILE = None
|
||||
EMAIL_TIMEOUT = None
|
||||
|
||||
# List of strings representing installed apps.
|
||||
INSTALLED_APPS = []
|
||||
|
||||
TEMPLATES = []
|
||||
|
||||
# Default form rendering class.
|
||||
FORM_RENDERER = 'django.forms.renderers.DjangoTemplates'
|
||||
|
||||
# Default email address to use for various automated correspondence from
|
||||
# the site managers.
|
||||
DEFAULT_FROM_EMAIL = 'webmaster@localhost'
|
||||
|
||||
# Subject-line prefix for email messages send with django.core.mail.mail_admins
|
||||
# or ...mail_managers. Make sure to include the trailing space.
|
||||
EMAIL_SUBJECT_PREFIX = '[Django] '
|
||||
|
||||
# Whether to append trailing slashes to URLs.
|
||||
APPEND_SLASH = True
|
||||
|
||||
# Whether to prepend the "www." subdomain to URLs that don't have it.
|
||||
PREPEND_WWW = False
|
||||
|
||||
# Override the server-derived value of SCRIPT_NAME
|
||||
FORCE_SCRIPT_NAME = None
|
||||
|
||||
# List of compiled regular expression objects representing User-Agent strings
|
||||
# that are not allowed to visit any page, systemwide. Use this for bad
|
||||
# robots/crawlers. Here are a few examples:
|
||||
# import re
|
||||
# DISALLOWED_USER_AGENTS = [
|
||||
# re.compile(r'^NaverBot.*'),
|
||||
# re.compile(r'^EmailSiphon.*'),
|
||||
# re.compile(r'^SiteSucker.*'),
|
||||
# re.compile(r'^sohu-search'),
|
||||
# ]
|
||||
DISALLOWED_USER_AGENTS = []
|
||||
|
||||
ABSOLUTE_URL_OVERRIDES = {}
|
||||
|
||||
# List of compiled regular expression objects representing URLs that need not
|
||||
# be reported by BrokenLinkEmailsMiddleware. Here are a few examples:
|
||||
# import re
|
||||
# IGNORABLE_404_URLS = [
|
||||
# re.compile(r'^/apple-touch-icon.*\.png$'),
|
||||
# re.compile(r'^/favicon.ico$'),
|
||||
# re.compile(r'^/robots.txt$'),
|
||||
# re.compile(r'^/phpmyadmin/'),
|
||||
# re.compile(r'\.(cgi|php|pl)$'),
|
||||
# ]
|
||||
IGNORABLE_404_URLS = []
|
||||
|
||||
# A secret key for this particular Django installation. Used in secret-key
|
||||
# hashing algorithms. Set this in your settings, or Django will complain
|
||||
# loudly.
|
||||
SECRET_KEY = ''
|
||||
|
||||
# Default file storage mechanism that holds media.
|
||||
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
|
||||
|
||||
# Absolute filesystem path to the directory that will hold user-uploaded files.
|
||||
# Example: "/var/www/example.com/media/"
|
||||
MEDIA_ROOT = ''
|
||||
|
||||
# URL that handles the media served from MEDIA_ROOT.
|
||||
# Examples: "http://example.com/media/", "http://media.example.com/"
|
||||
MEDIA_URL = ''
|
||||
|
||||
# Absolute path to the directory static files should be collected to.
|
||||
# Example: "/var/www/example.com/static/"
|
||||
STATIC_ROOT = None
|
||||
|
||||
# URL that handles the static files served from STATIC_ROOT.
|
||||
# Example: "http://example.com/static/", "http://static.example.com/"
|
||||
STATIC_URL = None
|
||||
|
||||
# List of upload handler classes to be applied in order.
|
||||
FILE_UPLOAD_HANDLERS = [
|
||||
'django.core.files.uploadhandler.MemoryFileUploadHandler',
|
||||
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
|
||||
]
|
||||
|
||||
# Maximum size, in bytes, of a request before it will be streamed to the
|
||||
# file system instead of into memory.
|
||||
FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
|
||||
|
||||
# Maximum size in bytes of request data (excluding file uploads) that will be
|
||||
# read before a SuspiciousOperation (RequestDataTooBig) is raised.
|
||||
DATA_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
|
||||
|
||||
# Maximum number of GET/POST parameters that will be read before a
|
||||
# SuspiciousOperation (TooManyFieldsSent) is raised.
|
||||
DATA_UPLOAD_MAX_NUMBER_FIELDS = 1000
|
||||
|
||||
# Directory in which upload streamed files will be temporarily saved. A value of
|
||||
# `None` will make Django use the operating system's default temporary directory
|
||||
# (i.e. "/tmp" on *nix systems).
|
||||
FILE_UPLOAD_TEMP_DIR = None
|
||||
|
||||
# The numeric mode to set newly-uploaded files to. The value should be a mode
|
||||
# you'd pass directly to os.chmod; see https://docs.python.org/library/os.html#files-and-directories.
|
||||
FILE_UPLOAD_PERMISSIONS = 0o644
|
||||
|
||||
# The numeric mode to assign to newly-created directories, when uploading files.
|
||||
# The value should be a mode as you'd pass to os.chmod;
|
||||
# see https://docs.python.org/library/os.html#files-and-directories.
|
||||
FILE_UPLOAD_DIRECTORY_PERMISSIONS = None
|
||||
|
||||
# Python module path where user will place custom format definition.
|
||||
# The directory where this setting is pointing should contain subdirectories
|
||||
# named as the locales, containing a formats.py file
|
||||
# (i.e. "myproject.locale" for myproject/locale/en/formats.py etc. use)
|
||||
FORMAT_MODULE_PATH = None
|
||||
|
||||
# Default formatting for date objects. See all available format strings here:
|
||||
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||
DATE_FORMAT = 'N j, Y'
|
||||
|
||||
# Default formatting for datetime objects. See all available format strings here:
|
||||
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||
DATETIME_FORMAT = 'N j, Y, P'
|
||||
|
||||
# Default formatting for time objects. See all available format strings here:
|
||||
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||
TIME_FORMAT = 'P'
|
||||
|
||||
# Default formatting for date objects when only the year and month are relevant.
|
||||
# See all available format strings here:
|
||||
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||
YEAR_MONTH_FORMAT = 'F Y'
|
||||
|
||||
# Default formatting for date objects when only the month and day are relevant.
|
||||
# See all available format strings here:
|
||||
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||
MONTH_DAY_FORMAT = 'F j'
|
||||
|
||||
# Default short formatting for date objects. See all available format strings here:
|
||||
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||
SHORT_DATE_FORMAT = 'm/d/Y'
|
||||
|
||||
# Default short formatting for datetime objects.
|
||||
# See all available format strings here:
|
||||
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||
SHORT_DATETIME_FORMAT = 'm/d/Y P'
|
||||
|
||||
# Default formats to be used when parsing dates from input boxes, in order
|
||||
# See all available format string here:
|
||||
# https://docs.python.org/library/datetime.html#strftime-behavior
|
||||
# * Note that these format strings are different from the ones to display dates
|
||||
DATE_INPUT_FORMATS = [
|
||||
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
|
||||
'%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
|
||||
'%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
|
||||
'%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
|
||||
'%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
|
||||
]
|
||||
|
||||
# Default formats to be used when parsing times from input boxes, in order
|
||||
# See all available format string here:
|
||||
# https://docs.python.org/library/datetime.html#strftime-behavior
|
||||
# * Note that these format strings are different from the ones to display dates
|
||||
TIME_INPUT_FORMATS = [
|
||||
'%H:%M:%S', # '14:30:59'
|
||||
'%H:%M:%S.%f', # '14:30:59.000200'
|
||||
'%H:%M', # '14:30'
|
||||
]
|
||||
|
||||
# Default formats to be used when parsing dates and times from input boxes,
|
||||
# in order
|
||||
# See all available format string here:
|
||||
# https://docs.python.org/library/datetime.html#strftime-behavior
|
||||
# * Note that these format strings are different from the ones to display dates
|
||||
DATETIME_INPUT_FORMATS = [
|
||||
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
|
||||
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
|
||||
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
|
||||
'%Y-%m-%d', # '2006-10-25'
|
||||
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
|
||||
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
|
||||
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
|
||||
'%m/%d/%Y', # '10/25/2006'
|
||||
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
|
||||
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
|
||||
'%m/%d/%y %H:%M', # '10/25/06 14:30'
|
||||
'%m/%d/%y', # '10/25/06'
|
||||
]
|
||||
|
||||
# First day of week, to be used on calendars
|
||||
# 0 means Sunday, 1 means Monday...
|
||||
FIRST_DAY_OF_WEEK = 0
|
||||
|
||||
# Decimal separator symbol
|
||||
DECIMAL_SEPARATOR = '.'
|
||||
|
||||
# Boolean that sets whether to add thousand separator when formatting numbers
|
||||
USE_THOUSAND_SEPARATOR = False
|
||||
|
||||
# Number of digits that will be together, when splitting them by
|
||||
# THOUSAND_SEPARATOR. 0 means no grouping, 3 means splitting by thousands...
|
||||
NUMBER_GROUPING = 0
|
||||
|
||||
# Thousand separator symbol
|
||||
THOUSAND_SEPARATOR = ','
|
||||
|
||||
# The tablespaces to use for each model when not specified otherwise.
|
||||
DEFAULT_TABLESPACE = ''
|
||||
DEFAULT_INDEX_TABLESPACE = ''
|
||||
|
||||
# Default X-Frame-Options header value
|
||||
X_FRAME_OPTIONS = 'DENY'
|
||||
|
||||
USE_X_FORWARDED_HOST = False
|
||||
USE_X_FORWARDED_PORT = False
|
||||
|
||||
# The Python dotted path to the WSGI application that Django's internal server
|
||||
# (runserver) will use. If `None`, the return value of
|
||||
# 'django.core.wsgi.get_wsgi_application' is used, thus preserving the same
|
||||
# behavior as previous versions of Django. Otherwise this should point to an
|
||||
# actual WSGI application object.
|
||||
WSGI_APPLICATION = None
|
||||
|
||||
# If your Django app is behind a proxy that sets a header to specify secure
|
||||
# connections, AND that proxy ensures that user-submitted headers with the
|
||||
# same name are ignored (so that people can't spoof it), set this value to
|
||||
# a tuple of (header_name, header_value). For any requests that come in with
|
||||
# that header/value, request.is_secure() will return True.
|
||||
# WARNING! Only set this if you fully understand what you're doing. Otherwise,
|
||||
# you may be opening yourself up to a security risk.
|
||||
SECURE_PROXY_SSL_HEADER = None
|
||||
|
||||
##############
|
||||
# MIDDLEWARE #
|
||||
##############
|
||||
|
||||
# List of middleware to use. Order is important; in the request phase, these
|
||||
# middleware will be applied in the order given, and in the response
|
||||
# phase the middleware will be applied in reverse order.
|
||||
MIDDLEWARE = []
|
||||
|
||||
############
|
||||
# SESSIONS #
|
||||
############
|
||||
|
||||
# Cache to store session data if using the cache session backend.
|
||||
SESSION_CACHE_ALIAS = 'default'
|
||||
# Cookie name. This can be whatever you want.
|
||||
SESSION_COOKIE_NAME = 'sessionid'
|
||||
# Age of cookie, in seconds (default: 2 weeks).
|
||||
SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2
|
||||
# A string like "example.com", or None for standard domain cookie.
|
||||
SESSION_COOKIE_DOMAIN = None
|
||||
# Whether the session cookie should be secure (https:// only).
|
||||
SESSION_COOKIE_SECURE = False
|
||||
# The path of the session cookie.
|
||||
SESSION_COOKIE_PATH = '/'
|
||||
# Whether to use the HttpOnly flag.
|
||||
SESSION_COOKIE_HTTPONLY = True
|
||||
# Whether to set the flag restricting cookie leaks on cross-site requests.
|
||||
# This can be 'Lax', 'Strict', or None to disable the flag.
|
||||
SESSION_COOKIE_SAMESITE = 'Lax'
|
||||
# Whether to save the session data on every request.
|
||||
SESSION_SAVE_EVERY_REQUEST = False
|
||||
# Whether a user's session cookie expires when the Web browser is closed.
|
||||
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
|
||||
# The module to store session data
|
||||
SESSION_ENGINE = 'django.contrib.sessions.backends.db'
|
||||
# Directory to store session files if using the file session module. If None,
|
||||
# the backend will use a sensible default.
|
||||
SESSION_FILE_PATH = None
|
||||
# class to serialize session data
|
||||
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
|
||||
|
||||
#########
|
||||
# CACHE #
|
||||
#########
|
||||
|
||||
# The cache backends to use.
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
}
|
||||
}
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX = ''
|
||||
CACHE_MIDDLEWARE_SECONDS = 600
|
||||
CACHE_MIDDLEWARE_ALIAS = 'default'
|
||||
|
||||
##################
|
||||
# AUTHENTICATION #
|
||||
##################
|
||||
|
||||
AUTH_USER_MODEL = 'auth.User'
|
||||
|
||||
AUTHENTICATION_BACKENDS = ['django.contrib.auth.backends.ModelBackend']
|
||||
|
||||
LOGIN_URL = '/accounts/login/'
|
||||
|
||||
LOGIN_REDIRECT_URL = '/accounts/profile/'
|
||||
|
||||
LOGOUT_REDIRECT_URL = None
|
||||
|
||||
# The number of days a password reset link is valid for
|
||||
PASSWORD_RESET_TIMEOUT_DAYS = 3
|
||||
|
||||
# the first hasher in this list is the preferred algorithm. any
|
||||
# password using different algorithms will be converted automatically
|
||||
# upon login
|
||||
PASSWORD_HASHERS = [
|
||||
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
|
||||
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
|
||||
'django.contrib.auth.hashers.Argon2PasswordHasher',
|
||||
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
|
||||
]
|
||||
|
||||
AUTH_PASSWORD_VALIDATORS = []
|
||||
|
||||
###########
|
||||
# SIGNING #
|
||||
###########
|
||||
|
||||
SIGNING_BACKEND = 'django.core.signing.TimestampSigner'
|
||||
|
||||
########
|
||||
# CSRF #
|
||||
########
|
||||
|
||||
# Dotted path to callable to be used as view when a request is
|
||||
# rejected by the CSRF middleware.
|
||||
CSRF_FAILURE_VIEW = 'django.views.csrf.csrf_failure'
|
||||
|
||||
# Settings for CSRF cookie.
|
||||
CSRF_COOKIE_NAME = 'csrftoken'
|
||||
CSRF_COOKIE_AGE = 60 * 60 * 24 * 7 * 52
|
||||
CSRF_COOKIE_DOMAIN = None
|
||||
CSRF_COOKIE_PATH = '/'
|
||||
CSRF_COOKIE_SECURE = False
|
||||
CSRF_COOKIE_HTTPONLY = False
|
||||
CSRF_COOKIE_SAMESITE = 'Lax'
|
||||
CSRF_HEADER_NAME = 'HTTP_X_CSRFTOKEN'
|
||||
CSRF_TRUSTED_ORIGINS = []
|
||||
CSRF_USE_SESSIONS = False
|
||||
|
||||
############
|
||||
# MESSAGES #
|
||||
############
|
||||
|
||||
# Class to use as messages backend
|
||||
MESSAGE_STORAGE = 'django.contrib.messages.storage.fallback.FallbackStorage'
|
||||
|
||||
# Default values of MESSAGE_LEVEL and MESSAGE_TAGS are defined within
|
||||
# django.contrib.messages to avoid imports in this settings file.
|
||||
|
||||
###########
|
||||
# LOGGING #
|
||||
###########
|
||||
|
||||
# The callable to use to configure logging
|
||||
LOGGING_CONFIG = 'logging.config.dictConfig'
|
||||
|
||||
# Custom logging configuration.
|
||||
LOGGING = {}
|
||||
|
||||
# Default exception reporter filter class used in case none has been
|
||||
# specifically assigned to the HttpRequest instance.
|
||||
DEFAULT_EXCEPTION_REPORTER_FILTER = 'django.views.debug.SafeExceptionReporterFilter'
|
||||
|
||||
###########
|
||||
# TESTING #
|
||||
###########
|
||||
|
||||
# The name of the class to use to run the test suite
|
||||
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
|
||||
|
||||
# Apps that don't need to be serialized at test database creation time
|
||||
# (only apps with migrations are to start with)
|
||||
TEST_NON_SERIALIZED_APPS = []
|
||||
|
||||
############
|
||||
# FIXTURES #
|
||||
############
|
||||
|
||||
# The list of directories to search for fixtures
|
||||
FIXTURE_DIRS = []
|
||||
|
||||
###############
|
||||
# STATICFILES #
|
||||
###############
|
||||
|
||||
# A list of locations of additional static files
|
||||
STATICFILES_DIRS = []
|
||||
|
||||
# The default file storage backend used during the build process
|
||||
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
|
||||
|
||||
# List of finder classes that know how to find static files in
|
||||
# various locations.
|
||||
STATICFILES_FINDERS = [
|
||||
'django.contrib.staticfiles.finders.FileSystemFinder',
|
||||
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
|
||||
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
|
||||
]
|
||||
|
||||
##############
|
||||
# MIGRATIONS #
|
||||
##############
|
||||
|
||||
# Migration module overrides for apps, by app label.
|
||||
MIGRATION_MODULES = {}
|
||||
|
||||
#################
|
||||
# SYSTEM CHECKS #
|
||||
#################
|
||||
|
||||
# List of all issues generated by system checks that should be silenced. Light
|
||||
# issues like warnings, infos or debugs will not generate a message. Silencing
|
||||
# serious issues like errors and criticals does not result in hiding the
|
||||
# message, but Django will not stop you from e.g. running server.
|
||||
SILENCED_SYSTEM_CHECKS = []
|
||||
|
||||
#######################
|
||||
# SECURITY MIDDLEWARE #
|
||||
#######################
|
||||
SECURE_BROWSER_XSS_FILTER = False
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||
SECURE_HSTS_INCLUDE_SUBDOMAINS = False
|
||||
SECURE_HSTS_PRELOAD = False
|
||||
SECURE_HSTS_SECONDS = 0
|
||||
SECURE_REDIRECT_EXEMPT = []
|
||||
SECURE_REFERRER_POLICY = None
|
||||
SECURE_SSL_HOST = None
|
||||
SECURE_SSL_REDIRECT = False
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,32 @@
|
||||
# This file is distributed under the same license as the Django package.
|
||||
#
|
||||
# The *_FORMAT strings use the Django date format syntax,
|
||||
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||
DATE_FORMAT = 'j E Y'
|
||||
TIME_FORMAT = 'G:i'
|
||||
DATETIME_FORMAT = 'j E Y, G:i'
|
||||
YEAR_MONTH_FORMAT = 'F Y'
|
||||
MONTH_DAY_FORMAT = 'j F'
|
||||
SHORT_DATE_FORMAT = 'd.m.Y'
|
||||
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
|
||||
FIRST_DAY_OF_WEEK = 1 # Monday
|
||||
|
||||
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||
DATE_INPUT_FORMATS = [
|
||||
'%d.%m.%Y', # '25.10.2006'
|
||||
'%d.%m.%y', # '25.10.06'
|
||||
]
|
||||
DATETIME_INPUT_FORMATS = [
|
||||
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
|
||||
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
|
||||
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
|
||||
'%d.%m.%Y', # '25.10.2006'
|
||||
'%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59'
|
||||
'%d.%m.%y %H:%M:%S.%f', # '25.10.06 14:30:59.000200'
|
||||
'%d.%m.%y %H:%M', # '25.10.06 14:30'
|
||||
'%d.%m.%y', # '25.10.06'
|
||||
]
|
||||
DECIMAL_SEPARATOR = ','
|
||||
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
|
||||
NUMBER_GROUPING = 3
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,32 @@
|
||||
# This file is distributed under the same license as the Django package.
|
||||
#
|
||||
# The *_FORMAT strings use the Django date format syntax,
|
||||
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||
DATE_FORMAT = 'j F, Y'
|
||||
TIME_FORMAT = 'g:i A'
|
||||
# DATETIME_FORMAT =
|
||||
YEAR_MONTH_FORMAT = 'F Y'
|
||||
MONTH_DAY_FORMAT = 'j F'
|
||||
SHORT_DATE_FORMAT = 'j M, Y'
|
||||
# SHORT_DATETIME_FORMAT =
|
||||
FIRST_DAY_OF_WEEK = 6 # Saturday
|
||||
|
||||
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||
DATE_INPUT_FORMATS = [
|
||||
'%d/%m/%Y', # 25/10/2016
|
||||
'%d/%m/%y', # 25/10/16
|
||||
'%d-%m-%Y', # 25-10-2016
|
||||
'%d-%m-%y', # 25-10-16
|
||||
]
|
||||
TIME_INPUT_FORMATS = [
|
||||
'%H:%M:%S', # 14:30:59
|
||||
'%H:%M', # 14:30
|
||||
]
|
||||
DATETIME_INPUT_FORMATS = [
|
||||
'%d/%m/%Y %H:%M:%S', # 25/10/2006 14:30:59
|
||||
'%d/%m/%Y %H:%M', # 25/10/2006 14:30
|
||||
]
|
||||
DECIMAL_SEPARATOR = '.'
|
||||
THOUSAND_SEPARATOR = ','
|
||||
# NUMBER_GROUPING =
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,21 @@
|
||||
# This file is distributed under the same license as the Django package.
|
||||
#
|
||||
# The *_FORMAT strings use the Django date format syntax,
|
||||
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||
DATE_FORMAT = 'j. N Y.'
|
||||
TIME_FORMAT = 'G:i'
|
||||
DATETIME_FORMAT = 'j. N. Y. G:i T'
|
||||
YEAR_MONTH_FORMAT = 'F Y.'
|
||||
MONTH_DAY_FORMAT = 'j. F'
|
||||
SHORT_DATE_FORMAT = 'Y M j'
|
||||
# SHORT_DATETIME_FORMAT =
|
||||
# FIRST_DAY_OF_WEEK =
|
||||
|
||||
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||
# DATE_INPUT_FORMATS =
|
||||
# TIME_INPUT_FORMATS =
|
||||
# DATETIME_INPUT_FORMATS =
|
||||
DECIMAL_SEPARATOR = ','
|
||||
THOUSAND_SEPARATOR = '.'
|
||||
# NUMBER_GROUPING =
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,30 @@
|
||||
# This file is distributed under the same license as the Django package.
|
||||
#
|
||||
# The *_FORMAT strings use the Django date format syntax,
|
||||
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||
DATE_FORMAT = r'j \d\e F \d\e Y'
|
||||
TIME_FORMAT = 'G:i'
|
||||
DATETIME_FORMAT = r'j \d\e F \d\e Y \a \l\e\s G:i'
|
||||
YEAR_MONTH_FORMAT = r'F \d\e\l Y'
|
||||
MONTH_DAY_FORMAT = r'j \d\e F'
|
||||
SHORT_DATE_FORMAT = 'd/m/Y'
|
||||
SHORT_DATETIME_FORMAT = 'd/m/Y G:i'
|
||||
FIRST_DAY_OF_WEEK = 1 # Monday
|
||||
|
||||
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||
DATE_INPUT_FORMATS = [
|
||||
# '31/12/2009', '31/12/09'
|
||||
'%d/%m/%Y', '%d/%m/%y'
|
||||
]
|
||||
DATETIME_INPUT_FORMATS = [
|
||||
'%d/%m/%Y %H:%M:%S',
|
||||
'%d/%m/%Y %H:%M:%S.%f',
|
||||
'%d/%m/%Y %H:%M',
|
||||
'%d/%m/%y %H:%M:%S',
|
||||
'%d/%m/%y %H:%M:%S.%f',
|
||||
'%d/%m/%y %H:%M',
|
||||
]
|
||||
DECIMAL_SEPARATOR = ','
|
||||
THOUSAND_SEPARATOR = '.'
|
||||
NUMBER_GROUPING = 3
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,42 @@
|
||||
# This file is distributed under the same license as the Django package.
|
||||
#
|
||||
# The *_FORMAT strings use the Django date format syntax,
|
||||
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||
DATE_FORMAT = 'j. E Y'
|
||||
TIME_FORMAT = 'G:i'
|
||||
DATETIME_FORMAT = 'j. E Y G:i'
|
||||
YEAR_MONTH_FORMAT = 'F Y'
|
||||
MONTH_DAY_FORMAT = 'j. F'
|
||||
SHORT_DATE_FORMAT = 'd.m.Y'
|
||||
SHORT_DATETIME_FORMAT = 'd.m.Y G:i'
|
||||
FIRST_DAY_OF_WEEK = 1 # Monday
|
||||
|
||||
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||
DATE_INPUT_FORMATS = [
|
||||
'%d.%m.%Y', '%d.%m.%y', # '05.01.2006', '05.01.06'
|
||||
'%d. %m. %Y', '%d. %m. %y', # '5. 1. 2006', '5. 1. 06'
|
||||
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
|
||||
]
|
||||
# Kept ISO formats as one is in first position
|
||||
TIME_INPUT_FORMATS = [
|
||||
'%H:%M:%S', # '04:30:59'
|
||||
'%H.%M', # '04.30'
|
||||
'%H:%M', # '04:30'
|
||||
]
|
||||
DATETIME_INPUT_FORMATS = [
|
||||
'%d.%m.%Y %H:%M:%S', # '05.01.2006 04:30:59'
|
||||
'%d.%m.%Y %H:%M:%S.%f', # '05.01.2006 04:30:59.000200'
|
||||
'%d.%m.%Y %H.%M', # '05.01.2006 04.30'
|
||||
'%d.%m.%Y %H:%M', # '05.01.2006 04:30'
|
||||
'%d.%m.%Y', # '05.01.2006'
|
||||
'%d. %m. %Y %H:%M:%S', # '05. 01. 2006 04:30:59'
|
||||
'%d. %m. %Y %H:%M:%S.%f', # '05. 01. 2006 04:30:59.000200'
|
||||
'%d. %m. %Y %H.%M', # '05. 01. 2006 04.30'
|
||||
'%d. %m. %Y %H:%M', # '05. 01. 2006 04:30'
|
||||
'%d. %m. %Y', # '05. 01. 2006'
|
||||
'%Y-%m-%d %H.%M', # '2006-01-05 04.30'
|
||||
]
|
||||
DECIMAL_SEPARATOR = ','
|
||||
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
|
||||
NUMBER_GROUPING = 3
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,35 @@
|
||||
# This file is distributed under the same license as the Django package.
|
||||
#
|
||||
# The *_FORMAT strings use the Django date format syntax,
|
||||
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
|
||||
DATE_FORMAT = 'j F Y' # '25 Hydref 2006'
|
||||
TIME_FORMAT = 'P' # '2:30 y.b.'
|
||||
DATETIME_FORMAT = 'j F Y, P' # '25 Hydref 2006, 2:30 y.b.'
|
||||
YEAR_MONTH_FORMAT = 'F Y' # 'Hydref 2006'
|
||||
MONTH_DAY_FORMAT = 'j F' # '25 Hydref'
|
||||
SHORT_DATE_FORMAT = 'd/m/Y' # '25/10/2006'
|
||||
SHORT_DATETIME_FORMAT = 'd/m/Y P' # '25/10/2006 2:30 y.b.'
|
||||
FIRST_DAY_OF_WEEK = 1 # 'Dydd Llun'
|
||||
|
||||
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
|
||||
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||
DATE_INPUT_FORMATS = [
|
||||
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
|
||||
]
|
||||
DATETIME_INPUT_FORMATS = [
|
||||
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
|
||||
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
|
||||
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
|
||||
'%Y-%m-%d', # '2006-10-25'
|
||||
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
|
||||
'%d/%m/%Y %H:%M:%S.%f', # '25/10/2006 14:30:59.000200'
|
||||
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
|
||||
'%d/%m/%Y', # '25/10/2006'
|
||||
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
|
||||
'%d/%m/%y %H:%M:%S.%f', # '25/10/06 14:30:59.000200'
|
||||
'%d/%m/%y %H:%M', # '25/10/06 14:30'
|
||||
'%d/%m/%y', # '25/10/06'
|
||||
]
|
||||
DECIMAL_SEPARATOR = '.'
|
||||
THOUSAND_SEPARATOR = ','
|
||||
NUMBER_GROUPING = 3
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue